commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
3d7bbd37485dca4782ad7e7fdb088b22db586b66
pyscores/config.py
pyscores/config.py
BASE_URL = "http://api.football-data.org/v1" LEAGUE_IDS = { "PL": "426", "ELC": "427", "EL1": "428", "FAC": "429", "BL1": "430", "BL2": "431", "DFB": "432", "DED": "433", "FL1": "434", "FL2": "435", "PD": "436", "SD": "437", "SA": "438", "PPL": "439", "CL": "440", "SB": "441", "ENL": "442", "EL2": "443" }
BASE_URL = "http://api.football-data.org/v1" LEAGUE_IDS = { "BSA": "444", "PL": "445", "ELC": "446", "EL1": "447", "EL2": "448", "DED": "449", "FL1": "450", "FL2": "451", "BL1": "452", "BL2": "453", "PD": "455", "SA": "456", "PPL": "457", "DFB": "458", "SB": "459", "CL": "464", "AAL": "466" }
Update league codes for new season
Update league codes for new season
Python
mit
conormag94/pyscores
BASE_URL = "http://api.football-data.org/v1" LEAGUE_IDS = { + "BSA": "444", - "PL": "426", + "PL": "445", - "ELC": "427", + "ELC": "446", - "EL1": "428", + "EL1": "447", - "FAC": "429", - "BL1": "430", - "BL2": "431", + "EL2": "448", - "DFB": "432", - "DED": "433", + "DED": "449", - "FL1": "434", + "FL1": "450", - "FL2": "435", + "FL2": "451", + "BL1": "452", + "BL2": "453", - "PD": "436", + "PD": "455", - "SD": "437", - "SA": "438", + "SA": "456", - "PPL": "439", + "PPL": "457", + "DFB": "458", + "SB": "459", - "CL": "440", + "CL": "464", - "SB": "441", - "ENL": "442", - "EL2": "443" + "AAL": "466" }
Update league codes for new season
## Code Before: BASE_URL = "http://api.football-data.org/v1" LEAGUE_IDS = { "PL": "426", "ELC": "427", "EL1": "428", "FAC": "429", "BL1": "430", "BL2": "431", "DFB": "432", "DED": "433", "FL1": "434", "FL2": "435", "PD": "436", "SD": "437", "SA": "438", "PPL": "439", "CL": "440", "SB": "441", "ENL": "442", "EL2": "443" } ## Instruction: Update league codes for new season ## Code After: BASE_URL = "http://api.football-data.org/v1" LEAGUE_IDS = { "BSA": "444", "PL": "445", "ELC": "446", "EL1": "447", "EL2": "448", "DED": "449", "FL1": "450", "FL2": "451", "BL1": "452", "BL2": "453", "PD": "455", "SA": "456", "PPL": "457", "DFB": "458", "SB": "459", "CL": "464", "AAL": "466" }
BASE_URL = "http://api.football-data.org/v1" LEAGUE_IDS = { + "BSA": "444", - "PL": "426", ? ^^ + "PL": "445", ? ^^ - "ELC": "427", ? ^^ + "ELC": "446", ? ^^ - "EL1": "428", ? ^^ + "EL1": "447", ? ^^ - "FAC": "429", - "BL1": "430", - "BL2": "431", ? ^ ^^ + "EL2": "448", ? ^ ^^ - "DFB": "432", - "DED": "433", ? ^^ + "DED": "449", ? ^^ - "FL1": "434", ? ^^ + "FL1": "450", ? ^^ - "FL2": "435", ? - + "FL2": "451", ? + + "BL1": "452", + "BL2": "453", - "PD": "436", ? ^^ + "PD": "455", ? ^^ - "SD": "437", - "SA": "438", ? ^^ + "SA": "456", ? ^^ - "PPL": "439", ? ^^ + "PPL": "457", ? ^^ + "DFB": "458", + "SB": "459", - "CL": "440", ? - + "CL": "464", ? + - "SB": "441", - "ENL": "442", - "EL2": "443" ? ^ - ^^ + "AAL": "466" ? ^^ ^^ }
f8db46b40629cfdb145a4a000d47277f72090c5b
powerline/lib/memoize.py
powerline/lib/memoize.py
from functools import wraps import time def default_cache_key(**kwargs): return frozenset(kwargs.items()) class memoize(object): '''Memoization decorator with timeout.''' def __init__(self, timeout, cache_key=default_cache_key, cache_reg_func=None): self.timeout = timeout self.cache_key = cache_key self.cache = {} self.cache_reg_func = cache_reg_func def __call__(self, func): @wraps(func) def decorated_function(**kwargs): if self.cache_reg_func: self.cache_reg_func(self.cache) self.cache_reg_func = None key = self.cache_key(**kwargs) try: cached = self.cache.get(key, None) except TypeError: return func(**kwargs) if cached is None or time.time() - cached['time'] > self.timeout: cached = self.cache[key] = { 'result': func(**kwargs), 'time': time.time(), } return cached['result'] return decorated_function
from functools import wraps try: # Python>=3.3, the only valid clock source for this job from time import monotonic as time except ImportError: # System time, is affected by clock updates. from time import time def default_cache_key(**kwargs): return frozenset(kwargs.items()) class memoize(object): '''Memoization decorator with timeout.''' def __init__(self, timeout, cache_key=default_cache_key, cache_reg_func=None): self.timeout = timeout self.cache_key = cache_key self.cache = {} self.cache_reg_func = cache_reg_func def __call__(self, func): @wraps(func) def decorated_function(**kwargs): if self.cache_reg_func: self.cache_reg_func(self.cache) self.cache_reg_func = None key = self.cache_key(**kwargs) try: cached = self.cache.get(key, None) except TypeError: return func(**kwargs) # Handle case when time() appears to be less then cached['time'] due # to clock updates. Not applicable for monotonic clock, but this # case is currently rare. if cached is None or not (cached['time'] < time() < cached['time'] + self.timeout): cached = self.cache[key] = { 'result': func(**kwargs), 'time': time(), } return cached['result'] return decorated_function
Use proper clock if possible
Use proper clock if possible
Python
mit
Liangjianghao/powerline,kenrachynski/powerline,darac/powerline,darac/powerline,bezhermoso/powerline,firebitsbr/powerline,bartvm/powerline,cyrixhero/powerline,junix/powerline,prvnkumar/powerline,s0undt3ch/powerline,S0lll0s/powerline,Luffin/powerline,EricSB/powerline,dragon788/powerline,prvnkumar/powerline,wfscheper/powerline,xfumihiro/powerline,magus424/powerline,IvanAli/powerline,cyrixhero/powerline,seanfisk/powerline,bartvm/powerline,wfscheper/powerline,dragon788/powerline,xfumihiro/powerline,magus424/powerline,cyrixhero/powerline,xxxhycl2010/powerline,dragon788/powerline,lukw00/powerline,DoctorJellyface/powerline,blindFS/powerline,seanfisk/powerline,blindFS/powerline,IvanAli/powerline,IvanAli/powerline,keelerm84/powerline,Luffin/powerline,s0undt3ch/powerline,Liangjianghao/powerline,blindFS/powerline,S0lll0s/powerline,EricSB/powerline,lukw00/powerline,junix/powerline,areteix/powerline,junix/powerline,QuLogic/powerline,prvnkumar/powerline,seanfisk/powerline,bezhermoso/powerline,QuLogic/powerline,russellb/powerline,bezhermoso/powerline,russellb/powerline,bartvm/powerline,darac/powerline,lukw00/powerline,kenrachynski/powerline,firebitsbr/powerline,areteix/powerline,magus424/powerline,xfumihiro/powerline,Luffin/powerline,keelerm84/powerline,s0undt3ch/powerline,DoctorJellyface/powerline,wfscheper/powerline,xxxhycl2010/powerline,xxxhycl2010/powerline,firebitsbr/powerline,russellb/powerline,EricSB/powerline,DoctorJellyface/powerline,Liangjianghao/powerline,areteix/powerline,S0lll0s/powerline,kenrachynski/powerline,QuLogic/powerline
from functools import wraps - import time + try: + # Python>=3.3, the only valid clock source for this job + from time import monotonic as time + except ImportError: + # System time, is affected by clock updates. + from time import time def default_cache_key(**kwargs): return frozenset(kwargs.items()) class memoize(object): '''Memoization decorator with timeout.''' def __init__(self, timeout, cache_key=default_cache_key, cache_reg_func=None): self.timeout = timeout self.cache_key = cache_key self.cache = {} self.cache_reg_func = cache_reg_func def __call__(self, func): @wraps(func) def decorated_function(**kwargs): if self.cache_reg_func: self.cache_reg_func(self.cache) self.cache_reg_func = None key = self.cache_key(**kwargs) try: cached = self.cache.get(key, None) except TypeError: return func(**kwargs) - if cached is None or time.time() - cached['time'] > self.timeout: + # Handle case when time() appears to be less then cached['time'] due + # to clock updates. Not applicable for monotonic clock, but this + # case is currently rare. + if cached is None or not (cached['time'] < time() < cached['time'] + self.timeout): cached = self.cache[key] = { 'result': func(**kwargs), - 'time': time.time(), + 'time': time(), } return cached['result'] return decorated_function
Use proper clock if possible
## Code Before: from functools import wraps import time def default_cache_key(**kwargs): return frozenset(kwargs.items()) class memoize(object): '''Memoization decorator with timeout.''' def __init__(self, timeout, cache_key=default_cache_key, cache_reg_func=None): self.timeout = timeout self.cache_key = cache_key self.cache = {} self.cache_reg_func = cache_reg_func def __call__(self, func): @wraps(func) def decorated_function(**kwargs): if self.cache_reg_func: self.cache_reg_func(self.cache) self.cache_reg_func = None key = self.cache_key(**kwargs) try: cached = self.cache.get(key, None) except TypeError: return func(**kwargs) if cached is None or time.time() - cached['time'] > self.timeout: cached = self.cache[key] = { 'result': func(**kwargs), 'time': time.time(), } return cached['result'] return decorated_function ## Instruction: Use proper clock if possible ## Code After: from functools import wraps try: # Python>=3.3, the only valid clock source for this job from time import monotonic as time except ImportError: # System time, is affected by clock updates. from time import time def default_cache_key(**kwargs): return frozenset(kwargs.items()) class memoize(object): '''Memoization decorator with timeout.''' def __init__(self, timeout, cache_key=default_cache_key, cache_reg_func=None): self.timeout = timeout self.cache_key = cache_key self.cache = {} self.cache_reg_func = cache_reg_func def __call__(self, func): @wraps(func) def decorated_function(**kwargs): if self.cache_reg_func: self.cache_reg_func(self.cache) self.cache_reg_func = None key = self.cache_key(**kwargs) try: cached = self.cache.get(key, None) except TypeError: return func(**kwargs) # Handle case when time() appears to be less then cached['time'] due # to clock updates. Not applicable for monotonic clock, but this # case is currently rare. if cached is None or not (cached['time'] < time() < cached['time'] + self.timeout): cached = self.cache[key] = { 'result': func(**kwargs), 'time': time(), } return cached['result'] return decorated_function
from functools import wraps - import time + try: + # Python>=3.3, the only valid clock source for this job + from time import monotonic as time + except ImportError: + # System time, is affected by clock updates. + from time import time def default_cache_key(**kwargs): return frozenset(kwargs.items()) class memoize(object): '''Memoization decorator with timeout.''' def __init__(self, timeout, cache_key=default_cache_key, cache_reg_func=None): self.timeout = timeout self.cache_key = cache_key self.cache = {} self.cache_reg_func = cache_reg_func def __call__(self, func): @wraps(func) def decorated_function(**kwargs): if self.cache_reg_func: self.cache_reg_func(self.cache) self.cache_reg_func = None key = self.cache_key(**kwargs) try: cached = self.cache.get(key, None) except TypeError: return func(**kwargs) + # Handle case when time() appears to be less then cached['time'] due + # to clock updates. Not applicable for monotonic clock, but this + # case is currently rare. - if cached is None or time.time() - cached['time'] > self.timeout: ? ^ ^ ^ + if cached is None or not (cached['time'] < time() < cached['time'] + self.timeout): ? +++++++++++++ ^^^^^ ^ ^ + cached = self.cache[key] = { 'result': func(**kwargs), - 'time': time.time(), ? ----- + 'time': time(), } return cached['result'] return decorated_function
62549a211ff41e2b84a9b085e784649efc76c5d9
apps/domain/tests/conftest.py
apps/domain/tests/conftest.py
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app @pytest.fixture(scope="function", autouse=True) def app(): return create_app() @pytest.fixture def client(app): return app.test_client()
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///databasenetwork.db" return create_app(test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client()
Update unit test app() fixture
Update unit test app() fixture
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app @pytest.fixture(scope="function", autouse=True) def app(): - return create_app() + db_path = "sqlite:///databasenetwork.db" + return create_app(test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client()
Update unit test app() fixture
## Code Before: import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app @pytest.fixture(scope="function", autouse=True) def app(): return create_app() @pytest.fixture def client(app): return app.test_client() ## Instruction: Update unit test app() fixture ## Code After: import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///databasenetwork.db" return create_app(test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client()
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app @pytest.fixture(scope="function", autouse=True) def app(): - return create_app() + db_path = "sqlite:///databasenetwork.db" + return create_app(test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client()
d150db290a72590e0f7cf9dae485bf98901bb2c2
web_ui/helpers.py
web_ui/helpers.py
from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] def reset_userdata(): """Reset all userdata files""" import os for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: try: os.remove(app.config[fileid]) except OSError: app.logger.warning("RESET: {} not found".format(fileid)) else: app.logger.warning("RESET: {} deleted")
Add helper method for resetting user data
Add helper method for resetting user data
Python
apache-2.0
ciex/souma,ciex/souma,ciex/souma
from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] + + def reset_userdata(): + """Reset all userdata files""" + import os + + for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: + try: + os.remove(app.config[fileid]) + except OSError: + app.logger.warning("RESET: {} not found".format(fileid)) + else: + app.logger.warning("RESET: {} deleted") +
Add helper method for resetting user data
## Code Before: from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] ## Instruction: Add helper method for resetting user data ## Code After: from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] def reset_userdata(): """Reset all userdata files""" import os for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: try: os.remove(app.config[fileid]) except OSError: app.logger.warning("RESET: {} not found".format(fileid)) else: app.logger.warning("RESET: {} deleted")
from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] + + + def reset_userdata(): + """Reset all userdata files""" + import os + + for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: + try: + os.remove(app.config[fileid]) + except OSError: + app.logger.warning("RESET: {} not found".format(fileid)) + else: + app.logger.warning("RESET: {} deleted")
7f79e575b9a2b5dc15ed304e2c1cb123ab39b91b
iscc_bench/metaid/shortnorm.py
iscc_bench/metaid/shortnorm.py
import unicodedata def shortest_normalization_form(): """ Find unicode normalization that generates shortest utf8 encoded text. Result NFKC """ s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky &#160; things' nfc = unicodedata.normalize('NFC', s) nfd = unicodedata.normalize('NFD', s) nfkc = unicodedata.normalize('NFKC', s) nfkd = unicodedata.normalize('NFKD', s) nfd_nfkc = unicodedata.normalize('NFKC', nfd) print('UTF-8 length of normalized strings:\n') print(f'NFC: {len(nfc.encode("utf8"))}') print(f'NFD: {len(nfd.encode("utf8"))}') print(f'NFKC: {len(nfkc.encode("utf8"))}') print(f'NFKD: {len(nfkd.encode("utf8"))}') print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}') if __name__ == '__main__': shortest_normalization_form()
import unicodedata def shortest_normalization_form(): """ Find unicode normalization that generates shortest utf8 encoded text. Result NFKC """ s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky &#160; things' nfc = unicodedata.normalize('NFC', s) nfd = unicodedata.normalize('NFD', s) nfkc = unicodedata.normalize('NFKC', s) nfkd = unicodedata.normalize('NFKD', s) nfd_nfkc = unicodedata.normalize('NFKC', nfd) nfd_nfc = unicodedata.normalize('NFC', nfd) print('UTF-8 length of normalized strings:\n') print(f'NFC: {len(nfc.encode("utf8"))}') print(f'NFD: {len(nfd.encode("utf8"))}') print(f'NFKC: {len(nfkc.encode("utf8"))}') print(f'NFKD: {len(nfkd.encode("utf8"))}') print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}') print(f'NFD_NFC: {len(nfd_nfc.encode("utf8"))}') if __name__ == '__main__': shortest_normalization_form()
Add NFD_NFC to unicode normalization comparison.
Add NFD_NFC to unicode normalization comparison.
Python
bsd-2-clause
coblo/isccbench
import unicodedata def shortest_normalization_form(): """ Find unicode normalization that generates shortest utf8 encoded text. Result NFKC """ s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky &#160; things' nfc = unicodedata.normalize('NFC', s) nfd = unicodedata.normalize('NFD', s) nfkc = unicodedata.normalize('NFKC', s) nfkd = unicodedata.normalize('NFKD', s) nfd_nfkc = unicodedata.normalize('NFKC', nfd) + nfd_nfc = unicodedata.normalize('NFC', nfd) print('UTF-8 length of normalized strings:\n') print(f'NFC: {len(nfc.encode("utf8"))}') print(f'NFD: {len(nfd.encode("utf8"))}') print(f'NFKC: {len(nfkc.encode("utf8"))}') print(f'NFKD: {len(nfkd.encode("utf8"))}') print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}') + print(f'NFD_NFC: {len(nfd_nfc.encode("utf8"))}') if __name__ == '__main__': shortest_normalization_form()
Add NFD_NFC to unicode normalization comparison.
## Code Before: import unicodedata def shortest_normalization_form(): """ Find unicode normalization that generates shortest utf8 encoded text. Result NFKC """ s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky &#160; things' nfc = unicodedata.normalize('NFC', s) nfd = unicodedata.normalize('NFD', s) nfkc = unicodedata.normalize('NFKC', s) nfkd = unicodedata.normalize('NFKD', s) nfd_nfkc = unicodedata.normalize('NFKC', nfd) print('UTF-8 length of normalized strings:\n') print(f'NFC: {len(nfc.encode("utf8"))}') print(f'NFD: {len(nfd.encode("utf8"))}') print(f'NFKC: {len(nfkc.encode("utf8"))}') print(f'NFKD: {len(nfkd.encode("utf8"))}') print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}') if __name__ == '__main__': shortest_normalization_form() ## Instruction: Add NFD_NFC to unicode normalization comparison. ## Code After: import unicodedata def shortest_normalization_form(): """ Find unicode normalization that generates shortest utf8 encoded text. Result NFKC """ s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky &#160; things' nfc = unicodedata.normalize('NFC', s) nfd = unicodedata.normalize('NFD', s) nfkc = unicodedata.normalize('NFKC', s) nfkd = unicodedata.normalize('NFKD', s) nfd_nfkc = unicodedata.normalize('NFKC', nfd) nfd_nfc = unicodedata.normalize('NFC', nfd) print('UTF-8 length of normalized strings:\n') print(f'NFC: {len(nfc.encode("utf8"))}') print(f'NFD: {len(nfd.encode("utf8"))}') print(f'NFKC: {len(nfkc.encode("utf8"))}') print(f'NFKD: {len(nfkd.encode("utf8"))}') print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}') print(f'NFD_NFC: {len(nfd_nfc.encode("utf8"))}') if __name__ == '__main__': shortest_normalization_form()
import unicodedata def shortest_normalization_form(): """ Find unicode normalization that generates shortest utf8 encoded text. Result NFKC """ s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky &#160; things' nfc = unicodedata.normalize('NFC', s) nfd = unicodedata.normalize('NFD', s) nfkc = unicodedata.normalize('NFKC', s) nfkd = unicodedata.normalize('NFKD', s) nfd_nfkc = unicodedata.normalize('NFKC', nfd) + nfd_nfc = unicodedata.normalize('NFC', nfd) print('UTF-8 length of normalized strings:\n') print(f'NFC: {len(nfc.encode("utf8"))}') print(f'NFD: {len(nfd.encode("utf8"))}') print(f'NFKC: {len(nfkc.encode("utf8"))}') print(f'NFKD: {len(nfkd.encode("utf8"))}') print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}') + print(f'NFD_NFC: {len(nfd_nfc.encode("utf8"))}') if __name__ == '__main__': shortest_normalization_form()
46cfd25a4acf075650a5471c388457cb04cd9a15
invenio_mail/api.py
invenio_mail/api.py
"""Template based messages.""" from __future__ import absolute_import, print_function from flask import render_template from flask_mail import Message class TemplatedMessage(Message): """Siplify creation of templated messages.""" def __init__(self, template_body=None, template_html=None, ctx={}, **kwargs): r"""Build message body and HTML based on provided templates. Provided templates can use keyword arguments ``body`` and ``html`` respectively. :param template_body: Path to the text template. :param template_html: Path to the html template. :param ctx: A mapping containing additional information passed to the template. :param \*\*kwargs: Keyword arguments as defined in :class:`flask_mail.Message`. """ if template_body: kwargs['body'] = render_template( template_body, body=kwargs.get('body'), **ctx ) if template_html: kwargs['html'] = render_template( template_html, html=kwargs.get('html'), **ctx ) super(TemplatedMessage, self).__init__(**kwargs)
"""Template based messages.""" from __future__ import absolute_import, print_function from flask import render_template from flask_mail import Message class TemplatedMessage(Message): """Siplify creation of templated messages.""" def __init__(self, template_body=None, template_html=None, ctx=None, **kwargs): r"""Build message body and HTML based on provided templates. Provided templates can use keyword arguments ``body`` and ``html`` respectively. :param template_body: Path to the text template. :param template_html: Path to the html template. :param ctx: A mapping containing additional information passed to the template. :param \*\*kwargs: Keyword arguments as defined in :class:`flask_mail.Message`. """ ctx = ctx if ctx else {} if template_body: kwargs['body'] = render_template( template_body, body=kwargs.get('body'), **ctx ) if template_html: kwargs['html'] = render_template( template_html, html=kwargs.get('html'), **ctx ) super(TemplatedMessage, self).__init__(**kwargs)
Use sentinel value for ctx
Use sentinel value for ctx
Python
mit
inveniosoftware/invenio-mail,inveniosoftware/invenio-mail,inveniosoftware/invenio-mail
"""Template based messages.""" from __future__ import absolute_import, print_function from flask import render_template from flask_mail import Message class TemplatedMessage(Message): """Siplify creation of templated messages.""" - def __init__(self, template_body=None, template_html=None, ctx={}, + def __init__(self, template_body=None, template_html=None, ctx=None, **kwargs): r"""Build message body and HTML based on provided templates. Provided templates can use keyword arguments ``body`` and ``html`` respectively. :param template_body: Path to the text template. :param template_html: Path to the html template. :param ctx: A mapping containing additional information passed to the template. :param \*\*kwargs: Keyword arguments as defined in :class:`flask_mail.Message`. """ + ctx = ctx if ctx else {} if template_body: kwargs['body'] = render_template( template_body, body=kwargs.get('body'), **ctx ) if template_html: kwargs['html'] = render_template( template_html, html=kwargs.get('html'), **ctx ) super(TemplatedMessage, self).__init__(**kwargs)
Use sentinel value for ctx
## Code Before: """Template based messages.""" from __future__ import absolute_import, print_function from flask import render_template from flask_mail import Message class TemplatedMessage(Message): """Siplify creation of templated messages.""" def __init__(self, template_body=None, template_html=None, ctx={}, **kwargs): r"""Build message body and HTML based on provided templates. Provided templates can use keyword arguments ``body`` and ``html`` respectively. :param template_body: Path to the text template. :param template_html: Path to the html template. :param ctx: A mapping containing additional information passed to the template. :param \*\*kwargs: Keyword arguments as defined in :class:`flask_mail.Message`. """ if template_body: kwargs['body'] = render_template( template_body, body=kwargs.get('body'), **ctx ) if template_html: kwargs['html'] = render_template( template_html, html=kwargs.get('html'), **ctx ) super(TemplatedMessage, self).__init__(**kwargs) ## Instruction: Use sentinel value for ctx ## Code After: """Template based messages.""" from __future__ import absolute_import, print_function from flask import render_template from flask_mail import Message class TemplatedMessage(Message): """Siplify creation of templated messages.""" def __init__(self, template_body=None, template_html=None, ctx=None, **kwargs): r"""Build message body and HTML based on provided templates. Provided templates can use keyword arguments ``body`` and ``html`` respectively. :param template_body: Path to the text template. :param template_html: Path to the html template. :param ctx: A mapping containing additional information passed to the template. :param \*\*kwargs: Keyword arguments as defined in :class:`flask_mail.Message`. """ ctx = ctx if ctx else {} if template_body: kwargs['body'] = render_template( template_body, body=kwargs.get('body'), **ctx ) if template_html: kwargs['html'] = render_template( template_html, html=kwargs.get('html'), **ctx ) super(TemplatedMessage, self).__init__(**kwargs)
"""Template based messages.""" from __future__ import absolute_import, print_function from flask import render_template from flask_mail import Message class TemplatedMessage(Message): """Siplify creation of templated messages.""" - def __init__(self, template_body=None, template_html=None, ctx={}, ? ^^ + def __init__(self, template_body=None, template_html=None, ctx=None, ? ^^^^ **kwargs): r"""Build message body and HTML based on provided templates. Provided templates can use keyword arguments ``body`` and ``html`` respectively. :param template_body: Path to the text template. :param template_html: Path to the html template. :param ctx: A mapping containing additional information passed to the template. :param \*\*kwargs: Keyword arguments as defined in :class:`flask_mail.Message`. """ + ctx = ctx if ctx else {} if template_body: kwargs['body'] = render_template( template_body, body=kwargs.get('body'), **ctx ) if template_html: kwargs['html'] = render_template( template_html, html=kwargs.get('html'), **ctx ) super(TemplatedMessage, self).__init__(**kwargs)
322997e229457bf43ee2281993ccdc30c8455244
tests/test_util.py
tests/test_util.py
from archivebox import util def test_download_url_downloads_content(): text = util.download_url("https://example.com") assert "Example Domain" in text
from archivebox import util def test_download_url_downloads_content(): text = util.download_url("http://localhost:8080/static/example.com.html") assert "Example Domain" in text
Refactor util tests to use local webserver
test: Refactor util tests to use local webserver
Python
mit
pirate/bookmark-archiver,pirate/bookmark-archiver,pirate/bookmark-archiver
from archivebox import util def test_download_url_downloads_content(): - text = util.download_url("https://example.com") + text = util.download_url("http://localhost:8080/static/example.com.html") assert "Example Domain" in text
Refactor util tests to use local webserver
## Code Before: from archivebox import util def test_download_url_downloads_content(): text = util.download_url("https://example.com") assert "Example Domain" in text ## Instruction: Refactor util tests to use local webserver ## Code After: from archivebox import util def test_download_url_downloads_content(): text = util.download_url("http://localhost:8080/static/example.com.html") assert "Example Domain" in text
from archivebox import util def test_download_url_downloads_content(): - text = util.download_url("https://example.com") ? - + text = util.download_url("http://localhost:8080/static/example.com.html") ? ++++++++++++++++++++++ +++++ assert "Example Domain" in text
bf042cbe47c9fcfc0e608ff726a73d0e562027d0
tests/test_with_hypothesis.py
tests/test_with_hypothesis.py
import pytest from aead import AEAD hypothesis = pytest.importorskip("hypothesis") @hypothesis.given(bytes, bytes) def test_round_trip_encrypt_decrypt(plaintext, associated_data): cryptor = AEAD(AEAD.generate_key()) ct = cryptor.encrypt(plaintext, associated_data) assert plaintext == cryptor.decrypt(ct, associated_data)
import pytest from aead import AEAD hypothesis = pytest.importorskip("hypothesis") @hypothesis.given( hypothesis.strategies.binary(), hypothesis.strategies.binary() ) def test_round_trip_encrypt_decrypt(plaintext, associated_data): cryptor = AEAD(AEAD.generate_key()) ct = cryptor.encrypt(plaintext, associated_data) assert plaintext == cryptor.decrypt(ct, associated_data)
Fix the Hypothesis test to work with new API.
Fix the Hypothesis test to work with new API. The Hypothesis API has since moved on from the last time we pushed a change. Fix the test suite to work with the new API.
Python
apache-2.0
Ayrx/python-aead,Ayrx/python-aead
import pytest from aead import AEAD hypothesis = pytest.importorskip("hypothesis") - @hypothesis.given(bytes, bytes) + @hypothesis.given( + hypothesis.strategies.binary(), + hypothesis.strategies.binary() + ) def test_round_trip_encrypt_decrypt(plaintext, associated_data): cryptor = AEAD(AEAD.generate_key()) ct = cryptor.encrypt(plaintext, associated_data) assert plaintext == cryptor.decrypt(ct, associated_data)
Fix the Hypothesis test to work with new API.
## Code Before: import pytest from aead import AEAD hypothesis = pytest.importorskip("hypothesis") @hypothesis.given(bytes, bytes) def test_round_trip_encrypt_decrypt(plaintext, associated_data): cryptor = AEAD(AEAD.generate_key()) ct = cryptor.encrypt(plaintext, associated_data) assert plaintext == cryptor.decrypt(ct, associated_data) ## Instruction: Fix the Hypothesis test to work with new API. ## Code After: import pytest from aead import AEAD hypothesis = pytest.importorskip("hypothesis") @hypothesis.given( hypothesis.strategies.binary(), hypothesis.strategies.binary() ) def test_round_trip_encrypt_decrypt(plaintext, associated_data): cryptor = AEAD(AEAD.generate_key()) ct = cryptor.encrypt(plaintext, associated_data) assert plaintext == cryptor.decrypt(ct, associated_data)
import pytest from aead import AEAD hypothesis = pytest.importorskip("hypothesis") - @hypothesis.given(bytes, bytes) + @hypothesis.given( + hypothesis.strategies.binary(), + hypothesis.strategies.binary() + ) def test_round_trip_encrypt_decrypt(plaintext, associated_data): cryptor = AEAD(AEAD.generate_key()) ct = cryptor.encrypt(plaintext, associated_data) assert plaintext == cryptor.decrypt(ct, associated_data)
423ea9128f01eb74790a3bb5a876c066acc9c2c1
firesim.py
firesim.py
import functools import signal import sys import logging as log from firesimgui import FireSimGUI from lib.arguments import parse_args def sig_handler(app, sig, frame): log.info("Firesim received signal %d. Shutting down.", sig) try: app.quit() except Exception: log.exception("Ignoring exception during shutdown request") def main(): log.basicConfig(level=log.WARN) log.info("Booting FireSim...") args = parse_args() sim = FireSimGUI(args) signal.signal(signal.SIGINT, functools.partial(sig_handler, sim)) sys.exit(sim.run()) if __name__ == "__main__": main()
import functools import signal import sys import logging as log from firesimgui import FireSimGUI from lib.arguments import parse_args def sig_handler(app, sig, frame): log.info("Firesim received signal %d. Shutting down.", sig) try: app.quit() except Exception: log.exception("Ignoring exception during shutdown request") def main(): log.basicConfig(level=log.WARN) log.info("Booting FireSim...") args = parse_args() sim = FireSimGUI(args) signal.signal(signal.SIGINT, functools.partial(sig_handler, sim)) sys.exit(sim.run()) if __name__ == "__main__": main()
Add shebang to main script and switch to Unix line endings
Add shebang to main script and switch to Unix line endings
Python
mit
Openlights/firesim
- import functools - import signal - import sys - import logging as log - - from firesimgui import FireSimGUI - from lib.arguments import parse_args - - def sig_handler(app, sig, frame): - log.info("Firesim received signal %d. Shutting down.", sig) - try: - app.quit() - except Exception: - log.exception("Ignoring exception during shutdown request") - - def main(): - log.basicConfig(level=log.WARN) - log.info("Booting FireSim...") - args = parse_args() - sim = FireSimGUI(args) - signal.signal(signal.SIGINT, functools.partial(sig_handler, sim)) - sys.exit(sim.run()) - - if __name__ == "__main__": - main() + import functools + import signal + import sys + import logging as log + + from firesimgui import FireSimGUI + from lib.arguments import parse_args + + def sig_handler(app, sig, frame): + log.info("Firesim received signal %d. Shutting down.", sig) + try: + app.quit() + except Exception: + log.exception("Ignoring exception during shutdown request") + + def main(): + log.basicConfig(level=log.WARN) + log.info("Booting FireSim...") + args = parse_args() + sim = FireSimGUI(args) + signal.signal(signal.SIGINT, functools.partial(sig_handler, sim)) + sys.exit(sim.run()) + + if __name__ == "__main__": + main() +
Add shebang to main script and switch to Unix line endings
## Code Before: import functools import signal import sys import logging as log from firesimgui import FireSimGUI from lib.arguments import parse_args def sig_handler(app, sig, frame): log.info("Firesim received signal %d. Shutting down.", sig) try: app.quit() except Exception: log.exception("Ignoring exception during shutdown request") def main(): log.basicConfig(level=log.WARN) log.info("Booting FireSim...") args = parse_args() sim = FireSimGUI(args) signal.signal(signal.SIGINT, functools.partial(sig_handler, sim)) sys.exit(sim.run()) if __name__ == "__main__": main() ## Instruction: Add shebang to main script and switch to Unix line endings ## Code After: import functools import signal import sys import logging as log from firesimgui import FireSimGUI from lib.arguments import parse_args def sig_handler(app, sig, frame): log.info("Firesim received signal %d. Shutting down.", sig) try: app.quit() except Exception: log.exception("Ignoring exception during shutdown request") def main(): log.basicConfig(level=log.WARN) log.info("Booting FireSim...") args = parse_args() sim = FireSimGUI(args) signal.signal(signal.SIGINT, functools.partial(sig_handler, sim)) sys.exit(sim.run()) if __name__ == "__main__": main()
+ import functools import signal import sys import logging as log from firesimgui import FireSimGUI from lib.arguments import parse_args def sig_handler(app, sig, frame): log.info("Firesim received signal %d. Shutting down.", sig) try: app.quit() except Exception: log.exception("Ignoring exception during shutdown request") def main(): log.basicConfig(level=log.WARN) log.info("Booting FireSim...") args = parse_args() sim = FireSimGUI(args) signal.signal(signal.SIGINT, functools.partial(sig_handler, sim)) sys.exit(sim.run()) if __name__ == "__main__": main()
04fbd56e647de937ceae426acb6762f1cbbcf616
cryptography/__about__.py
cryptography/__about__.py
from __future__ import absolute_import, division, print_function __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ] __title__ = "cryptography" __summary__ = ("cryptography is a package designed to expose cryptographic " "primitives and recipes to Python developers.") __uri__ = "https://github.com/alex/cryptography" __version__ = "0.1.dev1" __author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, " "Jean-Paul Calderone, Chris Heime, and Indivdual Contributors") __email__ = "cryptography-dev@python.org" __license__ = "Apache License, Version 2.0" __copyright__ = "Copyright 2013 Donald Stufft"
from __future__ import absolute_import, division, print_function __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ] __title__ = "cryptography" __summary__ = ("cryptography is a package designed to expose cryptographic " "primitives and recipes to Python developers.") __uri__ = "https://github.com/alex/cryptography" __version__ = "0.1.dev1" __author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, " "Jean-Paul Calderone, Christian Heimes, and Indivdual " "Contributors") __email__ = "cryptography-dev@python.org" __license__ = "Apache License, Version 2.0" __copyright__ = "Copyright 2013 Donald Stufft"
Fix spelling of my name
Fix spelling of my name
Python
bsd-3-clause
bwhmather/cryptography,dstufft/cryptography,glyph/cryptography,kimvais/cryptography,dstufft/cryptography,bwhmather/cryptography,Lukasa/cryptography,sholsapp/cryptography,bwhmather/cryptography,skeuomorf/cryptography,kimvais/cryptography,Ayrx/cryptography,sholsapp/cryptography,skeuomorf/cryptography,glyph/cryptography,Hasimir/cryptography,skeuomorf/cryptography,Ayrx/cryptography,dstufft/cryptography,dstufft/cryptography,Lukasa/cryptography,Lukasa/cryptography,bwhmather/cryptography,Ayrx/cryptography,Hasimir/cryptography,kimvais/cryptography,dstufft/cryptography,sholsapp/cryptography,Hasimir/cryptography,sholsapp/cryptography,Ayrx/cryptography,Hasimir/cryptography,kimvais/cryptography,skeuomorf/cryptography
from __future__ import absolute_import, division, print_function __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ] __title__ = "cryptography" __summary__ = ("cryptography is a package designed to expose cryptographic " "primitives and recipes to Python developers.") __uri__ = "https://github.com/alex/cryptography" __version__ = "0.1.dev1" __author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, " - "Jean-Paul Calderone, Chris Heime, and Indivdual Contributors") + "Jean-Paul Calderone, Christian Heimes, and Indivdual " + "Contributors") __email__ = "cryptography-dev@python.org" __license__ = "Apache License, Version 2.0" __copyright__ = "Copyright 2013 Donald Stufft"
Fix spelling of my name
## Code Before: from __future__ import absolute_import, division, print_function __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ] __title__ = "cryptography" __summary__ = ("cryptography is a package designed to expose cryptographic " "primitives and recipes to Python developers.") __uri__ = "https://github.com/alex/cryptography" __version__ = "0.1.dev1" __author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, " "Jean-Paul Calderone, Chris Heime, and Indivdual Contributors") __email__ = "cryptography-dev@python.org" __license__ = "Apache License, Version 2.0" __copyright__ = "Copyright 2013 Donald Stufft" ## Instruction: Fix spelling of my name ## Code After: from __future__ import absolute_import, division, print_function __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ] __title__ = "cryptography" __summary__ = ("cryptography is a package designed to expose cryptographic " "primitives and recipes to Python developers.") __uri__ = "https://github.com/alex/cryptography" __version__ = "0.1.dev1" __author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, " "Jean-Paul Calderone, Christian Heimes, and Indivdual " "Contributors") __email__ = "cryptography-dev@python.org" __license__ = "Apache License, Version 2.0" __copyright__ = "Copyright 2013 Donald Stufft"
from __future__ import absolute_import, division, print_function __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ] __title__ = "cryptography" __summary__ = ("cryptography is a package designed to expose cryptographic " "primitives and recipes to Python developers.") __uri__ = "https://github.com/alex/cryptography" __version__ = "0.1.dev1" __author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, " - "Jean-Paul Calderone, Chris Heime, and Indivdual Contributors") ? ------------ - + "Jean-Paul Calderone, Christian Heimes, and Indivdual " ? ++++ + + "Contributors") __email__ = "cryptography-dev@python.org" __license__ = "Apache License, Version 2.0" __copyright__ = "Copyright 2013 Donald Stufft"
cbaab510c92566ffdcc7eb65af0ec9cf1320f173
recipes/webrtc.py
recipes/webrtc.py
import sys import recipe_util # pylint: disable=F0401 # This class doesn't need an __init__ method, so we disable the warning # pylint: disable=W0232 class WebRTC(recipe_util.Recipe): """Basic Recipe class for WebRTC.""" @staticmethod def fetch_spec(props): url = 'https://chromium.googlesource.com/external/webrtc.git' spec = { 'solutions': [ { 'name': 'src', 'url': url, 'deps_file': 'DEPS', 'managed': False, 'custom_deps': {}, 'safesync_url': '', }, ], 'auto': True, # Runs git auto-svn as a part of the fetch. 'with_branch_heads': True, } if props.get('target_os'): spec['target_os'] = props['target_os'].split(',') return { 'type': 'gclient_git_svn', 'gclient_git_svn_spec': spec, } @staticmethod def expected_root(_props): return 'src' def main(argv=None): return WebRTC().handle_args(argv) if __name__ == '__main__': sys.exit(main(sys.argv))
import sys import recipe_util # pylint: disable=F0401 # This class doesn't need an __init__ method, so we disable the warning # pylint: disable=W0232 class WebRTC(recipe_util.Recipe): """Basic Recipe class for WebRTC.""" @staticmethod def fetch_spec(props): url = 'https://chromium.googlesource.com/external/webrtc.git' spec = { 'solutions': [ { 'name': 'src', 'url': url, 'deps_file': 'DEPS', 'managed': False, 'custom_deps': {}, 'safesync_url': '', }, ], 'with_branch_heads': True, } if props.get('target_os'): spec['target_os'] = props['target_os'].split(',') return { 'type': 'gclient_git', 'gclient_git_spec': spec, } @staticmethod def expected_root(_props): return 'src' def main(argv=None): return WebRTC().handle_args(argv) if __name__ == '__main__': sys.exit(main(sys.argv))
Switch WebRTC recipe to Git.
Switch WebRTC recipe to Git. BUG=412012 Review URL: https://codereview.chromium.org/765373002 git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@294546 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Python
bsd-3-clause
svn2github/chromium-depot-tools,svn2github/chromium-depot-tools,svn2github/chromium-depot-tools
import sys import recipe_util # pylint: disable=F0401 # This class doesn't need an __init__ method, so we disable the warning # pylint: disable=W0232 class WebRTC(recipe_util.Recipe): """Basic Recipe class for WebRTC.""" @staticmethod def fetch_spec(props): url = 'https://chromium.googlesource.com/external/webrtc.git' spec = { 'solutions': [ { 'name': 'src', 'url': url, 'deps_file': 'DEPS', 'managed': False, 'custom_deps': {}, 'safesync_url': '', }, ], - 'auto': True, # Runs git auto-svn as a part of the fetch. 'with_branch_heads': True, } if props.get('target_os'): spec['target_os'] = props['target_os'].split(',') return { - 'type': 'gclient_git_svn', + 'type': 'gclient_git', - 'gclient_git_svn_spec': spec, + 'gclient_git_spec': spec, } @staticmethod def expected_root(_props): return 'src' def main(argv=None): return WebRTC().handle_args(argv) if __name__ == '__main__': sys.exit(main(sys.argv))
Switch WebRTC recipe to Git.
## Code Before: import sys import recipe_util # pylint: disable=F0401 # This class doesn't need an __init__ method, so we disable the warning # pylint: disable=W0232 class WebRTC(recipe_util.Recipe): """Basic Recipe class for WebRTC.""" @staticmethod def fetch_spec(props): url = 'https://chromium.googlesource.com/external/webrtc.git' spec = { 'solutions': [ { 'name': 'src', 'url': url, 'deps_file': 'DEPS', 'managed': False, 'custom_deps': {}, 'safesync_url': '', }, ], 'auto': True, # Runs git auto-svn as a part of the fetch. 'with_branch_heads': True, } if props.get('target_os'): spec['target_os'] = props['target_os'].split(',') return { 'type': 'gclient_git_svn', 'gclient_git_svn_spec': spec, } @staticmethod def expected_root(_props): return 'src' def main(argv=None): return WebRTC().handle_args(argv) if __name__ == '__main__': sys.exit(main(sys.argv)) ## Instruction: Switch WebRTC recipe to Git. ## Code After: import sys import recipe_util # pylint: disable=F0401 # This class doesn't need an __init__ method, so we disable the warning # pylint: disable=W0232 class WebRTC(recipe_util.Recipe): """Basic Recipe class for WebRTC.""" @staticmethod def fetch_spec(props): url = 'https://chromium.googlesource.com/external/webrtc.git' spec = { 'solutions': [ { 'name': 'src', 'url': url, 'deps_file': 'DEPS', 'managed': False, 'custom_deps': {}, 'safesync_url': '', }, ], 'with_branch_heads': True, } if props.get('target_os'): spec['target_os'] = props['target_os'].split(',') return { 'type': 'gclient_git', 'gclient_git_spec': spec, } @staticmethod def expected_root(_props): return 'src' def main(argv=None): return WebRTC().handle_args(argv) if __name__ == '__main__': sys.exit(main(sys.argv))
import sys import recipe_util # pylint: disable=F0401 # This class doesn't need an __init__ method, so we disable the warning # pylint: disable=W0232 class WebRTC(recipe_util.Recipe): """Basic Recipe class for WebRTC.""" @staticmethod def fetch_spec(props): url = 'https://chromium.googlesource.com/external/webrtc.git' spec = { 'solutions': [ { 'name': 'src', 'url': url, 'deps_file': 'DEPS', 'managed': False, 'custom_deps': {}, 'safesync_url': '', }, ], - 'auto': True, # Runs git auto-svn as a part of the fetch. 'with_branch_heads': True, } if props.get('target_os'): spec['target_os'] = props['target_os'].split(',') return { - 'type': 'gclient_git_svn', ? ---- + 'type': 'gclient_git', - 'gclient_git_svn_spec': spec, ? ---- + 'gclient_git_spec': spec, } @staticmethod def expected_root(_props): return 'src' def main(argv=None): return WebRTC().handle_args(argv) if __name__ == '__main__': sys.exit(main(sys.argv))
abc95f3a10cd27ec67e982b187f7948d0dc83fe3
corgi/sql.py
corgi/sql.py
from six.moves import configparser as CP from sqlalchemy.engine.url import URL from sqlalchemy.engine import create_engine import os import pandas as pd def get_odbc_engine(name, odbc_filename='/etc/odbc.ini', database=None): """ Looks up the connection details in an odbc file and returns a SQLAlchemy engine initialized with those details. """ parser = CP.ConfigParser() parser.read(odbc_filename) cfg_dict = dict(parser.items(name)) if database: cfg_dict['database'] = database connection_href = str(URL(**cfg_dict)) engine = create_engine(connection_href) return engine def cached_read_sql(name, engine, sql_loc='sql', out_data_loc='data', refresh=False): sql_fname = '%s/%s.sql' % (sql_loc, name) data_fname = '%s/%s.csv' % (out_data_loc, name) if os.path.isfile(data_fname): return pd.read_csv(data_fname) with open(sql_fname) as f: df = pd.read_sql(f.read(), engine) df.to_csv(data_fname, index=False) return df
import os from pathlib import Path from six.moves import configparser as CP import pandas as pd from sqlalchemy.engine import create_engine from sqlalchemy.engine.url import URL home = str(Path.home()) def get_odbc_engine(name, odbc_filename=None, database=None): """ Looks up the connection details in an odbc file and returns a SQLAlchemy engine initialized with those details. """ possible_locations = [] if odbc_filename: possible_locations += [odbc_filename] possible_locations += [ '/etc/odbc.ini', '%s/odbc.ini' % home, ] odbc_loc = None for loc in possible_locations: if os.path.exists(loc): odbc_loc = loc break if not odbc_loc: raise Exception('Could not find an odbc config file. Checked: \n%s' % "\n".join(possible_locations)) parser = CP.ConfigParser() parser.read(odbc_loc) cfg_dict = dict(parser.items(name)) if database: cfg_dict['database'] = database connection_href = str(URL(**cfg_dict)) engine = create_engine(connection_href) return engine def cached_read_sql(name, engine, sql_loc='sql', out_data_loc='data', refresh=False): sql_fname = '%s/%s.sql' % (sql_loc, name) data_fname = '%s/%s.csv' % (out_data_loc, name) if os.path.isfile(data_fname): return pd.read_csv(data_fname) with open(sql_fname) as f: df = pd.read_sql(f.read(), engine) df.to_csv(data_fname, index=False) return df
Set the get_odbc_engine function to check etc, then user home for odbc file by default
Set the get_odbc_engine function to check etc, then user home for odbc file by default
Python
mit
log0ymxm/corgi
+ import os + from pathlib import Path + from six.moves import configparser as CP + + import pandas as pd + from sqlalchemy.engine import create_engine from sqlalchemy.engine.url import URL - from sqlalchemy.engine import create_engine - import os - import pandas as pd + home = str(Path.home()) + - def get_odbc_engine(name, odbc_filename='/etc/odbc.ini', database=None): + def get_odbc_engine(name, odbc_filename=None, database=None): """ Looks up the connection details in an odbc file and returns a SQLAlchemy engine initialized with those details. """ + possible_locations = [] + if odbc_filename: + possible_locations += [odbc_filename] + possible_locations += [ + '/etc/odbc.ini', + '%s/odbc.ini' % home, + ] + + odbc_loc = None + for loc in possible_locations: + if os.path.exists(loc): + odbc_loc = loc + break + if not odbc_loc: + raise Exception('Could not find an odbc config file. Checked: \n%s' % "\n".join(possible_locations)) parser = CP.ConfigParser() - parser.read(odbc_filename) + parser.read(odbc_loc) cfg_dict = dict(parser.items(name)) if database: cfg_dict['database'] = database connection_href = str(URL(**cfg_dict)) engine = create_engine(connection_href) return engine def cached_read_sql(name, engine, sql_loc='sql', out_data_loc='data', refresh=False): sql_fname = '%s/%s.sql' % (sql_loc, name) data_fname = '%s/%s.csv' % (out_data_loc, name) if os.path.isfile(data_fname): return pd.read_csv(data_fname) with open(sql_fname) as f: df = pd.read_sql(f.read(), engine) df.to_csv(data_fname, index=False) return df
Set the get_odbc_engine function to check etc, then user home for odbc file by default
## Code Before: from six.moves import configparser as CP from sqlalchemy.engine.url import URL from sqlalchemy.engine import create_engine import os import pandas as pd def get_odbc_engine(name, odbc_filename='/etc/odbc.ini', database=None): """ Looks up the connection details in an odbc file and returns a SQLAlchemy engine initialized with those details. """ parser = CP.ConfigParser() parser.read(odbc_filename) cfg_dict = dict(parser.items(name)) if database: cfg_dict['database'] = database connection_href = str(URL(**cfg_dict)) engine = create_engine(connection_href) return engine def cached_read_sql(name, engine, sql_loc='sql', out_data_loc='data', refresh=False): sql_fname = '%s/%s.sql' % (sql_loc, name) data_fname = '%s/%s.csv' % (out_data_loc, name) if os.path.isfile(data_fname): return pd.read_csv(data_fname) with open(sql_fname) as f: df = pd.read_sql(f.read(), engine) df.to_csv(data_fname, index=False) return df ## Instruction: Set the get_odbc_engine function to check etc, then user home for odbc file by default ## Code After: import os from pathlib import Path from six.moves import configparser as CP import pandas as pd from sqlalchemy.engine import create_engine from sqlalchemy.engine.url import URL home = str(Path.home()) def get_odbc_engine(name, odbc_filename=None, database=None): """ Looks up the connection details in an odbc file and returns a SQLAlchemy engine initialized with those details. """ possible_locations = [] if odbc_filename: possible_locations += [odbc_filename] possible_locations += [ '/etc/odbc.ini', '%s/odbc.ini' % home, ] odbc_loc = None for loc in possible_locations: if os.path.exists(loc): odbc_loc = loc break if not odbc_loc: raise Exception('Could not find an odbc config file. Checked: \n%s' % "\n".join(possible_locations)) parser = CP.ConfigParser() parser.read(odbc_loc) cfg_dict = dict(parser.items(name)) if database: cfg_dict['database'] = database connection_href = str(URL(**cfg_dict)) engine = create_engine(connection_href) return engine def cached_read_sql(name, engine, sql_loc='sql', out_data_loc='data', refresh=False): sql_fname = '%s/%s.sql' % (sql_loc, name) data_fname = '%s/%s.csv' % (out_data_loc, name) if os.path.isfile(data_fname): return pd.read_csv(data_fname) with open(sql_fname) as f: df = pd.read_sql(f.read(), engine) df.to_csv(data_fname, index=False) return df
+ import os + from pathlib import Path + from six.moves import configparser as CP + + import pandas as pd + from sqlalchemy.engine import create_engine from sqlalchemy.engine.url import URL - from sqlalchemy.engine import create_engine - import os - import pandas as pd + home = str(Path.home()) + - def get_odbc_engine(name, odbc_filename='/etc/odbc.ini', database=None): ? ^^ ------------ + def get_odbc_engine(name, odbc_filename=None, database=None): ? ^^^ """ Looks up the connection details in an odbc file and returns a SQLAlchemy engine initialized with those details. """ + possible_locations = [] + if odbc_filename: + possible_locations += [odbc_filename] + possible_locations += [ + '/etc/odbc.ini', + '%s/odbc.ini' % home, + ] + + odbc_loc = None + for loc in possible_locations: + if os.path.exists(loc): + odbc_loc = loc + break + if not odbc_loc: + raise Exception('Could not find an odbc config file. Checked: \n%s' % "\n".join(possible_locations)) parser = CP.ConfigParser() - parser.read(odbc_filename) ? -- ^^^^^ + parser.read(odbc_loc) ? ^^ cfg_dict = dict(parser.items(name)) if database: cfg_dict['database'] = database connection_href = str(URL(**cfg_dict)) engine = create_engine(connection_href) return engine def cached_read_sql(name, engine, sql_loc='sql', out_data_loc='data', refresh=False): sql_fname = '%s/%s.sql' % (sql_loc, name) data_fname = '%s/%s.csv' % (out_data_loc, name) if os.path.isfile(data_fname): return pd.read_csv(data_fname) with open(sql_fname) as f: df = pd.read_sql(f.read(), engine) df.to_csv(data_fname, index=False) return df
9216224d96770e32778c46b4959731ac70cb2c88
london_commute_alert.py
london_commute_alert.py
import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_command.sh') as f: raw_command = f.read() if lines: subject = 'Tube delays for commute' body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items()) else: subject = 'Good service for commute' body = 'Good service on all lines' # We must have this running on PythonAnywhere - Monday to Sunday. # Ignore Saturday and Sunday if datetime.date.today().isoweekday() in range(1, 6): os.system(raw_command.format(subject=subject, body=body)) def main(): commute_lines = ['metropolitan', 'jubilee', 'central'] status = update() delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'} email(delays) if __name__ == '__main__': main()
import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_command.sh') as f: raw_command = f.read() if lines: subject = 'Tube delays for commute' body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items()) else: subject = 'Good service for commute' body = 'Good service on all lines' os.system(raw_command.format(subject=subject, body=body)) def main(): commute_lines = ['metropolitan', 'jubilee', 'central'] status = update() delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'} email(delays) if __name__ == '__main__': main()
Move from python anywhere to webfaction
Move from python anywhere to webfaction
Python
mit
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_command.sh') as f: raw_command = f.read() if lines: subject = 'Tube delays for commute' body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items()) else: subject = 'Good service for commute' body = 'Good service on all lines' - # We must have this running on PythonAnywhere - Monday to Sunday. - # Ignore Saturday and Sunday - if datetime.date.today().isoweekday() in range(1, 6): - os.system(raw_command.format(subject=subject, body=body)) + os.system(raw_command.format(subject=subject, body=body)) def main(): commute_lines = ['metropolitan', 'jubilee', 'central'] status = update() delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'} email(delays) if __name__ == '__main__': main()
Move from python anywhere to webfaction
## Code Before: import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_command.sh') as f: raw_command = f.read() if lines: subject = 'Tube delays for commute' body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items()) else: subject = 'Good service for commute' body = 'Good service on all lines' # We must have this running on PythonAnywhere - Monday to Sunday. # Ignore Saturday and Sunday if datetime.date.today().isoweekday() in range(1, 6): os.system(raw_command.format(subject=subject, body=body)) def main(): commute_lines = ['metropolitan', 'jubilee', 'central'] status = update() delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'} email(delays) if __name__ == '__main__': main() ## Instruction: Move from python anywhere to webfaction ## Code After: import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_command.sh') as f: raw_command = f.read() if lines: subject = 'Tube delays for commute' body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items()) else: subject = 'Good service for commute' body = 'Good service on all lines' os.system(raw_command.format(subject=subject, body=body)) def main(): commute_lines = ['metropolitan', 'jubilee', 'central'] status = update() delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'} email(delays) if __name__ == '__main__': main()
import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_command.sh') as f: raw_command = f.read() if lines: subject = 'Tube delays for commute' body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items()) else: subject = 'Good service for commute' body = 'Good service on all lines' - # We must have this running on PythonAnywhere - Monday to Sunday. - # Ignore Saturday and Sunday - if datetime.date.today().isoweekday() in range(1, 6): - os.system(raw_command.format(subject=subject, body=body)) ? ---- + os.system(raw_command.format(subject=subject, body=body)) def main(): commute_lines = ['metropolitan', 'jubilee', 'central'] status = update() delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'} email(delays) if __name__ == '__main__': main()
703861029a2d3a36bbb18ed9e56c55064323478c
account_banking_payment_export/migrations/8.0.0.1.166/pre-migrate.py
account_banking_payment_export/migrations/8.0.0.1.166/pre-migrate.py
def migrate(cr, version): cr.execute('alter table payment_order add column total numeric') cr.execute( 'update payment_order ' 'set total=totals.total ' 'from ' '(select order_id, sum(amount_currency) total ' 'from payment_line group by order_id) totals ' 'where payment_order.id=totals.order_id')
def migrate(cr, version): cr.execute( 'SELECT count(attname) FROM pg_attribute ' 'WHERE attrelid = ' '( SELECT oid FROM pg_class WHERE relname = %s ) ' 'AND attname = %s', ('payment_order', 'total')) if cr.fetchone()[0] == 0: cr.execute('alter table payment_order add column total numeric') cr.execute( 'update payment_order ' 'set total=totals.total ' 'from ' '(select order_id, sum(amount_currency) total ' 'from payment_line group by order_id) totals ' 'where payment_order.id=totals.order_id')
Check if column exists before creating it
[FIX] account_banking_payment_export: Check if column exists before creating it
Python
agpl-3.0
incaser/bank-payment,damdam-s/bank-payment,sergiocorato/bank-payment,Antiun/bank-payment,acsone/bank-payment,damdam-s/bank-payment,CompassionCH/bank-payment,diagramsoftware/bank-payment,sergiocorato/bank-payment,open-synergy/bank-payment,hbrunn/bank-payment,Antiun/bank-payment,CompassionCH/bank-payment
def migrate(cr, version): + cr.execute( + 'SELECT count(attname) FROM pg_attribute ' + 'WHERE attrelid = ' + '( SELECT oid FROM pg_class WHERE relname = %s ) ' + 'AND attname = %s', + ('payment_order', 'total')) + if cr.fetchone()[0] == 0: - cr.execute('alter table payment_order add column total numeric') + cr.execute('alter table payment_order add column total numeric') cr.execute( 'update payment_order ' 'set total=totals.total ' 'from ' '(select order_id, sum(amount_currency) total ' 'from payment_line group by order_id) totals ' 'where payment_order.id=totals.order_id')
Check if column exists before creating it
## Code Before: def migrate(cr, version): cr.execute('alter table payment_order add column total numeric') cr.execute( 'update payment_order ' 'set total=totals.total ' 'from ' '(select order_id, sum(amount_currency) total ' 'from payment_line group by order_id) totals ' 'where payment_order.id=totals.order_id') ## Instruction: Check if column exists before creating it ## Code After: def migrate(cr, version): cr.execute( 'SELECT count(attname) FROM pg_attribute ' 'WHERE attrelid = ' '( SELECT oid FROM pg_class WHERE relname = %s ) ' 'AND attname = %s', ('payment_order', 'total')) if cr.fetchone()[0] == 0: cr.execute('alter table payment_order add column total numeric') cr.execute( 'update payment_order ' 'set total=totals.total ' 'from ' '(select order_id, sum(amount_currency) total ' 'from payment_line group by order_id) totals ' 'where payment_order.id=totals.order_id')
def migrate(cr, version): + cr.execute( + 'SELECT count(attname) FROM pg_attribute ' + 'WHERE attrelid = ' + '( SELECT oid FROM pg_class WHERE relname = %s ) ' + 'AND attname = %s', + ('payment_order', 'total')) + if cr.fetchone()[0] == 0: - cr.execute('alter table payment_order add column total numeric') + cr.execute('alter table payment_order add column total numeric') ? ++++ cr.execute( 'update payment_order ' 'set total=totals.total ' 'from ' '(select order_id, sum(amount_currency) total ' 'from payment_line group by order_id) totals ' 'where payment_order.id=totals.order_id')
6c8122be60b25bbe9ba4ff8a714370e801e6ae70
cufflinks/offline.py
cufflinks/offline.py
import plotly.offline as py_offline ### Offline Mode def go_offline(connected=False): try: py_offline.init_notebook_mode(connected) except TypeError: #For older versions of plotly py_offline.init_notebook_mode() py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url)
import plotly.offline as py_offline ### Offline Mode def run_from_ipython(): try: __IPYTHON__ return True except NameError: return False def go_offline(connected=False): if run_from_ipython(): try: py_offline.init_notebook_mode(connected) except TypeError: #For older versions of plotly py_offline.init_notebook_mode() py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url)
Call init_notebook_mode only if inside IPython
Call init_notebook_mode only if inside IPython
Python
mit
santosjorge/cufflinks
import plotly.offline as py_offline ### Offline Mode + def run_from_ipython(): + try: + __IPYTHON__ + return True + except NameError: + return False + + def go_offline(connected=False): - try: + if run_from_ipython(): + try: - py_offline.init_notebook_mode(connected) + py_offline.init_notebook_mode(connected) - except TypeError: + except TypeError: - #For older versions of plotly + #For older versions of plotly - py_offline.init_notebook_mode() + py_offline.init_notebook_mode() - py_offline.__PLOTLY_OFFLINE_INITIALIZED=True + py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url)
Call init_notebook_mode only if inside IPython
## Code Before: import plotly.offline as py_offline ### Offline Mode def go_offline(connected=False): try: py_offline.init_notebook_mode(connected) except TypeError: #For older versions of plotly py_offline.init_notebook_mode() py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url) ## Instruction: Call init_notebook_mode only if inside IPython ## Code After: import plotly.offline as py_offline ### Offline Mode def run_from_ipython(): try: __IPYTHON__ return True except NameError: return False def go_offline(connected=False): if run_from_ipython(): try: py_offline.init_notebook_mode(connected) except TypeError: #For older versions of plotly py_offline.init_notebook_mode() py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url)
import plotly.offline as py_offline ### Offline Mode + def run_from_ipython(): + try: + __IPYTHON__ + return True + except NameError: + return False + + def go_offline(connected=False): - try: + if run_from_ipython(): + try: - py_offline.init_notebook_mode(connected) ? ^^ + py_offline.init_notebook_mode(connected) ? ^^^^^^^^^^^^ - except TypeError: ? ^ + except TypeError: ? ^^^^^^^^ - #For older versions of plotly ? ^^ + #For older versions of plotly ? ^^^^^^^^^^^^ - py_offline.init_notebook_mode() ? ^^ + py_offline.init_notebook_mode() ? ^^^^^^^^^^^^ - py_offline.__PLOTLY_OFFLINE_INITIALIZED=True ? ^ + py_offline.__PLOTLY_OFFLINE_INITIALIZED=True ? ^^^^^^^^ def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url)
f3ec19e0893db4fbbad8848dec8f63a09d7ffd06
alg_sum_list.py
alg_sum_list.py
from __future__ import absolute_import from __future__ import print_function from __future__ import division def sum_list_iter(num_ls): """Sum number list by for loop.""" _sum = 0 for num in num_ls: _sum += num return _sum def sum_list_recur(num_ls): """Sum number list by recursion.""" if len(num_ls) == 1: return num_ls[0] else: return num_ls[0] + sum_list_recur(num_ls[1:]) def main(): import time num_ls = range(100) start_time = time.time() print('By iteration: {}'.format(sum_list_iter(num_ls))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import print_function from __future__ import division def sum_list_recur(a_list): """Sum list by recursion. Time complexity: O(n), where n is the list length. Space complexity: O(n). """ if len(a_list) == 1: return a_list[0] else: return a_list[0] + sum_list_recur(a_list[1:]) def sum_list_dp(a_list): """Sum list by bottom-up dynamic programming. Time complexity: O(n). Space complexity: O(1). """ s = 0 for x in a_list: s += x return s def main(): import time import random a_list = [random.randint(0, 1000) for _ in range(100)] start_time = time.time() print('By recursion: {}'.format(sum_list_recur(a_list))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By DP: {}'.format(sum_list_dp(a_list))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
Add time/space complexity; revise var's
Add time/space complexity; revise var's
Python
bsd-2-clause
bowen0701/algorithms_data_structures
from __future__ import absolute_import from __future__ import print_function from __future__ import division - def sum_list_iter(num_ls): + def sum_list_recur(a_list): - """Sum number list by for loop.""" - _sum = 0 - for num in num_ls: - _sum += num - return _sum + """Sum list by recursion. + + Time complexity: O(n), where n is the list length. + Space complexity: O(n). + """ + if len(a_list) == 1: + return a_list[0] + else: + return a_list[0] + sum_list_recur(a_list[1:]) - def sum_list_recur(num_ls): - """Sum number list by recursion.""" - if len(num_ls) == 1: - return num_ls[0] - else: - return num_ls[0] + sum_list_recur(num_ls[1:]) + def sum_list_dp(a_list): + """Sum list by bottom-up dynamic programming. + + Time complexity: O(n). + Space complexity: O(1). + """ + s = 0 + for x in a_list: + s += x + return s def main(): import time + import random - num_ls = range(100) + a_list = [random.randint(0, 1000) for _ in range(100)] + + start_time = time.time() + print('By recursion: {}'.format(sum_list_recur(a_list))) + print('Time: {}'.format(time.time() - start_time)) + start_time = time.time() - print('By iteration: {}'.format(sum_list_iter(num_ls))) + print('By DP: {}'.format(sum_list_dp(a_list))) - print('Time: {}'.format(time.time() - start_time)) - - start_time = time.time() - print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
Add time/space complexity; revise var's
## Code Before: from __future__ import absolute_import from __future__ import print_function from __future__ import division def sum_list_iter(num_ls): """Sum number list by for loop.""" _sum = 0 for num in num_ls: _sum += num return _sum def sum_list_recur(num_ls): """Sum number list by recursion.""" if len(num_ls) == 1: return num_ls[0] else: return num_ls[0] + sum_list_recur(num_ls[1:]) def main(): import time num_ls = range(100) start_time = time.time() print('By iteration: {}'.format(sum_list_iter(num_ls))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main() ## Instruction: Add time/space complexity; revise var's ## Code After: from __future__ import absolute_import from __future__ import print_function from __future__ import division def sum_list_recur(a_list): """Sum list by recursion. Time complexity: O(n), where n is the list length. Space complexity: O(n). """ if len(a_list) == 1: return a_list[0] else: return a_list[0] + sum_list_recur(a_list[1:]) def sum_list_dp(a_list): """Sum list by bottom-up dynamic programming. Time complexity: O(n). Space complexity: O(1). """ s = 0 for x in a_list: s += x return s def main(): import time import random a_list = [random.randint(0, 1000) for _ in range(100)] start_time = time.time() print('By recursion: {}'.format(sum_list_recur(a_list))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By DP: {}'.format(sum_list_dp(a_list))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import print_function from __future__ import division - def sum_list_iter(num_ls): ? ^^ ^^^ + def sum_list_recur(a_list): ? ^ ++ ^ + + - """Sum number list by for loop.""" - _sum = 0 - for num in num_ls: - _sum += num - return _sum + """Sum list by recursion. + + Time complexity: O(n), where n is the list length. + Space complexity: O(n). + """ + if len(a_list) == 1: + return a_list[0] + else: + return a_list[0] + sum_list_recur(a_list[1:]) - def sum_list_recur(num_ls): - """Sum number list by recursion.""" - if len(num_ls) == 1: - return num_ls[0] - else: - return num_ls[0] + sum_list_recur(num_ls[1:]) + def sum_list_dp(a_list): + """Sum list by bottom-up dynamic programming. + + Time complexity: O(n). + Space complexity: O(1). + """ + s = 0 + for x in a_list: + s += x + return s def main(): import time + import random - num_ls = range(100) + a_list = [random.randint(0, 1000) for _ in range(100)] + + start_time = time.time() + print('By recursion: {}'.format(sum_list_recur(a_list))) + print('Time: {}'.format(time.time() - start_time)) + start_time = time.time() - print('By iteration: {}'.format(sum_list_iter(num_ls))) ? ^^^^^^^^^ ^^^^ ^^^ + print('By DP: {}'.format(sum_list_dp(a_list))) ? ^^ ^^ ^ + + - print('Time: {}'.format(time.time() - start_time)) - - start_time = time.time() - print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
5188561f7de7f6762e1820a6b447f144f963b1d0
common/spaces.py
common/spaces.py
"""Digital Ocean Spaces interaction""" import boto3 from django.conf import settings class SpacesBucket(): """Interact with Spaces buckets""" def __init__(self): session = boto3.session.Session() self._client = session.client('s3', region_name='nyc3', endpoint_url='https://nyc3.digitaloceanspaces.com', aws_access_key_id=settings.SPACES_ACCESS_KEY_ID, aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET) def create(self, name="new-space-name"): """Create a new Space""" self._client.create_bucket(Bucket=name) def list(self): """List all buckets on your account""" response = self._client.list_buckets() spaces = [space['Name'] for space in response['Buckets']] print("Spaces List: %s" % spaces)
"""Digital Ocean Spaces interaction""" import boto3 from django.conf import settings class SpacesBucket(): """Interact with Spaces buckets""" def __init__(self, space_name="lutris"): session = boto3.session.Session() self._client = session.client('s3', region_name='nyc3', endpoint_url='https://nyc3.digitaloceanspaces.com', aws_access_key_id=settings.SPACES_ACCESS_KEY_ID, aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET) self.space_name = space_name def create(self, name="new-space-name"): """Create a new Space""" self._client.create_bucket(Bucket=name) def list_spaces(self): """List all buckets on your account""" response = self._client.list_buckets() return [space['Name'] for space in response['Buckets']] def upload(self, local_path, dest_path, public=False): """Upload a file to Spaces""" self._client.upload_file(local_path, self.space_name, dest_path) if public: self._client.put_object_acl( ACL="public-read", Bucket=self.space_name, Key=dest_path )
Add upload to Spaces API client
Add upload to Spaces API client
Python
agpl-3.0
lutris/website,lutris/website,lutris/website,lutris/website
"""Digital Ocean Spaces interaction""" import boto3 from django.conf import settings class SpacesBucket(): """Interact with Spaces buckets""" - def __init__(self): + def __init__(self, space_name="lutris"): session = boto3.session.Session() self._client = session.client('s3', region_name='nyc3', endpoint_url='https://nyc3.digitaloceanspaces.com', aws_access_key_id=settings.SPACES_ACCESS_KEY_ID, aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET) + self.space_name = space_name def create(self, name="new-space-name"): """Create a new Space""" self._client.create_bucket(Bucket=name) - def list(self): + def list_spaces(self): """List all buckets on your account""" response = self._client.list_buckets() - spaces = [space['Name'] for space in response['Buckets']] + return [space['Name'] for space in response['Buckets']] - print("Spaces List: %s" % spaces) + def upload(self, local_path, dest_path, public=False): + """Upload a file to Spaces""" + self._client.upload_file(local_path, self.space_name, dest_path) + if public: + self._client.put_object_acl( + ACL="public-read", + Bucket=self.space_name, + Key=dest_path + ) +
Add upload to Spaces API client
## Code Before: """Digital Ocean Spaces interaction""" import boto3 from django.conf import settings class SpacesBucket(): """Interact with Spaces buckets""" def __init__(self): session = boto3.session.Session() self._client = session.client('s3', region_name='nyc3', endpoint_url='https://nyc3.digitaloceanspaces.com', aws_access_key_id=settings.SPACES_ACCESS_KEY_ID, aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET) def create(self, name="new-space-name"): """Create a new Space""" self._client.create_bucket(Bucket=name) def list(self): """List all buckets on your account""" response = self._client.list_buckets() spaces = [space['Name'] for space in response['Buckets']] print("Spaces List: %s" % spaces) ## Instruction: Add upload to Spaces API client ## Code After: """Digital Ocean Spaces interaction""" import boto3 from django.conf import settings class SpacesBucket(): """Interact with Spaces buckets""" def __init__(self, space_name="lutris"): session = boto3.session.Session() self._client = session.client('s3', region_name='nyc3', endpoint_url='https://nyc3.digitaloceanspaces.com', aws_access_key_id=settings.SPACES_ACCESS_KEY_ID, aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET) self.space_name = space_name def create(self, name="new-space-name"): """Create a new Space""" self._client.create_bucket(Bucket=name) def list_spaces(self): """List all buckets on your account""" response = self._client.list_buckets() return [space['Name'] for space in response['Buckets']] def upload(self, local_path, dest_path, public=False): """Upload a file to Spaces""" self._client.upload_file(local_path, self.space_name, dest_path) if public: self._client.put_object_acl( ACL="public-read", Bucket=self.space_name, Key=dest_path )
"""Digital Ocean Spaces interaction""" import boto3 from django.conf import settings class SpacesBucket(): """Interact with Spaces buckets""" - def __init__(self): + def __init__(self, space_name="lutris"): session = boto3.session.Session() self._client = session.client('s3', region_name='nyc3', endpoint_url='https://nyc3.digitaloceanspaces.com', aws_access_key_id=settings.SPACES_ACCESS_KEY_ID, aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET) + self.space_name = space_name def create(self, name="new-space-name"): """Create a new Space""" self._client.create_bucket(Bucket=name) - def list(self): + def list_spaces(self): ? +++++++ """List all buckets on your account""" response = self._client.list_buckets() - spaces = [space['Name'] for space in response['Buckets']] ? ^^^^ ^^^ + return [space['Name'] for space in response['Buckets']] ? ^ ^^^^ - print("Spaces List: %s" % spaces) + + def upload(self, local_path, dest_path, public=False): + """Upload a file to Spaces""" + self._client.upload_file(local_path, self.space_name, dest_path) + if public: + self._client.put_object_acl( + ACL="public-read", + Bucket=self.space_name, + Key=dest_path + )
e7b7c93efe20ac50256c33ac7b37e4e51151123f
OIPA/api/region/urls.py
OIPA/api/region/urls.py
from django.conf.urls import patterns, url from api.region import views urlpatterns = patterns( '', url(r'^$', views.RegionList.as_view(), name='region-list'), url( r'^/(?P<pk>[0-9]+)$', views.RegionDetail.as_view(), name='region-detail' ), url( r'^/(?P<pk>[0-9]+)/countries$', views.RegionCountries.as_view(), name='region-countries' ), url( r'^/(?P<pk>[0-9]+)/activities$', views.RegionActivities.as_view(), name='region-activities' ), )
from django.conf.urls import patterns, url from api.region import views urlpatterns = patterns( '', url(r'^$', views.RegionList.as_view(), name='region-list'), url( r'^/(?P<pk>[A-Za-z0-9]+)$', views.RegionDetail.as_view(), name='region-detail' ), url( r'^/(?P<pk>[A-Za-z0-9]+)/countries$', views.RegionCountries.as_view(), name='region-countries' ), url( r'^/(?P<pk>[A-Za-z0-9]+)/activities$', views.RegionActivities.as_view(), name='region-activities' ), )
Fix region resolve on non Integer
Fix region resolve on non Integer
Python
agpl-3.0
openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA
from django.conf.urls import patterns, url from api.region import views urlpatterns = patterns( '', url(r'^$', views.RegionList.as_view(), name='region-list'), url( - r'^/(?P<pk>[0-9]+)$', + r'^/(?P<pk>[A-Za-z0-9]+)$', views.RegionDetail.as_view(), name='region-detail' ), url( - r'^/(?P<pk>[0-9]+)/countries$', + r'^/(?P<pk>[A-Za-z0-9]+)/countries$', views.RegionCountries.as_view(), name='region-countries' ), url( - r'^/(?P<pk>[0-9]+)/activities$', + r'^/(?P<pk>[A-Za-z0-9]+)/activities$', views.RegionActivities.as_view(), name='region-activities' ), )
Fix region resolve on non Integer
## Code Before: from django.conf.urls import patterns, url from api.region import views urlpatterns = patterns( '', url(r'^$', views.RegionList.as_view(), name='region-list'), url( r'^/(?P<pk>[0-9]+)$', views.RegionDetail.as_view(), name='region-detail' ), url( r'^/(?P<pk>[0-9]+)/countries$', views.RegionCountries.as_view(), name='region-countries' ), url( r'^/(?P<pk>[0-9]+)/activities$', views.RegionActivities.as_view(), name='region-activities' ), ) ## Instruction: Fix region resolve on non Integer ## Code After: from django.conf.urls import patterns, url from api.region import views urlpatterns = patterns( '', url(r'^$', views.RegionList.as_view(), name='region-list'), url( r'^/(?P<pk>[A-Za-z0-9]+)$', views.RegionDetail.as_view(), name='region-detail' ), url( r'^/(?P<pk>[A-Za-z0-9]+)/countries$', views.RegionCountries.as_view(), name='region-countries' ), url( r'^/(?P<pk>[A-Za-z0-9]+)/activities$', views.RegionActivities.as_view(), name='region-activities' ), )
from django.conf.urls import patterns, url from api.region import views urlpatterns = patterns( '', url(r'^$', views.RegionList.as_view(), name='region-list'), url( - r'^/(?P<pk>[0-9]+)$', + r'^/(?P<pk>[A-Za-z0-9]+)$', ? ++++++ views.RegionDetail.as_view(), name='region-detail' ), url( - r'^/(?P<pk>[0-9]+)/countries$', + r'^/(?P<pk>[A-Za-z0-9]+)/countries$', ? ++++++ views.RegionCountries.as_view(), name='region-countries' ), url( - r'^/(?P<pk>[0-9]+)/activities$', + r'^/(?P<pk>[A-Za-z0-9]+)/activities$', ? ++++++ views.RegionActivities.as_view(), name='region-activities' ), )
2d64c01daebd918c3e6196b1eb3ad62f105c56e0
django_google_charts/charts.py
django_google_charts/charts.py
import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), )
import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe from django.utils.encoding import python_2_unicode_compatible CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) @python_2_unicode_compatible class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), )
Make this Python 2.x compatible
Make this Python 2.x compatible
Python
mit
danpalmer/django-google-charts,danpalmer/django-google-charts
import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe + from django.utils.encoding import python_2_unicode_compatible CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) + @python_2_unicode_compatible class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), )
Make this Python 2.x compatible
## Code Before: import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), ) ## Instruction: Make this Python 2.x compatible ## Code After: import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe from django.utils.encoding import python_2_unicode_compatible CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) @python_2_unicode_compatible class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), )
import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe + from django.utils.encoding import python_2_unicode_compatible CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) + @python_2_unicode_compatible class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), )
22e82e3fb6949efe862216feafaedb2da9b19c62
filehandler.py
filehandler.py
import csv import sys import urllib from scheduleitem import ScheduleItem from team import Team def read(uri): """Open a File or a Web URL""" if uri.startswith('http://') or uri.startswith('https://'): return open_url(uri) else: return open_file(uri) def open_url(url): """Return the games file data as an array""" try: with urllib.request.urlopen(url) as response: return response.read() except urllib.HTTPError as e: msg = "Could Not Open URL {}.\nThe Code is: {} " print(msg.format(url, e.code)) sys.exit(1) except urllib.URLError as e: msg = "Could Not Open URL {}.\nThe Reason is: {} " print(msg.format(url.url, e.reason)) sys.exit(1) def open_file(uri): """Return the games file data as an array""" try: with open(uri, 'r') as f: return f.read() except IOError: msg = "Could not open file: `{}`" print(msg.format(uri)) sys.exit(1) def load_schedules(games_file): with open(games_file, 'r') as f: return [ScheduleItem.from_str(line) for line in f.readlines()] def load_teams_data(data_file): with open(data_file, 'r') as csv_file: reader = csv.reader(csv_file) # Skip the header row next(reader) return [Team(row[0], row[2], row[3]) for row in reader]
import csv import sys import urllib.error import urllib.request from scheduleitem import ScheduleItem from team import Team def read(uri): """Open a File or a Web URL""" if uri.startswith('http://') or uri.startswith('https://'): return open_url(uri) else: return open_local_file(uri) def open_url(url): """Return the game file data.""" with urllib.request.urlopen(url) as response: if response.status != 200: msg = 'Status {}. Could Not Open URL {}. Reason: {}' raise urllib.error.HTTPError( msg.format(response.status, url, response.msg) ) encoding = sys.getdefaultencoding() return [line.decode(encoding) for line in response.readlines()] def open_local_file(uri): """Return the games file data as an array""" with open(uri, 'r') as f: return f.readlines() def load_schedules(uri): data = read(uri) return [ScheduleItem.from_str(line) for line in data] def load_teams_data(data_file): with open(data_file, 'r') as csv_file: reader = csv.reader(csv_file) next(reader) # Skip the header row return [Team(row[0], row[2], row[3]) for row in reader]
Update file handlers to use Python3 urllib
Update file handlers to use Python3 urllib
Python
mit
brianjbuck/robie
import csv import sys - import urllib + import urllib.error + import urllib.request + from scheduleitem import ScheduleItem from team import Team def read(uri): """Open a File or a Web URL""" if uri.startswith('http://') or uri.startswith('https://'): return open_url(uri) else: - return open_file(uri) + return open_local_file(uri) def open_url(url): - """Return the games file data as an array""" + """Return the game file data.""" - try: - with urllib.request.urlopen(url) as response: + with urllib.request.urlopen(url) as response: + if response.status != 200: - return response.read() - except urllib.HTTPError as e: - msg = "Could Not Open URL {}.\nThe Code is: {} " - print(msg.format(url, e.code)) - sys.exit(1) - except urllib.URLError as e: - msg = "Could Not Open URL {}.\nThe Reason is: {} " + msg = 'Status {}. Could Not Open URL {}. Reason: {}' - print(msg.format(url.url, e.reason)) - sys.exit(1) + raise urllib.error.HTTPError( + msg.format(response.status, url, response.msg) + ) + encoding = sys.getdefaultencoding() + return [line.decode(encoding) for line in response.readlines()] - def open_file(uri): + def open_local_file(uri): """Return the games file data as an array""" - try: - with open(uri, 'r') as f: + with open(uri, 'r') as f: - return f.read() + return f.readlines() - except IOError: - msg = "Could not open file: `{}`" - print(msg.format(uri)) - sys.exit(1) - def load_schedules(games_file): + def load_schedules(uri): - with open(games_file, 'r') as f: + data = read(uri) - return [ScheduleItem.from_str(line) for line in f.readlines()] + return [ScheduleItem.from_str(line) for line in data] def load_teams_data(data_file): with open(data_file, 'r') as csv_file: reader = csv.reader(csv_file) - # Skip the header row + next(reader) # Skip the header row - next(reader) return [Team(row[0], row[2], row[3]) for row in reader]
Update file handlers to use Python3 urllib
## Code Before: import csv import sys import urllib from scheduleitem import ScheduleItem from team import Team def read(uri): """Open a File or a Web URL""" if uri.startswith('http://') or uri.startswith('https://'): return open_url(uri) else: return open_file(uri) def open_url(url): """Return the games file data as an array""" try: with urllib.request.urlopen(url) as response: return response.read() except urllib.HTTPError as e: msg = "Could Not Open URL {}.\nThe Code is: {} " print(msg.format(url, e.code)) sys.exit(1) except urllib.URLError as e: msg = "Could Not Open URL {}.\nThe Reason is: {} " print(msg.format(url.url, e.reason)) sys.exit(1) def open_file(uri): """Return the games file data as an array""" try: with open(uri, 'r') as f: return f.read() except IOError: msg = "Could not open file: `{}`" print(msg.format(uri)) sys.exit(1) def load_schedules(games_file): with open(games_file, 'r') as f: return [ScheduleItem.from_str(line) for line in f.readlines()] def load_teams_data(data_file): with open(data_file, 'r') as csv_file: reader = csv.reader(csv_file) # Skip the header row next(reader) return [Team(row[0], row[2], row[3]) for row in reader] ## Instruction: Update file handlers to use Python3 urllib ## Code After: import csv import sys import urllib.error import urllib.request from scheduleitem import ScheduleItem from team import Team def read(uri): """Open a File or a Web URL""" if uri.startswith('http://') or uri.startswith('https://'): return open_url(uri) else: return open_local_file(uri) def open_url(url): """Return the game file data.""" with urllib.request.urlopen(url) as response: if response.status != 200: msg = 'Status {}. Could Not Open URL {}. Reason: {}' raise urllib.error.HTTPError( msg.format(response.status, url, response.msg) ) encoding = sys.getdefaultencoding() return [line.decode(encoding) for line in response.readlines()] def open_local_file(uri): """Return the games file data as an array""" with open(uri, 'r') as f: return f.readlines() def load_schedules(uri): data = read(uri) return [ScheduleItem.from_str(line) for line in data] def load_teams_data(data_file): with open(data_file, 'r') as csv_file: reader = csv.reader(csv_file) next(reader) # Skip the header row return [Team(row[0], row[2], row[3]) for row in reader]
import csv import sys - import urllib + import urllib.error ? ++++++ + import urllib.request + from scheduleitem import ScheduleItem from team import Team def read(uri): """Open a File or a Web URL""" if uri.startswith('http://') or uri.startswith('https://'): return open_url(uri) else: - return open_file(uri) + return open_local_file(uri) ? ++++++ def open_url(url): - """Return the games file data as an array""" ? - ^^^^^^^^^^^^ + """Return the game file data.""" ? ^ - try: - with urllib.request.urlopen(url) as response: ? ---- + with urllib.request.urlopen(url) as response: + if response.status != 200: - return response.read() - except urllib.HTTPError as e: - msg = "Could Not Open URL {}.\nThe Code is: {} " - print(msg.format(url, e.code)) - sys.exit(1) - except urllib.URLError as e: - msg = "Could Not Open URL {}.\nThe Reason is: {} " ? ^ ----- --- ^^ + msg = 'Status {}. Could Not Open URL {}. Reason: {}' ? ++++ ^^^^^^^^^^^^ ^ - print(msg.format(url.url, e.reason)) - sys.exit(1) + raise urllib.error.HTTPError( + msg.format(response.status, url, response.msg) + ) + encoding = sys.getdefaultencoding() + return [line.decode(encoding) for line in response.readlines()] - def open_file(uri): + def open_local_file(uri): ? ++++++ """Return the games file data as an array""" - try: - with open(uri, 'r') as f: ? ---- + with open(uri, 'r') as f: - return f.read() ? ---- + return f.readlines() ? +++++ - except IOError: - msg = "Could not open file: `{}`" - print(msg.format(uri)) - sys.exit(1) - def load_schedules(games_file): ? ^^^^^^^ -- + def load_schedules(uri): ? ^^ - with open(games_file, 'r') as f: + data = read(uri) - return [ScheduleItem.from_str(line) for line in f.readlines()] ? ---- ^^^^ ^^^^^^^^ + return [ScheduleItem.from_str(line) for line in data] ? ^ ^^ def load_teams_data(data_file): with open(data_file, 'r') as csv_file: reader = csv.reader(csv_file) - # Skip the header row + next(reader) # Skip the header row ? ++++++++++++++ - next(reader) return [Team(row[0], row[2], row[3]) for row in reader]
c15dab903d3759578449279cc034d766d362d41f
rest_framework/authtoken/serializers.py
rest_framework/authtoken/serializers.py
from django.contrib.auth import authenticate from rest_framework import serializers class AuthTokenSerializer(serializers.Serializer): username = serializers.CharField() password = serializers.CharField() def validate(self, attrs): username = attrs.get('username') password = attrs.get('password') if username and password: user = authenticate(username=username, password=password) if user: if not user.is_active: raise serializers.ValidationError('User account is disabled.') attrs['user'] = user return attrs else: raise serializers.ValidationError('Unable to login with provided credentials.') else: raise serializers.ValidationError('Must include "username" and "password"')
from django.contrib.auth import authenticate from django.utils.translation import ugettext_lazy as _ from rest_framework import serializers class AuthTokenSerializer(serializers.Serializer): username = serializers.CharField() password = serializers.CharField() def validate(self, attrs): username = attrs.get('username') password = attrs.get('password') if username and password: user = authenticate(username=username, password=password) if user: if not user.is_active: msg = _('User account is disabled.') raise serializers.ValidationError() attrs['user'] = user return attrs else: msg = _('Unable to login with provided credentials.') raise serializers.ValidationError(msg) else: msg = _('Must include "username" and "password"') raise serializers.ValidationError(msg)
Mark strings in AuthTokenSerializer as translatable
Mark strings in AuthTokenSerializer as translatable
Python
bsd-2-clause
linovia/django-rest-framework,nhorelik/django-rest-framework,rafaelang/django-rest-framework,iheitlager/django-rest-framework,fishky/django-rest-framework,bluedazzle/django-rest-framework,damycra/django-rest-framework,HireAnEsquire/django-rest-framework,jerryhebert/django-rest-framework,gregmuellegger/django-rest-framework,ashishfinoit/django-rest-framework,kennydude/django-rest-framework,rafaelcaricio/django-rest-framework,YBJAY00000/django-rest-framework,potpath/django-rest-framework,edx/django-rest-framework,elim/django-rest-framework,rhblind/django-rest-framework,krinart/django-rest-framework,James1345/django-rest-framework,kezabelle/django-rest-framework,elim/django-rest-framework,jpadilla/django-rest-framework,d0ugal/django-rest-framework,davesque/django-rest-framework,hnakamur/django-rest-framework,jpulec/django-rest-framework,edx/django-rest-framework,canassa/django-rest-framework,jpadilla/django-rest-framework,thedrow/django-rest-framework-1,arpheno/django-rest-framework,kennydude/django-rest-framework,sehmaschine/django-rest-framework,vstoykov/django-rest-framework,akalipetis/django-rest-framework,maryokhin/django-rest-framework,qsorix/django-rest-framework,iheitlager/django-rest-framework,jness/django-rest-framework,jtiai/django-rest-framework,aericson/django-rest-framework,simudream/django-rest-framework,hunter007/django-rest-framework,AlexandreProenca/django-rest-framework,justanr/django-rest-framework,ossanna16/django-rest-framework,MJafarMashhadi/django-rest-framework,andriy-s/django-rest-framework,dmwyatt/django-rest-framework,fishky/django-rest-framework,jerryhebert/django-rest-framework,canassa/django-rest-framework,jpadilla/django-rest-framework,davesque/django-rest-framework,hnakamur/django-rest-framework,wangpanjun/django-rest-framework,simudream/django-rest-framework,maryokhin/django-rest-framework,tomchristie/django-rest-framework,pombredanne/django-rest-framework,lubomir/django-rest-framework,jtiai/django-rest-framework,hnakamur/django-rest-framework,delinhabit/django-rest-framework,AlexandreProenca/django-rest-framework,paolopaolopaolo/django-rest-framework,atombrella/django-rest-framework,fishky/django-rest-framework,mgaitan/django-rest-framework,delinhabit/django-rest-framework,vstoykov/django-rest-framework,tigeraniya/django-rest-framework,ambivalentno/django-rest-framework,uploadcare/django-rest-framework,davesque/django-rest-framework,wedaly/django-rest-framework,adambain-vokal/django-rest-framework,sheppard/django-rest-framework,xiaotangyuan/django-rest-framework,ashishfinoit/django-rest-framework,thedrow/django-rest-framework-1,kylefox/django-rest-framework,paolopaolopaolo/django-rest-framework,buptlsl/django-rest-framework,zeldalink0515/django-rest-framework,maryokhin/django-rest-framework,sbellem/django-rest-framework,kgeorgy/django-rest-framework,MJafarMashhadi/django-rest-framework,atombrella/django-rest-framework,alacritythief/django-rest-framework,tomchristie/django-rest-framework,wwj718/django-rest-framework,qsorix/django-rest-framework,brandoncazander/django-rest-framework,hunter007/django-rest-framework,pombredanne/django-rest-framework,callorico/django-rest-framework,sbellem/django-rest-framework,nryoung/django-rest-framework,justanr/django-rest-framework,qsorix/django-rest-framework,andriy-s/django-rest-framework,ebsaral/django-rest-framework,linovia/django-rest-framework,raphaelmerx/django-rest-framework,dmwyatt/django-rest-framework,ambivalentno/django-rest-framework,kezabelle/django-rest-framework,James1345/django-rest-framework,waytai/django-rest-framework,bluedazzle/django-rest-framework,jness/django-rest-framework,ossanna16/django-rest-framework,tcroiset/django-rest-framework,delinhabit/django-rest-framework,douwevandermeij/django-rest-framework,zeldalink0515/django-rest-framework,wwj718/django-rest-framework,d0ugal/django-rest-framework,wwj718/django-rest-framework,leeahoward/django-rest-framework,werthen/django-rest-framework,justanr/django-rest-framework,nryoung/django-rest-framework,tomchristie/django-rest-framework,sbellem/django-rest-framework,sehmaschine/django-rest-framework,buptlsl/django-rest-framework,damycra/django-rest-framework,kylefox/django-rest-framework,andriy-s/django-rest-framework,VishvajitP/django-rest-framework,callorico/django-rest-framework,jtiai/django-rest-framework,uruz/django-rest-framework,callorico/django-rest-framework,kgeorgy/django-rest-framework,hnarayanan/django-rest-framework,akalipetis/django-rest-framework,cyberj/django-rest-framework,jerryhebert/django-rest-framework,uploadcare/django-rest-framework,sheppard/django-rest-framework,MJafarMashhadi/django-rest-framework,antonyc/django-rest-framework,HireAnEsquire/django-rest-framework,cheif/django-rest-framework,werthen/django-rest-framework,ezheidtmann/django-rest-framework,rubendura/django-rest-framework,cyberj/django-rest-framework,nhorelik/django-rest-framework,ticosax/django-rest-framework,johnraz/django-rest-framework,James1345/django-rest-framework,canassa/django-rest-framework,antonyc/django-rest-framework,wzbozon/django-rest-framework,hnarayanan/django-rest-framework,cheif/django-rest-framework,potpath/django-rest-framework,YBJAY00000/django-rest-framework,leeahoward/django-rest-framework,krinart/django-rest-framework,gregmuellegger/django-rest-framework,kezabelle/django-rest-framework,arpheno/django-rest-framework,abdulhaq-e/django-rest-framework,werthen/django-rest-framework,ezheidtmann/django-rest-framework,raphaelmerx/django-rest-framework,mgaitan/django-rest-framework,cyberj/django-rest-framework,aericson/django-rest-framework,elim/django-rest-framework,hnarayanan/django-rest-framework,VishvajitP/django-rest-framework,ambivalentno/django-rest-framework,rafaelcaricio/django-rest-framework,ezheidtmann/django-rest-framework,cheif/django-rest-framework,d0ugal/django-rest-framework,akalipetis/django-rest-framework,alacritythief/django-rest-framework,kylefox/django-rest-framework,agconti/django-rest-framework,lubomir/django-rest-framework,brandoncazander/django-rest-framework,atombrella/django-rest-framework,douwevandermeij/django-rest-framework,jpulec/django-rest-framework,VishvajitP/django-rest-framework,xiaotangyuan/django-rest-framework,ajaali/django-rest-framework,adambain-vokal/django-rest-framework,rafaelang/django-rest-framework,wangpanjun/django-rest-framework,mgaitan/django-rest-framework,thedrow/django-rest-framework-1,pombredanne/django-rest-framework,nryoung/django-rest-framework,wedaly/django-rest-framework,wangpanjun/django-rest-framework,yiyocx/django-rest-framework,damycra/django-rest-framework,uruz/django-rest-framework,rhblind/django-rest-framework,sheppard/django-rest-framework,yiyocx/django-rest-framework,linovia/django-rest-framework,zeldalink0515/django-rest-framework,iheitlager/django-rest-framework,sehmaschine/django-rest-framework,lubomir/django-rest-framework,abdulhaq-e/django-rest-framework,HireAnEsquire/django-rest-framework,johnraz/django-rest-framework,YBJAY00000/django-rest-framework,rubendura/django-rest-framework,jness/django-rest-framework,potpath/django-rest-framework,rafaelang/django-rest-framework,jpulec/django-rest-framework,ticosax/django-rest-framework,leeahoward/django-rest-framework,arpheno/django-rest-framework,ebsaral/django-rest-framework,xiaotangyuan/django-rest-framework,yiyocx/django-rest-framework,ajaali/django-rest-framework,abdulhaq-e/django-rest-framework,douwevandermeij/django-rest-framework,krinart/django-rest-framework,buptlsl/django-rest-framework,brandoncazander/django-rest-framework,kgeorgy/django-rest-framework,ebsaral/django-rest-framework,tigeraniya/django-rest-framework,agconti/django-rest-framework,rubendura/django-rest-framework,uploadcare/django-rest-framework,adambain-vokal/django-rest-framework,alacritythief/django-rest-framework,bluedazzle/django-rest-framework,agconti/django-rest-framework,gregmuellegger/django-rest-framework,aericson/django-rest-framework,rhblind/django-rest-framework,ossanna16/django-rest-framework,johnraz/django-rest-framework,wedaly/django-rest-framework,simudream/django-rest-framework,edx/django-rest-framework,rafaelcaricio/django-rest-framework,paolopaolopaolo/django-rest-framework,antonyc/django-rest-framework,tcroiset/django-rest-framework,nhorelik/django-rest-framework,waytai/django-rest-framework,ashishfinoit/django-rest-framework,vstoykov/django-rest-framework,wzbozon/django-rest-framework,dmwyatt/django-rest-framework,tcroiset/django-rest-framework,uruz/django-rest-framework,ticosax/django-rest-framework,hunter007/django-rest-framework,AlexandreProenca/django-rest-framework,tigeraniya/django-rest-framework,ajaali/django-rest-framework,waytai/django-rest-framework,raphaelmerx/django-rest-framework,kennydude/django-rest-framework,wzbozon/django-rest-framework
from django.contrib.auth import authenticate + from django.utils.translation import ugettext_lazy as _ + from rest_framework import serializers class AuthTokenSerializer(serializers.Serializer): username = serializers.CharField() password = serializers.CharField() def validate(self, attrs): username = attrs.get('username') password = attrs.get('password') if username and password: user = authenticate(username=username, password=password) if user: if not user.is_active: + msg = _('User account is disabled.') - raise serializers.ValidationError('User account is disabled.') + raise serializers.ValidationError() attrs['user'] = user return attrs else: - raise serializers.ValidationError('Unable to login with provided credentials.') + msg = _('Unable to login with provided credentials.') + raise serializers.ValidationError(msg) else: - raise serializers.ValidationError('Must include "username" and "password"') + msg = _('Must include "username" and "password"') + raise serializers.ValidationError(msg)
Mark strings in AuthTokenSerializer as translatable
## Code Before: from django.contrib.auth import authenticate from rest_framework import serializers class AuthTokenSerializer(serializers.Serializer): username = serializers.CharField() password = serializers.CharField() def validate(self, attrs): username = attrs.get('username') password = attrs.get('password') if username and password: user = authenticate(username=username, password=password) if user: if not user.is_active: raise serializers.ValidationError('User account is disabled.') attrs['user'] = user return attrs else: raise serializers.ValidationError('Unable to login with provided credentials.') else: raise serializers.ValidationError('Must include "username" and "password"') ## Instruction: Mark strings in AuthTokenSerializer as translatable ## Code After: from django.contrib.auth import authenticate from django.utils.translation import ugettext_lazy as _ from rest_framework import serializers class AuthTokenSerializer(serializers.Serializer): username = serializers.CharField() password = serializers.CharField() def validate(self, attrs): username = attrs.get('username') password = attrs.get('password') if username and password: user = authenticate(username=username, password=password) if user: if not user.is_active: msg = _('User account is disabled.') raise serializers.ValidationError() attrs['user'] = user return attrs else: msg = _('Unable to login with provided credentials.') raise serializers.ValidationError(msg) else: msg = _('Must include "username" and "password"') raise serializers.ValidationError(msg)
from django.contrib.auth import authenticate + from django.utils.translation import ugettext_lazy as _ + from rest_framework import serializers class AuthTokenSerializer(serializers.Serializer): username = serializers.CharField() password = serializers.CharField() def validate(self, attrs): username = attrs.get('username') password = attrs.get('password') if username and password: user = authenticate(username=username, password=password) if user: if not user.is_active: + msg = _('User account is disabled.') - raise serializers.ValidationError('User account is disabled.') ? --------------------------- + raise serializers.ValidationError() attrs['user'] = user return attrs else: - raise serializers.ValidationError('Unable to login with provided credentials.') ? ^^^ ^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + msg = _('Unable to login with provided credentials.') ? ^ ^ ^^^ + raise serializers.ValidationError(msg) else: - raise serializers.ValidationError('Must include "username" and "password"') ? ^^^ ^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + msg = _('Must include "username" and "password"') ? ^ ^ ^^^ + raise serializers.ValidationError(msg)
51caae36a10cf5616982c78506c5dcec593259a3
test_suite.py
test_suite.py
import os os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management apps = [ 'test', 'core', 'exporting', 'formatters', 'lexicon', 'events', 'history', 'models', 'query', 'sets', 'stats', 'search', 'subcommands', 'validation', ] management.call_command(*apps)
import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management apps = sys.argv[1:] if not apps: apps = [ 'core', 'exporting', 'formatters', 'lexicon', 'events', 'history', 'models', 'query', 'sets', 'stats', 'search', 'subcommands', 'validation', ] management.call_command('test', *apps)
Allow apps to be specified from the command line
Allow apps to be specified from the command line
Python
bsd-2-clause
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
import os + import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management + apps = sys.argv[1:] - apps = [ - 'test', - 'core', - 'exporting', - 'formatters', - 'lexicon', - 'events', - 'history', - 'models', - 'query', - 'sets', - 'stats', - 'search', - 'subcommands', - 'validation', - ] - management.call_command(*apps) + if not apps: + apps = [ + 'core', + 'exporting', + 'formatters', + 'lexicon', + 'events', + 'history', + 'models', + 'query', + 'sets', + 'stats', + 'search', + 'subcommands', + 'validation', + ] + management.call_command('test', *apps) +
Allow apps to be specified from the command line
## Code Before: import os os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management apps = [ 'test', 'core', 'exporting', 'formatters', 'lexicon', 'events', 'history', 'models', 'query', 'sets', 'stats', 'search', 'subcommands', 'validation', ] management.call_command(*apps) ## Instruction: Allow apps to be specified from the command line ## Code After: import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management apps = sys.argv[1:] if not apps: apps = [ 'core', 'exporting', 'formatters', 'lexicon', 'events', 'history', 'models', 'query', 'sets', 'stats', 'search', 'subcommands', 'validation', ] management.call_command('test', *apps)
import os + import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management + apps = sys.argv[1:] - apps = [ - 'test', - 'core', - 'exporting', - 'formatters', - 'lexicon', - 'events', - 'history', - 'models', - 'query', - 'sets', - 'stats', - 'search', - 'subcommands', - 'validation', - ] + if not apps: + apps = [ + 'core', + 'exporting', + 'formatters', + 'lexicon', + 'events', + 'history', + 'models', + 'query', + 'sets', + 'stats', + 'search', + 'subcommands', + 'validation', + ] + - management.call_command(*apps) + management.call_command('test', *apps) ? ++++++++
0e2d792cfe8d7afff08e08f5eaecdc126c369f54
asyncio/compat.py
asyncio/compat.py
import sys # Python 2.6 or older? PY26 = (sys.version_info < (2, 7)) # Python 3.0 or newer? PY3 = (sys.version_info >= (3,)) # Python 3.3 or newer? PY33 = (sys.version_info >= (3, 3)) # Python 3.4 or newer? PY34 = sys.version_info >= (3, 4) if PY3: BYTES_TYPES = (bytes, bytearray, memoryview) elif PY26: BYTES_TYPES = (bytes, bytearray, buffer) else: BYTES_TYPES = (bytes, bytearray, memoryview, buffer) def flatten_bytes(data): """ Convert bytes-like objects (bytes, bytearray, memoryview, buffer) to a bytes string. """ if not isinstance(data, BYTES_TYPES): raise TypeError('data argument must be byte-ish (%r)', type(data)) if PY34: # In Python 3.4, socket.send() and bytes.join() accept memoryview # and bytearray return data if not data: return b'' if not PY3 and isinstance(data, (buffer, bytearray)): return bytes(data) elif not PY26 and isinstance(data, memoryview): return data.tobytes() else: return data
import sys # Python 2.6 or older? PY26 = (sys.version_info < (2, 7)) # Python 3.0 or newer? PY3 = (sys.version_info >= (3,)) # Python 3.3 or newer? PY33 = (sys.version_info >= (3, 3)) # Python 3.4 or newer? PY34 = sys.version_info >= (3, 4) if PY3: BYTES_TYPES = (bytes, bytearray, memoryview) elif PY26: BYTES_TYPES = (str, bytearray, buffer) else: # Python 2.7 BYTES_TYPES = (str, bytearray, memoryview, buffer) def flatten_bytes(data): """ Convert bytes-like objects (bytes, bytearray, memoryview, buffer) to a bytes string. """ if not isinstance(data, BYTES_TYPES): raise TypeError('data argument must be byte-ish (%r)', type(data)) if PY34: # In Python 3.4, socket.send() and bytes.join() accept memoryview # and bytearray return data if not data: return b'' if not PY3 and isinstance(data, (buffer, bytearray)): return str(data) elif not PY26 and isinstance(data, memoryview): return data.tobytes() else: return data
Use str type instead of bytes in Python 2
Use str type instead of bytes in Python 2
Python
apache-2.0
overcastcloud/trollius,overcastcloud/trollius,overcastcloud/trollius
import sys # Python 2.6 or older? PY26 = (sys.version_info < (2, 7)) # Python 3.0 or newer? PY3 = (sys.version_info >= (3,)) # Python 3.3 or newer? PY33 = (sys.version_info >= (3, 3)) # Python 3.4 or newer? PY34 = sys.version_info >= (3, 4) if PY3: BYTES_TYPES = (bytes, bytearray, memoryview) elif PY26: - BYTES_TYPES = (bytes, bytearray, buffer) + BYTES_TYPES = (str, bytearray, buffer) - else: + else: # Python 2.7 - BYTES_TYPES = (bytes, bytearray, memoryview, buffer) + BYTES_TYPES = (str, bytearray, memoryview, buffer) def flatten_bytes(data): """ Convert bytes-like objects (bytes, bytearray, memoryview, buffer) to a bytes string. """ if not isinstance(data, BYTES_TYPES): raise TypeError('data argument must be byte-ish (%r)', type(data)) if PY34: # In Python 3.4, socket.send() and bytes.join() accept memoryview # and bytearray return data if not data: return b'' if not PY3 and isinstance(data, (buffer, bytearray)): - return bytes(data) + return str(data) elif not PY26 and isinstance(data, memoryview): return data.tobytes() else: return data
Use str type instead of bytes in Python 2
## Code Before: import sys # Python 2.6 or older? PY26 = (sys.version_info < (2, 7)) # Python 3.0 or newer? PY3 = (sys.version_info >= (3,)) # Python 3.3 or newer? PY33 = (sys.version_info >= (3, 3)) # Python 3.4 or newer? PY34 = sys.version_info >= (3, 4) if PY3: BYTES_TYPES = (bytes, bytearray, memoryview) elif PY26: BYTES_TYPES = (bytes, bytearray, buffer) else: BYTES_TYPES = (bytes, bytearray, memoryview, buffer) def flatten_bytes(data): """ Convert bytes-like objects (bytes, bytearray, memoryview, buffer) to a bytes string. """ if not isinstance(data, BYTES_TYPES): raise TypeError('data argument must be byte-ish (%r)', type(data)) if PY34: # In Python 3.4, socket.send() and bytes.join() accept memoryview # and bytearray return data if not data: return b'' if not PY3 and isinstance(data, (buffer, bytearray)): return bytes(data) elif not PY26 and isinstance(data, memoryview): return data.tobytes() else: return data ## Instruction: Use str type instead of bytes in Python 2 ## Code After: import sys # Python 2.6 or older? PY26 = (sys.version_info < (2, 7)) # Python 3.0 or newer? PY3 = (sys.version_info >= (3,)) # Python 3.3 or newer? PY33 = (sys.version_info >= (3, 3)) # Python 3.4 or newer? PY34 = sys.version_info >= (3, 4) if PY3: BYTES_TYPES = (bytes, bytearray, memoryview) elif PY26: BYTES_TYPES = (str, bytearray, buffer) else: # Python 2.7 BYTES_TYPES = (str, bytearray, memoryview, buffer) def flatten_bytes(data): """ Convert bytes-like objects (bytes, bytearray, memoryview, buffer) to a bytes string. """ if not isinstance(data, BYTES_TYPES): raise TypeError('data argument must be byte-ish (%r)', type(data)) if PY34: # In Python 3.4, socket.send() and bytes.join() accept memoryview # and bytearray return data if not data: return b'' if not PY3 and isinstance(data, (buffer, bytearray)): return str(data) elif not PY26 and isinstance(data, memoryview): return data.tobytes() else: return data
import sys # Python 2.6 or older? PY26 = (sys.version_info < (2, 7)) # Python 3.0 or newer? PY3 = (sys.version_info >= (3,)) # Python 3.3 or newer? PY33 = (sys.version_info >= (3, 3)) # Python 3.4 or newer? PY34 = sys.version_info >= (3, 4) if PY3: BYTES_TYPES = (bytes, bytearray, memoryview) elif PY26: - BYTES_TYPES = (bytes, bytearray, buffer) ? ^^ ^^ + BYTES_TYPES = (str, bytearray, buffer) ? ^ ^ - else: + else: # Python 2.7 - BYTES_TYPES = (bytes, bytearray, memoryview, buffer) ? ^^ ^^ + BYTES_TYPES = (str, bytearray, memoryview, buffer) ? ^ ^ def flatten_bytes(data): """ Convert bytes-like objects (bytes, bytearray, memoryview, buffer) to a bytes string. """ if not isinstance(data, BYTES_TYPES): raise TypeError('data argument must be byte-ish (%r)', type(data)) if PY34: # In Python 3.4, socket.send() and bytes.join() accept memoryview # and bytearray return data if not data: return b'' if not PY3 and isinstance(data, (buffer, bytearray)): - return bytes(data) ? ^^ ^^ + return str(data) ? ^ ^ elif not PY26 and isinstance(data, memoryview): return data.tobytes() else: return data
b38647ef390ed6c78c2d55d706bac2f6a396ad39
errors.py
errors.py
import moira_constants class MoiraBaseError(Exception): """Any exception thrown by the library is inhereted from this""" pass class MoiraConnectionError(MoiraBaseError): """An error which prevents the client from having or continuing a meaningful dialogue with a server (parsing failure, connection failure, etc)""" pass class MoiraError(MoiraBaseError): """An error returned from Moira server itself which has a Moira error code.""" def __init__(self, code): if code in moira_constants.errors: MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code]) else: MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code) class MoiraUnavailableError(MoiraBaseError): """An error raised in case when Moira MOTD is not empty.""" pass
import moira_constants class MoiraBaseError(Exception): """Any exception thrown by the library is inhereted from this""" pass class MoiraConnectionError(MoiraBaseError): """An error which prevents the client from having or continuing a meaningful dialogue with a server (parsing failure, connection failure, etc)""" pass class MoiraError(MoiraBaseError): """An error returned from Moira server itself which has a Moira error code.""" def __init__(self, code): if code in moira_constants.errors: MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code]) else: MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code) class MoiraUnavailableError(MoiraBaseError): """An error raised in case when Moira MOTD is not empty.""" pass class MoiraUserError(MoiraBaseError): """An error related to Moira but not returned from the server.""" pass
Introduce a new error class.
Introduce a new error class.
Python
mit
vasilvv/pymoira
import moira_constants class MoiraBaseError(Exception): """Any exception thrown by the library is inhereted from this""" pass class MoiraConnectionError(MoiraBaseError): """An error which prevents the client from having or continuing a meaningful dialogue with a server (parsing failure, connection failure, etc)""" pass class MoiraError(MoiraBaseError): """An error returned from Moira server itself which has a Moira error code.""" def __init__(self, code): if code in moira_constants.errors: MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code]) else: MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code) class MoiraUnavailableError(MoiraBaseError): """An error raised in case when Moira MOTD is not empty.""" pass + class MoiraUserError(MoiraBaseError): + """An error related to Moira but not returned from the server.""" + + pass +
Introduce a new error class.
## Code Before: import moira_constants class MoiraBaseError(Exception): """Any exception thrown by the library is inhereted from this""" pass class MoiraConnectionError(MoiraBaseError): """An error which prevents the client from having or continuing a meaningful dialogue with a server (parsing failure, connection failure, etc)""" pass class MoiraError(MoiraBaseError): """An error returned from Moira server itself which has a Moira error code.""" def __init__(self, code): if code in moira_constants.errors: MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code]) else: MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code) class MoiraUnavailableError(MoiraBaseError): """An error raised in case when Moira MOTD is not empty.""" pass ## Instruction: Introduce a new error class. ## Code After: import moira_constants class MoiraBaseError(Exception): """Any exception thrown by the library is inhereted from this""" pass class MoiraConnectionError(MoiraBaseError): """An error which prevents the client from having or continuing a meaningful dialogue with a server (parsing failure, connection failure, etc)""" pass class MoiraError(MoiraBaseError): """An error returned from Moira server itself which has a Moira error code.""" def __init__(self, code): if code in moira_constants.errors: MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code]) else: MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code) class MoiraUnavailableError(MoiraBaseError): """An error raised in case when Moira MOTD is not empty.""" pass class MoiraUserError(MoiraBaseError): """An error related to Moira but not returned from the server.""" pass
import moira_constants class MoiraBaseError(Exception): """Any exception thrown by the library is inhereted from this""" pass class MoiraConnectionError(MoiraBaseError): """An error which prevents the client from having or continuing a meaningful dialogue with a server (parsing failure, connection failure, etc)""" pass class MoiraError(MoiraBaseError): """An error returned from Moira server itself which has a Moira error code.""" def __init__(self, code): if code in moira_constants.errors: MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code]) else: MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code) class MoiraUnavailableError(MoiraBaseError): """An error raised in case when Moira MOTD is not empty.""" pass + + class MoiraUserError(MoiraBaseError): + """An error related to Moira but not returned from the server.""" + + pass
36d3c2f81ea39968bc58bab172e6bf035147ae3c
mpld3/test_plots/test_logscale.py
mpld3/test_plots/test_logscale.py
"""Plot to test logscale""" import matplotlib.pyplot as plt import numpy as np import mpld3 def create_plot(): fig = plt.figure() fig.subplots_adjust(hspace=0.4, wspace=0.4) ax1 = fig.add_subplot(2, 2, 1) ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log') ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log') ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3) x = np.linspace(1, 1e2) y = x ** 2 for ax in [ax1, ax2, ax3, ax4]: ax.plot(x, y) return fig def test_logscale(): fig = create_plot() html = mpld3.fig_to_html(fig) plt.close(fig) if __name__ == "__main__": mpld3.show(create_plot())
import matplotlib.pyplot as plt import numpy as np import mpld3 def create_plot(): fig = plt.figure() fig.subplots_adjust(hspace=0.4, wspace=0.4) ax1 = fig.add_subplot(2, 2, 1) ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log') ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log') ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3) x = np.linspace(1, 1e2) y = x ** 2 for ax in [ax1, ax2, ax3, ax4]: ax.plot(x, y) return fig def test_logscale(): fig = create_plot() html = mpld3.fig_to_html(fig) plt.close(fig) if __name__ == "__main__": mpld3.show(create_plot())
Add TODO to broken test
Add TODO to broken test
Python
bsd-3-clause
mpld3/mpld3,jakevdp/mpld3,jakevdp/mpld3,mpld3/mpld3
- """Plot to test logscale""" import matplotlib.pyplot as plt import numpy as np import mpld3 def create_plot(): fig = plt.figure() fig.subplots_adjust(hspace=0.4, wspace=0.4) ax1 = fig.add_subplot(2, 2, 1) ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log') ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log') ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3) x = np.linspace(1, 1e2) y = x ** 2 for ax in [ax1, ax2, ax3, ax4]: ax.plot(x, y) return fig def test_logscale(): fig = create_plot() html = mpld3.fig_to_html(fig) plt.close(fig) if __name__ == "__main__": mpld3.show(create_plot())
Add TODO to broken test
## Code Before: """Plot to test logscale""" import matplotlib.pyplot as plt import numpy as np import mpld3 def create_plot(): fig = plt.figure() fig.subplots_adjust(hspace=0.4, wspace=0.4) ax1 = fig.add_subplot(2, 2, 1) ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log') ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log') ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3) x = np.linspace(1, 1e2) y = x ** 2 for ax in [ax1, ax2, ax3, ax4]: ax.plot(x, y) return fig def test_logscale(): fig = create_plot() html = mpld3.fig_to_html(fig) plt.close(fig) if __name__ == "__main__": mpld3.show(create_plot()) ## Instruction: Add TODO to broken test ## Code After: import matplotlib.pyplot as plt import numpy as np import mpld3 def create_plot(): fig = plt.figure() fig.subplots_adjust(hspace=0.4, wspace=0.4) ax1 = fig.add_subplot(2, 2, 1) ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log') ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log') ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3) x = np.linspace(1, 1e2) y = x ** 2 for ax in [ax1, ax2, ax3, ax4]: ax.plot(x, y) return fig def test_logscale(): fig = create_plot() html = mpld3.fig_to_html(fig) plt.close(fig) if __name__ == "__main__": mpld3.show(create_plot())
- """Plot to test logscale""" import matplotlib.pyplot as plt import numpy as np import mpld3 def create_plot(): fig = plt.figure() fig.subplots_adjust(hspace=0.4, wspace=0.4) ax1 = fig.add_subplot(2, 2, 1) ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log') ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log') ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3) x = np.linspace(1, 1e2) y = x ** 2 for ax in [ax1, ax2, ax3, ax4]: ax.plot(x, y) return fig def test_logscale(): fig = create_plot() html = mpld3.fig_to_html(fig) plt.close(fig) if __name__ == "__main__": mpld3.show(create_plot())
ed4666b0d1bf5b8f82e298dfb043cce158c4ba2f
morepath/tests/fixtures/template_unknown_extension_no_render.py
morepath/tests/fixtures/template_unknown_extension_no_render.py
import morepath import os from .template_engine import FormatLoader class App(morepath.App): pass @App.path(path='{name}') class Person(object): def __init__(self, name): self.name = name @App.template_loader(extension='.unknown') def get_template_loader(template_directories, settings): return FormatLoader(template_directories) @App.html(model=Person, template='person.unknown') def person_default(self, request): return { 'name': self.name }
import morepath import os from .template_engine import FormatLoader class App(morepath.App): pass @App.path(path='{name}') class Person(object): def __init__(self, name): self.name = name @App.template_loader(extension='.unknown') def get_template_loader(template_directories, settings): return FormatLoader(template_directories) @App.template_directory() def get_template_directory(): return 'templates' @App.html(model=Person, template='person.unknown') def person_default(self, request): return { 'name': self.name }
Fix so that error under test actually gets triggered.
Fix so that error under test actually gets triggered.
Python
bsd-3-clause
taschini/morepath,faassen/morepath,morepath/morepath
import morepath import os from .template_engine import FormatLoader class App(morepath.App): pass @App.path(path='{name}') class Person(object): def __init__(self, name): self.name = name @App.template_loader(extension='.unknown') def get_template_loader(template_directories, settings): return FormatLoader(template_directories) + @App.template_directory() + def get_template_directory(): + return 'templates' + + @App.html(model=Person, template='person.unknown') def person_default(self, request): return { 'name': self.name }
Fix so that error under test actually gets triggered.
## Code Before: import morepath import os from .template_engine import FormatLoader class App(morepath.App): pass @App.path(path='{name}') class Person(object): def __init__(self, name): self.name = name @App.template_loader(extension='.unknown') def get_template_loader(template_directories, settings): return FormatLoader(template_directories) @App.html(model=Person, template='person.unknown') def person_default(self, request): return { 'name': self.name } ## Instruction: Fix so that error under test actually gets triggered. ## Code After: import morepath import os from .template_engine import FormatLoader class App(morepath.App): pass @App.path(path='{name}') class Person(object): def __init__(self, name): self.name = name @App.template_loader(extension='.unknown') def get_template_loader(template_directories, settings): return FormatLoader(template_directories) @App.template_directory() def get_template_directory(): return 'templates' @App.html(model=Person, template='person.unknown') def person_default(self, request): return { 'name': self.name }
import morepath import os from .template_engine import FormatLoader class App(morepath.App): pass @App.path(path='{name}') class Person(object): def __init__(self, name): self.name = name @App.template_loader(extension='.unknown') def get_template_loader(template_directories, settings): return FormatLoader(template_directories) + @App.template_directory() + def get_template_directory(): + return 'templates' + + @App.html(model=Person, template='person.unknown') def person_default(self, request): return { 'name': self.name }
8b88ca952ff562eb692f25cba54263afcbbcfafd
auth/models.py
auth/models.py
from google.appengine.ext import db import bcrypt class User(db.Model): email = db.EmailProperty() first_name = db.StringProperty() last_name = db.StringProperty() password_hash = db.StringProperty() created = db.DateTimeProperty(auto_now_add=True) updated = db.DateTimeProperty(auto_now=True) def set_password(self, password): self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt(log_rounds=1)) def check_password(self, password): return bcrypt.hashpw(password, self.password_hash) == self.password_hash @classmethod def authenticate(cls, email, password): user = cls.all().filter('email =', email).get() if user is None: return None if user.check_password(password): return user return None
from google.appengine.ext import db import bcrypt class User(db.Model): email = db.EmailProperty() first_name = db.StringProperty() last_name = db.StringProperty() password_hash = db.StringProperty() created = db.DateTimeProperty(auto_now_add=True) updated = db.DateTimeProperty(auto_now=True) def __init__(self, *args, **kwds): db.Model.__init__(self, *args, **kwds) password = kwds.pop('password', None) if password: self.set_password(password) def set_password(self, password): self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt(log_rounds=1)) def check_password(self, password): return bcrypt.hashpw(password, self.password_hash) == self.password_hash @classmethod def authenticate(cls, email, password): user = cls.all().filter('email =', email).get() if user is None: return None if user.check_password(password): return user return None
Set password in user ini
Set password in user ini
Python
mit
haldun/optimyser2,haldun/optimyser2,haldun/tornado-gae-auth
from google.appengine.ext import db import bcrypt class User(db.Model): email = db.EmailProperty() first_name = db.StringProperty() last_name = db.StringProperty() password_hash = db.StringProperty() created = db.DateTimeProperty(auto_now_add=True) updated = db.DateTimeProperty(auto_now=True) + + def __init__(self, *args, **kwds): + db.Model.__init__(self, *args, **kwds) + password = kwds.pop('password', None) + if password: + self.set_password(password) def set_password(self, password): self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt(log_rounds=1)) def check_password(self, password): return bcrypt.hashpw(password, self.password_hash) == self.password_hash @classmethod def authenticate(cls, email, password): user = cls.all().filter('email =', email).get() if user is None: return None if user.check_password(password): return user return None
Set password in user ini
## Code Before: from google.appengine.ext import db import bcrypt class User(db.Model): email = db.EmailProperty() first_name = db.StringProperty() last_name = db.StringProperty() password_hash = db.StringProperty() created = db.DateTimeProperty(auto_now_add=True) updated = db.DateTimeProperty(auto_now=True) def set_password(self, password): self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt(log_rounds=1)) def check_password(self, password): return bcrypt.hashpw(password, self.password_hash) == self.password_hash @classmethod def authenticate(cls, email, password): user = cls.all().filter('email =', email).get() if user is None: return None if user.check_password(password): return user return None ## Instruction: Set password in user ini ## Code After: from google.appengine.ext import db import bcrypt class User(db.Model): email = db.EmailProperty() first_name = db.StringProperty() last_name = db.StringProperty() password_hash = db.StringProperty() created = db.DateTimeProperty(auto_now_add=True) updated = db.DateTimeProperty(auto_now=True) def __init__(self, *args, **kwds): db.Model.__init__(self, *args, **kwds) password = kwds.pop('password', None) if password: self.set_password(password) def set_password(self, password): self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt(log_rounds=1)) def check_password(self, password): return bcrypt.hashpw(password, self.password_hash) == self.password_hash @classmethod def authenticate(cls, email, password): user = cls.all().filter('email =', email).get() if user is None: return None if user.check_password(password): return user return None
from google.appengine.ext import db import bcrypt class User(db.Model): email = db.EmailProperty() first_name = db.StringProperty() last_name = db.StringProperty() password_hash = db.StringProperty() created = db.DateTimeProperty(auto_now_add=True) updated = db.DateTimeProperty(auto_now=True) + + def __init__(self, *args, **kwds): + db.Model.__init__(self, *args, **kwds) + password = kwds.pop('password', None) + if password: + self.set_password(password) def set_password(self, password): self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt(log_rounds=1)) def check_password(self, password): return bcrypt.hashpw(password, self.password_hash) == self.password_hash @classmethod def authenticate(cls, email, password): user = cls.all().filter('email =', email).get() if user is None: return None if user.check_password(password): return user return None
5b702de914f55ee936292576394b2ea06ad15680
tests/utils.py
tests/utils.py
from __future__ import unicode_literals import contextlib from django.core.urlresolvers import reverse from django.test import TestCase from rest_framework.test import APIClient from rest_framework_simplejwt.settings import api_settings def client_action_wrapper(action): def wrapper_method(self, *args, **kwargs): if self.view_name is None: raise ValueError('Must give value for `view_name` property') reverse_args = kwargs.pop('reverse_args', tuple()) reverse_kwargs = kwargs.pop('reverse_kwargs', dict()) query_string = kwargs.pop('query_string', None) url = reverse(self.view_name, args=reverse_args, kwargs=reverse_kwargs) if query_string is not None: url = url + '?{0}'.format(query_string) return getattr(self.client, action)(url, *args, **kwargs) return wrapper_method class APIViewTestCase(TestCase): client_class = APIClient def authenticate_with_token(self, type, token): """ Authenticates requests with the given token. """ self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(type, token)) view_name = None view_post = client_action_wrapper('post') view_get = client_action_wrapper('get') @contextlib.contextmanager def override_api_settings(**settings): for k, v in settings.items(): setattr(api_settings, k, v) yield for k in settings.keys(): delattr(api_settings, k)
from __future__ import unicode_literals import contextlib from django.test import TestCase from rest_framework.test import APIClient from rest_framework_simplejwt.compat import reverse from rest_framework_simplejwt.settings import api_settings def client_action_wrapper(action): def wrapper_method(self, *args, **kwargs): if self.view_name is None: raise ValueError('Must give value for `view_name` property') reverse_args = kwargs.pop('reverse_args', tuple()) reverse_kwargs = kwargs.pop('reverse_kwargs', dict()) query_string = kwargs.pop('query_string', None) url = reverse(self.view_name, args=reverse_args, kwargs=reverse_kwargs) if query_string is not None: url = url + '?{0}'.format(query_string) return getattr(self.client, action)(url, *args, **kwargs) return wrapper_method class APIViewTestCase(TestCase): client_class = APIClient def authenticate_with_token(self, type, token): """ Authenticates requests with the given token. """ self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(type, token)) view_name = None view_post = client_action_wrapper('post') view_get = client_action_wrapper('get') @contextlib.contextmanager def override_api_settings(**settings): for k, v in settings.items(): setattr(api_settings, k, v) yield for k in settings.keys(): delattr(api_settings, k)
Fix broken tests in django master
Fix broken tests in django master
Python
mit
davesque/django-rest-framework-simplejwt,davesque/django-rest-framework-simplejwt
from __future__ import unicode_literals import contextlib - from django.core.urlresolvers import reverse from django.test import TestCase from rest_framework.test import APIClient + from rest_framework_simplejwt.compat import reverse from rest_framework_simplejwt.settings import api_settings def client_action_wrapper(action): def wrapper_method(self, *args, **kwargs): if self.view_name is None: raise ValueError('Must give value for `view_name` property') reverse_args = kwargs.pop('reverse_args', tuple()) reverse_kwargs = kwargs.pop('reverse_kwargs', dict()) query_string = kwargs.pop('query_string', None) url = reverse(self.view_name, args=reverse_args, kwargs=reverse_kwargs) if query_string is not None: url = url + '?{0}'.format(query_string) return getattr(self.client, action)(url, *args, **kwargs) return wrapper_method class APIViewTestCase(TestCase): client_class = APIClient def authenticate_with_token(self, type, token): """ Authenticates requests with the given token. """ self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(type, token)) view_name = None view_post = client_action_wrapper('post') view_get = client_action_wrapper('get') @contextlib.contextmanager def override_api_settings(**settings): for k, v in settings.items(): setattr(api_settings, k, v) yield for k in settings.keys(): delattr(api_settings, k)
Fix broken tests in django master
## Code Before: from __future__ import unicode_literals import contextlib from django.core.urlresolvers import reverse from django.test import TestCase from rest_framework.test import APIClient from rest_framework_simplejwt.settings import api_settings def client_action_wrapper(action): def wrapper_method(self, *args, **kwargs): if self.view_name is None: raise ValueError('Must give value for `view_name` property') reverse_args = kwargs.pop('reverse_args', tuple()) reverse_kwargs = kwargs.pop('reverse_kwargs', dict()) query_string = kwargs.pop('query_string', None) url = reverse(self.view_name, args=reverse_args, kwargs=reverse_kwargs) if query_string is not None: url = url + '?{0}'.format(query_string) return getattr(self.client, action)(url, *args, **kwargs) return wrapper_method class APIViewTestCase(TestCase): client_class = APIClient def authenticate_with_token(self, type, token): """ Authenticates requests with the given token. """ self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(type, token)) view_name = None view_post = client_action_wrapper('post') view_get = client_action_wrapper('get') @contextlib.contextmanager def override_api_settings(**settings): for k, v in settings.items(): setattr(api_settings, k, v) yield for k in settings.keys(): delattr(api_settings, k) ## Instruction: Fix broken tests in django master ## Code After: from __future__ import unicode_literals import contextlib from django.test import TestCase from rest_framework.test import APIClient from rest_framework_simplejwt.compat import reverse from rest_framework_simplejwt.settings import api_settings def client_action_wrapper(action): def wrapper_method(self, *args, **kwargs): if self.view_name is None: raise ValueError('Must give value for `view_name` property') reverse_args = kwargs.pop('reverse_args', tuple()) reverse_kwargs = kwargs.pop('reverse_kwargs', dict()) query_string = kwargs.pop('query_string', None) url = reverse(self.view_name, args=reverse_args, kwargs=reverse_kwargs) if query_string is not None: url = url + '?{0}'.format(query_string) return getattr(self.client, action)(url, *args, **kwargs) return wrapper_method class APIViewTestCase(TestCase): client_class = APIClient def authenticate_with_token(self, type, token): """ Authenticates requests with the given token. """ self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(type, token)) view_name = None view_post = client_action_wrapper('post') view_get = client_action_wrapper('get') @contextlib.contextmanager def override_api_settings(**settings): for k, v in settings.items(): setattr(api_settings, k, v) yield for k in settings.keys(): delattr(api_settings, k)
from __future__ import unicode_literals import contextlib - from django.core.urlresolvers import reverse from django.test import TestCase from rest_framework.test import APIClient + from rest_framework_simplejwt.compat import reverse from rest_framework_simplejwt.settings import api_settings def client_action_wrapper(action): def wrapper_method(self, *args, **kwargs): if self.view_name is None: raise ValueError('Must give value for `view_name` property') reverse_args = kwargs.pop('reverse_args', tuple()) reverse_kwargs = kwargs.pop('reverse_kwargs', dict()) query_string = kwargs.pop('query_string', None) url = reverse(self.view_name, args=reverse_args, kwargs=reverse_kwargs) if query_string is not None: url = url + '?{0}'.format(query_string) return getattr(self.client, action)(url, *args, **kwargs) return wrapper_method class APIViewTestCase(TestCase): client_class = APIClient def authenticate_with_token(self, type, token): """ Authenticates requests with the given token. """ self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(type, token)) view_name = None view_post = client_action_wrapper('post') view_get = client_action_wrapper('get') @contextlib.contextmanager def override_api_settings(**settings): for k, v in settings.items(): setattr(api_settings, k, v) yield for k in settings.keys(): delattr(api_settings, k)
b2e0a123631d326f06192a01758ebe581284dbdf
src/pip/_internal/operations/generate_metadata.py
src/pip/_internal/operations/generate_metadata.py
def get_metadata_generator(install_req): if install_req.use_pep517: return install_req.prepare_pep517_metadata else: return install_req.run_egg_info
def get_metadata_generator(install_req): if not install_req.use_pep517: return install_req.run_egg_info return install_req.prepare_pep517_metadata
Return early for legacy processes
Return early for legacy processes
Python
mit
xavfernandez/pip,pfmoore/pip,rouge8/pip,rouge8/pip,pradyunsg/pip,rouge8/pip,pfmoore/pip,sbidoul/pip,xavfernandez/pip,pypa/pip,pradyunsg/pip,xavfernandez/pip,pypa/pip,sbidoul/pip
def get_metadata_generator(install_req): - if install_req.use_pep517: + if not install_req.use_pep517: - return install_req.prepare_pep517_metadata - else: return install_req.run_egg_info + return install_req.prepare_pep517_metadata +
Return early for legacy processes
## Code Before: def get_metadata_generator(install_req): if install_req.use_pep517: return install_req.prepare_pep517_metadata else: return install_req.run_egg_info ## Instruction: Return early for legacy processes ## Code After: def get_metadata_generator(install_req): if not install_req.use_pep517: return install_req.run_egg_info return install_req.prepare_pep517_metadata
def get_metadata_generator(install_req): - if install_req.use_pep517: + if not install_req.use_pep517: ? ++++ - return install_req.prepare_pep517_metadata - else: return install_req.run_egg_info + + return install_req.prepare_pep517_metadata
d9b5a78b36729bdb3ce11c8626d00b57555fb356
core/views.py
core/views.py
from django_filters.rest_framework import DjangoFilterBackend from rest_framework import viewsets, mixins, routers from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.viewsets import GenericViewSet from core import serializers as api from core.models import Image, Pin from core.permissions import IsOwnerOrReadOnly from users.models import User class UserViewSet(viewsets.ModelViewSet): queryset = User.objects.all() serializer_class = api.UserSerializer class ImageViewSet(mixins.CreateModelMixin, GenericViewSet): queryset = Image.objects.all() serializer_class = api.ImageSerializer def create(self, request, *args, **kwargs): return super(ImageViewSet, self).create(request, *args, **kwargs) class PinViewSet(viewsets.ModelViewSet): queryset = Pin.objects.all() serializer_class = api.PinSerializer filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) filter_fields = ("submitter__username", 'tags__name', ) ordering_fields = ('-id', ) ordering = ('-id', ) permission_classes = [IsOwnerOrReadOnly("submitter"), ] drf_router = routers.DefaultRouter() drf_router.register(r'users', UserViewSet) drf_router.register(r'pins', PinViewSet) drf_router.register(r'images', ImageViewSet)
from django_filters.rest_framework import DjangoFilterBackend from rest_framework import viewsets, mixins, routers from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.viewsets import GenericViewSet from core import serializers as api from core.models import Image, Pin from core.permissions import IsOwnerOrReadOnly from users.models import User class UserViewSet(mixins.RetrieveModelMixin, GenericViewSet): queryset = User.objects.all() serializer_class = api.UserSerializer class ImageViewSet(mixins.CreateModelMixin, GenericViewSet): queryset = Image.objects.all() serializer_class = api.ImageSerializer def create(self, request, *args, **kwargs): return super(ImageViewSet, self).create(request, *args, **kwargs) class PinViewSet(viewsets.ModelViewSet): queryset = Pin.objects.all() serializer_class = api.PinSerializer filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) filter_fields = ("submitter__username", 'tags__name', ) ordering_fields = ('-id', ) ordering = ('-id', ) permission_classes = [IsOwnerOrReadOnly("submitter"), ] drf_router = routers.DefaultRouter() drf_router.register(r'users', UserViewSet) drf_router.register(r'pins', PinViewSet) drf_router.register(r'images', ImageViewSet)
Allow only the user-data fetching
Refactor: Allow only the user-data fetching
Python
bsd-2-clause
pinry/pinry,pinry/pinry,lapo-luchini/pinry,lapo-luchini/pinry,lapo-luchini/pinry,lapo-luchini/pinry,pinry/pinry,pinry/pinry
from django_filters.rest_framework import DjangoFilterBackend from rest_framework import viewsets, mixins, routers from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.viewsets import GenericViewSet from core import serializers as api from core.models import Image, Pin from core.permissions import IsOwnerOrReadOnly from users.models import User - class UserViewSet(viewsets.ModelViewSet): + class UserViewSet(mixins.RetrieveModelMixin, GenericViewSet): queryset = User.objects.all() serializer_class = api.UserSerializer class ImageViewSet(mixins.CreateModelMixin, GenericViewSet): queryset = Image.objects.all() serializer_class = api.ImageSerializer def create(self, request, *args, **kwargs): return super(ImageViewSet, self).create(request, *args, **kwargs) class PinViewSet(viewsets.ModelViewSet): queryset = Pin.objects.all() serializer_class = api.PinSerializer filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) filter_fields = ("submitter__username", 'tags__name', ) ordering_fields = ('-id', ) ordering = ('-id', ) permission_classes = [IsOwnerOrReadOnly("submitter"), ] drf_router = routers.DefaultRouter() drf_router.register(r'users', UserViewSet) drf_router.register(r'pins', PinViewSet) drf_router.register(r'images', ImageViewSet)
Allow only the user-data fetching
## Code Before: from django_filters.rest_framework import DjangoFilterBackend from rest_framework import viewsets, mixins, routers from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.viewsets import GenericViewSet from core import serializers as api from core.models import Image, Pin from core.permissions import IsOwnerOrReadOnly from users.models import User class UserViewSet(viewsets.ModelViewSet): queryset = User.objects.all() serializer_class = api.UserSerializer class ImageViewSet(mixins.CreateModelMixin, GenericViewSet): queryset = Image.objects.all() serializer_class = api.ImageSerializer def create(self, request, *args, **kwargs): return super(ImageViewSet, self).create(request, *args, **kwargs) class PinViewSet(viewsets.ModelViewSet): queryset = Pin.objects.all() serializer_class = api.PinSerializer filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) filter_fields = ("submitter__username", 'tags__name', ) ordering_fields = ('-id', ) ordering = ('-id', ) permission_classes = [IsOwnerOrReadOnly("submitter"), ] drf_router = routers.DefaultRouter() drf_router.register(r'users', UserViewSet) drf_router.register(r'pins', PinViewSet) drf_router.register(r'images', ImageViewSet) ## Instruction: Allow only the user-data fetching ## Code After: from django_filters.rest_framework import DjangoFilterBackend from rest_framework import viewsets, mixins, routers from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.viewsets import GenericViewSet from core import serializers as api from core.models import Image, Pin from core.permissions import IsOwnerOrReadOnly from users.models import User class UserViewSet(mixins.RetrieveModelMixin, GenericViewSet): queryset = User.objects.all() serializer_class = api.UserSerializer class ImageViewSet(mixins.CreateModelMixin, GenericViewSet): queryset = Image.objects.all() serializer_class = api.ImageSerializer def create(self, request, *args, **kwargs): return super(ImageViewSet, self).create(request, *args, **kwargs) class PinViewSet(viewsets.ModelViewSet): queryset = Pin.objects.all() serializer_class = api.PinSerializer filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) filter_fields = ("submitter__username", 'tags__name', ) ordering_fields = ('-id', ) ordering = ('-id', ) permission_classes = [IsOwnerOrReadOnly("submitter"), ] drf_router = routers.DefaultRouter() drf_router.register(r'users', UserViewSet) drf_router.register(r'pins', PinViewSet) drf_router.register(r'images', ImageViewSet)
from django_filters.rest_framework import DjangoFilterBackend from rest_framework import viewsets, mixins, routers from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.viewsets import GenericViewSet from core import serializers as api from core.models import Image, Pin from core.permissions import IsOwnerOrReadOnly from users.models import User - class UserViewSet(viewsets.ModelViewSet): + class UserViewSet(mixins.RetrieveModelMixin, GenericViewSet): queryset = User.objects.all() serializer_class = api.UserSerializer class ImageViewSet(mixins.CreateModelMixin, GenericViewSet): queryset = Image.objects.all() serializer_class = api.ImageSerializer def create(self, request, *args, **kwargs): return super(ImageViewSet, self).create(request, *args, **kwargs) class PinViewSet(viewsets.ModelViewSet): queryset = Pin.objects.all() serializer_class = api.PinSerializer filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) filter_fields = ("submitter__username", 'tags__name', ) ordering_fields = ('-id', ) ordering = ('-id', ) permission_classes = [IsOwnerOrReadOnly("submitter"), ] drf_router = routers.DefaultRouter() drf_router.register(r'users', UserViewSet) drf_router.register(r'pins', PinViewSet) drf_router.register(r'images', ImageViewSet)
6c157525bc32f1e6005be69bd6fde61d0d002ad3
wizard/post_function.py
wizard/post_function.py
from openerp import pooler def call_post_function(cr, uid, context): """This functionality allows users of module account.move.reversal to call a function of the desired openerp model, after the reversal of the move. The call automatically sends at least the database cursor (cr) and the user id (uid) for security reasons. Two key parameters are required in the context to do so: - 'post_function_obj': the osv model where the function is defined, - 'post_function_name': the name of the function to call, And two optional key parameters: - 'post_function_args': an iterable object listing the required arguments to pass after 'cr, uid', - 'post_function_kwargs': a dictionary object listing the optionnal keyword args to pass. """ if 'post_function_obj' in context: # We get the function addr by its name, # and call it with (cr, uid, *args, **kwargs) getattr( pooler.get_pool(cr.dbname)[context['post_function_obj']], context['post_function_name'] )( cr, uid, *context['post_function_args'], **context['post_function_kwargs'] ) # We clean the context to avoid multiple calls of the function. context.pop('post_function_obj') context.pop('post_function_name') context.pop('post_function_args') context.pop('post_function_kwargs')
from openerp import pooler def call_post_function(cr, uid, context): """This functionality allows users of module account.move.reversal to call a function of the desired openerp model, after the reversal of the move. The call automatically sends at least the database cursor (cr) and the user id (uid) for security reasons. Two key parameters are required in the context to do so: - 'post_function_obj': the osv model where the function is defined, - 'post_function_name': the name of the function to call, And two optional key parameters: - 'post_function_args': an iterable object listing the required arguments to pass after 'cr, uid', - 'post_function_kwargs': a dictionary object listing the optionnal keyword args to pass. """ if 'post_function_obj' in context: # We get the function addr by its name, # and call it with (cr, uid, *args, **kwargs) getattr( pooler.get_pool(cr.dbname)[context['post_function_obj']], context['post_function_name'] )( cr, uid, *context.get('post_function_args', []), **context.get('post_function_kwargs', {}) ) # We clean the context to avoid multiple calls of the function. context.pop('post_function_obj') context.pop('post_function_name') context.pop('post_function_args') context.pop('post_function_kwargs')
Remove some required arguments in post function context
Remove some required arguments in post function context
Python
agpl-3.0
xcgd/account_move_reversal
from openerp import pooler def call_post_function(cr, uid, context): """This functionality allows users of module account.move.reversal to call a function of the desired openerp model, after the reversal of the move. The call automatically sends at least the database cursor (cr) and the user id (uid) for security reasons. Two key parameters are required in the context to do so: - 'post_function_obj': the osv model where the function is defined, - 'post_function_name': the name of the function to call, And two optional key parameters: - 'post_function_args': an iterable object listing the required arguments to pass after 'cr, uid', - 'post_function_kwargs': a dictionary object listing the optionnal keyword args to pass. """ if 'post_function_obj' in context: # We get the function addr by its name, # and call it with (cr, uid, *args, **kwargs) getattr( pooler.get_pool(cr.dbname)[context['post_function_obj']], context['post_function_name'] )( cr, uid, - *context['post_function_args'], + *context.get('post_function_args', []), - **context['post_function_kwargs'] + **context.get('post_function_kwargs', {}) ) # We clean the context to avoid multiple calls of the function. context.pop('post_function_obj') context.pop('post_function_name') context.pop('post_function_args') context.pop('post_function_kwargs')
Remove some required arguments in post function context
## Code Before: from openerp import pooler def call_post_function(cr, uid, context): """This functionality allows users of module account.move.reversal to call a function of the desired openerp model, after the reversal of the move. The call automatically sends at least the database cursor (cr) and the user id (uid) for security reasons. Two key parameters are required in the context to do so: - 'post_function_obj': the osv model where the function is defined, - 'post_function_name': the name of the function to call, And two optional key parameters: - 'post_function_args': an iterable object listing the required arguments to pass after 'cr, uid', - 'post_function_kwargs': a dictionary object listing the optionnal keyword args to pass. """ if 'post_function_obj' in context: # We get the function addr by its name, # and call it with (cr, uid, *args, **kwargs) getattr( pooler.get_pool(cr.dbname)[context['post_function_obj']], context['post_function_name'] )( cr, uid, *context['post_function_args'], **context['post_function_kwargs'] ) # We clean the context to avoid multiple calls of the function. context.pop('post_function_obj') context.pop('post_function_name') context.pop('post_function_args') context.pop('post_function_kwargs') ## Instruction: Remove some required arguments in post function context ## Code After: from openerp import pooler def call_post_function(cr, uid, context): """This functionality allows users of module account.move.reversal to call a function of the desired openerp model, after the reversal of the move. The call automatically sends at least the database cursor (cr) and the user id (uid) for security reasons. Two key parameters are required in the context to do so: - 'post_function_obj': the osv model where the function is defined, - 'post_function_name': the name of the function to call, And two optional key parameters: - 'post_function_args': an iterable object listing the required arguments to pass after 'cr, uid', - 'post_function_kwargs': a dictionary object listing the optionnal keyword args to pass. """ if 'post_function_obj' in context: # We get the function addr by its name, # and call it with (cr, uid, *args, **kwargs) getattr( pooler.get_pool(cr.dbname)[context['post_function_obj']], context['post_function_name'] )( cr, uid, *context.get('post_function_args', []), **context.get('post_function_kwargs', {}) ) # We clean the context to avoid multiple calls of the function. context.pop('post_function_obj') context.pop('post_function_name') context.pop('post_function_args') context.pop('post_function_kwargs')
from openerp import pooler def call_post_function(cr, uid, context): """This functionality allows users of module account.move.reversal to call a function of the desired openerp model, after the reversal of the move. The call automatically sends at least the database cursor (cr) and the user id (uid) for security reasons. Two key parameters are required in the context to do so: - 'post_function_obj': the osv model where the function is defined, - 'post_function_name': the name of the function to call, And two optional key parameters: - 'post_function_args': an iterable object listing the required arguments to pass after 'cr, uid', - 'post_function_kwargs': a dictionary object listing the optionnal keyword args to pass. """ if 'post_function_obj' in context: # We get the function addr by its name, # and call it with (cr, uid, *args, **kwargs) getattr( pooler.get_pool(cr.dbname)[context['post_function_obj']], context['post_function_name'] )( cr, uid, - *context['post_function_args'], ? ^ + *context.get('post_function_args', []), ? ^^^^^ +++ + - **context['post_function_kwargs'] ? ^ ^ + **context.get('post_function_kwargs', {}) ? ^^^^^ ^^^^^ ) # We clean the context to avoid multiple calls of the function. context.pop('post_function_obj') context.pop('post_function_name') context.pop('post_function_args') context.pop('post_function_kwargs')
e2f118ea3d1f9e092567802610915d76d083e9f7
tests/scoring_engine/test_worker.py
tests/scoring_engine/test_worker.py
import sys import os sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../scoring_engine')) from worker import Worker from worker_queue import WorkerQueue from job import Job class TestWorker(object): def test_init(self): worker = Worker() assert isinstance(worker.worker_queue, WorkerQueue) is True def test_execute_simple_cmd(self): worker = Worker() job = Job(service_id="12345", command="echo 'HELLO'") updated_job = worker.execute_cmd(job) assert updated_job.output == "HELLO\n" assert updated_job.completed() is True assert updated_job.passed() is False def test_execute_cmd_trigger_timeout(self): worker = Worker() timeout_time = 1 sleep_time = timeout_time + 1 job = Job(service_id="12345", command="sleep " + str(sleep_time)) updated_job = worker.execute_cmd(job, timeout_time) assert updated_job.output is None assert updated_job.reason == "Command Timed Out" assert updated_job.passed() is False assert updated_job.completed() is True
import sys import os sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../scoring_engine')) from worker import Worker from worker_queue import WorkerQueue from job import Job class TestWorker(object): def setup(self): self.worker = Worker() def test_init(self): assert isinstance(self.worker.worker_queue, WorkerQueue) is True def test_execute_simple_cmd(self): job = Job(service_id="12345", command="echo 'HELLO'") updated_job = self.worker.execute_cmd(job) assert updated_job.output == "HELLO\n" assert updated_job.completed() is True assert updated_job.passed() is False def test_execute_cmd_trigger_timeout(self): timeout_time = 1 sleep_time = timeout_time + 1 job = Job(service_id="12345", command="sleep " + str(sleep_time)) updated_job = self.worker.execute_cmd(job, timeout_time) assert updated_job.output is None assert updated_job.reason == "Command Timed Out" assert updated_job.passed() is False assert updated_job.completed() is True
Modify test worker unit test
Modify test worker unit test Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com>
Python
mit
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
import sys import os sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../scoring_engine')) from worker import Worker from worker_queue import WorkerQueue from job import Job class TestWorker(object): + def setup(self): + self.worker = Worker() + def test_init(self): - worker = Worker() - assert isinstance(worker.worker_queue, WorkerQueue) is True + assert isinstance(self.worker.worker_queue, WorkerQueue) is True def test_execute_simple_cmd(self): - worker = Worker() job = Job(service_id="12345", command="echo 'HELLO'") - updated_job = worker.execute_cmd(job) + updated_job = self.worker.execute_cmd(job) assert updated_job.output == "HELLO\n" assert updated_job.completed() is True assert updated_job.passed() is False def test_execute_cmd_trigger_timeout(self): - worker = Worker() timeout_time = 1 sleep_time = timeout_time + 1 job = Job(service_id="12345", command="sleep " + str(sleep_time)) - updated_job = worker.execute_cmd(job, timeout_time) + updated_job = self.worker.execute_cmd(job, timeout_time) assert updated_job.output is None assert updated_job.reason == "Command Timed Out" assert updated_job.passed() is False assert updated_job.completed() is True -
Modify test worker unit test
## Code Before: import sys import os sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../scoring_engine')) from worker import Worker from worker_queue import WorkerQueue from job import Job class TestWorker(object): def test_init(self): worker = Worker() assert isinstance(worker.worker_queue, WorkerQueue) is True def test_execute_simple_cmd(self): worker = Worker() job = Job(service_id="12345", command="echo 'HELLO'") updated_job = worker.execute_cmd(job) assert updated_job.output == "HELLO\n" assert updated_job.completed() is True assert updated_job.passed() is False def test_execute_cmd_trigger_timeout(self): worker = Worker() timeout_time = 1 sleep_time = timeout_time + 1 job = Job(service_id="12345", command="sleep " + str(sleep_time)) updated_job = worker.execute_cmd(job, timeout_time) assert updated_job.output is None assert updated_job.reason == "Command Timed Out" assert updated_job.passed() is False assert updated_job.completed() is True ## Instruction: Modify test worker unit test ## Code After: import sys import os sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../scoring_engine')) from worker import Worker from worker_queue import WorkerQueue from job import Job class TestWorker(object): def setup(self): self.worker = Worker() def test_init(self): assert isinstance(self.worker.worker_queue, WorkerQueue) is True def test_execute_simple_cmd(self): job = Job(service_id="12345", command="echo 'HELLO'") updated_job = self.worker.execute_cmd(job) assert updated_job.output == "HELLO\n" assert updated_job.completed() is True assert updated_job.passed() is False def test_execute_cmd_trigger_timeout(self): timeout_time = 1 sleep_time = timeout_time + 1 job = Job(service_id="12345", command="sleep " + str(sleep_time)) updated_job = self.worker.execute_cmd(job, timeout_time) assert updated_job.output is None assert updated_job.reason == "Command Timed Out" assert updated_job.passed() is False assert updated_job.completed() is True
import sys import os sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../scoring_engine')) from worker import Worker from worker_queue import WorkerQueue from job import Job class TestWorker(object): + def setup(self): + self.worker = Worker() + def test_init(self): - worker = Worker() - assert isinstance(worker.worker_queue, WorkerQueue) is True + assert isinstance(self.worker.worker_queue, WorkerQueue) is True ? +++++ def test_execute_simple_cmd(self): - worker = Worker() job = Job(service_id="12345", command="echo 'HELLO'") - updated_job = worker.execute_cmd(job) + updated_job = self.worker.execute_cmd(job) ? +++++ assert updated_job.output == "HELLO\n" assert updated_job.completed() is True assert updated_job.passed() is False def test_execute_cmd_trigger_timeout(self): - worker = Worker() timeout_time = 1 sleep_time = timeout_time + 1 job = Job(service_id="12345", command="sleep " + str(sleep_time)) - updated_job = worker.execute_cmd(job, timeout_time) + updated_job = self.worker.execute_cmd(job, timeout_time) ? +++++ assert updated_job.output is None assert updated_job.reason == "Command Timed Out" assert updated_job.passed() is False assert updated_job.completed() is True -
9bb7dc9c8f7b5208c332017df8b1501315e2601f
py/gaarf/utils.py
py/gaarf/utils.py
from .query_editor import QuerySpecification from .query_executor import AdsReportFetcher def get_customer_ids(ads_client, customer_id): query = """ SELECT customer_client.id FROM customer_client WHERE customer_client.manager = FALSE """ query_specification = QuerySpecification(query).generate() report_fetcher = AdsReportFetcher(ads_client) return report_fetcher.fetch(query_specification, customer_id)
from .query_editor import QuerySpecification from .query_executor import AdsReportFetcher def get_customer_ids(ads_client, customer_id): query = """ SELECT customer_client.id FROM customer_client WHERE customer_client.manager = FALSE """ query_specification = QuerySpecification(query).generate() report_fetcher = AdsReportFetcher(ads_client, customer_id) return report_fetcher.fetch(query_specification)
Fix incorrect signature for get_customer_ids function
Fix incorrect signature for get_customer_ids function Change-Id: Ib44af3ac6437ad9fa4cbfd9fda9b055b7eff4547
Python
apache-2.0
google/ads-api-report-fetcher,google/ads-api-report-fetcher,google/ads-api-report-fetcher,google/ads-api-report-fetcher
from .query_editor import QuerySpecification from .query_executor import AdsReportFetcher def get_customer_ids(ads_client, customer_id): query = """ SELECT customer_client.id FROM customer_client WHERE customer_client.manager = FALSE """ query_specification = QuerySpecification(query).generate() - report_fetcher = AdsReportFetcher(ads_client) + report_fetcher = AdsReportFetcher(ads_client, customer_id) - return report_fetcher.fetch(query_specification, customer_id) + return report_fetcher.fetch(query_specification)
Fix incorrect signature for get_customer_ids function
## Code Before: from .query_editor import QuerySpecification from .query_executor import AdsReportFetcher def get_customer_ids(ads_client, customer_id): query = """ SELECT customer_client.id FROM customer_client WHERE customer_client.manager = FALSE """ query_specification = QuerySpecification(query).generate() report_fetcher = AdsReportFetcher(ads_client) return report_fetcher.fetch(query_specification, customer_id) ## Instruction: Fix incorrect signature for get_customer_ids function ## Code After: from .query_editor import QuerySpecification from .query_executor import AdsReportFetcher def get_customer_ids(ads_client, customer_id): query = """ SELECT customer_client.id FROM customer_client WHERE customer_client.manager = FALSE """ query_specification = QuerySpecification(query).generate() report_fetcher = AdsReportFetcher(ads_client, customer_id) return report_fetcher.fetch(query_specification)
from .query_editor import QuerySpecification from .query_executor import AdsReportFetcher def get_customer_ids(ads_client, customer_id): query = """ SELECT customer_client.id FROM customer_client WHERE customer_client.manager = FALSE """ query_specification = QuerySpecification(query).generate() - report_fetcher = AdsReportFetcher(ads_client) + report_fetcher = AdsReportFetcher(ads_client, customer_id) ? +++++++++++++ - return report_fetcher.fetch(query_specification, customer_id) ? ------------- + return report_fetcher.fetch(query_specification)
dd171296a980dcc0349cf54b2afd6d2399cfb981
numba/tests/matmul_usecase.py
numba/tests/matmul_usecase.py
import sys try: import scipy.linalg.cython_blas has_blas = True except ImportError: has_blas = False import numba.unittest_support as unittest # The "@" operator only compiles on Python 3.5+. has_matmul = sys.version_info >= (3, 5) if has_matmul: code = """if 1: def matmul_usecase(x, y): return x @ y def imatmul_usecase(x, y): x @= y return x """ co = compile(code, "<string>", "exec") ns = {} eval(co, globals(), ns) globals().update(ns) del code, co, ns else: matmul_usecase = None imatmul_usecase = None needs_matmul = unittest.skipUnless( has_matmul, "the matrix multiplication operator needs Python 3.5+") needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+") class DumbMatrix(object): def __init__(self, value): self.value = value def __matmul__(self, other): if isinstance(other, DumbMatrix): return DumbMatrix(self.value * other.value) return NotImplemented def __imatmul__(self, other): if isinstance(other, DumbMatrix): self.value *= other.value return self return NotImplemented
import sys try: import scipy.linalg.cython_blas has_blas = True except ImportError: has_blas = False import numba.unittest_support as unittest from numba.numpy_support import version as numpy_version # The "@" operator only compiles on Python 3.5+. # It is only supported by Numpy 1.10+. has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10) if has_matmul: code = """if 1: def matmul_usecase(x, y): return x @ y def imatmul_usecase(x, y): x @= y return x """ co = compile(code, "<string>", "exec") ns = {} eval(co, globals(), ns) globals().update(ns) del code, co, ns else: matmul_usecase = None imatmul_usecase = None needs_matmul = unittest.skipUnless( has_matmul, "the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+") needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+") class DumbMatrix(object): def __init__(self, value): self.value = value def __matmul__(self, other): if isinstance(other, DumbMatrix): return DumbMatrix(self.value * other.value) return NotImplemented def __imatmul__(self, other): if isinstance(other, DumbMatrix): self.value *= other.value return self return NotImplemented
Fix test failure on Numpy 1.9 and Python 3.5
Fix test failure on Numpy 1.9 and Python 3.5 The "@" operator between arrays is only supported by Numpy 1.10+.
Python
bsd-2-clause
numba/numba,cpcloud/numba,stuartarchibald/numba,numba/numba,stefanseefeld/numba,gmarkall/numba,sklam/numba,stefanseefeld/numba,stefanseefeld/numba,jriehl/numba,IntelLabs/numba,seibert/numba,IntelLabs/numba,seibert/numba,stuartarchibald/numba,stonebig/numba,cpcloud/numba,sklam/numba,cpcloud/numba,stefanseefeld/numba,sklam/numba,seibert/numba,gmarkall/numba,sklam/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,stonebig/numba,stonebig/numba,stuartarchibald/numba,seibert/numba,seibert/numba,jriehl/numba,stuartarchibald/numba,stonebig/numba,numba/numba,IntelLabs/numba,jriehl/numba,numba/numba,sklam/numba,gmarkall/numba,cpcloud/numba,cpcloud/numba,stonebig/numba,jriehl/numba,stefanseefeld/numba,numba/numba,stuartarchibald/numba,IntelLabs/numba,gmarkall/numba
import sys try: import scipy.linalg.cython_blas has_blas = True except ImportError: has_blas = False import numba.unittest_support as unittest + from numba.numpy_support import version as numpy_version # The "@" operator only compiles on Python 3.5+. - has_matmul = sys.version_info >= (3, 5) + # It is only supported by Numpy 1.10+. + has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10) if has_matmul: code = """if 1: def matmul_usecase(x, y): return x @ y def imatmul_usecase(x, y): x @= y return x """ co = compile(code, "<string>", "exec") ns = {} eval(co, globals(), ns) globals().update(ns) del code, co, ns else: matmul_usecase = None imatmul_usecase = None needs_matmul = unittest.skipUnless( + has_matmul, - has_matmul, "the matrix multiplication operator needs Python 3.5+") + "the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+") needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+") class DumbMatrix(object): def __init__(self, value): self.value = value def __matmul__(self, other): if isinstance(other, DumbMatrix): return DumbMatrix(self.value * other.value) return NotImplemented def __imatmul__(self, other): if isinstance(other, DumbMatrix): self.value *= other.value return self return NotImplemented
Fix test failure on Numpy 1.9 and Python 3.5
## Code Before: import sys try: import scipy.linalg.cython_blas has_blas = True except ImportError: has_blas = False import numba.unittest_support as unittest # The "@" operator only compiles on Python 3.5+. has_matmul = sys.version_info >= (3, 5) if has_matmul: code = """if 1: def matmul_usecase(x, y): return x @ y def imatmul_usecase(x, y): x @= y return x """ co = compile(code, "<string>", "exec") ns = {} eval(co, globals(), ns) globals().update(ns) del code, co, ns else: matmul_usecase = None imatmul_usecase = None needs_matmul = unittest.skipUnless( has_matmul, "the matrix multiplication operator needs Python 3.5+") needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+") class DumbMatrix(object): def __init__(self, value): self.value = value def __matmul__(self, other): if isinstance(other, DumbMatrix): return DumbMatrix(self.value * other.value) return NotImplemented def __imatmul__(self, other): if isinstance(other, DumbMatrix): self.value *= other.value return self return NotImplemented ## Instruction: Fix test failure on Numpy 1.9 and Python 3.5 ## Code After: import sys try: import scipy.linalg.cython_blas has_blas = True except ImportError: has_blas = False import numba.unittest_support as unittest from numba.numpy_support import version as numpy_version # The "@" operator only compiles on Python 3.5+. # It is only supported by Numpy 1.10+. has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10) if has_matmul: code = """if 1: def matmul_usecase(x, y): return x @ y def imatmul_usecase(x, y): x @= y return x """ co = compile(code, "<string>", "exec") ns = {} eval(co, globals(), ns) globals().update(ns) del code, co, ns else: matmul_usecase = None imatmul_usecase = None needs_matmul = unittest.skipUnless( has_matmul, "the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+") needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+") class DumbMatrix(object): def __init__(self, value): self.value = value def __matmul__(self, other): if isinstance(other, DumbMatrix): return DumbMatrix(self.value * other.value) return NotImplemented def __imatmul__(self, other): if isinstance(other, DumbMatrix): self.value *= other.value return self return NotImplemented
import sys try: import scipy.linalg.cython_blas has_blas = True except ImportError: has_blas = False import numba.unittest_support as unittest + from numba.numpy_support import version as numpy_version # The "@" operator only compiles on Python 3.5+. - has_matmul = sys.version_info >= (3, 5) + # It is only supported by Numpy 1.10+. + has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10) if has_matmul: code = """if 1: def matmul_usecase(x, y): return x @ y def imatmul_usecase(x, y): x @= y return x """ co = compile(code, "<string>", "exec") ns = {} eval(co, globals(), ns) globals().update(ns) del code, co, ns else: matmul_usecase = None imatmul_usecase = None needs_matmul = unittest.skipUnless( + has_matmul, - has_matmul, "the matrix multiplication operator needs Python 3.5+") ? ------------ + "the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+") ? ++++++++++++++++ needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+") class DumbMatrix(object): def __init__(self, value): self.value = value def __matmul__(self, other): if isinstance(other, DumbMatrix): return DumbMatrix(self.value * other.value) return NotImplemented def __imatmul__(self, other): if isinstance(other, DumbMatrix): self.value *= other.value return self return NotImplemented
4f754ad3177eb0fcdc10ccf7804349a9453e9ff0
asyncio/__init__.py
asyncio/__init__.py
"""The asyncio package, tracking PEP 3156.""" import sys # The selectors module is in the stdlib in Python 3.4 but not in 3.3. # Do this first, so the other submodules can use "from . import selectors". # Prefer asyncio/selectors.py over the stdlib one, as ours may be newer. try: from . import selectors except ImportError: import selectors # Will also be exported. if sys.platform == 'win32': # Similar thing for _overlapped. try: from . import _overlapped except ImportError: import _overlapped # Will also be exported. # This relies on each of the submodules having an __all__ variable. from .coroutines import * from .events import * from .futures import * from .locks import * from .protocols import * from .queues import * from .streams import * from .subprocess import * from .tasks import * from .transports import * if sys.platform == 'win32': # pragma: no cover from .windows_events import * else: from .unix_events import * # pragma: no cover __all__ = (coroutines.__all__ + events.__all__ + futures.__all__ + locks.__all__ + protocols.__all__ + queues.__all__ + streams.__all__ + subprocess.__all__ + tasks.__all__ + transports.__all__)
"""The asyncio package, tracking PEP 3156.""" import sys # The selectors module is in the stdlib in Python 3.4 but not in 3.3. # Do this first, so the other submodules can use "from . import selectors". # Prefer asyncio/selectors.py over the stdlib one, as ours may be newer. try: from . import selectors except ImportError: import selectors # Will also be exported. if sys.platform == 'win32': # Similar thing for _overlapped. try: from . import _overlapped except ImportError: import _overlapped # Will also be exported. # This relies on each of the submodules having an __all__ variable. from .coroutines import * from .events import * from .futures import * from .locks import * from .protocols import * from .queues import * from .streams import * from .subprocess import * from .tasks import * from .transports import * __all__ = (coroutines.__all__ + events.__all__ + futures.__all__ + locks.__all__ + protocols.__all__ + queues.__all__ + streams.__all__ + subprocess.__all__ + tasks.__all__ + transports.__all__) if sys.platform == 'win32': # pragma: no cover from .windows_events import * __all__ += windows_events.__all__ else: from .unix_events import * # pragma: no cover __all__ += unix_events.__all__
Fix asyncio.__all__: export also unix_events and windows_events symbols
Fix asyncio.__all__: export also unix_events and windows_events symbols For example, on Windows, it was not possible to get ProactorEventLoop or DefaultEventLoopPolicy using "from asyncio import *".
Python
apache-2.0
vxgmichel/asyncio,gvanrossum/asyncio,manipopopo/asyncio,jashandeep-sohi/asyncio,Martiusweb/asyncio,ajdavis/asyncio,Martiusweb/asyncio,jashandeep-sohi/asyncio,1st1/asyncio,haypo/trollius,fallen/asyncio,vxgmichel/asyncio,gvanrossum/asyncio,Martiusweb/asyncio,jashandeep-sohi/asyncio,fallen/asyncio,haypo/trollius,haypo/trollius,ajdavis/asyncio,manipopopo/asyncio,fallen/asyncio,vxgmichel/asyncio,gsb-eng/asyncio,gsb-eng/asyncio,manipopopo/asyncio,gvanrossum/asyncio,ajdavis/asyncio,1st1/asyncio,gsb-eng/asyncio,1st1/asyncio
"""The asyncio package, tracking PEP 3156.""" import sys # The selectors module is in the stdlib in Python 3.4 but not in 3.3. # Do this first, so the other submodules can use "from . import selectors". # Prefer asyncio/selectors.py over the stdlib one, as ours may be newer. try: from . import selectors except ImportError: import selectors # Will also be exported. if sys.platform == 'win32': # Similar thing for _overlapped. try: from . import _overlapped except ImportError: import _overlapped # Will also be exported. # This relies on each of the submodules having an __all__ variable. from .coroutines import * from .events import * from .futures import * from .locks import * from .protocols import * from .queues import * from .streams import * from .subprocess import * from .tasks import * from .transports import * - if sys.platform == 'win32': # pragma: no cover - from .windows_events import * - else: - from .unix_events import * # pragma: no cover - - __all__ = (coroutines.__all__ + events.__all__ + futures.__all__ + locks.__all__ + protocols.__all__ + queues.__all__ + streams.__all__ + subprocess.__all__ + tasks.__all__ + transports.__all__) + if sys.platform == 'win32': # pragma: no cover + from .windows_events import * + __all__ += windows_events.__all__ + else: + from .unix_events import * # pragma: no cover + __all__ += unix_events.__all__ +
Fix asyncio.__all__: export also unix_events and windows_events symbols
## Code Before: """The asyncio package, tracking PEP 3156.""" import sys # The selectors module is in the stdlib in Python 3.4 but not in 3.3. # Do this first, so the other submodules can use "from . import selectors". # Prefer asyncio/selectors.py over the stdlib one, as ours may be newer. try: from . import selectors except ImportError: import selectors # Will also be exported. if sys.platform == 'win32': # Similar thing for _overlapped. try: from . import _overlapped except ImportError: import _overlapped # Will also be exported. # This relies on each of the submodules having an __all__ variable. from .coroutines import * from .events import * from .futures import * from .locks import * from .protocols import * from .queues import * from .streams import * from .subprocess import * from .tasks import * from .transports import * if sys.platform == 'win32': # pragma: no cover from .windows_events import * else: from .unix_events import * # pragma: no cover __all__ = (coroutines.__all__ + events.__all__ + futures.__all__ + locks.__all__ + protocols.__all__ + queues.__all__ + streams.__all__ + subprocess.__all__ + tasks.__all__ + transports.__all__) ## Instruction: Fix asyncio.__all__: export also unix_events and windows_events symbols ## Code After: """The asyncio package, tracking PEP 3156.""" import sys # The selectors module is in the stdlib in Python 3.4 but not in 3.3. # Do this first, so the other submodules can use "from . import selectors". # Prefer asyncio/selectors.py over the stdlib one, as ours may be newer. try: from . import selectors except ImportError: import selectors # Will also be exported. if sys.platform == 'win32': # Similar thing for _overlapped. try: from . import _overlapped except ImportError: import _overlapped # Will also be exported. # This relies on each of the submodules having an __all__ variable. from .coroutines import * from .events import * from .futures import * from .locks import * from .protocols import * from .queues import * from .streams import * from .subprocess import * from .tasks import * from .transports import * __all__ = (coroutines.__all__ + events.__all__ + futures.__all__ + locks.__all__ + protocols.__all__ + queues.__all__ + streams.__all__ + subprocess.__all__ + tasks.__all__ + transports.__all__) if sys.platform == 'win32': # pragma: no cover from .windows_events import * __all__ += windows_events.__all__ else: from .unix_events import * # pragma: no cover __all__ += unix_events.__all__
"""The asyncio package, tracking PEP 3156.""" import sys # The selectors module is in the stdlib in Python 3.4 but not in 3.3. # Do this first, so the other submodules can use "from . import selectors". # Prefer asyncio/selectors.py over the stdlib one, as ours may be newer. try: from . import selectors except ImportError: import selectors # Will also be exported. if sys.platform == 'win32': # Similar thing for _overlapped. try: from . import _overlapped except ImportError: import _overlapped # Will also be exported. # This relies on each of the submodules having an __all__ variable. from .coroutines import * from .events import * from .futures import * from .locks import * from .protocols import * from .queues import * from .streams import * from .subprocess import * from .tasks import * from .transports import * - if sys.platform == 'win32': # pragma: no cover - from .windows_events import * - else: - from .unix_events import * # pragma: no cover - - __all__ = (coroutines.__all__ + events.__all__ + futures.__all__ + locks.__all__ + protocols.__all__ + queues.__all__ + streams.__all__ + subprocess.__all__ + tasks.__all__ + transports.__all__) + + if sys.platform == 'win32': # pragma: no cover + from .windows_events import * + __all__ += windows_events.__all__ + else: + from .unix_events import * # pragma: no cover + __all__ += unix_events.__all__
6023fa1fdec83c0a4568529982a69ae64801ad5f
src/cli/_actions/_pool.py
src/cli/_actions/_pool.py
from __future__ import print_function from .._errors import StratisCliValueUnimplementedError def create_pool(dbus_thing, namespace): """ Create a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) if namespace.redundancy != 'none': raise StratisCliValueUnimplementedError( namespace.redundancy, "namespace.redundancy" ) (_, rc, message) = dbus_thing.CreatePool( namespace.name, namespace.device, len(namespace.device) ) return (rc, message) def list_pools(dbus_thing, namespace): """ List all stratis pools. :param Interface dbus_thing: the interface to the stratis manager """ # pylint: disable=unused-argument (result, rc, message) = dbus_thing.ListPools() if rc != 0: return (rc, message) for item in result: print(item) return (rc, message) def destroy_pool(dbus_thing, namespace): """ Destroy a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) (_, rc, message) = dbus_thing.DestroyPool( namespace.name ) return (rc, message)
from __future__ import print_function from .._errors import StratisCliValueUnimplementedError def create_pool(dbus_thing, namespace): """ Create a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) if namespace.redundancy != 'none': raise StratisCliValueUnimplementedError( namespace.redundancy, "namespace.redundancy" ) (result, rc, message) = dbus_thing.CreatePool( namespace.name, namespace.device, len(namespace.device) ) if rc == 0: print("New pool with object path: %s" % result) return (rc, message) def list_pools(dbus_thing, namespace): """ List all stratis pools. :param Interface dbus_thing: the interface to the stratis manager """ # pylint: disable=unused-argument (result, rc, message) = dbus_thing.ListPools() if rc != 0: return (rc, message) for item in result: print(item) return (rc, message) def destroy_pool(dbus_thing, namespace): """ Destroy a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) (result, rc, message) = dbus_thing.DestroyPool( namespace.name ) if rc == 0: print("Deleted pool with object path: %s" % result) return (rc, message)
Add some extra output for create and destroy.
Add some extra output for create and destroy. Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com>
Python
apache-2.0
stratis-storage/stratis-cli,stratis-storage/stratis-cli
from __future__ import print_function from .._errors import StratisCliValueUnimplementedError def create_pool(dbus_thing, namespace): """ Create a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) if namespace.redundancy != 'none': raise StratisCliValueUnimplementedError( namespace.redundancy, "namespace.redundancy" ) - (_, rc, message) = dbus_thing.CreatePool( + (result, rc, message) = dbus_thing.CreatePool( namespace.name, namespace.device, len(namespace.device) ) + + if rc == 0: + print("New pool with object path: %s" % result) return (rc, message) def list_pools(dbus_thing, namespace): """ List all stratis pools. :param Interface dbus_thing: the interface to the stratis manager """ # pylint: disable=unused-argument (result, rc, message) = dbus_thing.ListPools() if rc != 0: return (rc, message) for item in result: print(item) return (rc, message) def destroy_pool(dbus_thing, namespace): """ Destroy a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) - (_, rc, message) = dbus_thing.DestroyPool( + (result, rc, message) = dbus_thing.DestroyPool( namespace.name ) + if rc == 0: + print("Deleted pool with object path: %s" % result) + return (rc, message)
Add some extra output for create and destroy.
## Code Before: from __future__ import print_function from .._errors import StratisCliValueUnimplementedError def create_pool(dbus_thing, namespace): """ Create a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) if namespace.redundancy != 'none': raise StratisCliValueUnimplementedError( namespace.redundancy, "namespace.redundancy" ) (_, rc, message) = dbus_thing.CreatePool( namespace.name, namespace.device, len(namespace.device) ) return (rc, message) def list_pools(dbus_thing, namespace): """ List all stratis pools. :param Interface dbus_thing: the interface to the stratis manager """ # pylint: disable=unused-argument (result, rc, message) = dbus_thing.ListPools() if rc != 0: return (rc, message) for item in result: print(item) return (rc, message) def destroy_pool(dbus_thing, namespace): """ Destroy a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) (_, rc, message) = dbus_thing.DestroyPool( namespace.name ) return (rc, message) ## Instruction: Add some extra output for create and destroy. ## Code After: from __future__ import print_function from .._errors import StratisCliValueUnimplementedError def create_pool(dbus_thing, namespace): """ Create a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) if namespace.redundancy != 'none': raise StratisCliValueUnimplementedError( namespace.redundancy, "namespace.redundancy" ) (result, rc, message) = dbus_thing.CreatePool( namespace.name, namespace.device, len(namespace.device) ) if rc == 0: print("New pool with object path: %s" % result) return (rc, message) def list_pools(dbus_thing, namespace): """ List all stratis pools. :param Interface dbus_thing: the interface to the stratis manager """ # pylint: disable=unused-argument (result, rc, message) = dbus_thing.ListPools() if rc != 0: return (rc, message) for item in result: print(item) return (rc, message) def destroy_pool(dbus_thing, namespace): """ Destroy a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) (result, rc, message) = dbus_thing.DestroyPool( namespace.name ) if rc == 0: print("Deleted pool with object path: %s" % result) return (rc, message)
from __future__ import print_function from .._errors import StratisCliValueUnimplementedError def create_pool(dbus_thing, namespace): """ Create a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) if namespace.redundancy != 'none': raise StratisCliValueUnimplementedError( namespace.redundancy, "namespace.redundancy" ) - (_, rc, message) = dbus_thing.CreatePool( ? ^ + (result, rc, message) = dbus_thing.CreatePool( ? ^^^^^^ namespace.name, namespace.device, len(namespace.device) ) + + if rc == 0: + print("New pool with object path: %s" % result) return (rc, message) def list_pools(dbus_thing, namespace): """ List all stratis pools. :param Interface dbus_thing: the interface to the stratis manager """ # pylint: disable=unused-argument (result, rc, message) = dbus_thing.ListPools() if rc != 0: return (rc, message) for item in result: print(item) return (rc, message) def destroy_pool(dbus_thing, namespace): """ Destroy a stratis pool. """ if namespace.force: raise StratisCliValueUnimplementedError( namespace.force, "namespace.force" ) - (_, rc, message) = dbus_thing.DestroyPool( ? ^ + (result, rc, message) = dbus_thing.DestroyPool( ? ^^^^^^ namespace.name ) + if rc == 0: + print("Deleted pool with object path: %s" % result) + return (rc, message)
5eb3f2c61c2b61e1bad7faa006e5503bd9a20edf
uni_form/util.py
uni_form/util.py
from django import forms from django.forms.widgets import Input class SubmitButtonWidget(Input): """ A widget that handles a submit button. """ input_type = 'submit' def render(self, name, value, attrs=None): return super(SubmitButtonWidget, self).render(name, self.attrs['value'], attrs) class BaseInput(forms.Field): """ An base Input class to reduce the amount of code in the Input classes. """ widget = SubmitButtonWidget def __init__(self, **kwargs): if not 'label' in kwargs: kwargs['label'] = '' if not 'required' in kwargs: kwargs['required'] = False if 'value' in kwargs: self._widget_attrs = {'value': kwargs['value']} del kwargs['value'] else: self._widget_attrs = {'value': 'Submit'} super(BaseInput, self).__init__(**kwargs) def widget_attrs(self, widget): return self._widget_attrs class Toggle(object): """ A container for holder toggled items such as fields and buttons. """ fields = []
class BaseInput(object): """ An base Input class to reduce the amount of code in the Input classes. """ def __init__(self,name,value): self.name = name self.value = value class Toggle(object): """ A container for holder toggled items such as fields and buttons. """ fields = []
Revert "Made BaseInput inherit from forms.Field so inputs can be used in layouts. Added a SubmitButtonWidget."
Revert "Made BaseInput inherit from forms.Field so inputs can be used in layouts. Added a SubmitButtonWidget." This reverts commit aa571b2e1fd177491895cc263b192467431b90c2.
Python
mit
HungryCloud/django-crispy-forms,spectras/django-crispy-forms,iris-edu-int/django-crispy-forms,scuml/django-crispy-forms,ngenovictor/django-crispy-forms,CashStar/django-uni-form,PetrDlouhy/django-crispy-forms,RamezIssac/django-crispy-forms,jcomeauictx/django-crispy-forms,tarunlnmiit/django-crispy-forms,CashStar/django-uni-form,pydanny/django-uni-form,avsd/django-crispy-forms,agepoly/django-crispy-forms,django-crispy-forms/django-crispy-forms,carltongibson/django-crispy-forms,pjdelport/django-crispy-forms,iris-edu/django-crispy-forms,IanLee1521/django-crispy-forms,dzhuang/django-crispy-forms,IanLee1521/django-crispy-forms,zixan/django-crispy-forms,ionelmc/django-uni-form,spectras/django-crispy-forms,dessibelle/django-crispy-forms,bouttier/django-crispy-forms,HungryCloud/django-crispy-forms,jcomeauictx/django-crispy-forms,tarunlnmiit/django-crispy-forms,eykanal/django-crispy-forms,HungryCloud/django-crispy-forms,maraujop/django-crispy-forms,iedparis8/django-crispy-forms,VishvajitP/django-crispy-forms,damienjones/django-crispy-forms,saydulk/django-crispy-forms,impulse-cloud/django-crispy-forms,VishvajitP/django-crispy-forms,saydulk/django-crispy-forms,scuml/django-crispy-forms,carltongibson/django-crispy-forms,alanwj/django-crispy-forms,rfleschenberg/django-crispy-forms,eykanal/django-crispy-forms,alanwj/django-crispy-forms,uranusjr/django-crispy-forms-ng,iris-edu-int/django-crispy-forms,zixan/django-crispy-forms,treyhunner/django-crispy-forms,maraujop/django-crispy-forms,davidszotten/django-crispy-forms,iedparis8/django-crispy-forms,avsd/django-crispy-forms,ngenovictor/django-crispy-forms,jtyoung/django-crispy-forms,davidszotten/django-crispy-forms,Stranger6667/django-crispy-forms,dessibelle/django-crispy-forms,PetrDlouhy/django-crispy-forms,dzhuang/django-crispy-forms,treyhunner/django-crispy-forms,agepoly/django-crispy-forms,uranusjr/django-crispy-forms-ng,django-crispy-forms/django-crispy-forms,RamezIssac/django-crispy-forms,damienjones/django-crispy-forms,bouttier/django-crispy-forms,rfleschenberg/django-crispy-forms,smirolo/django-crispy-forms,jtyoung/django-crispy-forms,iris-edu/django-crispy-forms,schrd/django-crispy-forms,Stranger6667/django-crispy-forms,pydanny/django-uni-form,impulse-cloud/django-crispy-forms,smirolo/django-crispy-forms,schrd/django-crispy-forms
- from django import forms - from django.forms.widgets import Input - - class SubmitButtonWidget(Input): - """ - A widget that handles a submit button. - """ - input_type = 'submit' - - def render(self, name, value, attrs=None): - return super(SubmitButtonWidget, self).render(name, - self.attrs['value'], attrs) - - - class BaseInput(forms.Field): + class BaseInput(object): """ An base Input class to reduce the amount of code in the Input classes. """ + + def __init__(self,name,value): + self.name = name + self.value = value - widget = SubmitButtonWidget - - def __init__(self, **kwargs): - if not 'label' in kwargs: - kwargs['label'] = '' - if not 'required' in kwargs: - kwargs['required'] = False - if 'value' in kwargs: - self._widget_attrs = {'value': kwargs['value']} - del kwargs['value'] - else: + - self._widget_attrs = {'value': 'Submit'} - super(BaseInput, self).__init__(**kwargs) - - def widget_attrs(self, widget): - return self._widget_attrs class Toggle(object): """ A container for holder toggled items such as fields and buttons. """ fields = []
Revert "Made BaseInput inherit from forms.Field so inputs can be used in layouts. Added a SubmitButtonWidget."
## Code Before: from django import forms from django.forms.widgets import Input class SubmitButtonWidget(Input): """ A widget that handles a submit button. """ input_type = 'submit' def render(self, name, value, attrs=None): return super(SubmitButtonWidget, self).render(name, self.attrs['value'], attrs) class BaseInput(forms.Field): """ An base Input class to reduce the amount of code in the Input classes. """ widget = SubmitButtonWidget def __init__(self, **kwargs): if not 'label' in kwargs: kwargs['label'] = '' if not 'required' in kwargs: kwargs['required'] = False if 'value' in kwargs: self._widget_attrs = {'value': kwargs['value']} del kwargs['value'] else: self._widget_attrs = {'value': 'Submit'} super(BaseInput, self).__init__(**kwargs) def widget_attrs(self, widget): return self._widget_attrs class Toggle(object): """ A container for holder toggled items such as fields and buttons. """ fields = [] ## Instruction: Revert "Made BaseInput inherit from forms.Field so inputs can be used in layouts. Added a SubmitButtonWidget." ## Code After: class BaseInput(object): """ An base Input class to reduce the amount of code in the Input classes. """ def __init__(self,name,value): self.name = name self.value = value class Toggle(object): """ A container for holder toggled items such as fields and buttons. """ fields = []
- from django import forms - from django.forms.widgets import Input - - class SubmitButtonWidget(Input): - """ - A widget that handles a submit button. - """ - input_type = 'submit' - - def render(self, name, value, attrs=None): - return super(SubmitButtonWidget, self).render(name, - self.attrs['value'], attrs) - - - class BaseInput(forms.Field): ? - ^^^^^^ ^^ + class BaseInput(object): ? ^^ ^^ """ An base Input class to reduce the amount of code in the Input classes. """ + + def __init__(self,name,value): + self.name = name + self.value = value - widget = SubmitButtonWidget - - def __init__(self, **kwargs): - if not 'label' in kwargs: - kwargs['label'] = '' - if not 'required' in kwargs: - kwargs['required'] = False - if 'value' in kwargs: - self._widget_attrs = {'value': kwargs['value']} - del kwargs['value'] - else: ? ----- + - self._widget_attrs = {'value': 'Submit'} - super(BaseInput, self).__init__(**kwargs) - - def widget_attrs(self, widget): - return self._widget_attrs class Toggle(object): """ A container for holder toggled items such as fields and buttons. """ fields = []
c259e42ea95fdc43ad9345d702d3cab901d88f93
rx/core/__init__.py
rx/core/__init__.py
from .typing import Observer, Scheduler from .disposable import Disposable from .anonymousobserver import AnonymousObserver from . import observerextensions from .pipe import pipe from .observable import Observable from .observable import AnonymousObservable, ConnectableObservable from .observable import GroupedObservable, BlockingObservable from .observerbase import ObserverBase
from .typing import Observer, Scheduler from .disposable import Disposable from .anonymousobserver import AnonymousObserver from .pipe import pipe from .observable import Observable from .observable import AnonymousObservable, ConnectableObservable from .observable import GroupedObservable, BlockingObservable from .observerbase import ObserverBase
Remove observer extension from init
Remove observer extension from init
Python
mit
ReactiveX/RxPY,ReactiveX/RxPY
from .typing import Observer, Scheduler + from .disposable import Disposable - from .disposable import Disposable from .anonymousobserver import AnonymousObserver - from . import observerextensions from .pipe import pipe from .observable import Observable from .observable import AnonymousObservable, ConnectableObservable from .observable import GroupedObservable, BlockingObservable from .observerbase import ObserverBase
Remove observer extension from init
## Code Before: from .typing import Observer, Scheduler from .disposable import Disposable from .anonymousobserver import AnonymousObserver from . import observerextensions from .pipe import pipe from .observable import Observable from .observable import AnonymousObservable, ConnectableObservable from .observable import GroupedObservable, BlockingObservable from .observerbase import ObserverBase ## Instruction: Remove observer extension from init ## Code After: from .typing import Observer, Scheduler from .disposable import Disposable from .anonymousobserver import AnonymousObserver from .pipe import pipe from .observable import Observable from .observable import AnonymousObservable, ConnectableObservable from .observable import GroupedObservable, BlockingObservable from .observerbase import ObserverBase
from .typing import Observer, Scheduler + from .disposable import Disposable - from .disposable import Disposable from .anonymousobserver import AnonymousObserver - from . import observerextensions from .pipe import pipe from .observable import Observable from .observable import AnonymousObservable, ConnectableObservable from .observable import GroupedObservable, BlockingObservable from .observerbase import ObserverBase
1c3c092afae1946e72a87cca8792bd012bee23e4
ktbs_bench/utils/decorators.py
ktbs_bench/utils/decorators.py
from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Times a function given specific arguments.""" # TODO mettre args (n_repeat, func) qui execute n_repeat fois et applique un reduce(res, func) @wraps(f) def wrapped(*args, **kwargs): timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() res = [call_signature(f, *args, **kwargs), timer.get_times()['real']] # TODO penser a quel temps garder return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call.""" call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Decorator to time a function. Parameters ---------- f : function The function to time. Returns ------- call_signature : str The signature of the function call, with parameter names and values. time : float The real time taken to execute the function, in second. Examples -------- >>> @bench ... def square_list(numbers): ... for ind_num in range(len(numbers)): ... numbers[ind_num] *= numbers[ind_num] ... return numbers >>> call_sig, time = square_list(range(10)) >>> call_sig 'numbers=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]' >>> 0 < time < 1 # benched function is not computationally intensive so time should be less than 1 s True """ @wraps(f) def wrapped(*args, **kwargs): """Actual benchmark takes place here.""" call_sig = call_signature(f, *args, **kwargs) timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() res = [call_sig, timer.get_times()['real']] return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call. Parameters ---------- f : function The function to get the call signature from. args : list List of arguments. kwargs : dict Dictionary of argument names and values. Returns ------- out : str String representation of the function call Examples -------- >>> def square(num): ... return num*num >>> call_signature(square, 4) 'num=4' """ call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
Add docstrings and fix call of call_signature.
Add docstrings and fix call of call_signature. For the fix: call_signature has been moved before executing the actual call, if the call is made before then it might change arguments if they are references.
Python
mit
ktbs/ktbs-bench,ktbs/ktbs-bench
from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): - """Times a function given specific arguments.""" + """Decorator to time a function. - # TODO mettre args (n_repeat, func) qui execute n_repeat fois et applique un reduce(res, func) + Parameters + ---------- + f : function + The function to time. + + Returns + ------- + call_signature : str + The signature of the function call, with parameter names and values. + time : float + The real time taken to execute the function, in second. + + Examples + -------- + >>> @bench + ... def square_list(numbers): + ... for ind_num in range(len(numbers)): + ... numbers[ind_num] *= numbers[ind_num] + ... return numbers + >>> call_sig, time = square_list(range(10)) + >>> call_sig + 'numbers=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]' + >>> 0 < time < 1 # benched function is not computationally intensive so time should be less than 1 s + True + """ + @wraps(f) def wrapped(*args, **kwargs): + """Actual benchmark takes place here.""" + call_sig = call_signature(f, *args, **kwargs) + timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() + res = [call_sig, timer.get_times()['real']] - res = [call_signature(f, *args, **kwargs), - timer.get_times()['real']] # TODO penser a quel temps garder return res return wrapped def call_signature(f, *args, **kwargs): - """Return a string representation of a function call.""" + """Return a string representation of a function call. + + Parameters + ---------- + f : function + The function to get the call signature from. + args : list + List of arguments. + kwargs : dict + Dictionary of argument names and values. + + Returns + ------- + out : str + String representation of the function call + + Examples + -------- + >>> def square(num): + ... return num*num + >>> call_signature(square, 4) + 'num=4' + """ call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
Add docstrings and fix call of call_signature.
## Code Before: from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Times a function given specific arguments.""" # TODO mettre args (n_repeat, func) qui execute n_repeat fois et applique un reduce(res, func) @wraps(f) def wrapped(*args, **kwargs): timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() res = [call_signature(f, *args, **kwargs), timer.get_times()['real']] # TODO penser a quel temps garder return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call.""" call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()]) ## Instruction: Add docstrings and fix call of call_signature. ## Code After: from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Decorator to time a function. Parameters ---------- f : function The function to time. Returns ------- call_signature : str The signature of the function call, with parameter names and values. time : float The real time taken to execute the function, in second. Examples -------- >>> @bench ... def square_list(numbers): ... for ind_num in range(len(numbers)): ... numbers[ind_num] *= numbers[ind_num] ... return numbers >>> call_sig, time = square_list(range(10)) >>> call_sig 'numbers=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]' >>> 0 < time < 1 # benched function is not computationally intensive so time should be less than 1 s True """ @wraps(f) def wrapped(*args, **kwargs): """Actual benchmark takes place here.""" call_sig = call_signature(f, *args, **kwargs) timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() res = [call_sig, timer.get_times()['real']] return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call. Parameters ---------- f : function The function to get the call signature from. args : list List of arguments. kwargs : dict Dictionary of argument names and values. Returns ------- out : str String representation of the function call Examples -------- >>> def square(num): ... return num*num >>> call_signature(square, 4) 'num=4' """ call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): - """Times a function given specific arguments.""" + """Decorator to time a function. - # TODO mettre args (n_repeat, func) qui execute n_repeat fois et applique un reduce(res, func) + Parameters + ---------- + f : function + The function to time. + + Returns + ------- + call_signature : str + The signature of the function call, with parameter names and values. + time : float + The real time taken to execute the function, in second. + + Examples + -------- + >>> @bench + ... def square_list(numbers): + ... for ind_num in range(len(numbers)): + ... numbers[ind_num] *= numbers[ind_num] + ... return numbers + >>> call_sig, time = square_list(range(10)) + >>> call_sig + 'numbers=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]' + >>> 0 < time < 1 # benched function is not computationally intensive so time should be less than 1 s + True + """ + @wraps(f) def wrapped(*args, **kwargs): + """Actual benchmark takes place here.""" + call_sig = call_signature(f, *args, **kwargs) + timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() + res = [call_sig, timer.get_times()['real']] - res = [call_signature(f, *args, **kwargs), - timer.get_times()['real']] # TODO penser a quel temps garder return res return wrapped def call_signature(f, *args, **kwargs): - """Return a string representation of a function call.""" ? --- + """Return a string representation of a function call. + + Parameters + ---------- + f : function + The function to get the call signature from. + args : list + List of arguments. + kwargs : dict + Dictionary of argument names and values. + + Returns + ------- + out : str + String representation of the function call + + Examples + -------- + >>> def square(num): + ... return num*num + >>> call_signature(square, 4) + 'num=4' + """ call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
bd540e3a0bcc13c6c50c1d72f1982084ab5cb87e
django_enumfield/fields.py
django_enumfield/fields.py
from django.db import models class EnumField(models.Field): __metaclass__ = models.SubfieldBase def __init__(self, enumeration, *args, **kwargs): self.enumeration = enumeration kwargs.setdefault('choices', enumeration.get_choices()) super(EnumField, self).__init__(*args, **kwargs) def get_internal_type(self): return 'IntegerField' def to_python(self, value): return self.enumeration.to_item(value) def get_db_prep_save(self, value, connection=None): if hasattr(value, 'value'): return value.value return value def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False): def prepare(value): v = self.to_python(value) return self.get_db_prep_save(v, connection=connection) if lookup_type == 'exact': return [prepare(value)] elif lookup_type == 'in': return [prepare(v) for v in value] elif lookup_type == 'isnull': return [] raise TypeError("Lookup type %r not supported." % lookup_type)
from django.db import models class EnumField(models.Field): __metaclass__ = models.SubfieldBase def __init__(self, enumeration, *args, **kwargs): self.enumeration = enumeration kwargs.setdefault('choices', enumeration.get_choices()) super(EnumField, self).__init__(*args, **kwargs) def get_internal_type(self): return 'IntegerField' def to_python(self, value): return self.enumeration.to_item(value) def get_db_prep_save(self, value, connection=None): return self.to_python(value).value def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False): def prepare(value): v = self.to_python(value) return self.get_db_prep_save(v, connection=connection) if lookup_type == 'exact': return [prepare(value)] elif lookup_type == 'in': return [prepare(v) for v in value] elif lookup_type == 'isnull': return [] raise TypeError("Lookup type %r not supported." % lookup_type)
Allow string arguments (as slugs) when saving/updating EnumFields
Allow string arguments (as slugs) when saving/updating EnumFields This fixes issues where: MyModel.objects.update(my_enum_field='slug') would result in SQL like: UPDATE app_mymodel SET my_enum_field = 'slug' .. instead of what that's slug's integer value is. Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>
Python
bsd-3-clause
playfire/django-enumfield
from django.db import models class EnumField(models.Field): __metaclass__ = models.SubfieldBase def __init__(self, enumeration, *args, **kwargs): self.enumeration = enumeration kwargs.setdefault('choices', enumeration.get_choices()) super(EnumField, self).__init__(*args, **kwargs) def get_internal_type(self): return 'IntegerField' def to_python(self, value): return self.enumeration.to_item(value) def get_db_prep_save(self, value, connection=None): + return self.to_python(value).value - if hasattr(value, 'value'): - return value.value - return value def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False): def prepare(value): v = self.to_python(value) return self.get_db_prep_save(v, connection=connection) if lookup_type == 'exact': return [prepare(value)] elif lookup_type == 'in': return [prepare(v) for v in value] elif lookup_type == 'isnull': return [] raise TypeError("Lookup type %r not supported." % lookup_type)
Allow string arguments (as slugs) when saving/updating EnumFields
## Code Before: from django.db import models class EnumField(models.Field): __metaclass__ = models.SubfieldBase def __init__(self, enumeration, *args, **kwargs): self.enumeration = enumeration kwargs.setdefault('choices', enumeration.get_choices()) super(EnumField, self).__init__(*args, **kwargs) def get_internal_type(self): return 'IntegerField' def to_python(self, value): return self.enumeration.to_item(value) def get_db_prep_save(self, value, connection=None): if hasattr(value, 'value'): return value.value return value def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False): def prepare(value): v = self.to_python(value) return self.get_db_prep_save(v, connection=connection) if lookup_type == 'exact': return [prepare(value)] elif lookup_type == 'in': return [prepare(v) for v in value] elif lookup_type == 'isnull': return [] raise TypeError("Lookup type %r not supported." % lookup_type) ## Instruction: Allow string arguments (as slugs) when saving/updating EnumFields ## Code After: from django.db import models class EnumField(models.Field): __metaclass__ = models.SubfieldBase def __init__(self, enumeration, *args, **kwargs): self.enumeration = enumeration kwargs.setdefault('choices', enumeration.get_choices()) super(EnumField, self).__init__(*args, **kwargs) def get_internal_type(self): return 'IntegerField' def to_python(self, value): return self.enumeration.to_item(value) def get_db_prep_save(self, value, connection=None): return self.to_python(value).value def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False): def prepare(value): v = self.to_python(value) return self.get_db_prep_save(v, connection=connection) if lookup_type == 'exact': return [prepare(value)] elif lookup_type == 'in': return [prepare(v) for v in value] elif lookup_type == 'isnull': return [] raise TypeError("Lookup type %r not supported." % lookup_type)
from django.db import models class EnumField(models.Field): __metaclass__ = models.SubfieldBase def __init__(self, enumeration, *args, **kwargs): self.enumeration = enumeration kwargs.setdefault('choices', enumeration.get_choices()) super(EnumField, self).__init__(*args, **kwargs) def get_internal_type(self): return 'IntegerField' def to_python(self, value): return self.enumeration.to_item(value) def get_db_prep_save(self, value, connection=None): + return self.to_python(value).value - if hasattr(value, 'value'): - return value.value - return value def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False): def prepare(value): v = self.to_python(value) return self.get_db_prep_save(v, connection=connection) if lookup_type == 'exact': return [prepare(value)] elif lookup_type == 'in': return [prepare(v) for v in value] elif lookup_type == 'isnull': return [] raise TypeError("Lookup type %r not supported." % lookup_type)
b2b939e13a5bcdabe09e85d7f940052f4fec8f27
events/urls.py
events/urls.py
from django.conf.urls.defaults import * from django.views.generic import list_detail from django.views.generic import date_based from agenda.events.models import Event general_info = { "queryset" : Event.objects.filter(moderated=True), "template_object_name" : "event", } list_info = { "paginate_by": 25, } month_list_info = { "month_format": "%m", "date_field": "start_time", "allow_future": True, } event_info = general_info event_list_info = dict(general_info, **list_info) event_list_month_info = dict(general_info, **month_list_info) urlpatterns = patterns('', (r'^$', list_detail.object_list, event_list_info), (r'^(?P<object_id>\d+)/$', list_detail.object_detail, event_info), (r'^(?P<year>\d+)/(?P<month>\d+)/$', date_based.archive_month, event_list_month_info), )
from django.conf.urls.defaults import * from django.views.generic import list_detail from django.views.generic import date_based from agenda.events.models import Event general_info = { "queryset" : Event.objects.filter(moderated=True), "template_object_name" : "event", } list_info = { "paginate_by": 25, } month_list_info = { "month_format": "%m", "date_field": "start_time", "allow_future": True, "allow_empty": True, } event_info = general_info event_list_info = dict(general_info, **list_info) event_list_month_info = dict(general_info, **month_list_info) urlpatterns = patterns('', (r'^$', list_detail.object_list, event_list_info), (r'^(?P<object_id>\d+)/$', list_detail.object_detail, event_info), (r'^(?P<year>\d+)/(?P<month>\d+)/$', date_based.archive_month, event_list_month_info), )
Allow empty calendar to be drawn
Allow empty calendar to be drawn
Python
agpl-3.0
mlhamel/agendadulibre,mlhamel/agendadulibre,mlhamel/agendadulibre,vcorreze/agendaEteAccoord,vcorreze/agendaEteAccoord,vcorreze/agendaEteAccoord
from django.conf.urls.defaults import * from django.views.generic import list_detail from django.views.generic import date_based from agenda.events.models import Event general_info = { "queryset" : Event.objects.filter(moderated=True), "template_object_name" : "event", } list_info = { "paginate_by": 25, } month_list_info = { "month_format": "%m", "date_field": "start_time", "allow_future": True, + "allow_empty": True, } event_info = general_info event_list_info = dict(general_info, **list_info) event_list_month_info = dict(general_info, **month_list_info) urlpatterns = patterns('', (r'^$', list_detail.object_list, event_list_info), (r'^(?P<object_id>\d+)/$', list_detail.object_detail, event_info), (r'^(?P<year>\d+)/(?P<month>\d+)/$', date_based.archive_month, event_list_month_info), )
Allow empty calendar to be drawn
## Code Before: from django.conf.urls.defaults import * from django.views.generic import list_detail from django.views.generic import date_based from agenda.events.models import Event general_info = { "queryset" : Event.objects.filter(moderated=True), "template_object_name" : "event", } list_info = { "paginate_by": 25, } month_list_info = { "month_format": "%m", "date_field": "start_time", "allow_future": True, } event_info = general_info event_list_info = dict(general_info, **list_info) event_list_month_info = dict(general_info, **month_list_info) urlpatterns = patterns('', (r'^$', list_detail.object_list, event_list_info), (r'^(?P<object_id>\d+)/$', list_detail.object_detail, event_info), (r'^(?P<year>\d+)/(?P<month>\d+)/$', date_based.archive_month, event_list_month_info), ) ## Instruction: Allow empty calendar to be drawn ## Code After: from django.conf.urls.defaults import * from django.views.generic import list_detail from django.views.generic import date_based from agenda.events.models import Event general_info = { "queryset" : Event.objects.filter(moderated=True), "template_object_name" : "event", } list_info = { "paginate_by": 25, } month_list_info = { "month_format": "%m", "date_field": "start_time", "allow_future": True, "allow_empty": True, } event_info = general_info event_list_info = dict(general_info, **list_info) event_list_month_info = dict(general_info, **month_list_info) urlpatterns = patterns('', (r'^$', list_detail.object_list, event_list_info), (r'^(?P<object_id>\d+)/$', list_detail.object_detail, event_info), (r'^(?P<year>\d+)/(?P<month>\d+)/$', date_based.archive_month, event_list_month_info), )
from django.conf.urls.defaults import * from django.views.generic import list_detail from django.views.generic import date_based from agenda.events.models import Event general_info = { "queryset" : Event.objects.filter(moderated=True), "template_object_name" : "event", } list_info = { "paginate_by": 25, } month_list_info = { "month_format": "%m", "date_field": "start_time", "allow_future": True, + "allow_empty": True, } event_info = general_info event_list_info = dict(general_info, **list_info) event_list_month_info = dict(general_info, **month_list_info) urlpatterns = patterns('', (r'^$', list_detail.object_list, event_list_info), (r'^(?P<object_id>\d+)/$', list_detail.object_detail, event_info), (r'^(?P<year>\d+)/(?P<month>\d+)/$', date_based.archive_month, event_list_month_info), )
2b70b4d2ca40cfbf36265a650ca04855999c5a03
elm_open_in_browser.py
elm_open_in_browser.py
import sublime import os.path as fs if int(sublime.version()) < 3000: from elm_project import ElmProject from ViewInBrowserCommand import ViewInBrowserCommand else: from .elm_project import ElmProject ViewInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand class ElmOpenInBrowserCommand(ViewInBrowserCommand): def run(self, edit): super(ElmOpenInBrowserCommand, self).run(edit) def is_enabled(self): self.project = ElmProject(self.view.file_name()) return self.project.exists def normalizePath(self, fileToOpen): # ViewInBrowserCommand norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path)) return super(ElmOpenInBrowserCommand, self).normalizePath(fs.abspath(norm_path))
import sublime import os.path as fs if int(sublime.version()) < 3000: from elm_project import ElmProject from ViewInBrowserCommand import ViewInBrowserCommand as OpenInBrowserCommand else: from .elm_project import ElmProject try: from SideBarEnhancements.SideBar import SideBarOpenInBrowserCommand as OpenInBrowserCommand except: OpenInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand class ElmOpenInBrowserCommand(OpenInBrowserCommand): def run(self, edit=None): if edit: # ViewInBrowserCommand super(ElmOpenInBrowserCommand, self).run(edit) else: # SideBarOpenInBrowserCommand super(ElmOpenInBrowserCommand, self).run([self.html_path()]) def is_enabled(self): try: # ViewInBrowserCommand self.project = ElmProject(self.view.file_name()) except: # SideBarOpenInBrowserCommand self.project = ElmProject(self.window.active_view().file_name()) return self.project.exists def normalizePath(self, fileToOpen): # ViewInBrowserCommand return super(ElmOpenInBrowserCommand, self).normalizePath(self.html_path()) def html_path(self): norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path)) return fs.abspath(norm_path)
Add alternative support for open in browser
Add alternative support for open in browser Integrate SideBarEnhancements for ST3 for poplarity and browser detection
Python
mit
deadfoxygrandpa/Elm.tmLanguage,deadfoxygrandpa/Elm.tmLanguage,rtfeldman/Elm.tmLanguage,rtfeldman/Elm.tmLanguage,sekjun9878/Elm.tmLanguage,sekjun9878/Elm.tmLanguage
import sublime import os.path as fs if int(sublime.version()) < 3000: from elm_project import ElmProject - from ViewInBrowserCommand import ViewInBrowserCommand + from ViewInBrowserCommand import ViewInBrowserCommand as OpenInBrowserCommand else: from .elm_project import ElmProject + try: + from SideBarEnhancements.SideBar import SideBarOpenInBrowserCommand as OpenInBrowserCommand + except: - ViewInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand + OpenInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand - class ElmOpenInBrowserCommand(ViewInBrowserCommand): + class ElmOpenInBrowserCommand(OpenInBrowserCommand): - def run(self, edit): + def run(self, edit=None): + if edit: # ViewInBrowserCommand - super(ElmOpenInBrowserCommand, self).run(edit) + super(ElmOpenInBrowserCommand, self).run(edit) + else: # SideBarOpenInBrowserCommand + super(ElmOpenInBrowserCommand, self).run([self.html_path()]) def is_enabled(self): + try: # ViewInBrowserCommand - self.project = ElmProject(self.view.file_name()) + self.project = ElmProject(self.view.file_name()) + except: # SideBarOpenInBrowserCommand + self.project = ElmProject(self.window.active_view().file_name()) return self.project.exists def normalizePath(self, fileToOpen): # ViewInBrowserCommand + return super(ElmOpenInBrowserCommand, self).normalizePath(self.html_path()) + + def html_path(self): norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path)) - return super(ElmOpenInBrowserCommand, self).normalizePath(fs.abspath(norm_path)) + return fs.abspath(norm_path)
Add alternative support for open in browser
## Code Before: import sublime import os.path as fs if int(sublime.version()) < 3000: from elm_project import ElmProject from ViewInBrowserCommand import ViewInBrowserCommand else: from .elm_project import ElmProject ViewInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand class ElmOpenInBrowserCommand(ViewInBrowserCommand): def run(self, edit): super(ElmOpenInBrowserCommand, self).run(edit) def is_enabled(self): self.project = ElmProject(self.view.file_name()) return self.project.exists def normalizePath(self, fileToOpen): # ViewInBrowserCommand norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path)) return super(ElmOpenInBrowserCommand, self).normalizePath(fs.abspath(norm_path)) ## Instruction: Add alternative support for open in browser ## Code After: import sublime import os.path as fs if int(sublime.version()) < 3000: from elm_project import ElmProject from ViewInBrowserCommand import ViewInBrowserCommand as OpenInBrowserCommand else: from .elm_project import ElmProject try: from SideBarEnhancements.SideBar import SideBarOpenInBrowserCommand as OpenInBrowserCommand except: OpenInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand class ElmOpenInBrowserCommand(OpenInBrowserCommand): def run(self, edit=None): if edit: # ViewInBrowserCommand super(ElmOpenInBrowserCommand, self).run(edit) else: # SideBarOpenInBrowserCommand super(ElmOpenInBrowserCommand, self).run([self.html_path()]) def is_enabled(self): try: # ViewInBrowserCommand self.project = ElmProject(self.view.file_name()) except: # SideBarOpenInBrowserCommand self.project = ElmProject(self.window.active_view().file_name()) return self.project.exists def normalizePath(self, fileToOpen): # ViewInBrowserCommand return super(ElmOpenInBrowserCommand, self).normalizePath(self.html_path()) def html_path(self): norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path)) return fs.abspath(norm_path)
import sublime import os.path as fs if int(sublime.version()) < 3000: from elm_project import ElmProject - from ViewInBrowserCommand import ViewInBrowserCommand + from ViewInBrowserCommand import ViewInBrowserCommand as OpenInBrowserCommand ? ++++++++++++++++++++++++ else: from .elm_project import ElmProject + try: + from SideBarEnhancements.SideBar import SideBarOpenInBrowserCommand as OpenInBrowserCommand + except: - ViewInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand ? ^^ ^ + OpenInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand ? ^^^^^^ ^ - class ElmOpenInBrowserCommand(ViewInBrowserCommand): ? ^^ ^ + class ElmOpenInBrowserCommand(OpenInBrowserCommand): ? ^^ ^ - def run(self, edit): + def run(self, edit=None): ? +++++ + if edit: # ViewInBrowserCommand - super(ElmOpenInBrowserCommand, self).run(edit) + super(ElmOpenInBrowserCommand, self).run(edit) ? ++++ + else: # SideBarOpenInBrowserCommand + super(ElmOpenInBrowserCommand, self).run([self.html_path()]) def is_enabled(self): + try: # ViewInBrowserCommand - self.project = ElmProject(self.view.file_name()) + self.project = ElmProject(self.view.file_name()) ? ++++ + except: # SideBarOpenInBrowserCommand + self.project = ElmProject(self.window.active_view().file_name()) return self.project.exists def normalizePath(self, fileToOpen): # ViewInBrowserCommand + return super(ElmOpenInBrowserCommand, self).normalizePath(self.html_path()) + + def html_path(self): norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path)) - return super(ElmOpenInBrowserCommand, self).normalizePath(fs.abspath(norm_path)) + return fs.abspath(norm_path)
aebc3440c98ee2b4cc5f880d648e106e1f9d6b9d
source/urls.py
source/urls.py
from django.conf.urls import url, include from django.contrib import admin from rest_framework import routers from task.views import * from userprofile.views import * router = routers.DefaultRouter() router.register(r'tasks', TaskListViewSet) router.register(r'tolausers', TolaUserViewset) router.register(r'countries', CountryViewSet) router.register(r'organizations', OrganizationViewset) urlpatterns = [ url(r'^admin/', admin.site.urls), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^api/auth/', include('userprofile.urls')), #url(r'^api/', include('task.urls')), #rest framework url(r'^api/', include(router.urls)), ]
from django.conf.urls import url, include from django.contrib import admin from rest_framework import routers from task.views import * from userprofile.views import * router = routers.DefaultRouter() router.register(r'tasks', TaskListViewSet, base_name="my_task") router.register(r'tolausers', TolaUserViewset) router.register(r'countries', CountryViewSet) router.register(r'organizations', OrganizationViewset) urlpatterns = [ url(r'^admin/', admin.site.urls), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^api/auth/', include('userprofile.urls')), #url(r'^api/', include('task.urls')), #rest framework url(r'^api/', include(router.urls)), ]
Add the base_name to the API routers for the custom query_set
Add the base_name to the API routers for the custom query_set
Python
apache-2.0
toladata/TolaProfile,toladata/TolaProfile,toladata/TolaProfile,toladata/TolaProfile
from django.conf.urls import url, include from django.contrib import admin from rest_framework import routers from task.views import * from userprofile.views import * router = routers.DefaultRouter() - router.register(r'tasks', TaskListViewSet) + router.register(r'tasks', TaskListViewSet, base_name="my_task") router.register(r'tolausers', TolaUserViewset) router.register(r'countries', CountryViewSet) router.register(r'organizations', OrganizationViewset) urlpatterns = [ url(r'^admin/', admin.site.urls), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^api/auth/', include('userprofile.urls')), #url(r'^api/', include('task.urls')), #rest framework url(r'^api/', include(router.urls)), ]
Add the base_name to the API routers for the custom query_set
## Code Before: from django.conf.urls import url, include from django.contrib import admin from rest_framework import routers from task.views import * from userprofile.views import * router = routers.DefaultRouter() router.register(r'tasks', TaskListViewSet) router.register(r'tolausers', TolaUserViewset) router.register(r'countries', CountryViewSet) router.register(r'organizations', OrganizationViewset) urlpatterns = [ url(r'^admin/', admin.site.urls), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^api/auth/', include('userprofile.urls')), #url(r'^api/', include('task.urls')), #rest framework url(r'^api/', include(router.urls)), ] ## Instruction: Add the base_name to the API routers for the custom query_set ## Code After: from django.conf.urls import url, include from django.contrib import admin from rest_framework import routers from task.views import * from userprofile.views import * router = routers.DefaultRouter() router.register(r'tasks', TaskListViewSet, base_name="my_task") router.register(r'tolausers', TolaUserViewset) router.register(r'countries', CountryViewSet) router.register(r'organizations', OrganizationViewset) urlpatterns = [ url(r'^admin/', admin.site.urls), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^api/auth/', include('userprofile.urls')), #url(r'^api/', include('task.urls')), #rest framework url(r'^api/', include(router.urls)), ]
from django.conf.urls import url, include from django.contrib import admin from rest_framework import routers from task.views import * from userprofile.views import * router = routers.DefaultRouter() - router.register(r'tasks', TaskListViewSet) + router.register(r'tasks', TaskListViewSet, base_name="my_task") ? +++++++++++++++++++++ router.register(r'tolausers', TolaUserViewset) router.register(r'countries', CountryViewSet) router.register(r'organizations', OrganizationViewset) urlpatterns = [ url(r'^admin/', admin.site.urls), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^api/auth/', include('userprofile.urls')), #url(r'^api/', include('task.urls')), #rest framework url(r'^api/', include(router.urls)), ]
aaa8743c8610eb4b5ae7d08167715f3c1181d4d5
app/sessions.py
app/sessions.py
from functools import wraps from flask import request, abort, redirect, url_for, render_template from flask.ext.login import LoginManager, login_user, logout_user, login_required from app import app, db from app.models import User login_manager = LoginManager() login_manager.init_app(app) # required function for flask-login to function @login_manager.user_loader def user_loader(id): return User.query.get(id) # testing: automatically make an admin user if not User.query.first(): u = User('admin', 'password') db.session.add(u) db.session.commit() @app.route('/login/', methods=['GET', 'POST']) def login(): if request.method == 'POST': if request.form['user'] == 'admin' and request.form['password'] == 'password': u = User.query.filter_by(handle=request.form['user']).first() login_user(u) return redirect(url_for('admin_index')) return render_template('login.html') @app.route('/logout/') def logout(): logout_user() return redirect(url_for('index'))
from functools import wraps from flask import request, abort, redirect, url_for, render_template from flask.ext.login import LoginManager, login_user, logout_user, login_required from app import app, db from app.models import User login_manager = LoginManager() login_manager.init_app(app) # required function for flask-login to function @login_manager.user_loader def user_loader(id): return User.query.get(id) @app.route('/login/', methods=['GET', 'POST']) def login(): if request.method == 'POST': if request.form['user'] == 'admin' and request.form['password'] == 'password': u = User.query.filter_by(handle=request.form['user']).first() login_user(u) return redirect(url_for('admin_index')) return render_template('login.html') @app.route('/logout/') def logout(): logout_user() return redirect(url_for('index'))
Remove development auto admin user creation
Remove development auto admin user creation
Python
mit
tjgavlick/whiskey-blog,tjgavlick/whiskey-blog,tjgavlick/whiskey-blog,tjgavlick/whiskey-blog
from functools import wraps from flask import request, abort, redirect, url_for, render_template from flask.ext.login import LoginManager, login_user, logout_user, login_required from app import app, db from app.models import User login_manager = LoginManager() login_manager.init_app(app) # required function for flask-login to function @login_manager.user_loader def user_loader(id): return User.query.get(id) - # testing: automatically make an admin user - if not User.query.first(): - u = User('admin', 'password') - db.session.add(u) - db.session.commit() - - @app.route('/login/', methods=['GET', 'POST']) def login(): if request.method == 'POST': if request.form['user'] == 'admin' and request.form['password'] == 'password': u = User.query.filter_by(handle=request.form['user']).first() login_user(u) return redirect(url_for('admin_index')) return render_template('login.html') @app.route('/logout/') def logout(): logout_user() return redirect(url_for('index'))
Remove development auto admin user creation
## Code Before: from functools import wraps from flask import request, abort, redirect, url_for, render_template from flask.ext.login import LoginManager, login_user, logout_user, login_required from app import app, db from app.models import User login_manager = LoginManager() login_manager.init_app(app) # required function for flask-login to function @login_manager.user_loader def user_loader(id): return User.query.get(id) # testing: automatically make an admin user if not User.query.first(): u = User('admin', 'password') db.session.add(u) db.session.commit() @app.route('/login/', methods=['GET', 'POST']) def login(): if request.method == 'POST': if request.form['user'] == 'admin' and request.form['password'] == 'password': u = User.query.filter_by(handle=request.form['user']).first() login_user(u) return redirect(url_for('admin_index')) return render_template('login.html') @app.route('/logout/') def logout(): logout_user() return redirect(url_for('index')) ## Instruction: Remove development auto admin user creation ## Code After: from functools import wraps from flask import request, abort, redirect, url_for, render_template from flask.ext.login import LoginManager, login_user, logout_user, login_required from app import app, db from app.models import User login_manager = LoginManager() login_manager.init_app(app) # required function for flask-login to function @login_manager.user_loader def user_loader(id): return User.query.get(id) @app.route('/login/', methods=['GET', 'POST']) def login(): if request.method == 'POST': if request.form['user'] == 'admin' and request.form['password'] == 'password': u = User.query.filter_by(handle=request.form['user']).first() login_user(u) return redirect(url_for('admin_index')) return render_template('login.html') @app.route('/logout/') def logout(): logout_user() return redirect(url_for('index'))
from functools import wraps from flask import request, abort, redirect, url_for, render_template from flask.ext.login import LoginManager, login_user, logout_user, login_required from app import app, db from app.models import User login_manager = LoginManager() login_manager.init_app(app) # required function for flask-login to function @login_manager.user_loader def user_loader(id): return User.query.get(id) - # testing: automatically make an admin user - if not User.query.first(): - u = User('admin', 'password') - db.session.add(u) - db.session.commit() - - @app.route('/login/', methods=['GET', 'POST']) def login(): if request.method == 'POST': if request.form['user'] == 'admin' and request.form['password'] == 'password': u = User.query.filter_by(handle=request.form['user']).first() login_user(u) return redirect(url_for('admin_index')) return render_template('login.html') @app.route('/logout/') def logout(): logout_user() return redirect(url_for('index'))
c5103eea181455afded264528bb97ac8a9982db0
enable/__init__.py
enable/__init__.py
from __future__ import absolute_import from ._version import full_version as __version__ __requires__ = [ 'traitsui', 'PIL', 'kiwisolver', ]
from enable._version import full_version as __version__ __requires__ = [ 'traitsui', 'PIL', 'kiwisolver', ]
Use an absolute import to avoid breaking the docs build.
Use an absolute import to avoid breaking the docs build.
Python
bsd-3-clause
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
- from __future__ import absolute_import - - from ._version import full_version as __version__ + from enable._version import full_version as __version__ __requires__ = [ 'traitsui', 'PIL', 'kiwisolver', ]
Use an absolute import to avoid breaking the docs build.
## Code Before: from __future__ import absolute_import from ._version import full_version as __version__ __requires__ = [ 'traitsui', 'PIL', 'kiwisolver', ] ## Instruction: Use an absolute import to avoid breaking the docs build. ## Code After: from enable._version import full_version as __version__ __requires__ = [ 'traitsui', 'PIL', 'kiwisolver', ]
- from __future__ import absolute_import - - from ._version import full_version as __version__ + from enable._version import full_version as __version__ ? ++++++ __requires__ = [ 'traitsui', 'PIL', 'kiwisolver', ]
1e0327c852b851f867d21a182ba7604b42d15331
examples/charts/file/stacked_bar.py
examples/charts/file/stacked_bar.py
from bokeh.charts import Bar, output_file, show from bokeh.charts.operations import blend from bokeh.charts.attributes import cat, color from bokeh.charts.utils import df_from_json from bokeh.sampledata.olympics2014 import data from bokeh.models.tools import HoverTool # utilize utility to make it easy to get json/dict data converted to a dataframe df = df_from_json(data) # filter by countries with at least one medal and sort by total medals df = df[df['total'] > 0] df = df.sort("total", ascending=False) bar = Bar(df, values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'), label=cat(columns='abbr', sort=False), stack=cat(columns='medal', sort=False), color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'], sort=False), legend='top_right', title="Medals per Country, Sorted by Total Medals") bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')])) output_file("stacked_bar.html") show(bar)
from bokeh.charts import Bar, output_file, show from bokeh.charts.operations import blend from bokeh.charts.attributes import cat, color from bokeh.charts.utils import df_from_json from bokeh.sampledata.olympics2014 import data # utilize utility to make it easy to get json/dict data converted to a dataframe df = df_from_json(data) # filter by countries with at least one medal and sort by total medals df = df[df['total'] > 0] df = df.sort("total", ascending=False) bar = Bar(df, values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'), label=cat(columns='abbr', sort=False), stack=cat(columns='medal', sort=False), color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'], sort=False), legend='top_right', title="Medals per Country, Sorted by Total Medals", hover=[('medal', '@medal'), ('country', '@abbr')]) output_file("stacked_bar.html") show(bar)
Update stacked bar example to use the hover kwarg.
Update stacked bar example to use the hover kwarg.
Python
bsd-3-clause
Karel-van-de-Plassche/bokeh,rs2/bokeh,jakirkham/bokeh,msarahan/bokeh,DuCorey/bokeh,schoolie/bokeh,schoolie/bokeh,quasiben/bokeh,timsnyder/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,stonebig/bokeh,bokeh/bokeh,ericmjl/bokeh,bokeh/bokeh,aavanian/bokeh,dennisobrien/bokeh,clairetang6/bokeh,DuCorey/bokeh,ericmjl/bokeh,azjps/bokeh,Karel-van-de-Plassche/bokeh,stonebig/bokeh,justacec/bokeh,ptitjano/bokeh,rs2/bokeh,timsnyder/bokeh,draperjames/bokeh,msarahan/bokeh,ptitjano/bokeh,dennisobrien/bokeh,justacec/bokeh,aiguofer/bokeh,draperjames/bokeh,ptitjano/bokeh,jakirkham/bokeh,azjps/bokeh,mindriot101/bokeh,philippjfr/bokeh,mindriot101/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,philippjfr/bokeh,timsnyder/bokeh,ptitjano/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh,jakirkham/bokeh,dennisobrien/bokeh,aiguofer/bokeh,schoolie/bokeh,phobson/bokeh,mindriot101/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh,rs2/bokeh,draperjames/bokeh,bokeh/bokeh,phobson/bokeh,aavanian/bokeh,philippjfr/bokeh,ericmjl/bokeh,clairetang6/bokeh,draperjames/bokeh,percyfal/bokeh,percyfal/bokeh,aavanian/bokeh,mindriot101/bokeh,azjps/bokeh,KasperPRasmussen/bokeh,rs2/bokeh,bokeh/bokeh,percyfal/bokeh,philippjfr/bokeh,ptitjano/bokeh,aavanian/bokeh,quasiben/bokeh,percyfal/bokeh,msarahan/bokeh,DuCorey/bokeh,aiguofer/bokeh,Karel-van-de-Plassche/bokeh,phobson/bokeh,phobson/bokeh,timsnyder/bokeh,timsnyder/bokeh,phobson/bokeh,rs2/bokeh,bokeh/bokeh,justacec/bokeh,schoolie/bokeh,stonebig/bokeh,KasperPRasmussen/bokeh,justacec/bokeh,clairetang6/bokeh,azjps/bokeh,jakirkham/bokeh,draperjames/bokeh,Karel-van-de-Plassche/bokeh,msarahan/bokeh,dennisobrien/bokeh,quasiben/bokeh,percyfal/bokeh,aiguofer/bokeh,jakirkham/bokeh,DuCorey/bokeh,DuCorey/bokeh,philippjfr/bokeh,clairetang6/bokeh,stonebig/bokeh,azjps/bokeh,dennisobrien/bokeh,aiguofer/bokeh
from bokeh.charts import Bar, output_file, show from bokeh.charts.operations import blend from bokeh.charts.attributes import cat, color from bokeh.charts.utils import df_from_json from bokeh.sampledata.olympics2014 import data - - from bokeh.models.tools import HoverTool # utilize utility to make it easy to get json/dict data converted to a dataframe df = df_from_json(data) # filter by countries with at least one medal and sort by total medals df = df[df['total'] > 0] df = df.sort("total", ascending=False) bar = Bar(df, values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'), label=cat(columns='abbr', sort=False), stack=cat(columns='medal', sort=False), color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'], sort=False), legend='top_right', - title="Medals per Country, Sorted by Total Medals") + title="Medals per Country, Sorted by Total Medals", + hover=[('medal', '@medal'), ('country', '@abbr')]) - bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')])) output_file("stacked_bar.html") show(bar)
Update stacked bar example to use the hover kwarg.
## Code Before: from bokeh.charts import Bar, output_file, show from bokeh.charts.operations import blend from bokeh.charts.attributes import cat, color from bokeh.charts.utils import df_from_json from bokeh.sampledata.olympics2014 import data from bokeh.models.tools import HoverTool # utilize utility to make it easy to get json/dict data converted to a dataframe df = df_from_json(data) # filter by countries with at least one medal and sort by total medals df = df[df['total'] > 0] df = df.sort("total", ascending=False) bar = Bar(df, values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'), label=cat(columns='abbr', sort=False), stack=cat(columns='medal', sort=False), color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'], sort=False), legend='top_right', title="Medals per Country, Sorted by Total Medals") bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')])) output_file("stacked_bar.html") show(bar) ## Instruction: Update stacked bar example to use the hover kwarg. ## Code After: from bokeh.charts import Bar, output_file, show from bokeh.charts.operations import blend from bokeh.charts.attributes import cat, color from bokeh.charts.utils import df_from_json from bokeh.sampledata.olympics2014 import data # utilize utility to make it easy to get json/dict data converted to a dataframe df = df_from_json(data) # filter by countries with at least one medal and sort by total medals df = df[df['total'] > 0] df = df.sort("total", ascending=False) bar = Bar(df, values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'), label=cat(columns='abbr', sort=False), stack=cat(columns='medal', sort=False), color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'], sort=False), legend='top_right', title="Medals per Country, Sorted by Total Medals", hover=[('medal', '@medal'), ('country', '@abbr')]) output_file("stacked_bar.html") show(bar)
from bokeh.charts import Bar, output_file, show from bokeh.charts.operations import blend from bokeh.charts.attributes import cat, color from bokeh.charts.utils import df_from_json from bokeh.sampledata.olympics2014 import data - - from bokeh.models.tools import HoverTool # utilize utility to make it easy to get json/dict data converted to a dataframe df = df_from_json(data) # filter by countries with at least one medal and sort by total medals df = df[df['total'] > 0] df = df.sort("total", ascending=False) bar = Bar(df, values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'), label=cat(columns='abbr', sort=False), stack=cat(columns='medal', sort=False), color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'], sort=False), legend='top_right', - title="Medals per Country, Sorted by Total Medals") ? ^ + title="Medals per Country, Sorted by Total Medals", ? ^ + hover=[('medal', '@medal'), ('country', '@abbr')]) - bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')])) output_file("stacked_bar.html") show(bar)
f498eba42dd3d35e9ff0b5240f44fe8df25332ed
law/contrib/cms/tasks.py
law/contrib/cms/tasks.py
__all__ = ["BundleCMSSW"] import os import luigi from law import Task, LocalFileTarget, NO_STR from law.decorator import log from law.util import rel_path, interruptable_popen class BundleCMSSW(Task): task_namespace = "law.cms" path = luigi.Parameter(description="the path to the CMSSW checkout to bundle") exclude = luigi.Parameter(default=NO_STR, description="regular expression for excluding files " "or directories, relative to the CMSSW checkout path") def __init__(self, *args, **kwargs): super(BundleCMSSW, self).__init__(*args, **kwargs) self.path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.path))) def output(self): return LocalFileTarget("{}.tgz".format(os.path.basename(self.path))) @log def run(self): with self.output().localize("w") as tmp: self.bundle(tmp.path) def bundle(self, dst_path): cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.path, dst_path] if self.exclude != NO_STR: cmd += [self.exclude] code = interruptable_popen(cmd)[0] if code != 0: raise Exception("cmssw bundling failed")
__all__ = ["BundleCMSSW"] import os import luigi from law import Task, LocalFileTarget, NO_STR from law.decorator import log from law.util import rel_path, interruptable_popen class BundleCMSSW(Task): task_namespace = "law.cms" cmssw_path = luigi.Parameter(description="the path to the CMSSW checkout to bundle") exclude = luigi.Parameter(default=NO_STR, description="regular expression for excluding files " "or directories, relative to the CMSSW checkout path") def __init__(self, *args, **kwargs): super(BundleCMSSW, self).__init__(*args, **kwargs) self.cmssw_path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.cmssw_path))) def output(self): return LocalFileTarget("{}.tgz".format(os.path.basename(self.cmssw_path))) @log def run(self): with self.output().localize("w") as tmp: self.bundle(tmp.path) def bundle(self, dst_path): cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.cmssw_path, dst_path] if self.exclude != NO_STR: cmd += [self.exclude] code = interruptable_popen(cmd)[0] if code != 0: raise Exception("cmssw bundling failed")
Rename path parameter in contrib.cms.BundleCMSSW.
Rename path parameter in contrib.cms.BundleCMSSW.
Python
bsd-3-clause
riga/law,riga/law
__all__ = ["BundleCMSSW"] import os import luigi from law import Task, LocalFileTarget, NO_STR from law.decorator import log from law.util import rel_path, interruptable_popen class BundleCMSSW(Task): task_namespace = "law.cms" - path = luigi.Parameter(description="the path to the CMSSW checkout to bundle") + cmssw_path = luigi.Parameter(description="the path to the CMSSW checkout to bundle") exclude = luigi.Parameter(default=NO_STR, description="regular expression for excluding files " "or directories, relative to the CMSSW checkout path") def __init__(self, *args, **kwargs): super(BundleCMSSW, self).__init__(*args, **kwargs) - self.path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.path))) + self.cmssw_path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.cmssw_path))) def output(self): - return LocalFileTarget("{}.tgz".format(os.path.basename(self.path))) + return LocalFileTarget("{}.tgz".format(os.path.basename(self.cmssw_path))) @log def run(self): with self.output().localize("w") as tmp: self.bundle(tmp.path) def bundle(self, dst_path): - cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.path, dst_path] + cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.cmssw_path, dst_path] if self.exclude != NO_STR: cmd += [self.exclude] code = interruptable_popen(cmd)[0] if code != 0: raise Exception("cmssw bundling failed")
Rename path parameter in contrib.cms.BundleCMSSW.
## Code Before: __all__ = ["BundleCMSSW"] import os import luigi from law import Task, LocalFileTarget, NO_STR from law.decorator import log from law.util import rel_path, interruptable_popen class BundleCMSSW(Task): task_namespace = "law.cms" path = luigi.Parameter(description="the path to the CMSSW checkout to bundle") exclude = luigi.Parameter(default=NO_STR, description="regular expression for excluding files " "or directories, relative to the CMSSW checkout path") def __init__(self, *args, **kwargs): super(BundleCMSSW, self).__init__(*args, **kwargs) self.path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.path))) def output(self): return LocalFileTarget("{}.tgz".format(os.path.basename(self.path))) @log def run(self): with self.output().localize("w") as tmp: self.bundle(tmp.path) def bundle(self, dst_path): cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.path, dst_path] if self.exclude != NO_STR: cmd += [self.exclude] code = interruptable_popen(cmd)[0] if code != 0: raise Exception("cmssw bundling failed") ## Instruction: Rename path parameter in contrib.cms.BundleCMSSW. ## Code After: __all__ = ["BundleCMSSW"] import os import luigi from law import Task, LocalFileTarget, NO_STR from law.decorator import log from law.util import rel_path, interruptable_popen class BundleCMSSW(Task): task_namespace = "law.cms" cmssw_path = luigi.Parameter(description="the path to the CMSSW checkout to bundle") exclude = luigi.Parameter(default=NO_STR, description="regular expression for excluding files " "or directories, relative to the CMSSW checkout path") def __init__(self, *args, **kwargs): super(BundleCMSSW, self).__init__(*args, **kwargs) self.cmssw_path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.cmssw_path))) def output(self): return LocalFileTarget("{}.tgz".format(os.path.basename(self.cmssw_path))) @log def run(self): with self.output().localize("w") as tmp: self.bundle(tmp.path) def bundle(self, dst_path): cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.cmssw_path, dst_path] if self.exclude != NO_STR: cmd += [self.exclude] code = interruptable_popen(cmd)[0] if code != 0: raise Exception("cmssw bundling failed")
__all__ = ["BundleCMSSW"] import os import luigi from law import Task, LocalFileTarget, NO_STR from law.decorator import log from law.util import rel_path, interruptable_popen class BundleCMSSW(Task): task_namespace = "law.cms" - path = luigi.Parameter(description="the path to the CMSSW checkout to bundle") + cmssw_path = luigi.Parameter(description="the path to the CMSSW checkout to bundle") ? ++++++ exclude = luigi.Parameter(default=NO_STR, description="regular expression for excluding files " "or directories, relative to the CMSSW checkout path") def __init__(self, *args, **kwargs): super(BundleCMSSW, self).__init__(*args, **kwargs) - self.path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.path))) + self.cmssw_path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.cmssw_path))) ? ++++++ ++++++ def output(self): - return LocalFileTarget("{}.tgz".format(os.path.basename(self.path))) + return LocalFileTarget("{}.tgz".format(os.path.basename(self.cmssw_path))) ? ++++++ @log def run(self): with self.output().localize("w") as tmp: self.bundle(tmp.path) def bundle(self, dst_path): - cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.path, dst_path] + cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.cmssw_path, dst_path] ? ++++++ if self.exclude != NO_STR: cmd += [self.exclude] code = interruptable_popen(cmd)[0] if code != 0: raise Exception("cmssw bundling failed")
83dce279fcf157f9ca4cc2e7dbdad55db9f1f857
play.py
play.py
import readline import random import shelve import sys from src import parser from src import locations from src import classes player = classes.Player(locations, locations.start) previousNoun = '' turns = 0 while True: try: command = parser.parseCommand(input('> ')) if command is not None: hasNoun = True action = command[0] if len(command) >= 2: noun = command[1] else: hasNoun = False noun = None if previousNoun != '' and noun == 'it': noun = previousNoun try: commandResult = getattr(player, action)(action, noun, hasNoun) except AttributeError: print('You can\'t do that here.') if noun is not None: previousNoun = noun else: previousNoun = '' turns += 1 except KeyboardInterrupt: player.die()
import os import readline import random import shelve import sys os.chdir(os.path.dirname(os.path.abspath(__file__))) from src import parser from src import locations from src import classes player = classes.Player(locations, locations.start) previousNoun = '' turns = 0 while True: try: command = parser.parseCommand(input('> ')) if command is not None: hasNoun = True action = command[0] if len(command) >= 2: noun = command[1] else: hasNoun = False noun = None if previousNoun != '' and noun == 'it': noun = previousNoun try: commandResult = getattr(player, action)(action, noun, hasNoun) except AttributeError: print('You can\'t do that here.') if noun is not None: previousNoun = noun else: previousNoun = '' turns += 1 except KeyboardInterrupt: player.die()
Fix issue where game could be in wrong cwd
Fix issue where game could be in wrong cwd
Python
mit
disorientedperson/python-adventure-game,allanburleson/python-adventure-game
+ import os import readline import random import shelve import sys + + os.chdir(os.path.dirname(os.path.abspath(__file__))) from src import parser from src import locations from src import classes player = classes.Player(locations, locations.start) previousNoun = '' turns = 0 while True: try: command = parser.parseCommand(input('> ')) if command is not None: hasNoun = True action = command[0] if len(command) >= 2: noun = command[1] else: hasNoun = False noun = None if previousNoun != '' and noun == 'it': noun = previousNoun try: commandResult = getattr(player, action)(action, noun, hasNoun) except AttributeError: print('You can\'t do that here.') if noun is not None: previousNoun = noun else: previousNoun = '' turns += 1 except KeyboardInterrupt: player.die()
Fix issue where game could be in wrong cwd
## Code Before: import readline import random import shelve import sys from src import parser from src import locations from src import classes player = classes.Player(locations, locations.start) previousNoun = '' turns = 0 while True: try: command = parser.parseCommand(input('> ')) if command is not None: hasNoun = True action = command[0] if len(command) >= 2: noun = command[1] else: hasNoun = False noun = None if previousNoun != '' and noun == 'it': noun = previousNoun try: commandResult = getattr(player, action)(action, noun, hasNoun) except AttributeError: print('You can\'t do that here.') if noun is not None: previousNoun = noun else: previousNoun = '' turns += 1 except KeyboardInterrupt: player.die() ## Instruction: Fix issue where game could be in wrong cwd ## Code After: import os import readline import random import shelve import sys os.chdir(os.path.dirname(os.path.abspath(__file__))) from src import parser from src import locations from src import classes player = classes.Player(locations, locations.start) previousNoun = '' turns = 0 while True: try: command = parser.parseCommand(input('> ')) if command is not None: hasNoun = True action = command[0] if len(command) >= 2: noun = command[1] else: hasNoun = False noun = None if previousNoun != '' and noun == 'it': noun = previousNoun try: commandResult = getattr(player, action)(action, noun, hasNoun) except AttributeError: print('You can\'t do that here.') if noun is not None: previousNoun = noun else: previousNoun = '' turns += 1 except KeyboardInterrupt: player.die()
+ import os import readline import random import shelve import sys + + os.chdir(os.path.dirname(os.path.abspath(__file__))) from src import parser from src import locations from src import classes player = classes.Player(locations, locations.start) previousNoun = '' turns = 0 while True: try: command = parser.parseCommand(input('> ')) if command is not None: hasNoun = True action = command[0] if len(command) >= 2: noun = command[1] else: hasNoun = False noun = None if previousNoun != '' and noun == 'it': noun = previousNoun try: commandResult = getattr(player, action)(action, noun, hasNoun) except AttributeError: print('You can\'t do that here.') if noun is not None: previousNoun = noun else: previousNoun = '' turns += 1 except KeyboardInterrupt: player.die()
f36baf09fbbe62ff2fef97528f2d00df43797b43
flow/__init__.py
flow/__init__.py
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData, NumpyFeature from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
Add NumpyFeature to top-level exports
Add NumpyFeature to top-level exports
Python
mit
JohnVinyard/featureflow,JohnVinyard/featureflow
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter - from nmpy import StreamingNumpyDecoder, NumpyMetaData + from nmpy import StreamingNumpyDecoder, NumpyMetaData, NumpyFeature from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
Add NumpyFeature to top-level exports
## Code Before: from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings ## Instruction: Add NumpyFeature to top-level exports ## Code After: from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData, NumpyFeature from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter - from nmpy import StreamingNumpyDecoder, NumpyMetaData + from nmpy import StreamingNumpyDecoder, NumpyMetaData, NumpyFeature ? ++++++++++++++ from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
a5f34a8011718ba31dc3d70d761bc4583112f133
common/morse_parse.py
common/morse_parse.py
f = open("morse_table.txt") morse_table = f.read() morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")]) f.close()
import inspect, os common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory f = open(os.path.join(common_dir, "morse_table.txt")) morse_table = f.read() morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")]) f.close()
Make morse parser not assume that the current working directory is common/
Make morse parser not assume that the current working directory is common/
Python
mit
nickodell/morse-code
- f = open("morse_table.txt") + import inspect, os + common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory + + f = open(os.path.join(common_dir, "morse_table.txt")) morse_table = f.read() morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")]) f.close()
Make morse parser not assume that the current working directory is common/
## Code Before: f = open("morse_table.txt") morse_table = f.read() morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")]) f.close() ## Instruction: Make morse parser not assume that the current working directory is common/ ## Code After: import inspect, os common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory f = open(os.path.join(common_dir, "morse_table.txt")) morse_table = f.read() morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")]) f.close()
- f = open("morse_table.txt") + import inspect, os + common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory + + f = open(os.path.join(common_dir, "morse_table.txt")) morse_table = f.read() morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")]) f.close()
f9332afe031f4d7875b8c6dd53392a46a198fc9e
evaluation/packages/utils.py
evaluation/packages/utils.py
def distanceToPrimitives(cloud, assign, primitives): return [ [primVar.distanceTo(cloud[a[0]]) for primVar in primitives if primVar.uid == a[1]] for a in assign]
def distanceToPrimitives(cloud, assign, primitives): return [ [primVar.distanceTo(cloud[a[0]]) for primVar in primitives if primVar.uid == a[1]] for a in assign] import packages.orderedSet as orderedSet def parseAngles(strAngle): angles = orderedSet.OrderedSet() angles.add(0.) if len(strAngle) == 1: strAngle = strAngle[0].split(',') for genAngle in strAngle: a = float(genAngle) while a <= 180.: angles.add(a) a+= float(genAngle) return angles
Add method to parse angle command line arguments
Add method to parse angle command line arguments
Python
apache-2.0
amonszpart/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,amonszpart/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,NUAAXXY/globOpt
def distanceToPrimitives(cloud, assign, primitives): return [ [primVar.distanceTo(cloud[a[0]]) for primVar in primitives if primVar.uid == a[1]] for a in assign] + + + + import packages.orderedSet as orderedSet + def parseAngles(strAngle): + angles = orderedSet.OrderedSet() + angles.add(0.) + if len(strAngle) == 1: + strAngle = strAngle[0].split(',') + for genAngle in strAngle: + a = float(genAngle) + while a <= 180.: + angles.add(a) + a+= float(genAngle) + + return angles + +
Add method to parse angle command line arguments
## Code Before: def distanceToPrimitives(cloud, assign, primitives): return [ [primVar.distanceTo(cloud[a[0]]) for primVar in primitives if primVar.uid == a[1]] for a in assign] ## Instruction: Add method to parse angle command line arguments ## Code After: def distanceToPrimitives(cloud, assign, primitives): return [ [primVar.distanceTo(cloud[a[0]]) for primVar in primitives if primVar.uid == a[1]] for a in assign] import packages.orderedSet as orderedSet def parseAngles(strAngle): angles = orderedSet.OrderedSet() angles.add(0.) if len(strAngle) == 1: strAngle = strAngle[0].split(',') for genAngle in strAngle: a = float(genAngle) while a <= 180.: angles.add(a) a+= float(genAngle) return angles
def distanceToPrimitives(cloud, assign, primitives): return [ [primVar.distanceTo(cloud[a[0]]) for primVar in primitives if primVar.uid == a[1]] for a in assign] + + + + import packages.orderedSet as orderedSet + def parseAngles(strAngle): + angles = orderedSet.OrderedSet() + angles.add(0.) + if len(strAngle) == 1: + strAngle = strAngle[0].split(',') + + for genAngle in strAngle: + a = float(genAngle) + while a <= 180.: + angles.add(a) + a+= float(genAngle) + + return angles +
c62b42eb528babebf96e56738031dcda97868e80
flowfairy/app.py
flowfairy/app.py
import tensorflow as tf import numpy as np import itertools as it import importlib from flowfairy.conf import settings from flowfairy.utils import take from flowfairy import data from flowfairy.feature import FeatureManager from flowfairy.core.queue import FlowQueue from flowfairy.core.stage import stage def load_net(): net = importlib.import_module(settings.NET).Net() return net def run(*args, **options): coord = tf.train.Coordinator() net = load_net() queues = [] with tf.variable_scope('network') as scope: for data_loader in data.provider: fts = FeatureManager(data_loader) queue = FlowQueue(fts, coord) queues.append(queue) X = queue.dequeue() func = getattr(net, data_loader.name) func(**dict(zip(fts.fields, X))) scope.reuse_variables() with tf.Session() as sess: stage.before(sess, net) for queue in queues: queue.start(sess) sess.run(tf.global_variables_initializer()) try: step = 1 while not coord.should_stop() and not net.should_stop(): stage.run(sess, step) step += 1 except KeyboardInterrupt: pass coord.request_stop() queue.stop() coord.join(stop_grace_period_secs=5)
import tensorflow as tf import numpy as np import itertools as it import importlib from flowfairy.conf import settings from flowfairy.utils import take from flowfairy import data from flowfairy.feature import FeatureManager from flowfairy.core.queue import FlowQueue from flowfairy.core.stage import stage def load_net(): net = importlib.import_module(settings.NET).Net() return net def run(*args, **options): coord = tf.train.Coordinator() net = load_net() queues = [] for data_loader in data.provider: with tf.variable_scope(data_loader.name) as scope: fts = FeatureManager(data_loader) queue = FlowQueue(fts, coord) queues.append(queue) X = queue.dequeue() func = getattr(net, data_loader.name) func(**dict(zip(fts.fields, X))) scope.reuse_variables() with tf.Session() as sess: stage.before(sess, net) for queue in queues: queue.start(sess) sess.run(tf.global_variables_initializer()) try: step = 1 while not coord.should_stop() and not net.should_stop(): stage.run(sess, step) step += 1 except KeyboardInterrupt: pass coord.request_stop() queue.stop() coord.join(stop_grace_period_secs=5)
Set name_scope of entire network to the dataset it handles
Set name_scope of entire network to the dataset it handles
Python
mit
WhatDo/FlowFairy
import tensorflow as tf import numpy as np import itertools as it import importlib from flowfairy.conf import settings from flowfairy.utils import take from flowfairy import data from flowfairy.feature import FeatureManager from flowfairy.core.queue import FlowQueue from flowfairy.core.stage import stage def load_net(): net = importlib.import_module(settings.NET).Net() return net def run(*args, **options): coord = tf.train.Coordinator() net = load_net() queues = [] - with tf.variable_scope('network') as scope: - for data_loader in data.provider: + for data_loader in data.provider: + with tf.variable_scope(data_loader.name) as scope: fts = FeatureManager(data_loader) queue = FlowQueue(fts, coord) queues.append(queue) X = queue.dequeue() func = getattr(net, data_loader.name) func(**dict(zip(fts.fields, X))) scope.reuse_variables() with tf.Session() as sess: stage.before(sess, net) for queue in queues: queue.start(sess) sess.run(tf.global_variables_initializer()) try: step = 1 while not coord.should_stop() and not net.should_stop(): stage.run(sess, step) step += 1 except KeyboardInterrupt: pass coord.request_stop() queue.stop() coord.join(stop_grace_period_secs=5)
Set name_scope of entire network to the dataset it handles
## Code Before: import tensorflow as tf import numpy as np import itertools as it import importlib from flowfairy.conf import settings from flowfairy.utils import take from flowfairy import data from flowfairy.feature import FeatureManager from flowfairy.core.queue import FlowQueue from flowfairy.core.stage import stage def load_net(): net = importlib.import_module(settings.NET).Net() return net def run(*args, **options): coord = tf.train.Coordinator() net = load_net() queues = [] with tf.variable_scope('network') as scope: for data_loader in data.provider: fts = FeatureManager(data_loader) queue = FlowQueue(fts, coord) queues.append(queue) X = queue.dequeue() func = getattr(net, data_loader.name) func(**dict(zip(fts.fields, X))) scope.reuse_variables() with tf.Session() as sess: stage.before(sess, net) for queue in queues: queue.start(sess) sess.run(tf.global_variables_initializer()) try: step = 1 while not coord.should_stop() and not net.should_stop(): stage.run(sess, step) step += 1 except KeyboardInterrupt: pass coord.request_stop() queue.stop() coord.join(stop_grace_period_secs=5) ## Instruction: Set name_scope of entire network to the dataset it handles ## Code After: import tensorflow as tf import numpy as np import itertools as it import importlib from flowfairy.conf import settings from flowfairy.utils import take from flowfairy import data from flowfairy.feature import FeatureManager from flowfairy.core.queue import FlowQueue from flowfairy.core.stage import stage def load_net(): net = importlib.import_module(settings.NET).Net() return net def run(*args, **options): coord = tf.train.Coordinator() net = load_net() queues = [] for data_loader in data.provider: with tf.variable_scope(data_loader.name) as scope: fts = FeatureManager(data_loader) queue = FlowQueue(fts, coord) queues.append(queue) X = queue.dequeue() func = getattr(net, data_loader.name) func(**dict(zip(fts.fields, X))) scope.reuse_variables() with tf.Session() as sess: stage.before(sess, net) for queue in queues: queue.start(sess) sess.run(tf.global_variables_initializer()) try: step = 1 while not coord.should_stop() and not net.should_stop(): stage.run(sess, step) step += 1 except KeyboardInterrupt: pass coord.request_stop() queue.stop() coord.join(stop_grace_period_secs=5)
import tensorflow as tf import numpy as np import itertools as it import importlib from flowfairy.conf import settings from flowfairy.utils import take from flowfairy import data from flowfairy.feature import FeatureManager from flowfairy.core.queue import FlowQueue from flowfairy.core.stage import stage def load_net(): net = importlib.import_module(settings.NET).Net() return net def run(*args, **options): coord = tf.train.Coordinator() net = load_net() queues = [] - with tf.variable_scope('network') as scope: - for data_loader in data.provider: ? ---- + for data_loader in data.provider: + with tf.variable_scope(data_loader.name) as scope: fts = FeatureManager(data_loader) queue = FlowQueue(fts, coord) queues.append(queue) X = queue.dequeue() func = getattr(net, data_loader.name) func(**dict(zip(fts.fields, X))) scope.reuse_variables() with tf.Session() as sess: stage.before(sess, net) for queue in queues: queue.start(sess) sess.run(tf.global_variables_initializer()) try: step = 1 while not coord.should_stop() and not net.should_stop(): stage.run(sess, step) step += 1 except KeyboardInterrupt: pass coord.request_stop() queue.stop() coord.join(stop_grace_period_secs=5)
6629a3a238432522d77f840b465eb99a3745593f
django_base64field/tests/models.py
django_base64field/tests/models.py
from django.db import models from django.utils.baseconv import base64 from django_base64field.fields import Base64Field class Planet(models.Model): ek = Base64Field() name = models.CharField( default='Fucker', max_length=103 ) class Continent(models.Model): ek = Base64Field() name = models.CharField( default='Suckers!', max_length=13 ) planet = models.ForeignKey(Planet, to_field='ek') class Helper(models.Model): """ base64 encoded value won't be available at first time creation. It can ve accessible by getting the object from database after creation mean when it get saved completely, But what if we don't want to get our base64 encoded key from our sweet model by retrieving it again from database? It's easy, efficient, holly and molly! """ ek = Base64Field() def _ek(self): if self.ek: return self.ek if not self.ek and self.pk: return base64.encode(self.pk) return self.ek class CustomReceiver(models.Model): """ Passing custom receiver to generate `youyouid` with a custom receiver. """ youyouid = Base64Field( encode_receiver='django_base64field.tests.receivers:custom_receiver' )
from django.db import models from django.utils.baseconv import base64 from django_base64field.fields import Base64Field class Planet(models.Model): # Making `ek` unique just because it will be used as `FK` # in other models. ek = Base64Field(unique=True) name = models.CharField( default='Fucker', max_length=103 ) class Continent(models.Model): ek = Base64Field() name = models.CharField( default='Suckers!', max_length=13 ) planet = models.ForeignKey(Planet, to_field='ek') class Helper(models.Model): """ base64 encoded value won't be available at first time creation. It can ve accessible by getting the object from database after creation mean when it get saved completely, But what if we don't want to get our base64 encoded key from our sweet model by retrieving it again from database? It's easy, efficient, holly and molly! """ ek = Base64Field() def _ek(self): if self.ek: return self.ek if not self.ek and self.pk: return base64.encode(self.pk) return self.ek class CustomReceiver(models.Model): """ Passing custom receiver to generate `youyouid` with a custom receiver. """ youyouid = Base64Field( encode_receiver='django_base64field.tests.receivers:custom_receiver' )
Add little bit comments for Planet model
Add little bit comments for Planet model
Python
bsd-3-clause
Alir3z4/django-base64field
from django.db import models from django.utils.baseconv import base64 from django_base64field.fields import Base64Field class Planet(models.Model): + # Making `ek` unique just because it will be used as `FK` + # in other models. - ek = Base64Field() + ek = Base64Field(unique=True) name = models.CharField( default='Fucker', max_length=103 ) class Continent(models.Model): ek = Base64Field() name = models.CharField( default='Suckers!', max_length=13 ) planet = models.ForeignKey(Planet, to_field='ek') class Helper(models.Model): """ base64 encoded value won't be available at first time creation. It can ve accessible by getting the object from database after creation mean when it get saved completely, But what if we don't want to get our base64 encoded key from our sweet model by retrieving it again from database? It's easy, efficient, holly and molly! """ ek = Base64Field() def _ek(self): if self.ek: return self.ek if not self.ek and self.pk: return base64.encode(self.pk) return self.ek class CustomReceiver(models.Model): """ Passing custom receiver to generate `youyouid` with a custom receiver. """ youyouid = Base64Field( encode_receiver='django_base64field.tests.receivers:custom_receiver' )
Add little bit comments for Planet model
## Code Before: from django.db import models from django.utils.baseconv import base64 from django_base64field.fields import Base64Field class Planet(models.Model): ek = Base64Field() name = models.CharField( default='Fucker', max_length=103 ) class Continent(models.Model): ek = Base64Field() name = models.CharField( default='Suckers!', max_length=13 ) planet = models.ForeignKey(Planet, to_field='ek') class Helper(models.Model): """ base64 encoded value won't be available at first time creation. It can ve accessible by getting the object from database after creation mean when it get saved completely, But what if we don't want to get our base64 encoded key from our sweet model by retrieving it again from database? It's easy, efficient, holly and molly! """ ek = Base64Field() def _ek(self): if self.ek: return self.ek if not self.ek and self.pk: return base64.encode(self.pk) return self.ek class CustomReceiver(models.Model): """ Passing custom receiver to generate `youyouid` with a custom receiver. """ youyouid = Base64Field( encode_receiver='django_base64field.tests.receivers:custom_receiver' ) ## Instruction: Add little bit comments for Planet model ## Code After: from django.db import models from django.utils.baseconv import base64 from django_base64field.fields import Base64Field class Planet(models.Model): # Making `ek` unique just because it will be used as `FK` # in other models. ek = Base64Field(unique=True) name = models.CharField( default='Fucker', max_length=103 ) class Continent(models.Model): ek = Base64Field() name = models.CharField( default='Suckers!', max_length=13 ) planet = models.ForeignKey(Planet, to_field='ek') class Helper(models.Model): """ base64 encoded value won't be available at first time creation. It can ve accessible by getting the object from database after creation mean when it get saved completely, But what if we don't want to get our base64 encoded key from our sweet model by retrieving it again from database? It's easy, efficient, holly and molly! """ ek = Base64Field() def _ek(self): if self.ek: return self.ek if not self.ek and self.pk: return base64.encode(self.pk) return self.ek class CustomReceiver(models.Model): """ Passing custom receiver to generate `youyouid` with a custom receiver. """ youyouid = Base64Field( encode_receiver='django_base64field.tests.receivers:custom_receiver' )
from django.db import models from django.utils.baseconv import base64 from django_base64field.fields import Base64Field class Planet(models.Model): + # Making `ek` unique just because it will be used as `FK` + # in other models. - ek = Base64Field() + ek = Base64Field(unique=True) ? +++++++++++ name = models.CharField( default='Fucker', max_length=103 ) class Continent(models.Model): ek = Base64Field() name = models.CharField( default='Suckers!', max_length=13 ) planet = models.ForeignKey(Planet, to_field='ek') class Helper(models.Model): """ base64 encoded value won't be available at first time creation. It can ve accessible by getting the object from database after creation mean when it get saved completely, But what if we don't want to get our base64 encoded key from our sweet model by retrieving it again from database? It's easy, efficient, holly and molly! """ ek = Base64Field() def _ek(self): if self.ek: return self.ek if not self.ek and self.pk: return base64.encode(self.pk) return self.ek class CustomReceiver(models.Model): """ Passing custom receiver to generate `youyouid` with a custom receiver. """ youyouid = Base64Field( encode_receiver='django_base64field.tests.receivers:custom_receiver' )
0ab7d60f02abe3bd4509c3377ebc6cb11f0a5e0f
ydf/templating.py
ydf/templating.py
import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
Add global for default template name.
Add global for default template name.
Python
apache-2.0
ahawker/ydf
import jinja2 import os from ydf import instructions, __version__ + DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
Add global for default template name.
## Code Before: import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env ## Instruction: Add global for default template name. ## Code After: import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
import jinja2 import os from ydf import instructions, __version__ + DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
7064916ddd2913856b9493670ca2d525fd412b06
crmapp/urls.py
crmapp/urls.py
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() from marketing.views import HomePage urlpatterns = patterns('', # Marketing pages url(r'^$', HomePage.as_view(), name="home"), # Subscriber related URLs url(r'^signup/$', 'crmapp.subscribers.views.subscriber_new', name='sub_new' ), # Admin URL (r'^admin/', include(admin.site.urls)), # Login/Logout URLs # Account related URLs # Contact related URLS # Communication related URLs )
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() from marketing.views import HomePage urlpatterns = patterns('', # Marketing pages url(r'^$', HomePage.as_view(), name="home"), # Subscriber related URLs url(r'^signup/$', 'crmapp.subscribers.views.subscriber_new', name='sub_new' ), # Admin URL (r'^admin/', include(admin.site.urls)), # Login/Logout URLs (r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'login.html'} ), (r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login/'} ), # Account related URLs # Contact related URLS # Communication related URLs )
Create the Login Page > Create the Login & Logout URLs
Create the Login Page > Create the Login & Logout URLs
Python
mit
deenaariff/Django,tabdon/crmeasyapp,tabdon/crmeasyapp
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() from marketing.views import HomePage urlpatterns = patterns('', # Marketing pages url(r'^$', HomePage.as_view(), name="home"), # Subscriber related URLs url(r'^signup/$', 'crmapp.subscribers.views.subscriber_new', name='sub_new' ), # Admin URL (r'^admin/', include(admin.site.urls)), # Login/Logout URLs - + (r'^login/$', + 'django.contrib.auth.views.login', {'template_name': 'login.html'} + ), + (r'^logout/$', + 'django.contrib.auth.views.logout', {'next_page': '/login/'} + ), # Account related URLs # Contact related URLS # Communication related URLs )
Create the Login Page > Create the Login & Logout URLs
## Code Before: from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() from marketing.views import HomePage urlpatterns = patterns('', # Marketing pages url(r'^$', HomePage.as_view(), name="home"), # Subscriber related URLs url(r'^signup/$', 'crmapp.subscribers.views.subscriber_new', name='sub_new' ), # Admin URL (r'^admin/', include(admin.site.urls)), # Login/Logout URLs # Account related URLs # Contact related URLS # Communication related URLs ) ## Instruction: Create the Login Page > Create the Login & Logout URLs ## Code After: from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() from marketing.views import HomePage urlpatterns = patterns('', # Marketing pages url(r'^$', HomePage.as_view(), name="home"), # Subscriber related URLs url(r'^signup/$', 'crmapp.subscribers.views.subscriber_new', name='sub_new' ), # Admin URL (r'^admin/', include(admin.site.urls)), # Login/Logout URLs (r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'login.html'} ), (r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login/'} ), # Account related URLs # Contact related URLS # Communication related URLs )
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() from marketing.views import HomePage urlpatterns = patterns('', # Marketing pages url(r'^$', HomePage.as_view(), name="home"), # Subscriber related URLs url(r'^signup/$', 'crmapp.subscribers.views.subscriber_new', name='sub_new' ), # Admin URL (r'^admin/', include(admin.site.urls)), # Login/Logout URLs - + (r'^login/$', + 'django.contrib.auth.views.login', {'template_name': 'login.html'} + ), + (r'^logout/$', + 'django.contrib.auth.views.logout', {'next_page': '/login/'} + ), # Account related URLs # Contact related URLS # Communication related URLs )
d7b7f157fd5758c1de22810d871642768f4eac68
trunk/metpy/__init__.py
trunk/metpy/__init__.py
import bl import readers import vis import tools import constants from calc import * import version __version__ = version.get_version()
import bl import readers import vis import tools import constants #What do we want to pull into the top-level namespace from calc import * from readers.mesonet import * import version __version__ = version.get_version()
Add mesonet readers to top level namespace.
Add mesonet readers to top level namespace. git-svn-id: acf0ef94bfce630b1a882387fc03ab8593ec6522@150 150532fb-1d5b-0410-a8ab-efec50f980d4
Python
bsd-3-clause
dopplershift/MetPy,deeplycloudy/MetPy,dopplershift/MetPy,Unidata/MetPy,Unidata/MetPy,ahaberlie/MetPy,ahaberlie/MetPy,jrleeman/MetPy,jrleeman/MetPy,ShawnMurd/MetPy,ahill818/MetPy
import bl import readers import vis import tools import constants + + #What do we want to pull into the top-level namespace from calc import * + from readers.mesonet import * import version __version__ = version.get_version()
Add mesonet readers to top level namespace.
## Code Before: import bl import readers import vis import tools import constants from calc import * import version __version__ = version.get_version() ## Instruction: Add mesonet readers to top level namespace. ## Code After: import bl import readers import vis import tools import constants #What do we want to pull into the top-level namespace from calc import * from readers.mesonet import * import version __version__ = version.get_version()
import bl import readers import vis import tools import constants + + #What do we want to pull into the top-level namespace from calc import * + from readers.mesonet import * import version __version__ = version.get_version()
2c41bfe7da9644b3a76adc5d2f1744107a3c40f4
core/git_mixins/rewrite.py
core/git_mixins/rewrite.py
from types import SimpleNamespace class ChangeTemplate(SimpleNamespace): # orig_hash do_commit = True msg = None datetime = None author = None class RewriteMixin(): ChangeTemplate = ChangeTemplate def rewrite_active_branch(self, base_commit, commit_chain): branch_name = self.get_current_branch_name() # Detach HEAD to base commit. self.checkout_ref(base_commit) # Apply each commit to HEAD in order. try: for commit in commit_chain: self.git( "cherry-pick", "--allow-empty", "--no-commit", commit.orig_hash ) # If squashing one commit into the next, do_commit should be # False so that it's changes are included in the next commit. if commit.do_commit: self.git( "commit", "--author", commit.author, "--date", commit.datetime, "-F", "-", stdin=commit.msg ) self.git("branch", "-f", branch_name, "HEAD") except Exception as e: raise e finally: # Whether on success or failure, always re-checkout the branch. On success, # this will be the re-written branch. On failure, this will be the original # branch (since re-defining the branch ref is the last step). self.git("checkout", branch_name)
from types import SimpleNamespace class ChangeTemplate(SimpleNamespace): # orig_hash do_commit = True msg = None datetime = None author = None class RewriteMixin(): ChangeTemplate = ChangeTemplate def rewrite_active_branch(self, base_commit, commit_chain): branch_name = self.get_current_branch_name() # Detach HEAD to base commit. self.checkout_ref(base_commit) # Apply each commit to HEAD in order. try: for commit in commit_chain: self.git( "cherry-pick", "--allow-empty", "--allow-empty-message", "--no-commit", commit.orig_hash ) # If squashing one commit into the next, do_commit should be # False so that it's changes are included in the next commit. if commit.do_commit: self.git( "commit", "--author", commit.author, "--date", commit.datetime, "-F", "-", stdin=commit.msg ) self.git("branch", "-f", branch_name, "HEAD") except Exception as e: raise e finally: # Whether on success or failure, always re-checkout the branch. On success, # this will be the re-written branch. On failure, this will be the original # branch (since re-defining the branch ref is the last step). self.git("checkout", branch_name)
Allow empty commit messages if explictly specified.
Allow empty commit messages if explictly specified.
Python
mit
theiviaxx/GitSavvy,jmanuel1/GitSavvy,dreki/GitSavvy,dvcrn/GitSavvy,dvcrn/GitSavvy,asfaltboy/GitSavvy,jmanuel1/GitSavvy,ddevlin/GitSavvy,ddevlin/GitSavvy,divmain/GitSavvy,stoivo/GitSavvy,stoivo/GitSavvy,divmain/GitSavvy,dreki/GitSavvy,ddevlin/GitSavvy,theiviaxx/GitSavvy,stoivo/GitSavvy,asfaltboy/GitSavvy,ralic/GitSavvy,divmain/GitSavvy,ralic/GitSavvy,asfaltboy/GitSavvy
from types import SimpleNamespace class ChangeTemplate(SimpleNamespace): # orig_hash do_commit = True msg = None datetime = None author = None class RewriteMixin(): ChangeTemplate = ChangeTemplate def rewrite_active_branch(self, base_commit, commit_chain): branch_name = self.get_current_branch_name() # Detach HEAD to base commit. self.checkout_ref(base_commit) # Apply each commit to HEAD in order. try: for commit in commit_chain: self.git( "cherry-pick", "--allow-empty", + "--allow-empty-message", "--no-commit", commit.orig_hash ) # If squashing one commit into the next, do_commit should be # False so that it's changes are included in the next commit. if commit.do_commit: self.git( "commit", "--author", commit.author, "--date", commit.datetime, "-F", "-", stdin=commit.msg ) self.git("branch", "-f", branch_name, "HEAD") except Exception as e: raise e finally: # Whether on success or failure, always re-checkout the branch. On success, # this will be the re-written branch. On failure, this will be the original # branch (since re-defining the branch ref is the last step). self.git("checkout", branch_name)
Allow empty commit messages if explictly specified.
## Code Before: from types import SimpleNamespace class ChangeTemplate(SimpleNamespace): # orig_hash do_commit = True msg = None datetime = None author = None class RewriteMixin(): ChangeTemplate = ChangeTemplate def rewrite_active_branch(self, base_commit, commit_chain): branch_name = self.get_current_branch_name() # Detach HEAD to base commit. self.checkout_ref(base_commit) # Apply each commit to HEAD in order. try: for commit in commit_chain: self.git( "cherry-pick", "--allow-empty", "--no-commit", commit.orig_hash ) # If squashing one commit into the next, do_commit should be # False so that it's changes are included in the next commit. if commit.do_commit: self.git( "commit", "--author", commit.author, "--date", commit.datetime, "-F", "-", stdin=commit.msg ) self.git("branch", "-f", branch_name, "HEAD") except Exception as e: raise e finally: # Whether on success or failure, always re-checkout the branch. On success, # this will be the re-written branch. On failure, this will be the original # branch (since re-defining the branch ref is the last step). self.git("checkout", branch_name) ## Instruction: Allow empty commit messages if explictly specified. ## Code After: from types import SimpleNamespace class ChangeTemplate(SimpleNamespace): # orig_hash do_commit = True msg = None datetime = None author = None class RewriteMixin(): ChangeTemplate = ChangeTemplate def rewrite_active_branch(self, base_commit, commit_chain): branch_name = self.get_current_branch_name() # Detach HEAD to base commit. self.checkout_ref(base_commit) # Apply each commit to HEAD in order. try: for commit in commit_chain: self.git( "cherry-pick", "--allow-empty", "--allow-empty-message", "--no-commit", commit.orig_hash ) # If squashing one commit into the next, do_commit should be # False so that it's changes are included in the next commit. if commit.do_commit: self.git( "commit", "--author", commit.author, "--date", commit.datetime, "-F", "-", stdin=commit.msg ) self.git("branch", "-f", branch_name, "HEAD") except Exception as e: raise e finally: # Whether on success or failure, always re-checkout the branch. On success, # this will be the re-written branch. On failure, this will be the original # branch (since re-defining the branch ref is the last step). self.git("checkout", branch_name)
from types import SimpleNamespace class ChangeTemplate(SimpleNamespace): # orig_hash do_commit = True msg = None datetime = None author = None class RewriteMixin(): ChangeTemplate = ChangeTemplate def rewrite_active_branch(self, base_commit, commit_chain): branch_name = self.get_current_branch_name() # Detach HEAD to base commit. self.checkout_ref(base_commit) # Apply each commit to HEAD in order. try: for commit in commit_chain: self.git( "cherry-pick", "--allow-empty", + "--allow-empty-message", "--no-commit", commit.orig_hash ) # If squashing one commit into the next, do_commit should be # False so that it's changes are included in the next commit. if commit.do_commit: self.git( "commit", "--author", commit.author, "--date", commit.datetime, "-F", "-", stdin=commit.msg ) self.git("branch", "-f", branch_name, "HEAD") except Exception as e: raise e finally: # Whether on success or failure, always re-checkout the branch. On success, # this will be the re-written branch. On failure, this will be the original # branch (since re-defining the branch ref is the last step). self.git("checkout", branch_name)
d82111c5415176ea07674723151f14445e4b52ab
fire_rs/firemodel/test_propagation.py
fire_rs/firemodel/test_propagation.py
import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): env = propagation.Environment([[475060.0, 477060.0], [6200074.0, 6202074.0]], wind_speed=4.11, wind_dir=0) prop = propagation.propagate(env, 10, 20) # prop.plot(blocking=True)
import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): env = propagation.Environment([[480060.0, 490060.0], [6210074.0, 6220074.0]], wind_speed=4.11, wind_dir=0) prop = propagation.propagate(env, 10, 20, horizon=3*3600) # prop.plot(blocking=True)
Set test area to a burnable one.
[fire-models] Set test area to a burnable one.
Python
bsd-2-clause
fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop
import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): - env = propagation.Environment([[475060.0, 477060.0], [6200074.0, 6202074.0]], wind_speed=4.11, wind_dir=0) + env = propagation.Environment([[480060.0, 490060.0], [6210074.0, 6220074.0]], wind_speed=4.11, wind_dir=0) - prop = propagation.propagate(env, 10, 20) + prop = propagation.propagate(env, 10, 20, horizon=3*3600) # prop.plot(blocking=True)
Set test area to a burnable one.
## Code Before: import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): env = propagation.Environment([[475060.0, 477060.0], [6200074.0, 6202074.0]], wind_speed=4.11, wind_dir=0) prop = propagation.propagate(env, 10, 20) # prop.plot(blocking=True) ## Instruction: Set test area to a burnable one. ## Code After: import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): env = propagation.Environment([[480060.0, 490060.0], [6210074.0, 6220074.0]], wind_speed=4.11, wind_dir=0) prop = propagation.propagate(env, 10, 20, horizon=3*3600) # prop.plot(blocking=True)
import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): - env = propagation.Environment([[475060.0, 477060.0], [6200074.0, 6202074.0]], wind_speed=4.11, wind_dir=0) ? ^^ ^^ ^ - + env = propagation.Environment([[480060.0, 490060.0], [6210074.0, 6220074.0]], wind_speed=4.11, wind_dir=0) ? ^^ ^^ ^ + - prop = propagation.propagate(env, 10, 20) + prop = propagation.propagate(env, 10, 20, horizon=3*3600) ? ++++++++++++++++ # prop.plot(blocking=True)
eab1de115f010922531a5a2c5f023bf2294f2af4
sendgrid/__init__.py
sendgrid/__init__.py
"""A small django app around sendgrid and its webhooks""" from utils import SendgridEmailMessage, SendgridEmailMultiAlternatives from models import Email from signals import email_event __version__ = '0.1.0' __all__ = ('SendgridEmailMessage', 'SendgridEmailMultiAlternatives', 'Email', 'email_event')
"""A small django app around sendgrid and its webhooks""" __version__ = '0.1.0'
Revert "add __all__ parameter to main module"
Revert "add __all__ parameter to main module" This reverts commit bc9e574206e75b1a50bd1b8eb4bd56f96a18cf51.
Python
bsd-2-clause
resmio/django-sendgrid
"""A small django app around sendgrid and its webhooks""" + __version__ = '0.1.0' - from utils import SendgridEmailMessage, SendgridEmailMultiAlternatives - from models import Email - from signals import email_event - __version__ = '0.1.0' - __all__ = ('SendgridEmailMessage', 'SendgridEmailMultiAlternatives', 'Email', 'email_event') -
Revert "add __all__ parameter to main module"
## Code Before: """A small django app around sendgrid and its webhooks""" from utils import SendgridEmailMessage, SendgridEmailMultiAlternatives from models import Email from signals import email_event __version__ = '0.1.0' __all__ = ('SendgridEmailMessage', 'SendgridEmailMultiAlternatives', 'Email', 'email_event') ## Instruction: Revert "add __all__ parameter to main module" ## Code After: """A small django app around sendgrid and its webhooks""" __version__ = '0.1.0'
"""A small django app around sendgrid and its webhooks""" - from utils import SendgridEmailMessage, SendgridEmailMultiAlternatives - from models import Email - from signals import email_event - __version__ = '0.1.0' - __all__ = ('SendgridEmailMessage', 'SendgridEmailMultiAlternatives', 'Email', 'email_event')
413ba364dc35a7186953d02bb7cc8cf705371873
contentious/constants.py
contentious/constants.py
from django.conf import settings SELF_CLOSING_HTML_TAGS = getattr(settings, 'CONTENTIOUS_SELF_CLOSING_HTML_TAGS', ['img', 'br', 'hr', 'meta']) #Note, the Javascript plugin has its own seprate copy of this: TREAT_CONTENT_AS_HTML_TAGS = getattr(settings, 'CONTENTIOUS_TREAT_CONTENT_AS_HTML_TAGS', ['div', 'select', 'ul'])
from django.conf import settings SELF_CLOSING_HTML_TAGS = ['img', 'br', 'hr', 'meta'] #Note, the Javascript plugin has its own seprate copy of this: TREAT_CONTENT_AS_HTML_TAGS = getattr(settings, 'CONTENTIOUS_TREAT_CONTENT_AS_HTML_TAGS', ['div', 'select', 'ul'])
Remove SELF_CLOSING_HTML_TAGS as a configurable option
Remove SELF_CLOSING_HTML_TAGS as a configurable option
Python
bsd-2-clause
potatolondon/contentious,potatolondon/contentious
from django.conf import settings - SELF_CLOSING_HTML_TAGS = getattr(settings, - 'CONTENTIOUS_SELF_CLOSING_HTML_TAGS', ['img', 'br', 'hr', 'meta']) + SELF_CLOSING_HTML_TAGS = ['img', 'br', 'hr', 'meta'] #Note, the Javascript plugin has its own seprate copy of this: TREAT_CONTENT_AS_HTML_TAGS = getattr(settings, 'CONTENTIOUS_TREAT_CONTENT_AS_HTML_TAGS', ['div', 'select', 'ul'])
Remove SELF_CLOSING_HTML_TAGS as a configurable option
## Code Before: from django.conf import settings SELF_CLOSING_HTML_TAGS = getattr(settings, 'CONTENTIOUS_SELF_CLOSING_HTML_TAGS', ['img', 'br', 'hr', 'meta']) #Note, the Javascript plugin has its own seprate copy of this: TREAT_CONTENT_AS_HTML_TAGS = getattr(settings, 'CONTENTIOUS_TREAT_CONTENT_AS_HTML_TAGS', ['div', 'select', 'ul']) ## Instruction: Remove SELF_CLOSING_HTML_TAGS as a configurable option ## Code After: from django.conf import settings SELF_CLOSING_HTML_TAGS = ['img', 'br', 'hr', 'meta'] #Note, the Javascript plugin has its own seprate copy of this: TREAT_CONTENT_AS_HTML_TAGS = getattr(settings, 'CONTENTIOUS_TREAT_CONTENT_AS_HTML_TAGS', ['div', 'select', 'ul'])
from django.conf import settings - SELF_CLOSING_HTML_TAGS = getattr(settings, - 'CONTENTIOUS_SELF_CLOSING_HTML_TAGS', ['img', 'br', 'hr', 'meta']) ? ----------------- ^^ - + SELF_CLOSING_HTML_TAGS = ['img', 'br', 'hr', 'meta'] ? ^^ #Note, the Javascript plugin has its own seprate copy of this: TREAT_CONTENT_AS_HTML_TAGS = getattr(settings, 'CONTENTIOUS_TREAT_CONTENT_AS_HTML_TAGS', ['div', 'select', 'ul'])
6f0d09ff5f81518daf30b00311ce4ac052e08c14
admission_notes/models.py
admission_notes/models.py
import datetime #from labsys.patients import Patient from django.db import models class AdmissionNote(models.Model): id_gal = models.CharField(max_length=30) requester = models.CharField( max_length=255, help_text="LACEN ou instituto que solicitou o exame", ) health_unit = models.CharField( max_length=255, help_text="unidade onde o paciente foi avaliado", ) state = models.CharField(max_length=2) city = models.CharField(max_length=255) admission_date = models.DateField( verbose_name="Data de entrada (LVRS)", help_text="quando a amostra chegou no LVRS", null=True, blank=True, ) def __str__(self): return "ID Gal: {}".format(self.id_gal)
import datetime #from labsys.patients import Patient from django.db import models class AdmissionNote(models.Model): id_gal = models.CharField( 'Número da requisição (GAL interno)', max_length=30 ) requester = models.CharField( 'Instituto solicitante', max_length=255, help_text='LACEN ou instituto que solicitou o exame', ) health_unit = models.CharField( 'Unidade de saúde', max_length=255, help_text='unidade onde o paciente foi avaliado', ) state = models.CharField(max_length=2) city = models.CharField(max_length=255) admission_date = models.DateField( 'Data de entrada (LVRS)', help_text='quando a amostra chegou no LVRS', null=True, blank=True, ) def __str__(self): return "ID Gal: {}".format(self.id_gal)
Rename fields and remove verbose_name attr (1st arg is it by default)
:art: Rename fields and remove verbose_name attr (1st arg is it by default)
Python
mit
gems-uff/labsys,gems-uff/labsys,gems-uff/labsys
import datetime #from labsys.patients import Patient from django.db import models + class AdmissionNote(models.Model): - id_gal = models.CharField(max_length=30) + id_gal = models.CharField( + 'Número da requisição (GAL interno)', + max_length=30 + ) requester = models.CharField( + 'Instituto solicitante', max_length=255, - help_text="LACEN ou instituto que solicitou o exame", + help_text='LACEN ou instituto que solicitou o exame', ) health_unit = models.CharField( + 'Unidade de saúde', max_length=255, - help_text="unidade onde o paciente foi avaliado", + help_text='unidade onde o paciente foi avaliado', ) state = models.CharField(max_length=2) city = models.CharField(max_length=255) admission_date = models.DateField( - verbose_name="Data de entrada (LVRS)", + 'Data de entrada (LVRS)', - help_text="quando a amostra chegou no LVRS", + help_text='quando a amostra chegou no LVRS', null=True, blank=True, ) def __str__(self): return "ID Gal: {}".format(self.id_gal)
Rename fields and remove verbose_name attr (1st arg is it by default)
## Code Before: import datetime #from labsys.patients import Patient from django.db import models class AdmissionNote(models.Model): id_gal = models.CharField(max_length=30) requester = models.CharField( max_length=255, help_text="LACEN ou instituto que solicitou o exame", ) health_unit = models.CharField( max_length=255, help_text="unidade onde o paciente foi avaliado", ) state = models.CharField(max_length=2) city = models.CharField(max_length=255) admission_date = models.DateField( verbose_name="Data de entrada (LVRS)", help_text="quando a amostra chegou no LVRS", null=True, blank=True, ) def __str__(self): return "ID Gal: {}".format(self.id_gal) ## Instruction: Rename fields and remove verbose_name attr (1st arg is it by default) ## Code After: import datetime #from labsys.patients import Patient from django.db import models class AdmissionNote(models.Model): id_gal = models.CharField( 'Número da requisição (GAL interno)', max_length=30 ) requester = models.CharField( 'Instituto solicitante', max_length=255, help_text='LACEN ou instituto que solicitou o exame', ) health_unit = models.CharField( 'Unidade de saúde', max_length=255, help_text='unidade onde o paciente foi avaliado', ) state = models.CharField(max_length=2) city = models.CharField(max_length=255) admission_date = models.DateField( 'Data de entrada (LVRS)', help_text='quando a amostra chegou no LVRS', null=True, blank=True, ) def __str__(self): return "ID Gal: {}".format(self.id_gal)
import datetime #from labsys.patients import Patient from django.db import models + class AdmissionNote(models.Model): - id_gal = models.CharField(max_length=30) ? -------------- + id_gal = models.CharField( + 'Número da requisição (GAL interno)', + max_length=30 + ) requester = models.CharField( + 'Instituto solicitante', max_length=255, - help_text="LACEN ou instituto que solicitou o exame", ? ^ ^ + help_text='LACEN ou instituto que solicitou o exame', ? ^ ^ ) health_unit = models.CharField( + 'Unidade de saúde', max_length=255, - help_text="unidade onde o paciente foi avaliado", ? ^ ^ + help_text='unidade onde o paciente foi avaliado', ? ^ ^ ) state = models.CharField(max_length=2) city = models.CharField(max_length=255) admission_date = models.DateField( - verbose_name="Data de entrada (LVRS)", ? ^^^^^^^^^^^^^^ ^ + 'Data de entrada (LVRS)', ? ^ ^ - help_text="quando a amostra chegou no LVRS", ? ^ ^ + help_text='quando a amostra chegou no LVRS', ? ^ ^ null=True, blank=True, ) def __str__(self): return "ID Gal: {}".format(self.id_gal)
8433fe04ad1230329de2c209a8625cd4b36b63f8
src/sentry/api/serializers/models/grouptagvalue.py
src/sentry/api/serializers/models/grouptagvalue.py
from __future__ import absolute_import from sentry.api.serializers import Serializer, register from sentry.models import GroupTagValue @register(GroupTagValue) class GroupTagValueSerializer(Serializer): def serialize(self, obj, attrs, user): d = { 'key': obj.key, 'value': obj.value, 'count': obj.times_seen, 'lastSeen': obj.last_seen, 'firstSeen': obj.first_seen, } return d
from __future__ import absolute_import from sentry.api.serializers import Serializer, register from sentry.models import GroupTagValue, TagValue @register(GroupTagValue) class GroupTagValueSerializer(Serializer): def get_attrs(self, item_list, user): assert len(set(i.key for i in item_list)) < 2 tagvalues = dict( (t.value, t) for t in TagValue.objects.filter( project=item_list[0].project, key=item_list[0].key, value__in=[i.value for i in item_list] ) ) result = {} for item in item_list: result[item] = { 'name': tagvalues[item.value].get_label(), } return result def serialize(self, obj, attrs, user): d = { 'name': attrs['name'], 'key': obj.key, 'value': obj.value, 'count': obj.times_seen, 'lastSeen': obj.last_seen, 'firstSeen': obj.first_seen, } return d
Implement labels on group tag values
Implement labels on group tag values
Python
bsd-3-clause
gencer/sentry,drcapulet/sentry,vperron/sentry,pauloschilling/sentry,kevinlondon/sentry,ifduyue/sentry,zenefits/sentry,JamesMura/sentry,jean/sentry,fotinakis/sentry,gencer/sentry,ngonzalvez/sentry,gg7/sentry,mvaled/sentry,JTCunning/sentry,alexm92/sentry,hongliang5623/sentry,Kryz/sentry,JackDanger/sentry,gg7/sentry,TedaLIEz/sentry,imankulov/sentry,vperron/sentry,imankulov/sentry,felixbuenemann/sentry,mvaled/sentry,Natim/sentry,BayanGroup/sentry,wong2/sentry,ewdurbin/sentry,wujuguang/sentry,jean/sentry,beeftornado/sentry,JTCunning/sentry,beeftornado/sentry,pauloschilling/sentry,ifduyue/sentry,BuildingLink/sentry,Natim/sentry,gencer/sentry,mitsuhiko/sentry,alexm92/sentry,songyi199111/sentry,kevinlondon/sentry,JackDanger/sentry,kevinastone/sentry,jean/sentry,beeftornado/sentry,fuziontech/sentry,kevinlondon/sentry,looker/sentry,JackDanger/sentry,mitsuhiko/sentry,fotinakis/sentry,1tush/sentry,boneyao/sentry,JamesMura/sentry,mvaled/sentry,korealerts1/sentry,zenefits/sentry,BuildingLink/sentry,BuildingLink/sentry,felixbuenemann/sentry,JamesMura/sentry,korealerts1/sentry,ifduyue/sentry,daevaorn/sentry,ngonzalvez/sentry,TedaLIEz/sentry,fotinakis/sentry,JTCunning/sentry,daevaorn/sentry,boneyao/sentry,zenefits/sentry,TedaLIEz/sentry,nicholasserra/sentry,jean/sentry,drcapulet/sentry,songyi199111/sentry,mvaled/sentry,BuildingLink/sentry,kevinastone/sentry,alexm92/sentry,BayanGroup/sentry,daevaorn/sentry,BuildingLink/sentry,gencer/sentry,drcapulet/sentry,wong2/sentry,looker/sentry,nicholasserra/sentry,JamesMura/sentry,kevinastone/sentry,wujuguang/sentry,fotinakis/sentry,jean/sentry,boneyao/sentry,fuziontech/sentry,imankulov/sentry,daevaorn/sentry,mvaled/sentry,vperron/sentry,gencer/sentry,looker/sentry,wong2/sentry,Natim/sentry,1tush/sentry,korealerts1/sentry,zenefits/sentry,zenefits/sentry,nicholasserra/sentry,ewdurbin/sentry,looker/sentry,Kryz/sentry,Kryz/sentry,mvaled/sentry,felixbuenemann/sentry,gg7/sentry,ifduyue/sentry,hongliang5623/sentry,looker/sentry,pauloschilling/sentry,ewdurbin/sentry,fuziontech/sentry,songyi199111/sentry,JamesMura/sentry,BayanGroup/sentry,1tush/sentry,hongliang5623/sentry,ngonzalvez/sentry,ifduyue/sentry,wujuguang/sentry
from __future__ import absolute_import from sentry.api.serializers import Serializer, register - from sentry.models import GroupTagValue + from sentry.models import GroupTagValue, TagValue @register(GroupTagValue) class GroupTagValueSerializer(Serializer): + def get_attrs(self, item_list, user): + assert len(set(i.key for i in item_list)) < 2 + + tagvalues = dict( + (t.value, t) + for t in TagValue.objects.filter( + project=item_list[0].project, + key=item_list[0].key, + value__in=[i.value for i in item_list] + ) + ) + + result = {} + for item in item_list: + result[item] = { + 'name': tagvalues[item.value].get_label(), + } + return result + def serialize(self, obj, attrs, user): d = { + 'name': attrs['name'], 'key': obj.key, 'value': obj.value, 'count': obj.times_seen, 'lastSeen': obj.last_seen, 'firstSeen': obj.first_seen, } return d
Implement labels on group tag values
## Code Before: from __future__ import absolute_import from sentry.api.serializers import Serializer, register from sentry.models import GroupTagValue @register(GroupTagValue) class GroupTagValueSerializer(Serializer): def serialize(self, obj, attrs, user): d = { 'key': obj.key, 'value': obj.value, 'count': obj.times_seen, 'lastSeen': obj.last_seen, 'firstSeen': obj.first_seen, } return d ## Instruction: Implement labels on group tag values ## Code After: from __future__ import absolute_import from sentry.api.serializers import Serializer, register from sentry.models import GroupTagValue, TagValue @register(GroupTagValue) class GroupTagValueSerializer(Serializer): def get_attrs(self, item_list, user): assert len(set(i.key for i in item_list)) < 2 tagvalues = dict( (t.value, t) for t in TagValue.objects.filter( project=item_list[0].project, key=item_list[0].key, value__in=[i.value for i in item_list] ) ) result = {} for item in item_list: result[item] = { 'name': tagvalues[item.value].get_label(), } return result def serialize(self, obj, attrs, user): d = { 'name': attrs['name'], 'key': obj.key, 'value': obj.value, 'count': obj.times_seen, 'lastSeen': obj.last_seen, 'firstSeen': obj.first_seen, } return d
from __future__ import absolute_import from sentry.api.serializers import Serializer, register - from sentry.models import GroupTagValue + from sentry.models import GroupTagValue, TagValue ? ++++++++++ @register(GroupTagValue) class GroupTagValueSerializer(Serializer): + def get_attrs(self, item_list, user): + assert len(set(i.key for i in item_list)) < 2 + + tagvalues = dict( + (t.value, t) + for t in TagValue.objects.filter( + project=item_list[0].project, + key=item_list[0].key, + value__in=[i.value for i in item_list] + ) + ) + + result = {} + for item in item_list: + result[item] = { + 'name': tagvalues[item.value].get_label(), + } + return result + def serialize(self, obj, attrs, user): d = { + 'name': attrs['name'], 'key': obj.key, 'value': obj.value, 'count': obj.times_seen, 'lastSeen': obj.last_seen, 'firstSeen': obj.first_seen, } return d
db0253a228b3253e23bb5190fba9930a2f313d66
basictracer/context.py
basictracer/context.py
from __future__ import absolute_import import opentracing class SpanContext(opentracing.SpanContext): """SpanContext satisfies the opentracing.SpanContext contract. trace_id and span_id are uint64's, so their range is [0, 2^64). """ def __init__( self, trace_id=None, span_id=None, baggage=None, sampled=True): self.trace_id = trace_id self.span_id = span_id self.sampled = sampled self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE @property def baggage(self): return self._baggage or opentracing.SpanContext.EMPTY_BAGGAGE def with_baggage_item(self, key, value): new_baggage = self._baggage.copy() new_baggage[key] = value return SpanContext( trace_id=self.trace_id, span_id=self.span_id, sampled=self.sampled, baggage=new_baggage)
from __future__ import absolute_import import opentracing class SpanContext(opentracing.SpanContext): """SpanContext satisfies the opentracing.SpanContext contract. trace_id and span_id are uint64's, so their range is [0, 2^64). """ def __init__( self, trace_id=None, span_id=None, baggage=None, sampled=True): self.trace_id = trace_id self.span_id = span_id self.sampled = sampled self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE @property def baggage(self): return self._baggage def with_baggage_item(self, key, value): new_baggage = self._baggage.copy() new_baggage[key] = value return SpanContext( trace_id=self.trace_id, span_id=self.span_id, sampled=self.sampled, baggage=new_baggage)
Remove superfluous check for None baggage
Remove superfluous check for None baggage
Python
apache-2.0
opentracing/basictracer-python
from __future__ import absolute_import import opentracing class SpanContext(opentracing.SpanContext): """SpanContext satisfies the opentracing.SpanContext contract. trace_id and span_id are uint64's, so their range is [0, 2^64). """ def __init__( self, trace_id=None, span_id=None, baggage=None, sampled=True): self.trace_id = trace_id self.span_id = span_id self.sampled = sampled self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE @property def baggage(self): - return self._baggage or opentracing.SpanContext.EMPTY_BAGGAGE + return self._baggage def with_baggage_item(self, key, value): new_baggage = self._baggage.copy() new_baggage[key] = value return SpanContext( trace_id=self.trace_id, span_id=self.span_id, sampled=self.sampled, baggage=new_baggage)
Remove superfluous check for None baggage
## Code Before: from __future__ import absolute_import import opentracing class SpanContext(opentracing.SpanContext): """SpanContext satisfies the opentracing.SpanContext contract. trace_id and span_id are uint64's, so their range is [0, 2^64). """ def __init__( self, trace_id=None, span_id=None, baggage=None, sampled=True): self.trace_id = trace_id self.span_id = span_id self.sampled = sampled self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE @property def baggage(self): return self._baggage or opentracing.SpanContext.EMPTY_BAGGAGE def with_baggage_item(self, key, value): new_baggage = self._baggage.copy() new_baggage[key] = value return SpanContext( trace_id=self.trace_id, span_id=self.span_id, sampled=self.sampled, baggage=new_baggage) ## Instruction: Remove superfluous check for None baggage ## Code After: from __future__ import absolute_import import opentracing class SpanContext(opentracing.SpanContext): """SpanContext satisfies the opentracing.SpanContext contract. trace_id and span_id are uint64's, so their range is [0, 2^64). """ def __init__( self, trace_id=None, span_id=None, baggage=None, sampled=True): self.trace_id = trace_id self.span_id = span_id self.sampled = sampled self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE @property def baggage(self): return self._baggage def with_baggage_item(self, key, value): new_baggage = self._baggage.copy() new_baggage[key] = value return SpanContext( trace_id=self.trace_id, span_id=self.span_id, sampled=self.sampled, baggage=new_baggage)
from __future__ import absolute_import import opentracing class SpanContext(opentracing.SpanContext): """SpanContext satisfies the opentracing.SpanContext contract. trace_id and span_id are uint64's, so their range is [0, 2^64). """ def __init__( self, trace_id=None, span_id=None, baggage=None, sampled=True): self.trace_id = trace_id self.span_id = span_id self.sampled = sampled self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE @property def baggage(self): - return self._baggage or opentracing.SpanContext.EMPTY_BAGGAGE + return self._baggage def with_baggage_item(self, key, value): new_baggage = self._baggage.copy() new_baggage[key] = value return SpanContext( trace_id=self.trace_id, span_id=self.span_id, sampled=self.sampled, baggage=new_baggage)
104c136488d468f26c7fe247d0548636cbf3c6fe
random_4.py
random_4.py
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: print(''.join(map(str, l[1:5]))) else: print(''.join(map(str, l[0:4])))
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random # 1. l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: pos = random.choice(range(1, len(l))) l[0], l[pos] = l[pos], l[0] print(''.join(map(str, l[0:4]))) # 2. # We create a set of digits: {0, 1, .... 9} digits = set(range(10)) # We generate a random integer, 1 <= first <= 9 first = random.randint(1, 9) # We remove it from our set, then take a sample of # 3 distinct elements from the remaining values last_3 = random.sample(digits - {first}, 3) print(str(first) + ''.join(map(str, last_3))) # 3. numbers = [0] while numbers[0] == 0: numbers = random.sample(range(10), 4) print(''.join(map(str, numbers)))
Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions.
Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions.
Python
mit
foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random + + # 1. l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: - print(''.join(map(str, l[1:5]))) - else: + pos = random.choice(range(1, len(l))) + l[0], l[pos] = l[pos], l[0] - print(''.join(map(str, l[0:4]))) + print(''.join(map(str, l[0:4]))) + # 2. + # We create a set of digits: {0, 1, .... 9} + digits = set(range(10)) + # We generate a random integer, 1 <= first <= 9 + first = random.randint(1, 9) + # We remove it from our set, then take a sample of + # 3 distinct elements from the remaining values + last_3 = random.sample(digits - {first}, 3) + print(str(first) + ''.join(map(str, last_3))) + + # 3. + numbers = [0] + while numbers[0] == 0: + numbers = random.sample(range(10), 4) + print(''.join(map(str, numbers))) + +
Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions.
## Code Before: """ How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: print(''.join(map(str, l[1:5]))) else: print(''.join(map(str, l[0:4]))) ## Instruction: Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions. ## Code After: """ How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random # 1. l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: pos = random.choice(range(1, len(l))) l[0], l[pos] = l[pos], l[0] print(''.join(map(str, l[0:4]))) # 2. # We create a set of digits: {0, 1, .... 9} digits = set(range(10)) # We generate a random integer, 1 <= first <= 9 first = random.randint(1, 9) # We remove it from our set, then take a sample of # 3 distinct elements from the remaining values last_3 = random.sample(digits - {first}, 3) print(str(first) + ''.join(map(str, last_3))) # 3. numbers = [0] while numbers[0] == 0: numbers = random.sample(range(10), 4) print(''.join(map(str, numbers)))
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random + + # 1. l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: - print(''.join(map(str, l[1:5]))) - else: + pos = random.choice(range(1, len(l))) + l[0], l[pos] = l[pos], l[0] - print(''.join(map(str, l[0:4]))) ? ---- + print(''.join(map(str, l[0:4]))) + + # 2. + # We create a set of digits: {0, 1, .... 9} + digits = set(range(10)) + # We generate a random integer, 1 <= first <= 9 + first = random.randint(1, 9) + # We remove it from our set, then take a sample of + # 3 distinct elements from the remaining values + last_3 = random.sample(digits - {first}, 3) + print(str(first) + ''.join(map(str, last_3))) + + # 3. + numbers = [0] + while numbers[0] == 0: + numbers = random.sample(range(10), 4) + print(''.join(map(str, numbers))) +
131f266e73139f1148ee3e9fcce8db40842afb88
sale_channel/models/account.py
sale_channel/models/account.py
from openerp import models, fields, api, _, exceptions class AccountTax(models.Model): _inherit = 'account.tax' @api.model def _get_sales_channel_domain(self): ids = self.env.ref('res_partner_category.sales_channel').ids return [('category_id', 'in', ids)] sales_channel_id = fields.Many2one('res.partner', string="Sales channel", ondelete='set null', domain=_get_sales_channel_domain)
from openerp import models, fields, api, _, exceptions class AccountTax(models.Model): _inherit = 'account.tax' @api.model def _get_sales_channel_domain(self): ids = self.env.ref('res_partner_category.sales_channel').ids return [('category_id', 'in', ids)] sales_channel_id = fields.Many2one('res.partner', string="Sales channel", ondelete='set null', domain=_get_sales_channel_domain) _sql_constraints = [ ('name_company_uniq', 'unique(name, company_id, sales_channel_id)', 'Tax Name must be unique per company and sales channel!'), ]
Add constraint, tax name must be unique for each company and sales channel
[IMP] Add constraint, tax name must be unique for each company and sales channel
Python
agpl-3.0
1200wd/1200wd_addons,1200wd/1200wd_addons
from openerp import models, fields, api, _, exceptions class AccountTax(models.Model): _inherit = 'account.tax' @api.model def _get_sales_channel_domain(self): ids = self.env.ref('res_partner_category.sales_channel').ids return [('category_id', 'in', ids)] sales_channel_id = fields.Many2one('res.partner', string="Sales channel", ondelete='set null', domain=_get_sales_channel_domain) + _sql_constraints = [ + ('name_company_uniq', 'unique(name, company_id, sales_channel_id)', 'Tax Name must be unique per company and sales channel!'), + ] +
Add constraint, tax name must be unique for each company and sales channel
## Code Before: from openerp import models, fields, api, _, exceptions class AccountTax(models.Model): _inherit = 'account.tax' @api.model def _get_sales_channel_domain(self): ids = self.env.ref('res_partner_category.sales_channel').ids return [('category_id', 'in', ids)] sales_channel_id = fields.Many2one('res.partner', string="Sales channel", ondelete='set null', domain=_get_sales_channel_domain) ## Instruction: Add constraint, tax name must be unique for each company and sales channel ## Code After: from openerp import models, fields, api, _, exceptions class AccountTax(models.Model): _inherit = 'account.tax' @api.model def _get_sales_channel_domain(self): ids = self.env.ref('res_partner_category.sales_channel').ids return [('category_id', 'in', ids)] sales_channel_id = fields.Many2one('res.partner', string="Sales channel", ondelete='set null', domain=_get_sales_channel_domain) _sql_constraints = [ ('name_company_uniq', 'unique(name, company_id, sales_channel_id)', 'Tax Name must be unique per company and sales channel!'), ]
from openerp import models, fields, api, _, exceptions class AccountTax(models.Model): _inherit = 'account.tax' @api.model def _get_sales_channel_domain(self): ids = self.env.ref('res_partner_category.sales_channel').ids return [('category_id', 'in', ids)] sales_channel_id = fields.Many2one('res.partner', string="Sales channel", ondelete='set null', domain=_get_sales_channel_domain) + + _sql_constraints = [ + ('name_company_uniq', 'unique(name, company_id, sales_channel_id)', 'Tax Name must be unique per company and sales channel!'), + ]
96d7a2a3a3250993084c1847436711ceaea988fc
app/database.py
app/database.py
from app import app from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.script import Manager, prompt_bool from datetime import datetime db = SQLAlchemy(app) manager = Manager(usage="Manage the database") @manager.command def create(): "Create the database" db.create_all() @manager.command def drop(): "Empty the database" if prompt_bool("Are you sure you want to drop all tables from the database?"): db.drop_all() @manager.command def recreate(): "Recreate the database" drop() create() class Urls(db.Model): __tablename__ = 'urls' id = db.Column(db.Integer, primary_key=True) url = db.Column(db.Text, unique=True) code = db.Column(db.Text, unique=True) clicks = db.Column(db.Integer, default=0) created = db.Column(db.DateTime(timezone=True), default=datetime.utcnow) def __init__(self, url, code): self.url = url self.code = code def __repr__(self): return "<Url ('%r', '%r')>" % (self.url, self.code)
from app import app from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.script import Manager, prompt_bool from datetime import datetime db = SQLAlchemy(app) manager = Manager(usage="Manage the database") @manager.command def create(): "Create the database" db.create_all() @manager.command def drop(): "Empty the database" if prompt_bool("Are you sure you want to drop all tables from the database?"): db.drop_all() @manager.command def recreate(): "Recreate the database" drop() create() class Urls(db.Model): __tablename__ = 'urls' id = db.Column(db.Integer, primary_key=True) url = db.Column(db.VARCHAR(length=255), unique=True) code = db.Column(db.VARCHAR(length=255), unique=True) clicks = db.Column(db.Integer, default=0) created = db.Column(db.DateTime(timezone=True), default=datetime.utcnow) def __init__(self, url, code): self.url = url self.code = code def __repr__(self): return "<Url ('%r', '%r')>" % (self.url, self.code)
Change unuique keys to MySQL varchar
Change unuique keys to MySQL varchar
Python
mit
taeram/idiocy,taeram/idiocy,taeram/idiocy
from app import app from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.script import Manager, prompt_bool from datetime import datetime db = SQLAlchemy(app) manager = Manager(usage="Manage the database") @manager.command def create(): "Create the database" db.create_all() @manager.command def drop(): "Empty the database" if prompt_bool("Are you sure you want to drop all tables from the database?"): db.drop_all() @manager.command def recreate(): "Recreate the database" drop() create() class Urls(db.Model): __tablename__ = 'urls' id = db.Column(db.Integer, primary_key=True) - url = db.Column(db.Text, unique=True) + url = db.Column(db.VARCHAR(length=255), unique=True) - code = db.Column(db.Text, unique=True) + code = db.Column(db.VARCHAR(length=255), unique=True) clicks = db.Column(db.Integer, default=0) created = db.Column(db.DateTime(timezone=True), default=datetime.utcnow) def __init__(self, url, code): self.url = url self.code = code def __repr__(self): return "<Url ('%r', '%r')>" % (self.url, self.code)
Change unuique keys to MySQL varchar
## Code Before: from app import app from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.script import Manager, prompt_bool from datetime import datetime db = SQLAlchemy(app) manager = Manager(usage="Manage the database") @manager.command def create(): "Create the database" db.create_all() @manager.command def drop(): "Empty the database" if prompt_bool("Are you sure you want to drop all tables from the database?"): db.drop_all() @manager.command def recreate(): "Recreate the database" drop() create() class Urls(db.Model): __tablename__ = 'urls' id = db.Column(db.Integer, primary_key=True) url = db.Column(db.Text, unique=True) code = db.Column(db.Text, unique=True) clicks = db.Column(db.Integer, default=0) created = db.Column(db.DateTime(timezone=True), default=datetime.utcnow) def __init__(self, url, code): self.url = url self.code = code def __repr__(self): return "<Url ('%r', '%r')>" % (self.url, self.code) ## Instruction: Change unuique keys to MySQL varchar ## Code After: from app import app from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.script import Manager, prompt_bool from datetime import datetime db = SQLAlchemy(app) manager = Manager(usage="Manage the database") @manager.command def create(): "Create the database" db.create_all() @manager.command def drop(): "Empty the database" if prompt_bool("Are you sure you want to drop all tables from the database?"): db.drop_all() @manager.command def recreate(): "Recreate the database" drop() create() class Urls(db.Model): __tablename__ = 'urls' id = db.Column(db.Integer, primary_key=True) url = db.Column(db.VARCHAR(length=255), unique=True) code = db.Column(db.VARCHAR(length=255), unique=True) clicks = db.Column(db.Integer, default=0) created = db.Column(db.DateTime(timezone=True), default=datetime.utcnow) def __init__(self, url, code): self.url = url self.code = code def __repr__(self): return "<Url ('%r', '%r')>" % (self.url, self.code)
from app import app from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.script import Manager, prompt_bool from datetime import datetime db = SQLAlchemy(app) manager = Manager(usage="Manage the database") @manager.command def create(): "Create the database" db.create_all() @manager.command def drop(): "Empty the database" if prompt_bool("Are you sure you want to drop all tables from the database?"): db.drop_all() @manager.command def recreate(): "Recreate the database" drop() create() class Urls(db.Model): __tablename__ = 'urls' id = db.Column(db.Integer, primary_key=True) - url = db.Column(db.Text, unique=True) ? ^ ^ + url = db.Column(db.VARCHAR(length=255), unique=True) ? ^^^^^^^^^ ^^ ++++++ - code = db.Column(db.Text, unique=True) ? ^ ^ + code = db.Column(db.VARCHAR(length=255), unique=True) ? ^^^^^^^^^ ^^ ++++++ clicks = db.Column(db.Integer, default=0) created = db.Column(db.DateTime(timezone=True), default=datetime.utcnow) def __init__(self, url, code): self.url = url self.code = code def __repr__(self): return "<Url ('%r', '%r')>" % (self.url, self.code)
e621b9f03b19e38dc6754dd1a4cb7b172e4891e7
tests/test_extended_tests.py
tests/test_extended_tests.py
import pytest import glob from html2kirby import HTML2Kirby files = [] for f in glob.glob("extended_tests/*.html"): html = f txt = f.replace(".html", ".txt") files.append((html, txt)) @pytest.mark.parametrize("html,kirby", files) def test_file(html, kirby): formatter = HTML2Kirby() with open(html, 'r') as html_file: formatter.feed(html_file.read()) with open(kirby, 'r') as kirby_file: expected_result = kirby_file.read() assert formatter.markdown.strip() == expected_result.strip()
import pytest import glob import os from html2kirby import HTML2Kirby files = [] path = os.path.dirname(os.path.abspath(__file__)) extended_tests_path = os.path.join(path, "extended_tests/*.html") for f in glob.glob(extended_tests_path): html = f txt = f.replace(".html", ".txt") files.append((html, txt)) @pytest.mark.parametrize("html,kirby", files) def test_file(html, kirby): formatter = HTML2Kirby() with open(html, 'r') as html_file: formatter.feed(html_file.read()) with open(kirby, 'r') as kirby_file: expected_result = kirby_file.read() assert formatter.markdown.strip() == expected_result.strip()
Fix the extended test search
Fix the extended test search
Python
mit
liip/html2kirby,liip/html2kirby
import pytest import glob + import os from html2kirby import HTML2Kirby files = [] + path = os.path.dirname(os.path.abspath(__file__)) + + extended_tests_path = os.path.join(path, "extended_tests/*.html") + - for f in glob.glob("extended_tests/*.html"): + for f in glob.glob(extended_tests_path): html = f txt = f.replace(".html", ".txt") files.append((html, txt)) @pytest.mark.parametrize("html,kirby", files) def test_file(html, kirby): formatter = HTML2Kirby() with open(html, 'r') as html_file: formatter.feed(html_file.read()) with open(kirby, 'r') as kirby_file: expected_result = kirby_file.read() assert formatter.markdown.strip() == expected_result.strip()
Fix the extended test search
## Code Before: import pytest import glob from html2kirby import HTML2Kirby files = [] for f in glob.glob("extended_tests/*.html"): html = f txt = f.replace(".html", ".txt") files.append((html, txt)) @pytest.mark.parametrize("html,kirby", files) def test_file(html, kirby): formatter = HTML2Kirby() with open(html, 'r') as html_file: formatter.feed(html_file.read()) with open(kirby, 'r') as kirby_file: expected_result = kirby_file.read() assert formatter.markdown.strip() == expected_result.strip() ## Instruction: Fix the extended test search ## Code After: import pytest import glob import os from html2kirby import HTML2Kirby files = [] path = os.path.dirname(os.path.abspath(__file__)) extended_tests_path = os.path.join(path, "extended_tests/*.html") for f in glob.glob(extended_tests_path): html = f txt = f.replace(".html", ".txt") files.append((html, txt)) @pytest.mark.parametrize("html,kirby", files) def test_file(html, kirby): formatter = HTML2Kirby() with open(html, 'r') as html_file: formatter.feed(html_file.read()) with open(kirby, 'r') as kirby_file: expected_result = kirby_file.read() assert formatter.markdown.strip() == expected_result.strip()
import pytest import glob + import os from html2kirby import HTML2Kirby files = [] + path = os.path.dirname(os.path.abspath(__file__)) + + extended_tests_path = os.path.join(path, "extended_tests/*.html") + - for f in glob.glob("extended_tests/*.html"): ? - ^^^ ---- + for f in glob.glob(extended_tests_path): ? ^^^^ html = f txt = f.replace(".html", ".txt") files.append((html, txt)) @pytest.mark.parametrize("html,kirby", files) def test_file(html, kirby): formatter = HTML2Kirby() with open(html, 'r') as html_file: formatter.feed(html_file.read()) with open(kirby, 'r') as kirby_file: expected_result = kirby_file.read() assert formatter.markdown.strip() == expected_result.strip()
e4ab52fc36b9d4e0805fb134d43bf63fb73a62d8
shcol/cli.py
shcol/cli.py
from __future__ import print_function import argparse import shcol __all__ = ['main'] def main(cmd_args): parser = argparse.ArgumentParser( description='Generate columnized output for given string items.', version='shcol {}'.format(shcol.__version__) ) parser.add_argument('items', nargs='+', help='the items to columnize') parser.add_argument( '-s', '--spacing', metavar='N', type=int, default=2, help='number of blanks between two columns (default: 2)' ) parser.add_argument( '-w', '--width', metavar='N', type=int, default=80, help='maximal amount of characters per line (default: 80)' ) parser.add_argument( '-S', '--sort', help='sort the items', action='store_true', default=False ) print(shcol.columnize(args.items, args.spacing, args.width, args.sort))
from __future__ import print_function import argparse import shcol import sys __all__ = ['main'] def main(cmd_args): parser = argparse.ArgumentParser( description='Generate columnized output for given string items.', version='shcol {}'.format(shcol.__version__) ) parser.add_argument( 'items', nargs='*', metavar='item', help='an item to columnize' ) parser.add_argument( '-s', '--spacing', metavar='N', type=int, default=2, help='number of blanks between two columns (default: 2)' ) parser.add_argument( '-w', '--width', metavar='N', type=int, default=80, help='maximal amount of characters per line (default: 80)' ) parser.add_argument( '-S', '--sort', action='store_true', default=False, help='sort the items' ) args = parser.parse_args(cmd_args[1:]) items = args.items or [line.rstrip('\n') for line in sys.stdin] print(shcol.columnize(items, args.spacing, args.width, args.sort))
Read from Stdin when no items are passed.
Read from Stdin when no items are passed.
Python
bsd-2-clause
seblin/shcol
from __future__ import print_function import argparse import shcol + import sys __all__ = ['main'] def main(cmd_args): parser = argparse.ArgumentParser( description='Generate columnized output for given string items.', version='shcol {}'.format(shcol.__version__) ) - parser.add_argument('items', nargs='+', help='the items to columnize') + parser.add_argument( + 'items', nargs='*', metavar='item', help='an item to columnize' + ) parser.add_argument( '-s', '--spacing', metavar='N', type=int, default=2, help='number of blanks between two columns (default: 2)' ) parser.add_argument( '-w', '--width', metavar='N', type=int, default=80, help='maximal amount of characters per line (default: 80)' ) parser.add_argument( - '-S', '--sort', help='sort the items', - action='store_true', default=False + '-S', '--sort', action='store_true', default=False, + help='sort the items' ) + args = parser.parse_args(cmd_args[1:]) + items = args.items or [line.rstrip('\n') for line in sys.stdin] - print(shcol.columnize(args.items, args.spacing, args.width, args.sort)) + print(shcol.columnize(items, args.spacing, args.width, args.sort))
Read from Stdin when no items are passed.
## Code Before: from __future__ import print_function import argparse import shcol __all__ = ['main'] def main(cmd_args): parser = argparse.ArgumentParser( description='Generate columnized output for given string items.', version='shcol {}'.format(shcol.__version__) ) parser.add_argument('items', nargs='+', help='the items to columnize') parser.add_argument( '-s', '--spacing', metavar='N', type=int, default=2, help='number of blanks between two columns (default: 2)' ) parser.add_argument( '-w', '--width', metavar='N', type=int, default=80, help='maximal amount of characters per line (default: 80)' ) parser.add_argument( '-S', '--sort', help='sort the items', action='store_true', default=False ) print(shcol.columnize(args.items, args.spacing, args.width, args.sort)) ## Instruction: Read from Stdin when no items are passed. ## Code After: from __future__ import print_function import argparse import shcol import sys __all__ = ['main'] def main(cmd_args): parser = argparse.ArgumentParser( description='Generate columnized output for given string items.', version='shcol {}'.format(shcol.__version__) ) parser.add_argument( 'items', nargs='*', metavar='item', help='an item to columnize' ) parser.add_argument( '-s', '--spacing', metavar='N', type=int, default=2, help='number of blanks between two columns (default: 2)' ) parser.add_argument( '-w', '--width', metavar='N', type=int, default=80, help='maximal amount of characters per line (default: 80)' ) parser.add_argument( '-S', '--sort', action='store_true', default=False, help='sort the items' ) args = parser.parse_args(cmd_args[1:]) items = args.items or [line.rstrip('\n') for line in sys.stdin] print(shcol.columnize(items, args.spacing, args.width, args.sort))
from __future__ import print_function import argparse import shcol + import sys __all__ = ['main'] def main(cmd_args): parser = argparse.ArgumentParser( description='Generate columnized output for given string items.', version='shcol {}'.format(shcol.__version__) ) - parser.add_argument('items', nargs='+', help='the items to columnize') + parser.add_argument( + 'items', nargs='*', metavar='item', help='an item to columnize' + ) parser.add_argument( '-s', '--spacing', metavar='N', type=int, default=2, help='number of blanks between two columns (default: 2)' ) parser.add_argument( '-w', '--width', metavar='N', type=int, default=80, help='maximal amount of characters per line (default: 80)' ) parser.add_argument( - '-S', '--sort', help='sort the items', - action='store_true', default=False + '-S', '--sort', action='store_true', default=False, ? ++++++++++++++++ + + help='sort the items' ) + args = parser.parse_args(cmd_args[1:]) + items = args.items or [line.rstrip('\n') for line in sys.stdin] - print(shcol.columnize(args.items, args.spacing, args.width, args.sort)) ? ----- + print(shcol.columnize(items, args.spacing, args.width, args.sort))
662046497abfa6f7f6553aeb266a261637ba6407
numba/postpasses.py
numba/postpasses.py
from __future__ import print_function, division, absolute_import from numba.support.math_support import math_support, libs default_postpasses = {} def register_default(name): def dec(f): default_postpasses[name] = f return f return dec # ______________________________________________________________________ # Postpasses @register_default('math') def postpass_link_math(env, ee, lmod, lfunc): "numba.math.* -> mathcode.*" replacements = {} for lf in lmod.functions: if lf.name.startswith('numba.math.'): _, _, name = lf.name.rpartition('.') replacements[lf.name] = name del lf # this is dead after linking below math_support.link_llvm_math_intrinsics(ee, lmod, libs.math_library, math_support.LLVMLinker(), replacements) return lfunc
from __future__ import print_function, division, absolute_import from numba.support.math_support import linking, libs default_postpasses = {} def register_default(name): def dec(f): default_postpasses[name] = f return f return dec # ______________________________________________________________________ # Postpasses @register_default('math') def postpass_link_math(env, ee, lmod, lfunc): "numba.math.* -> mathcode.*" replacements = {} for lf in lmod.functions: if lf.name.startswith('numba.math.'): _, _, name = lf.name.rpartition('.') replacements[lf.name] = name del lf # this is dead after linking below linking.link_llvm_math_intrinsics(ee, lmod, libs.math_library, linking.LLVMLinker(), replacements) return lfunc
Clean up old test, pass all tests
Clean up old test, pass all tests
Python
bsd-2-clause
jriehl/numba,jriehl/numba,stefanseefeld/numba,seibert/numba,gdementen/numba,sklam/numba,pitrou/numba,stefanseefeld/numba,stuartarchibald/numba,stonebig/numba,numba/numba,GaZ3ll3/numba,ssarangi/numba,sklam/numba,cpcloud/numba,sklam/numba,ssarangi/numba,stonebig/numba,gdementen/numba,pombredanne/numba,GaZ3ll3/numba,seibert/numba,seibert/numba,cpcloud/numba,gmarkall/numba,pombredanne/numba,sklam/numba,stefanseefeld/numba,pombredanne/numba,stonebig/numba,gmarkall/numba,pitrou/numba,GaZ3ll3/numba,ssarangi/numba,gdementen/numba,GaZ3ll3/numba,gdementen/numba,cpcloud/numba,jriehl/numba,stefanseefeld/numba,stuartarchibald/numba,gmarkall/numba,pitrou/numba,stonebig/numba,numba/numba,stuartarchibald/numba,pombredanne/numba,pombredanne/numba,stuartarchibald/numba,cpcloud/numba,jriehl/numba,stuartarchibald/numba,GaZ3ll3/numba,pitrou/numba,numba/numba,IntelLabs/numba,gmarkall/numba,stonebig/numba,ssarangi/numba,seibert/numba,stefanseefeld/numba,IntelLabs/numba,sklam/numba,seibert/numba,IntelLabs/numba,jriehl/numba,numba/numba,gdementen/numba,pitrou/numba,gmarkall/numba,cpcloud/numba,IntelLabs/numba,IntelLabs/numba,numba/numba,ssarangi/numba
from __future__ import print_function, division, absolute_import - from numba.support.math_support import math_support, libs + from numba.support.math_support import linking, libs default_postpasses = {} def register_default(name): def dec(f): default_postpasses[name] = f return f return dec # ______________________________________________________________________ # Postpasses @register_default('math') def postpass_link_math(env, ee, lmod, lfunc): "numba.math.* -> mathcode.*" replacements = {} for lf in lmod.functions: if lf.name.startswith('numba.math.'): _, _, name = lf.name.rpartition('.') replacements[lf.name] = name del lf # this is dead after linking below - math_support.link_llvm_math_intrinsics(ee, lmod, libs.math_library, + linking.link_llvm_math_intrinsics(ee, lmod, libs.math_library, - math_support.LLVMLinker(), + linking.LLVMLinker(), replacements) return lfunc
Clean up old test, pass all tests
## Code Before: from __future__ import print_function, division, absolute_import from numba.support.math_support import math_support, libs default_postpasses = {} def register_default(name): def dec(f): default_postpasses[name] = f return f return dec # ______________________________________________________________________ # Postpasses @register_default('math') def postpass_link_math(env, ee, lmod, lfunc): "numba.math.* -> mathcode.*" replacements = {} for lf in lmod.functions: if lf.name.startswith('numba.math.'): _, _, name = lf.name.rpartition('.') replacements[lf.name] = name del lf # this is dead after linking below math_support.link_llvm_math_intrinsics(ee, lmod, libs.math_library, math_support.LLVMLinker(), replacements) return lfunc ## Instruction: Clean up old test, pass all tests ## Code After: from __future__ import print_function, division, absolute_import from numba.support.math_support import linking, libs default_postpasses = {} def register_default(name): def dec(f): default_postpasses[name] = f return f return dec # ______________________________________________________________________ # Postpasses @register_default('math') def postpass_link_math(env, ee, lmod, lfunc): "numba.math.* -> mathcode.*" replacements = {} for lf in lmod.functions: if lf.name.startswith('numba.math.'): _, _, name = lf.name.rpartition('.') replacements[lf.name] = name del lf # this is dead after linking below linking.link_llvm_math_intrinsics(ee, lmod, libs.math_library, linking.LLVMLinker(), replacements) return lfunc
from __future__ import print_function, division, absolute_import - from numba.support.math_support import math_support, libs ? ^^^^^^^^^^^^ + from numba.support.math_support import linking, libs ? ^^^^^^^ default_postpasses = {} def register_default(name): def dec(f): default_postpasses[name] = f return f return dec # ______________________________________________________________________ # Postpasses @register_default('math') def postpass_link_math(env, ee, lmod, lfunc): "numba.math.* -> mathcode.*" replacements = {} for lf in lmod.functions: if lf.name.startswith('numba.math.'): _, _, name = lf.name.rpartition('.') replacements[lf.name] = name del lf # this is dead after linking below - math_support.link_llvm_math_intrinsics(ee, lmod, libs.math_library, ? ^^^^^^^^^^^^ + linking.link_llvm_math_intrinsics(ee, lmod, libs.math_library, ? ^^^^^^^ - math_support.LLVMLinker(), ? ^^^^^^^^^^^^ + linking.LLVMLinker(), ? ^^^^^^^ replacements) return lfunc
cd2ecd3bede2886c384e4761f7052cfacb7d24ae
modules/serialize.py
modules/serialize.py
import sublime import json import os from ..json import encoder from ..json import decoder from . import settings _DEFAULT_PATH = os.path.join('User', 'sessions') _DEFAULT_EXTENSION = 'json' def dump(name, session): session_path = _generate_path(name) with open(session_path, 'w') as f: json.dump(session, f, cls=encoder.SessionEncoder) def load(name): session_path = _generate_path(name) with open(session_path, 'r') as f: return json.load(f, cls=decoder.SessionDecoder) def _generate_path(name): path = settings.get('session_path') if not path: path = _DEFAULT_PATH folder = os.path.join(sublime.packages_path(), path) # Ensure the folder exists os.makedirs(folder, exist_ok=True) return os.path.join(folder, _generate_name(name)) def _generate_name(name, extension=_DEFAULT_EXTENSION): return '.'.join([name, extension])
import sublime import json import os from ..json import encoder from ..json import decoder from . import settings _DEFAULT_PATH = os.path.join('User', 'sessions') _DEFAULT_EXTENSION = '.sublime-session' def dump(name, session): session_path = _generate_path(name) with open(session_path, 'w') as f: json.dump(session, f, cls=encoder.SessionEncoder) def load(name): session_path = _generate_path(name) with open(session_path, 'r') as f: return json.load(f, cls=decoder.SessionDecoder) def _generate_path(name): return os.path.join(_generate_folder(), _generate_name(name)) def _generate_folder(): folder = settings.get('session_path') if folder: folder = os.path.normpath(folder) else: folder = os.path.join(sublime.packages_path(), _DEFAULT_PATH) # Ensure the folder exists os.makedirs(folder, exist_ok=True) return folder def _generate_name(name, extension=_DEFAULT_EXTENSION): return ''.join([name, extension])
Use "sublime-session" as file extension
Use "sublime-session" as file extension Furthermore fix some bugs in serialize.py
Python
mit
Zeeker/sublime-SessionManager
import sublime import json import os from ..json import encoder from ..json import decoder from . import settings _DEFAULT_PATH = os.path.join('User', 'sessions') - _DEFAULT_EXTENSION = 'json' + _DEFAULT_EXTENSION = '.sublime-session' def dump(name, session): session_path = _generate_path(name) with open(session_path, 'w') as f: json.dump(session, f, cls=encoder.SessionEncoder) def load(name): session_path = _generate_path(name) with open(session_path, 'r') as f: return json.load(f, cls=decoder.SessionDecoder) def _generate_path(name): + return os.path.join(_generate_folder(), _generate_name(name)) - path = settings.get('session_path') - if not path: - path = _DEFAULT_PATH + + def _generate_folder(): + folder = settings.get('session_path') + if folder: + folder = os.path.normpath(folder) + else: - folder = os.path.join(sublime.packages_path(), path) + folder = os.path.join(sublime.packages_path(), _DEFAULT_PATH) # Ensure the folder exists os.makedirs(folder, exist_ok=True) - return os.path.join(folder, _generate_name(name)) + return folder def _generate_name(name, extension=_DEFAULT_EXTENSION): - return '.'.join([name, extension]) + return ''.join([name, extension])
Use "sublime-session" as file extension
## Code Before: import sublime import json import os from ..json import encoder from ..json import decoder from . import settings _DEFAULT_PATH = os.path.join('User', 'sessions') _DEFAULT_EXTENSION = 'json' def dump(name, session): session_path = _generate_path(name) with open(session_path, 'w') as f: json.dump(session, f, cls=encoder.SessionEncoder) def load(name): session_path = _generate_path(name) with open(session_path, 'r') as f: return json.load(f, cls=decoder.SessionDecoder) def _generate_path(name): path = settings.get('session_path') if not path: path = _DEFAULT_PATH folder = os.path.join(sublime.packages_path(), path) # Ensure the folder exists os.makedirs(folder, exist_ok=True) return os.path.join(folder, _generate_name(name)) def _generate_name(name, extension=_DEFAULT_EXTENSION): return '.'.join([name, extension]) ## Instruction: Use "sublime-session" as file extension ## Code After: import sublime import json import os from ..json import encoder from ..json import decoder from . import settings _DEFAULT_PATH = os.path.join('User', 'sessions') _DEFAULT_EXTENSION = '.sublime-session' def dump(name, session): session_path = _generate_path(name) with open(session_path, 'w') as f: json.dump(session, f, cls=encoder.SessionEncoder) def load(name): session_path = _generate_path(name) with open(session_path, 'r') as f: return json.load(f, cls=decoder.SessionDecoder) def _generate_path(name): return os.path.join(_generate_folder(), _generate_name(name)) def _generate_folder(): folder = settings.get('session_path') if folder: folder = os.path.normpath(folder) else: folder = os.path.join(sublime.packages_path(), _DEFAULT_PATH) # Ensure the folder exists os.makedirs(folder, exist_ok=True) return folder def _generate_name(name, extension=_DEFAULT_EXTENSION): return ''.join([name, extension])
import sublime import json import os from ..json import encoder from ..json import decoder from . import settings _DEFAULT_PATH = os.path.join('User', 'sessions') - _DEFAULT_EXTENSION = 'json' ? ^ + _DEFAULT_EXTENSION = '.sublime-session' ? ^ ++++++++++++ def dump(name, session): session_path = _generate_path(name) with open(session_path, 'w') as f: json.dump(session, f, cls=encoder.SessionEncoder) def load(name): session_path = _generate_path(name) with open(session_path, 'r') as f: return json.load(f, cls=decoder.SessionDecoder) def _generate_path(name): + return os.path.join(_generate_folder(), _generate_name(name)) - path = settings.get('session_path') - if not path: - path = _DEFAULT_PATH + + def _generate_folder(): + folder = settings.get('session_path') + if folder: + folder = os.path.normpath(folder) + else: - folder = os.path.join(sublime.packages_path(), path) ? ^^^^ + folder = os.path.join(sublime.packages_path(), _DEFAULT_PATH) ? ++++ ^^^^^^^^^^^^^ # Ensure the folder exists os.makedirs(folder, exist_ok=True) - return os.path.join(folder, _generate_name(name)) + return folder def _generate_name(name, extension=_DEFAULT_EXTENSION): - return '.'.join([name, extension]) ? - + return ''.join([name, extension])
f48c15a6b0c09db26a0f1b0e8846acf1c5e8cc62
plyer/platforms/ios/gyroscope.py
plyer/platforms/ios/gyroscope.py
''' iOS Gyroscope --------------------- ''' from plyer.facades import Gyroscope from pyobjus import autoclass from pyobjus.dylib_manager import load_framework load_framework('/System/Library/Frameworks/UIKit.framework') UIDevice = autoclass('UIDevice') device = UIDevice.currentDevice() class IosGyroscope(Gyroscope): def __init__(self): super(IosGyroscope, self).__init__() self.bridge = autoclass('bridge').alloc().init() if int(device.systemVersion.UTF8String().split('.')[0]) <= 4: self.bridge.motionManager.setGyroscopeUpdateInterval_(0.1) else: self.bridge.motionManager.setGyroUpdateInterval_(0.1) def _enable(self): self.bridge.startGyroscope() def _disable(self): self.bridge.stopGyroscope() def _get_orientation(self): return ( self.bridge.gy_x, self.bridge.gy_y, self.bridge.gy_z) def instance(): return IosGyroscope()
''' iOS Gyroscope --------------------- ''' from plyer.facades import Gyroscope from pyobjus import autoclass from pyobjus.dylib_manager import load_framework load_framework('/System/Library/Frameworks/UIKit.framework') UIDevice = autoclass('UIDevice') device = UIDevice.currentDevice() class IosGyroscope(Gyroscope): def __init__(self): super(IosGyroscope, self).__init__() self.bridge = autoclass('bridge').alloc().init() if int(device.systemVersion.UTF8String().split('.')[0]) <= 4: self.bridge.motionManager.setGyroscopeUpdateInterval_(0.1) else: self.bridge.motionManager.setGyroUpdateInterval_(0.1) self.bridge.motionManager.setDeviceMotionUpdateInterval_(0.1) def _enable(self): self.bridge.startGyroscope() self.bridge.startDeviceMotion() def _disable(self): self.bridge.stopGyroscope() self.bridge.stopDeviceMotion() def _get_orientation(self): return ( self.bridge.rotation_rate_x, self.bridge.rotation_rate_y, self.bridge.rotation_rate_z) def _get_rotation_uncalib(self): return ( self.bridge.gy_x, self.bridge.gy_y, self.bridge.gy_z, self.bridge.gy_x - self.bridge.rotation_rate_x, self.bridge.gy_y - self.bridge.rotation_rate_y, self.bridge.gy_z - self.bridge.rotation_rate_z) def instance(): return IosGyroscope()
Add method for uncalibrated values of iOS Gyroscope
Add method for uncalibrated values of iOS Gyroscope
Python
mit
KeyWeeUsr/plyer,KeyWeeUsr/plyer,kivy/plyer,KeyWeeUsr/plyer,kivy/plyer,kivy/plyer
''' iOS Gyroscope --------------------- ''' from plyer.facades import Gyroscope from pyobjus import autoclass from pyobjus.dylib_manager import load_framework load_framework('/System/Library/Frameworks/UIKit.framework') UIDevice = autoclass('UIDevice') device = UIDevice.currentDevice() class IosGyroscope(Gyroscope): def __init__(self): super(IosGyroscope, self).__init__() self.bridge = autoclass('bridge').alloc().init() if int(device.systemVersion.UTF8String().split('.')[0]) <= 4: self.bridge.motionManager.setGyroscopeUpdateInterval_(0.1) else: self.bridge.motionManager.setGyroUpdateInterval_(0.1) + self.bridge.motionManager.setDeviceMotionUpdateInterval_(0.1) + def _enable(self): self.bridge.startGyroscope() + self.bridge.startDeviceMotion() def _disable(self): self.bridge.stopGyroscope() + self.bridge.stopDeviceMotion() def _get_orientation(self): return ( + self.bridge.rotation_rate_x, + self.bridge.rotation_rate_y, + self.bridge.rotation_rate_z) + + def _get_rotation_uncalib(self): + return ( self.bridge.gy_x, self.bridge.gy_y, - self.bridge.gy_z) + self.bridge.gy_z, + self.bridge.gy_x - self.bridge.rotation_rate_x, + self.bridge.gy_y - self.bridge.rotation_rate_y, + self.bridge.gy_z - self.bridge.rotation_rate_z) def instance(): return IosGyroscope()
Add method for uncalibrated values of iOS Gyroscope
## Code Before: ''' iOS Gyroscope --------------------- ''' from plyer.facades import Gyroscope from pyobjus import autoclass from pyobjus.dylib_manager import load_framework load_framework('/System/Library/Frameworks/UIKit.framework') UIDevice = autoclass('UIDevice') device = UIDevice.currentDevice() class IosGyroscope(Gyroscope): def __init__(self): super(IosGyroscope, self).__init__() self.bridge = autoclass('bridge').alloc().init() if int(device.systemVersion.UTF8String().split('.')[0]) <= 4: self.bridge.motionManager.setGyroscopeUpdateInterval_(0.1) else: self.bridge.motionManager.setGyroUpdateInterval_(0.1) def _enable(self): self.bridge.startGyroscope() def _disable(self): self.bridge.stopGyroscope() def _get_orientation(self): return ( self.bridge.gy_x, self.bridge.gy_y, self.bridge.gy_z) def instance(): return IosGyroscope() ## Instruction: Add method for uncalibrated values of iOS Gyroscope ## Code After: ''' iOS Gyroscope --------------------- ''' from plyer.facades import Gyroscope from pyobjus import autoclass from pyobjus.dylib_manager import load_framework load_framework('/System/Library/Frameworks/UIKit.framework') UIDevice = autoclass('UIDevice') device = UIDevice.currentDevice() class IosGyroscope(Gyroscope): def __init__(self): super(IosGyroscope, self).__init__() self.bridge = autoclass('bridge').alloc().init() if int(device.systemVersion.UTF8String().split('.')[0]) <= 4: self.bridge.motionManager.setGyroscopeUpdateInterval_(0.1) else: self.bridge.motionManager.setGyroUpdateInterval_(0.1) self.bridge.motionManager.setDeviceMotionUpdateInterval_(0.1) def _enable(self): self.bridge.startGyroscope() self.bridge.startDeviceMotion() def _disable(self): self.bridge.stopGyroscope() self.bridge.stopDeviceMotion() def _get_orientation(self): return ( self.bridge.rotation_rate_x, self.bridge.rotation_rate_y, self.bridge.rotation_rate_z) def _get_rotation_uncalib(self): return ( self.bridge.gy_x, self.bridge.gy_y, self.bridge.gy_z, self.bridge.gy_x - self.bridge.rotation_rate_x, self.bridge.gy_y - self.bridge.rotation_rate_y, self.bridge.gy_z - self.bridge.rotation_rate_z) def instance(): return IosGyroscope()
''' iOS Gyroscope --------------------- ''' from plyer.facades import Gyroscope from pyobjus import autoclass from pyobjus.dylib_manager import load_framework load_framework('/System/Library/Frameworks/UIKit.framework') UIDevice = autoclass('UIDevice') device = UIDevice.currentDevice() class IosGyroscope(Gyroscope): def __init__(self): super(IosGyroscope, self).__init__() self.bridge = autoclass('bridge').alloc().init() if int(device.systemVersion.UTF8String().split('.')[0]) <= 4: self.bridge.motionManager.setGyroscopeUpdateInterval_(0.1) else: self.bridge.motionManager.setGyroUpdateInterval_(0.1) + self.bridge.motionManager.setDeviceMotionUpdateInterval_(0.1) + def _enable(self): self.bridge.startGyroscope() + self.bridge.startDeviceMotion() def _disable(self): self.bridge.stopGyroscope() + self.bridge.stopDeviceMotion() def _get_orientation(self): return ( + self.bridge.rotation_rate_x, + self.bridge.rotation_rate_y, + self.bridge.rotation_rate_z) + + def _get_rotation_uncalib(self): + return ( self.bridge.gy_x, self.bridge.gy_y, - self.bridge.gy_z) ? ^ + self.bridge.gy_z, ? ^ + self.bridge.gy_x - self.bridge.rotation_rate_x, + self.bridge.gy_y - self.bridge.rotation_rate_y, + self.bridge.gy_z - self.bridge.rotation_rate_z) def instance(): return IosGyroscope()
5a47ca87858bb08fcaac4a38322dc04eaf74cac2
src/foremast/utils/get_sns_topic_arn.py
src/foremast/utils/get_sns_topic_arn.py
"""SNS Topic functions.""" import logging import boto3 from ..exceptions import SNSTopicNotFound LOG = logging.getLogger(__name__) def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic
"""SNS Topic functions.""" import logging import boto3 from ..exceptions import SNSTopicNotFound LOG = logging.getLogger(__name__) def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ if topic_name.count(':') == 5: return topic_name session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic
Return ARN directly if topic name appears to be an ARN
Return ARN directly if topic name appears to be an ARN
Python
apache-2.0
gogoair/foremast,gogoair/foremast
"""SNS Topic functions.""" import logging import boto3 from ..exceptions import SNSTopicNotFound LOG = logging.getLogger(__name__) def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ + if topic_name.count(':') == 5: + return topic_name session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic
Return ARN directly if topic name appears to be an ARN
## Code Before: """SNS Topic functions.""" import logging import boto3 from ..exceptions import SNSTopicNotFound LOG = logging.getLogger(__name__) def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic ## Instruction: Return ARN directly if topic name appears to be an ARN ## Code After: """SNS Topic functions.""" import logging import boto3 from ..exceptions import SNSTopicNotFound LOG = logging.getLogger(__name__) def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ if topic_name.count(':') == 5: return topic_name session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic
"""SNS Topic functions.""" import logging import boto3 from ..exceptions import SNSTopicNotFound LOG = logging.getLogger(__name__) def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ + if topic_name.count(':') == 5: + return topic_name session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic
dfe84075109620481cac493c1d0dba69d9ca19df
vesper/tests/test_case_mixin.py
vesper/tests/test_case_mixin.py
import vesper.util.numpy_utils as numpy_utils class TestCaseMixin: def assert_raises(self, exception_class, function, *args, **kwargs): self.assertRaises(exception_class, function, *args, **kwargs) try: function(*args, **kwargs) except exception_class as e: pass # print(str(e)) def assert_arrays_equal(self, x, y): self.assertTrue(numpy_utils.arrays_equal(x, y)) def assert_arrays_close(self, x, y): self.assertTrue(numpy_utils.arrays_close(x, y))
import vesper.util.numpy_utils as numpy_utils SHOW_EXCEPTION_MESSAGES = False class TestCaseMixin: def assert_raises(self, exception_class, function, *args, **kwargs): try: function(*args, **kwargs) except exception_class as e: if SHOW_EXCEPTION_MESSAGES: print(str(e)) else: raise AssertionError( f'{exception_class.__name__} not raised by ' f'{function.__name__}') async def assert_raises_async( self, exception_class, function, *args, **kwargs): try: await function(*args, **kwargs) except exception_class as e: if SHOW_EXCEPTION_MESSAGES: print(str(e)) else: raise AssertionError( f'{exception_class.__name__} not raised by ' f'{function.__name__}') def assert_arrays_equal(self, x, y): self.assertTrue(numpy_utils.arrays_equal(x, y)) def assert_arrays_close(self, x, y): self.assertTrue(numpy_utils.arrays_close(x, y))
Add method for testing async function errors.
Add method for testing async function errors.
Python
mit
HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper
import vesper.util.numpy_utils as numpy_utils + + SHOW_EXCEPTION_MESSAGES = False class TestCaseMixin: def assert_raises(self, exception_class, function, *args, **kwargs): - self.assertRaises(exception_class, function, *args, **kwargs) - try: function(*args, **kwargs) + + except exception_class as e: + if SHOW_EXCEPTION_MESSAGES: + print(str(e)) + + else: + raise AssertionError( + f'{exception_class.__name__} not raised by ' + f'{function.__name__}') + + async def assert_raises_async( + self, exception_class, function, *args, **kwargs): + + try: + await function(*args, **kwargs) + except exception_class as e: - pass + if SHOW_EXCEPTION_MESSAGES: - # print(str(e)) + print(str(e)) - - + + else: + raise AssertionError( + f'{exception_class.__name__} not raised by ' + f'{function.__name__}') + + def assert_arrays_equal(self, x, y): self.assertTrue(numpy_utils.arrays_equal(x, y)) def assert_arrays_close(self, x, y): self.assertTrue(numpy_utils.arrays_close(x, y))
Add method for testing async function errors.
## Code Before: import vesper.util.numpy_utils as numpy_utils class TestCaseMixin: def assert_raises(self, exception_class, function, *args, **kwargs): self.assertRaises(exception_class, function, *args, **kwargs) try: function(*args, **kwargs) except exception_class as e: pass # print(str(e)) def assert_arrays_equal(self, x, y): self.assertTrue(numpy_utils.arrays_equal(x, y)) def assert_arrays_close(self, x, y): self.assertTrue(numpy_utils.arrays_close(x, y)) ## Instruction: Add method for testing async function errors. ## Code After: import vesper.util.numpy_utils as numpy_utils SHOW_EXCEPTION_MESSAGES = False class TestCaseMixin: def assert_raises(self, exception_class, function, *args, **kwargs): try: function(*args, **kwargs) except exception_class as e: if SHOW_EXCEPTION_MESSAGES: print(str(e)) else: raise AssertionError( f'{exception_class.__name__} not raised by ' f'{function.__name__}') async def assert_raises_async( self, exception_class, function, *args, **kwargs): try: await function(*args, **kwargs) except exception_class as e: if SHOW_EXCEPTION_MESSAGES: print(str(e)) else: raise AssertionError( f'{exception_class.__name__} not raised by ' f'{function.__name__}') def assert_arrays_equal(self, x, y): self.assertTrue(numpy_utils.arrays_equal(x, y)) def assert_arrays_close(self, x, y): self.assertTrue(numpy_utils.arrays_close(x, y))
import vesper.util.numpy_utils as numpy_utils + + SHOW_EXCEPTION_MESSAGES = False class TestCaseMixin: def assert_raises(self, exception_class, function, *args, **kwargs): - self.assertRaises(exception_class, function, *args, **kwargs) - try: function(*args, **kwargs) + + except exception_class as e: + if SHOW_EXCEPTION_MESSAGES: + print(str(e)) + + else: + raise AssertionError( + f'{exception_class.__name__} not raised by ' + f'{function.__name__}') + + async def assert_raises_async( + self, exception_class, function, *args, **kwargs): + + try: + await function(*args, **kwargs) + except exception_class as e: - pass + if SHOW_EXCEPTION_MESSAGES: - # print(str(e)) ? ^ + print(str(e)) ? ^^^ - - + + else: + raise AssertionError( + f'{exception_class.__name__} not raised by ' + f'{function.__name__}') + + def assert_arrays_equal(self, x, y): self.assertTrue(numpy_utils.arrays_equal(x, y)) def assert_arrays_close(self, x, y): self.assertTrue(numpy_utils.arrays_close(x, y))
9bc7d09e9abf79f6af7f7fd3cdddbfacd91ba9d3
run.py
run.py
import os import argparse def run(): """ Reuse the Procfile to start the dev server """ with open("Procfile", "r") as f: command = f.read().strip() command = command.replace("web: ", "") command += " --reload" os.system(command) def deploy(): os.system("git push dokku master") def main(): parser = argparse.ArgumentParser() parser.add_argument('--deploy', action="store_true", required=False) args = parser.parse_args() if args.deploy: deploy() else: run() if __name__ == '__main__': main()
import os import argparse def run(): """ Reuse the Procfile to start the dev server """ with open("Procfile", "r") as f: command = f.read().strip() command = command.replace("web: ", "") command += " --reload" os.system(command) def deploy(): os.system("git push dokku master") def dependencies(): os.system("pip-compile --upgrade requirements.in") os.system("pip-compile --upgrade requirements-dev.in") os.system("pip-sync requirements-dev.txt") def main(): parser = argparse.ArgumentParser() parser.add_argument('--deploy', action="store_true", required=False) parser.add_argument('--deps', action="store_true", required=False) args = parser.parse_args() if args.deploy: deploy() elif args.deps: dependencies() else: run() if __name__ == '__main__': main()
Add command to update dependencies.
Add command to update dependencies.
Python
mit
EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger
import os import argparse def run(): """ Reuse the Procfile to start the dev server """ with open("Procfile", "r") as f: command = f.read().strip() command = command.replace("web: ", "") command += " --reload" os.system(command) def deploy(): os.system("git push dokku master") + def dependencies(): + os.system("pip-compile --upgrade requirements.in") + os.system("pip-compile --upgrade requirements-dev.in") + os.system("pip-sync requirements-dev.txt") + def main(): parser = argparse.ArgumentParser() parser.add_argument('--deploy', action="store_true", required=False) + parser.add_argument('--deps', action="store_true", required=False) args = parser.parse_args() if args.deploy: deploy() + elif args.deps: + dependencies() else: run() + if __name__ == '__main__': main()
Add command to update dependencies.
## Code Before: import os import argparse def run(): """ Reuse the Procfile to start the dev server """ with open("Procfile", "r") as f: command = f.read().strip() command = command.replace("web: ", "") command += " --reload" os.system(command) def deploy(): os.system("git push dokku master") def main(): parser = argparse.ArgumentParser() parser.add_argument('--deploy', action="store_true", required=False) args = parser.parse_args() if args.deploy: deploy() else: run() if __name__ == '__main__': main() ## Instruction: Add command to update dependencies. ## Code After: import os import argparse def run(): """ Reuse the Procfile to start the dev server """ with open("Procfile", "r") as f: command = f.read().strip() command = command.replace("web: ", "") command += " --reload" os.system(command) def deploy(): os.system("git push dokku master") def dependencies(): os.system("pip-compile --upgrade requirements.in") os.system("pip-compile --upgrade requirements-dev.in") os.system("pip-sync requirements-dev.txt") def main(): parser = argparse.ArgumentParser() parser.add_argument('--deploy', action="store_true", required=False) parser.add_argument('--deps', action="store_true", required=False) args = parser.parse_args() if args.deploy: deploy() elif args.deps: dependencies() else: run() if __name__ == '__main__': main()
import os import argparse def run(): """ Reuse the Procfile to start the dev server """ with open("Procfile", "r") as f: command = f.read().strip() command = command.replace("web: ", "") command += " --reload" os.system(command) def deploy(): os.system("git push dokku master") + def dependencies(): + os.system("pip-compile --upgrade requirements.in") + os.system("pip-compile --upgrade requirements-dev.in") + os.system("pip-sync requirements-dev.txt") + def main(): parser = argparse.ArgumentParser() parser.add_argument('--deploy', action="store_true", required=False) + parser.add_argument('--deps', action="store_true", required=False) args = parser.parse_args() if args.deploy: deploy() + elif args.deps: + dependencies() else: run() + if __name__ == '__main__': main()
bbb5fa95cd4b9d7fac6ac45546eedcd8a5d14162
s4v1.py
s4v1.py
from s3v3 import *
from s3v3 import * import csv def write_to_file(filename, data_sample): example = csv.write(open(filename, 'w', encoding='utf-8'), dialect='excel') # example is the variable of the new file that is open and which we can write to (using utf-8 encoding and an excel dialect). example.writerows(data_sample) # write rows is going to take the rows in the data sample and write them to the example (i.e. the file name we passed in) write_to_file("_data/s4-silk_ties.csv", silk_ties) # this is going to create a new csv located in the _data directory, named s4-silk_ties.csv and it is going to contain all of that data from the silk_ties list which we created in s3v2 (silk_ties = filter_col_by_string(data_from_csv, "material", "_silk"))
Create write to file function
Create write to file function
Python
mit
alexmilesyounger/ds_basics
from s3v3 import * + import csv + + def write_to_file(filename, data_sample): + example = csv.write(open(filename, 'w', encoding='utf-8'), dialect='excel') # example is the variable of the new file that is open and which we can write to (using utf-8 encoding and an excel dialect). + example.writerows(data_sample) # write rows is going to take the rows in the data sample and write them to the example (i.e. the file name we passed in) + + write_to_file("_data/s4-silk_ties.csv", silk_ties) # this is going to create a new csv located in the _data directory, named s4-silk_ties.csv and it is going to contain all of that data from the silk_ties list which we created in s3v2 (silk_ties = filter_col_by_string(data_from_csv, "material", "_silk"))
Create write to file function
## Code Before: from s3v3 import * ## Instruction: Create write to file function ## Code After: from s3v3 import * import csv def write_to_file(filename, data_sample): example = csv.write(open(filename, 'w', encoding='utf-8'), dialect='excel') # example is the variable of the new file that is open and which we can write to (using utf-8 encoding and an excel dialect). example.writerows(data_sample) # write rows is going to take the rows in the data sample and write them to the example (i.e. the file name we passed in) write_to_file("_data/s4-silk_ties.csv", silk_ties) # this is going to create a new csv located in the _data directory, named s4-silk_ties.csv and it is going to contain all of that data from the silk_ties list which we created in s3v2 (silk_ties = filter_col_by_string(data_from_csv, "material", "_silk"))
from s3v3 import * + import csv + def write_to_file(filename, data_sample): + example = csv.write(open(filename, 'w', encoding='utf-8'), dialect='excel') # example is the variable of the new file that is open and which we can write to (using utf-8 encoding and an excel dialect). + example.writerows(data_sample) # write rows is going to take the rows in the data sample and write them to the example (i.e. the file name we passed in) + + write_to_file("_data/s4-silk_ties.csv", silk_ties) # this is going to create a new csv located in the _data directory, named s4-silk_ties.csv and it is going to contain all of that data from the silk_ties list which we created in s3v2 (silk_ties = filter_col_by_string(data_from_csv, "material", "_silk")) +
81b7089633b9d43b05566a1e23f93fb59678fe1e
plugins/unicode_plugin.py
plugins/unicode_plugin.py
import string import textwrap import binascii from veryprettytable import VeryPrettyTable from plugins import BasePlugin __author__ = 'peter' class DecodeHexPlugin(BasePlugin): short_description = 'Decode hex string to encodings:' default = True description = textwrap.dedent(''' This plugin tries to decode the given hexstring with some common encodings, then print it '''.strip()) def sentinel(self): return all(not len(x) % 2 for x in self.args['STRING']) def handle(self): result = '' for s in self.args['STRING']: if len(self.args['STRING']) > 1: result += '{0}:\n'.format(s) binary = binascii.unhexlify(s) result += self._decode('UTF8', 'utf8', binary) result += self._decode('iso-8859-1 (Cyrillic)', 'iso-8859-1', binary) return result @staticmethod def _decode(name, encoding, binary): try: s = binary.decode(encoding) except UnicodeDecodeError: s = '<invalid>' return '{0}: "{1}"\n'.format(name, s)
import string import textwrap import binascii import unicodedata from veryprettytable import VeryPrettyTable from plugins import BasePlugin __author__ = 'peter' class DecodeHexPlugin(BasePlugin): short_description = 'Decode hex string to encodings:' default = True description = textwrap.dedent(''' This plugin tries to decode the given hexstring with some common encodings, then print it. It tries to remove control characters from the string after decoding to prevent terminal breakage. '''.strip()) def sentinel(self): return all(not len(x) % 2 for x in self.args['STRING']) def handle(self): result = '' for s in self.args['STRING']: if len(self.args['STRING']) > 1: result += '{0}:\n'.format(s) binary = binascii.unhexlify(s) result += self._decode('UTF8', 'utf8', binary) result += self._decode('iso-8859-1 (Cyrillic)', 'iso-8859-1', binary) return result def _decode(self, name, encoding, binary): try: s = self._clean(binary.decode(encoding)) except UnicodeDecodeError: s = '<invalid>' return '{0}: "{1}"\n'.format(name, s) @staticmethod def _clean(s): return "".join(ch for ch in s if unicodedata.category(ch)[0] != "C")
Remove control characters from printed string to prevent terminal breakage
Remove control characters from printed string to prevent terminal breakage
Python
mit
Sakartu/stringinfo
import string import textwrap import binascii + import unicodedata from veryprettytable import VeryPrettyTable from plugins import BasePlugin __author__ = 'peter' class DecodeHexPlugin(BasePlugin): short_description = 'Decode hex string to encodings:' default = True description = textwrap.dedent(''' - This plugin tries to decode the given hexstring with some common encodings, then print it + This plugin tries to decode the given hexstring with some common encodings, then print it. + It tries to remove control characters from the string after decoding to prevent terminal breakage. '''.strip()) def sentinel(self): return all(not len(x) % 2 for x in self.args['STRING']) def handle(self): result = '' for s in self.args['STRING']: if len(self.args['STRING']) > 1: result += '{0}:\n'.format(s) binary = binascii.unhexlify(s) result += self._decode('UTF8', 'utf8', binary) result += self._decode('iso-8859-1 (Cyrillic)', 'iso-8859-1', binary) return result - @staticmethod - def _decode(name, encoding, binary): + def _decode(self, name, encoding, binary): try: - s = binary.decode(encoding) + s = self._clean(binary.decode(encoding)) except UnicodeDecodeError: s = '<invalid>' return '{0}: "{1}"\n'.format(name, s) + + @staticmethod + def _clean(s): + return "".join(ch for ch in s if unicodedata.category(ch)[0] != "C") +
Remove control characters from printed string to prevent terminal breakage
## Code Before: import string import textwrap import binascii from veryprettytable import VeryPrettyTable from plugins import BasePlugin __author__ = 'peter' class DecodeHexPlugin(BasePlugin): short_description = 'Decode hex string to encodings:' default = True description = textwrap.dedent(''' This plugin tries to decode the given hexstring with some common encodings, then print it '''.strip()) def sentinel(self): return all(not len(x) % 2 for x in self.args['STRING']) def handle(self): result = '' for s in self.args['STRING']: if len(self.args['STRING']) > 1: result += '{0}:\n'.format(s) binary = binascii.unhexlify(s) result += self._decode('UTF8', 'utf8', binary) result += self._decode('iso-8859-1 (Cyrillic)', 'iso-8859-1', binary) return result @staticmethod def _decode(name, encoding, binary): try: s = binary.decode(encoding) except UnicodeDecodeError: s = '<invalid>' return '{0}: "{1}"\n'.format(name, s) ## Instruction: Remove control characters from printed string to prevent terminal breakage ## Code After: import string import textwrap import binascii import unicodedata from veryprettytable import VeryPrettyTable from plugins import BasePlugin __author__ = 'peter' class DecodeHexPlugin(BasePlugin): short_description = 'Decode hex string to encodings:' default = True description = textwrap.dedent(''' This plugin tries to decode the given hexstring with some common encodings, then print it. It tries to remove control characters from the string after decoding to prevent terminal breakage. '''.strip()) def sentinel(self): return all(not len(x) % 2 for x in self.args['STRING']) def handle(self): result = '' for s in self.args['STRING']: if len(self.args['STRING']) > 1: result += '{0}:\n'.format(s) binary = binascii.unhexlify(s) result += self._decode('UTF8', 'utf8', binary) result += self._decode('iso-8859-1 (Cyrillic)', 'iso-8859-1', binary) return result def _decode(self, name, encoding, binary): try: s = self._clean(binary.decode(encoding)) except UnicodeDecodeError: s = '<invalid>' return '{0}: "{1}"\n'.format(name, s) @staticmethod def _clean(s): return "".join(ch for ch in s if unicodedata.category(ch)[0] != "C")
import string import textwrap import binascii + import unicodedata from veryprettytable import VeryPrettyTable from plugins import BasePlugin __author__ = 'peter' class DecodeHexPlugin(BasePlugin): short_description = 'Decode hex string to encodings:' default = True description = textwrap.dedent(''' - This plugin tries to decode the given hexstring with some common encodings, then print it + This plugin tries to decode the given hexstring with some common encodings, then print it. ? + + It tries to remove control characters from the string after decoding to prevent terminal breakage. '''.strip()) def sentinel(self): return all(not len(x) % 2 for x in self.args['STRING']) def handle(self): result = '' for s in self.args['STRING']: if len(self.args['STRING']) > 1: result += '{0}:\n'.format(s) binary = binascii.unhexlify(s) result += self._decode('UTF8', 'utf8', binary) result += self._decode('iso-8859-1 (Cyrillic)', 'iso-8859-1', binary) return result - @staticmethod - def _decode(name, encoding, binary): + def _decode(self, name, encoding, binary): ? ++++++ try: - s = binary.decode(encoding) + s = self._clean(binary.decode(encoding)) ? ++++++++++++ + except UnicodeDecodeError: s = '<invalid>' return '{0}: "{1}"\n'.format(name, s) + + @staticmethod + def _clean(s): + return "".join(ch for ch in s if unicodedata.category(ch)[0] != "C")
533b4c090547389054934ea88388512399b568c9
filter_plugins/custom_plugins.py
filter_plugins/custom_plugins.py
def vault(encrypted, env): method = """ from keyczar import keyczar import os.path import sys keydir = '.vault' if not os.path.isdir(keydir): keydir = os.path.expanduser('~/.decrypted_openconext_keystore_{env}') crypter = keyczar.Crypter.Read(keydir) sys.stdout.write(crypter.Decrypt("%s")) """.format(env=env) % encrypted import subprocess from ansible import errors (out, err) = subprocess.Popen(['python', '-c', method], stdout=subprocess.PIPE).communicate() if (err != None): raise errors.AnsibleFilterError("Unable to decrypt, aborting. Error: {error}".format(error = err)) else: return out class FilterModule(object): def filters(self): return { 'vault': vault }
def vault(encrypted, env): method = """ from keyczar import keyczar import os.path import sys keydir = '.vault' if not os.path.isdir(keydir): keydir = os.path.expanduser('~/.decrypted_openconext_keystore_{env}') crypter = keyczar.Crypter.Read(keydir) sys.stdout.write(crypter.Decrypt("%s")) """.format(env=env) % encrypted from subprocess import check_output return check_output(["python", "-c", method]) class FilterModule(object): def filters(self): return { 'vault': vault }
Use simpler invocation that actually fails. Leave it to @thijskh to use Popen-type of invocation
Use simpler invocation that actually fails. Leave it to @thijskh to use Popen-type of invocation
Python
apache-2.0
baszoetekouw/OpenConext-deploy,remold/OpenConext-deploy,OpenConext/OpenConext-deploy,baszoetekouw/OpenConext-deploy,baszoetekouw/OpenConext-deploy,OpenConext/OpenConext-deploy,OpenConext/OpenConext-deploy,remold/OpenConext-deploy,remold/OpenConext-deploy,baszoetekouw/OpenConext-deploy,OpenConext/OpenConext-deploy,baszoetekouw/OpenConext-deploy,OpenConext/OpenConext-deploy
def vault(encrypted, env): method = """ from keyczar import keyczar import os.path import sys keydir = '.vault' if not os.path.isdir(keydir): keydir = os.path.expanduser('~/.decrypted_openconext_keystore_{env}') crypter = keyczar.Crypter.Read(keydir) sys.stdout.write(crypter.Decrypt("%s")) """.format(env=env) % encrypted - import subprocess - from ansible import errors + from subprocess import check_output + return check_output(["python", "-c", method]) - (out, err) = subprocess.Popen(['python', '-c', method], stdout=subprocess.PIPE).communicate() - if (err != None): - raise errors.AnsibleFilterError("Unable to decrypt, aborting. Error: {error}".format(error = err)) - else: - return out class FilterModule(object): def filters(self): return { 'vault': vault }
Use simpler invocation that actually fails. Leave it to @thijskh to use Popen-type of invocation
## Code Before: def vault(encrypted, env): method = """ from keyczar import keyczar import os.path import sys keydir = '.vault' if not os.path.isdir(keydir): keydir = os.path.expanduser('~/.decrypted_openconext_keystore_{env}') crypter = keyczar.Crypter.Read(keydir) sys.stdout.write(crypter.Decrypt("%s")) """.format(env=env) % encrypted import subprocess from ansible import errors (out, err) = subprocess.Popen(['python', '-c', method], stdout=subprocess.PIPE).communicate() if (err != None): raise errors.AnsibleFilterError("Unable to decrypt, aborting. Error: {error}".format(error = err)) else: return out class FilterModule(object): def filters(self): return { 'vault': vault } ## Instruction: Use simpler invocation that actually fails. Leave it to @thijskh to use Popen-type of invocation ## Code After: def vault(encrypted, env): method = """ from keyczar import keyczar import os.path import sys keydir = '.vault' if not os.path.isdir(keydir): keydir = os.path.expanduser('~/.decrypted_openconext_keystore_{env}') crypter = keyczar.Crypter.Read(keydir) sys.stdout.write(crypter.Decrypt("%s")) """.format(env=env) % encrypted from subprocess import check_output return check_output(["python", "-c", method]) class FilterModule(object): def filters(self): return { 'vault': vault }
def vault(encrypted, env): method = """ from keyczar import keyczar import os.path import sys keydir = '.vault' if not os.path.isdir(keydir): keydir = os.path.expanduser('~/.decrypted_openconext_keystore_{env}') crypter = keyczar.Crypter.Read(keydir) sys.stdout.write(crypter.Decrypt("%s")) """.format(env=env) % encrypted - import subprocess - from ansible import errors + from subprocess import check_output + return check_output(["python", "-c", method]) - (out, err) = subprocess.Popen(['python', '-c', method], stdout=subprocess.PIPE).communicate() - if (err != None): - raise errors.AnsibleFilterError("Unable to decrypt, aborting. Error: {error}".format(error = err)) - else: - return out class FilterModule(object): def filters(self): return { 'vault': vault }
3446db734ce669e98f8cdeedbabf13dac62c777f
edgedb/lang/build.py
edgedb/lang/build.py
import os.path from distutils.command import build class build(build.build): def _compile_parsers(self): import parsing import edgedb import edgedb.server.main edgedb.server.main.init_import_system() import edgedb.lang.edgeql.parser.grammar.single as edgeql_spec import edgedb.lang.edgeql.parser.grammar.block as edgeql_spec2 import edgedb.server.pgsql.parser.pgsql as pgsql_spec import edgedb.lang.schema.parser.grammar.declarations as schema_spec import edgedb.lang.graphql.parser.grammar.document as graphql_spec base_path = os.path.dirname( os.path.dirname(os.path.dirname(__file__))) for spec in (edgeql_spec, edgeql_spec2, pgsql_spec, schema_spec, graphql_spec): subpath = os.path.dirname(spec.__file__)[len(base_path) + 1:] cache_dir = os.path.join(self.build_lib, subpath) os.makedirs(cache_dir) cache = os.path.join( cache_dir, spec.__name__.rpartition('.')[2] + '.pickle') parsing.Spec(spec, pickleFile=cache, verbose=True) def run(self, *args, **kwargs): super().run(*args, **kwargs) self._compile_parsers()
import os.path from distutils.command import build class build(build.build): def _compile_parsers(self): import parsing import edgedb import edgedb.server.main edgedb.server.main.init_import_system() import edgedb.lang.edgeql.parser.grammar.single as edgeql_spec import edgedb.lang.edgeql.parser.grammar.block as edgeql_spec2 import edgedb.server.pgsql.parser.pgsql as pgsql_spec import edgedb.lang.schema.parser.grammar.declarations as schema_spec import edgedb.lang.graphql.parser.grammar.document as graphql_spec base_path = os.path.dirname( os.path.dirname(os.path.dirname(__file__))) for spec in (edgeql_spec, edgeql_spec2, pgsql_spec, schema_spec, graphql_spec): subpath = os.path.dirname(spec.__file__)[len(base_path) + 1:] cache_dir = os.path.join(self.build_lib, subpath) os.makedirs(cache_dir, exist_ok=True) cache = os.path.join( cache_dir, spec.__name__.rpartition('.')[2] + '.pickle') parsing.Spec(spec, pickleFile=cache, verbose=True) def run(self, *args, **kwargs): super().run(*args, **kwargs) self._compile_parsers()
Fix the creation of parser cache directory
setup.py: Fix the creation of parser cache directory
Python
apache-2.0
edgedb/edgedb,edgedb/edgedb,edgedb/edgedb
import os.path from distutils.command import build class build(build.build): def _compile_parsers(self): import parsing import edgedb import edgedb.server.main edgedb.server.main.init_import_system() import edgedb.lang.edgeql.parser.grammar.single as edgeql_spec import edgedb.lang.edgeql.parser.grammar.block as edgeql_spec2 import edgedb.server.pgsql.parser.pgsql as pgsql_spec import edgedb.lang.schema.parser.grammar.declarations as schema_spec import edgedb.lang.graphql.parser.grammar.document as graphql_spec base_path = os.path.dirname( os.path.dirname(os.path.dirname(__file__))) for spec in (edgeql_spec, edgeql_spec2, pgsql_spec, schema_spec, graphql_spec): subpath = os.path.dirname(spec.__file__)[len(base_path) + 1:] cache_dir = os.path.join(self.build_lib, subpath) - os.makedirs(cache_dir) + os.makedirs(cache_dir, exist_ok=True) cache = os.path.join( cache_dir, spec.__name__.rpartition('.')[2] + '.pickle') parsing.Spec(spec, pickleFile=cache, verbose=True) def run(self, *args, **kwargs): super().run(*args, **kwargs) self._compile_parsers()
Fix the creation of parser cache directory
## Code Before: import os.path from distutils.command import build class build(build.build): def _compile_parsers(self): import parsing import edgedb import edgedb.server.main edgedb.server.main.init_import_system() import edgedb.lang.edgeql.parser.grammar.single as edgeql_spec import edgedb.lang.edgeql.parser.grammar.block as edgeql_spec2 import edgedb.server.pgsql.parser.pgsql as pgsql_spec import edgedb.lang.schema.parser.grammar.declarations as schema_spec import edgedb.lang.graphql.parser.grammar.document as graphql_spec base_path = os.path.dirname( os.path.dirname(os.path.dirname(__file__))) for spec in (edgeql_spec, edgeql_spec2, pgsql_spec, schema_spec, graphql_spec): subpath = os.path.dirname(spec.__file__)[len(base_path) + 1:] cache_dir = os.path.join(self.build_lib, subpath) os.makedirs(cache_dir) cache = os.path.join( cache_dir, spec.__name__.rpartition('.')[2] + '.pickle') parsing.Spec(spec, pickleFile=cache, verbose=True) def run(self, *args, **kwargs): super().run(*args, **kwargs) self._compile_parsers() ## Instruction: Fix the creation of parser cache directory ## Code After: import os.path from distutils.command import build class build(build.build): def _compile_parsers(self): import parsing import edgedb import edgedb.server.main edgedb.server.main.init_import_system() import edgedb.lang.edgeql.parser.grammar.single as edgeql_spec import edgedb.lang.edgeql.parser.grammar.block as edgeql_spec2 import edgedb.server.pgsql.parser.pgsql as pgsql_spec import edgedb.lang.schema.parser.grammar.declarations as schema_spec import edgedb.lang.graphql.parser.grammar.document as graphql_spec base_path = os.path.dirname( os.path.dirname(os.path.dirname(__file__))) for spec in (edgeql_spec, edgeql_spec2, pgsql_spec, schema_spec, graphql_spec): subpath = os.path.dirname(spec.__file__)[len(base_path) + 1:] cache_dir = os.path.join(self.build_lib, subpath) os.makedirs(cache_dir, exist_ok=True) cache = os.path.join( cache_dir, spec.__name__.rpartition('.')[2] + '.pickle') parsing.Spec(spec, pickleFile=cache, verbose=True) def run(self, *args, **kwargs): super().run(*args, **kwargs) self._compile_parsers()
import os.path from distutils.command import build class build(build.build): def _compile_parsers(self): import parsing import edgedb import edgedb.server.main edgedb.server.main.init_import_system() import edgedb.lang.edgeql.parser.grammar.single as edgeql_spec import edgedb.lang.edgeql.parser.grammar.block as edgeql_spec2 import edgedb.server.pgsql.parser.pgsql as pgsql_spec import edgedb.lang.schema.parser.grammar.declarations as schema_spec import edgedb.lang.graphql.parser.grammar.document as graphql_spec base_path = os.path.dirname( os.path.dirname(os.path.dirname(__file__))) for spec in (edgeql_spec, edgeql_spec2, pgsql_spec, schema_spec, graphql_spec): subpath = os.path.dirname(spec.__file__)[len(base_path) + 1:] cache_dir = os.path.join(self.build_lib, subpath) - os.makedirs(cache_dir) + os.makedirs(cache_dir, exist_ok=True) ? +++++++++++++++ cache = os.path.join( cache_dir, spec.__name__.rpartition('.')[2] + '.pickle') parsing.Spec(spec, pickleFile=cache, verbose=True) def run(self, *args, **kwargs): super().run(*args, **kwargs) self._compile_parsers()
ae689c9de698daeaf8ab5275c384183cb665c903
neutron_classifier/common/constants.py
neutron_classifier/common/constants.py
CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier', 'transport_classifier', 'ethernet_classifier', 'encapsulation_classifier', 'neutron_port_classifier'] # TODO(sc68cal) add more protocols` PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6'] ENCAPSULATION_TYPES = ['vxlan', 'gre'] NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6'] ENCAPSULATION_TYPES = ['vxlan', 'gre'] NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
Remove CLASSIFIER_TYPES constant - it was never used
Remove CLASSIFIER_TYPES constant - it was never used Change-Id: Ia6ba4453f6bc9b9de0da1e83d2dc75147fb91882
Python
apache-2.0
openstack/neutron-classifier,openstack/neutron-classifier
- - CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier', - 'transport_classifier', 'ethernet_classifier', - 'encapsulation_classifier', 'neutron_port_classifier'] - - # TODO(sc68cal) add more protocols` PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6'] ENCAPSULATION_TYPES = ['vxlan', 'gre'] NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
Remove CLASSIFIER_TYPES constant - it was never used
## Code Before: CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier', 'transport_classifier', 'ethernet_classifier', 'encapsulation_classifier', 'neutron_port_classifier'] # TODO(sc68cal) add more protocols` PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6'] ENCAPSULATION_TYPES = ['vxlan', 'gre'] NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group'] ## Instruction: Remove CLASSIFIER_TYPES constant - it was never used ## Code After: PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6'] ENCAPSULATION_TYPES = ['vxlan', 'gre'] NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
- - CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier', - 'transport_classifier', 'ethernet_classifier', - 'encapsulation_classifier', 'neutron_port_classifier'] - - # TODO(sc68cal) add more protocols` PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6'] ENCAPSULATION_TYPES = ['vxlan', 'gre'] NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
778df70d5e755d0681636cb401bbf33f17f247bc
uniqueids/admin.py
uniqueids/admin.py
from django.contrib import admin from .models import Record from .tasks import send_personnel_code class RecordAdmin(admin.ModelAdmin): list_display = [ "id", "identity", "write_to", "created_at", "updated_at"] list_filter = ["write_to", "created_at"] search_fields = ["identity", "write_to"] actions = ["resend_personnel_code"] def resend_personnel_code(self, request, queryset): created = 0 skipped = 0 for record in queryset: if record.write_to != "personnel_code": skipped += 1 continue send_personnel_code.apply_async(kwargs={ "identity": str(record.identity), "personnel_code": record.id}) created += 1 if created == 1: created_text = "%s Record was" % created else: created_text = "%s Records were" % created if skipped == 1: skipped_text = "%s Record was" % skipped else: skipped_text = "%s Records were" % skipped self.message_user( request, "%s successfully changed. %s skipped because they are " "not a HCW." % (created_text, skipped_text)) resend_personnel_code.short_description = "Send code by SMS (personnel "\ "code only)" admin.site.register(Record, RecordAdmin)
from django.contrib import admin from .models import Record from .tasks import send_personnel_code class RecordAdmin(admin.ModelAdmin): list_display = [ "id", "identity", "write_to", "created_at", "updated_at"] list_filter = ["write_to", "created_at"] search_fields = ["identity", "write_to"] actions = ["resend_personnel_code"] def resend_personnel_code(self, request, queryset): created = 0 skipped = 0 for record in queryset.iterator(): if record.write_to != "personnel_code": skipped += 1 continue send_personnel_code.apply_async(kwargs={ "identity": str(record.identity), "personnel_code": record.id}) created += 1 if created == 1: created_text = "%s Record was" % created else: created_text = "%s Records were" % created if skipped == 1: skipped_text = "%s Record was" % skipped else: skipped_text = "%s Records were" % skipped self.message_user( request, "%s successfully changed. %s skipped because they are " "not a HCW." % (created_text, skipped_text)) resend_personnel_code.short_description = "Send code by SMS (personnel "\ "code only)" admin.site.register(Record, RecordAdmin)
Use Iterator to iterate through records
Use Iterator to iterate through records
Python
bsd-3-clause
praekelt/hellomama-registration,praekelt/hellomama-registration
from django.contrib import admin from .models import Record from .tasks import send_personnel_code class RecordAdmin(admin.ModelAdmin): list_display = [ "id", "identity", "write_to", "created_at", "updated_at"] list_filter = ["write_to", "created_at"] search_fields = ["identity", "write_to"] actions = ["resend_personnel_code"] def resend_personnel_code(self, request, queryset): created = 0 skipped = 0 - for record in queryset: + for record in queryset.iterator(): if record.write_to != "personnel_code": skipped += 1 continue send_personnel_code.apply_async(kwargs={ "identity": str(record.identity), "personnel_code": record.id}) created += 1 if created == 1: created_text = "%s Record was" % created else: created_text = "%s Records were" % created if skipped == 1: skipped_text = "%s Record was" % skipped else: skipped_text = "%s Records were" % skipped self.message_user( request, "%s successfully changed. %s skipped because they are " "not a HCW." % (created_text, skipped_text)) resend_personnel_code.short_description = "Send code by SMS (personnel "\ "code only)" admin.site.register(Record, RecordAdmin)
Use Iterator to iterate through records
## Code Before: from django.contrib import admin from .models import Record from .tasks import send_personnel_code class RecordAdmin(admin.ModelAdmin): list_display = [ "id", "identity", "write_to", "created_at", "updated_at"] list_filter = ["write_to", "created_at"] search_fields = ["identity", "write_to"] actions = ["resend_personnel_code"] def resend_personnel_code(self, request, queryset): created = 0 skipped = 0 for record in queryset: if record.write_to != "personnel_code": skipped += 1 continue send_personnel_code.apply_async(kwargs={ "identity": str(record.identity), "personnel_code": record.id}) created += 1 if created == 1: created_text = "%s Record was" % created else: created_text = "%s Records were" % created if skipped == 1: skipped_text = "%s Record was" % skipped else: skipped_text = "%s Records were" % skipped self.message_user( request, "%s successfully changed. %s skipped because they are " "not a HCW." % (created_text, skipped_text)) resend_personnel_code.short_description = "Send code by SMS (personnel "\ "code only)" admin.site.register(Record, RecordAdmin) ## Instruction: Use Iterator to iterate through records ## Code After: from django.contrib import admin from .models import Record from .tasks import send_personnel_code class RecordAdmin(admin.ModelAdmin): list_display = [ "id", "identity", "write_to", "created_at", "updated_at"] list_filter = ["write_to", "created_at"] search_fields = ["identity", "write_to"] actions = ["resend_personnel_code"] def resend_personnel_code(self, request, queryset): created = 0 skipped = 0 for record in queryset.iterator(): if record.write_to != "personnel_code": skipped += 1 continue send_personnel_code.apply_async(kwargs={ "identity": str(record.identity), "personnel_code": record.id}) created += 1 if created == 1: created_text = "%s Record was" % created else: created_text = "%s Records were" % created if skipped == 1: skipped_text = "%s Record was" % skipped else: skipped_text = "%s Records were" % skipped self.message_user( request, "%s successfully changed. %s skipped because they are " "not a HCW." % (created_text, skipped_text)) resend_personnel_code.short_description = "Send code by SMS (personnel "\ "code only)" admin.site.register(Record, RecordAdmin)
from django.contrib import admin from .models import Record from .tasks import send_personnel_code class RecordAdmin(admin.ModelAdmin): list_display = [ "id", "identity", "write_to", "created_at", "updated_at"] list_filter = ["write_to", "created_at"] search_fields = ["identity", "write_to"] actions = ["resend_personnel_code"] def resend_personnel_code(self, request, queryset): created = 0 skipped = 0 - for record in queryset: + for record in queryset.iterator(): ? +++++++++++ if record.write_to != "personnel_code": skipped += 1 continue send_personnel_code.apply_async(kwargs={ "identity": str(record.identity), "personnel_code": record.id}) created += 1 if created == 1: created_text = "%s Record was" % created else: created_text = "%s Records were" % created if skipped == 1: skipped_text = "%s Record was" % skipped else: skipped_text = "%s Records were" % skipped self.message_user( request, "%s successfully changed. %s skipped because they are " "not a HCW." % (created_text, skipped_text)) resend_personnel_code.short_description = "Send code by SMS (personnel "\ "code only)" admin.site.register(Record, RecordAdmin)
fac8f1af6bd3eb46fe2a26689b0d85f358934f7a
network_checker/url_access_checker/cli.py
network_checker/url_access_checker/cli.py
import sys from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
Add EDITOR variable in urlaccesschecker
Add EDITOR variable in urlaccesschecker This variable required by cmd2 library to work. Without - it fails on bootstrap with traceback: File "/usr/lib/python2.6/site-packages/cmd2.py", line 424, in Cmd if subprocess.Popen(['which', editor]) Change-Id: I061f88b65d7bc7181752cd076da4067df2f84131 Related-Bug: 1439686
Python
apache-2.0
prmtl/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,huntxu/fuel-web,prmtl/fuel-web,eayunstack/fuel-web,stackforge/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-web,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,huntxu/fuel-web,prmtl/fuel-web,huntxu/fuel-web,huntxu/fuel-web,eayunstack/fuel-web,huntxu/fuel-web,nebril/fuel-web,prmtl/fuel-web,stackforge/fuel-web,stackforge/fuel-web,prmtl/fuel-web,nebril/fuel-web
+ import os import sys + # fixed in cmd2 >=0.6.6 + os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
Add EDITOR variable in urlaccesschecker
## Code Before: import sys from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:])) ## Instruction: Add EDITOR variable in urlaccesschecker ## Code After: import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
+ import os import sys + # fixed in cmd2 >=0.6.6 + os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
d63d6070576bf22d60bf9684e417163201814353
webapp/worker.py
webapp/worker.py
"""Run a worker for the job queue.""" import os import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context if __name__ == '__main__': config_name = os.environ.get('ENVIRONMENT') if config_name is None: sys.stderr.write("Environment variable 'ENVIRONMENT' must be set but isn't.") sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
"""Run a worker for the job queue.""" import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context, get_config_name_from_env if __name__ == '__main__': try: config_name = get_config_name_from_env() except Exception as e: sys.stderr.write(str(e) + '\n') sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
Use bootstrap utility to retrieve the configuration name from the environment.
Use bootstrap utility to retrieve the configuration name from the environment.
Python
bsd-3-clause
m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
"""Run a worker for the job queue.""" - import os import sys from redis import StrictRedis from rq import Connection, Queue, Worker - from bootstrap.util import app_context + from bootstrap.util import app_context, get_config_name_from_env if __name__ == '__main__': - config_name = os.environ.get('ENVIRONMENT') - if config_name is None: - sys.stderr.write("Environment variable 'ENVIRONMENT' must be set but isn't.") + try: + config_name = get_config_name_from_env() + except Exception as e: + sys.stderr.write(str(e) + '\n') sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
Use bootstrap utility to retrieve the configuration name from the environment.
## Code Before: """Run a worker for the job queue.""" import os import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context if __name__ == '__main__': config_name = os.environ.get('ENVIRONMENT') if config_name is None: sys.stderr.write("Environment variable 'ENVIRONMENT' must be set but isn't.") sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work() ## Instruction: Use bootstrap utility to retrieve the configuration name from the environment. ## Code After: """Run a worker for the job queue.""" import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context, get_config_name_from_env if __name__ == '__main__': try: config_name = get_config_name_from_env() except Exception as e: sys.stderr.write(str(e) + '\n') sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
"""Run a worker for the job queue.""" - import os import sys from redis import StrictRedis from rq import Connection, Queue, Worker - from bootstrap.util import app_context + from bootstrap.util import app_context, get_config_name_from_env if __name__ == '__main__': - config_name = os.environ.get('ENVIRONMENT') - if config_name is None: - sys.stderr.write("Environment variable 'ENVIRONMENT' must be set but isn't.") + try: + config_name = get_config_name_from_env() + except Exception as e: + sys.stderr.write(str(e) + '\n') sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
423e4cc4b73e7c13d0796069733ee37aaad4c2e4
taar/recommenders/__init__.py
taar/recommenders/__init__.py
from .collaborative_recommender import CollaborativeRecommender from .locale_recommender import LocaleRecommender from .legacy_recommender import LegacyRecommender from .recommendation_manager import RecommendationManager __all__ = [ 'CollaborativeRecommender', 'LegacyRecommender', 'LocaleRecommender', 'RecommendationManager', ]
from .collaborative_recommender import CollaborativeRecommender from .locale_recommender import LocaleRecommender from .legacy_recommender import LegacyRecommender from .similarity_recommender import SimilarityRecommender from .recommendation_manager import RecommendationManager __all__ = [ 'CollaborativeRecommender', 'LegacyRecommender', 'LocaleRecommender', 'SimilarityRecommender', 'RecommendationManager', ]
Add SimilarityRecommender to init file
Add SimilarityRecommender to init file
Python
mpl-2.0
maurodoglio/taar
from .collaborative_recommender import CollaborativeRecommender from .locale_recommender import LocaleRecommender from .legacy_recommender import LegacyRecommender + from .similarity_recommender import SimilarityRecommender from .recommendation_manager import RecommendationManager __all__ = [ 'CollaborativeRecommender', 'LegacyRecommender', 'LocaleRecommender', + 'SimilarityRecommender', 'RecommendationManager', ]
Add SimilarityRecommender to init file
## Code Before: from .collaborative_recommender import CollaborativeRecommender from .locale_recommender import LocaleRecommender from .legacy_recommender import LegacyRecommender from .recommendation_manager import RecommendationManager __all__ = [ 'CollaborativeRecommender', 'LegacyRecommender', 'LocaleRecommender', 'RecommendationManager', ] ## Instruction: Add SimilarityRecommender to init file ## Code After: from .collaborative_recommender import CollaborativeRecommender from .locale_recommender import LocaleRecommender from .legacy_recommender import LegacyRecommender from .similarity_recommender import SimilarityRecommender from .recommendation_manager import RecommendationManager __all__ = [ 'CollaborativeRecommender', 'LegacyRecommender', 'LocaleRecommender', 'SimilarityRecommender', 'RecommendationManager', ]
from .collaborative_recommender import CollaborativeRecommender from .locale_recommender import LocaleRecommender from .legacy_recommender import LegacyRecommender + from .similarity_recommender import SimilarityRecommender from .recommendation_manager import RecommendationManager __all__ = [ 'CollaborativeRecommender', 'LegacyRecommender', 'LocaleRecommender', + 'SimilarityRecommender', 'RecommendationManager', ]
6b73de9fea31b7a5176601d7f19370291ba4e130
tests/test_transpiler.py
tests/test_transpiler.py
import os import unittest import transpiler class TestTranspiler: def test_transpiler_creates_files_without_format(self): transpiler.main(["--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h") def test_transpiler_creates_files_with_format(self): transpiler.main(["--format", "--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h")
import os import unittest import transpiler class TestTranspiler: def test_transpiler_creates_files_without_format(self): try: os.remove("/tmp/auto_functions.cpp") os.remove("/tmp/auto_functions.h") except FileNotFoundError: pass transpiler.main(["--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h") def test_transpiler_creates_files_with_format(self): try: os.remove("/tmp/auto_functions.cpp") os.remove("/tmp/auto_functions.h") except FileNotFoundError: pass transpiler.main(["--format", "--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h")
Make transpiler test remove files if they already exist
Make transpiler test remove files if they already exist
Python
mit
WesleyAC/lemonscript-transpiler,WesleyAC/lemonscript-transpiler,WesleyAC/lemonscript-transpiler
import os import unittest import transpiler class TestTranspiler: def test_transpiler_creates_files_without_format(self): + try: + os.remove("/tmp/auto_functions.cpp") + os.remove("/tmp/auto_functions.h") + except FileNotFoundError: + pass + transpiler.main(["--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h") def test_transpiler_creates_files_with_format(self): + try: + os.remove("/tmp/auto_functions.cpp") + os.remove("/tmp/auto_functions.h") + except FileNotFoundError: + pass + transpiler.main(["--format", "--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h")
Make transpiler test remove files if they already exist
## Code Before: import os import unittest import transpiler class TestTranspiler: def test_transpiler_creates_files_without_format(self): transpiler.main(["--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h") def test_transpiler_creates_files_with_format(self): transpiler.main(["--format", "--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h") ## Instruction: Make transpiler test remove files if they already exist ## Code After: import os import unittest import transpiler class TestTranspiler: def test_transpiler_creates_files_without_format(self): try: os.remove("/tmp/auto_functions.cpp") os.remove("/tmp/auto_functions.h") except FileNotFoundError: pass transpiler.main(["--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h") def test_transpiler_creates_files_with_format(self): try: os.remove("/tmp/auto_functions.cpp") os.remove("/tmp/auto_functions.h") except FileNotFoundError: pass transpiler.main(["--format", "--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h")
import os import unittest import transpiler class TestTranspiler: def test_transpiler_creates_files_without_format(self): + try: + os.remove("/tmp/auto_functions.cpp") + os.remove("/tmp/auto_functions.h") + except FileNotFoundError: + pass + transpiler.main(["--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h") def test_transpiler_creates_files_with_format(self): + try: + os.remove("/tmp/auto_functions.cpp") + os.remove("/tmp/auto_functions.h") + except FileNotFoundError: + pass + transpiler.main(["--format", "--output-dir", "/tmp"]) assert os.path.isfile("/tmp/auto_functions.cpp") assert os.path.isfile("/tmp/auto_functions.h")
759e22f8d629f76d7fca0d0567603c9ae6835fa6
api_v3/serializers/profile.py
api_v3/serializers/profile.py
from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { 'member_centers': settings.MEMBER_CENTERS, 'expense_scopes': settings.EXPENSE_SCOPES }
from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { 'member_centers': sorted(settings.MEMBER_CENTERS), 'expense_scopes': sorted(settings.EXPENSE_SCOPES) }
Return sorted member centers and expense scopes.
Return sorted member centers and expense scopes.
Python
mit
occrp/id-backend
from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { - 'member_centers': settings.MEMBER_CENTERS, + 'member_centers': sorted(settings.MEMBER_CENTERS), - 'expense_scopes': settings.EXPENSE_SCOPES + 'expense_scopes': sorted(settings.EXPENSE_SCOPES) }
Return sorted member centers and expense scopes.
## Code Before: from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { 'member_centers': settings.MEMBER_CENTERS, 'expense_scopes': settings.EXPENSE_SCOPES } ## Instruction: Return sorted member centers and expense scopes. ## Code After: from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { 'member_centers': sorted(settings.MEMBER_CENTERS), 'expense_scopes': sorted(settings.EXPENSE_SCOPES) }
from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { - 'member_centers': settings.MEMBER_CENTERS, + 'member_centers': sorted(settings.MEMBER_CENTERS), ? +++++++ + - 'expense_scopes': settings.EXPENSE_SCOPES + 'expense_scopes': sorted(settings.EXPENSE_SCOPES) ? +++++++ + }
649bea9ce3ebaf4ba44919097b731ba915703852
alembic/versions/30d0a626888_add_username.py
alembic/versions/30d0a626888_add_username.py
# revision identifiers, used by Alembic. revision = '30d0a626888' down_revision = '51375067b45' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True)) op.create_unique_constraint( '_user_email_key', '_user', ['email']) def downgrade(): op.drop_column('_user', 'username') op.drop_constraint( '_user_email_key', table_name='_user', type_='unique')
# revision identifiers, used by Alembic. revision = '30d0a626888' down_revision = '51375067b45' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): """ SQL That equal to the following ALTER TABLE app_name._user ADD COLUMN username varchar(255); ALTER TABLE app_name._user ADD CONSTRAINT '_user_email_key' UNIQUE('email'); UPDATE app_name._version set version_num = '30d0a626888; """ op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True)) op.create_unique_constraint( '_user_email_key', '_user', ['email']) def downgrade(): op.drop_column('_user', 'username') op.drop_constraint( '_user_email_key', table_name='_user', type_='unique')
Add generate sql example as comment
Add generate sql example as comment
Python
apache-2.0
SkygearIO/skygear-server,rickmak/skygear-server,rickmak/skygear-server,SkygearIO/skygear-server,rickmak/skygear-server,SkygearIO/skygear-server,SkygearIO/skygear-server
# revision identifiers, used by Alembic. revision = '30d0a626888' down_revision = '51375067b45' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): + """ + SQL That equal to the following + ALTER TABLE app_name._user ADD COLUMN username varchar(255); + ALTER TABLE app_name._user ADD CONSTRAINT '_user_email_key' UNIQUE('email'); + UPDATE app_name._version set version_num = '30d0a626888; + """ op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True)) op.create_unique_constraint( '_user_email_key', '_user', ['email']) def downgrade(): op.drop_column('_user', 'username') op.drop_constraint( '_user_email_key', table_name='_user', type_='unique')
Add generate sql example as comment
## Code Before: # revision identifiers, used by Alembic. revision = '30d0a626888' down_revision = '51375067b45' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True)) op.create_unique_constraint( '_user_email_key', '_user', ['email']) def downgrade(): op.drop_column('_user', 'username') op.drop_constraint( '_user_email_key', table_name='_user', type_='unique') ## Instruction: Add generate sql example as comment ## Code After: # revision identifiers, used by Alembic. revision = '30d0a626888' down_revision = '51375067b45' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): """ SQL That equal to the following ALTER TABLE app_name._user ADD COLUMN username varchar(255); ALTER TABLE app_name._user ADD CONSTRAINT '_user_email_key' UNIQUE('email'); UPDATE app_name._version set version_num = '30d0a626888; """ op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True)) op.create_unique_constraint( '_user_email_key', '_user', ['email']) def downgrade(): op.drop_column('_user', 'username') op.drop_constraint( '_user_email_key', table_name='_user', type_='unique')
# revision identifiers, used by Alembic. revision = '30d0a626888' down_revision = '51375067b45' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): + """ + SQL That equal to the following + ALTER TABLE app_name._user ADD COLUMN username varchar(255); + ALTER TABLE app_name._user ADD CONSTRAINT '_user_email_key' UNIQUE('email'); + UPDATE app_name._version set version_num = '30d0a626888; + """ op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True)) op.create_unique_constraint( '_user_email_key', '_user', ['email']) def downgrade(): op.drop_column('_user', 'username') op.drop_constraint( '_user_email_key', table_name='_user', type_='unique')
697d30430fa908c6e2baf88285f0a464993d6636
formapi/compat.py
formapi/compat.py
import sys if sys.version_info[0] == 3: from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u # noinspection PyUnresolvedReferences from urllib.parse import quote ifilter = filter b_str = bytes u_str = str iteritems = lambda dic: dic.items() else: from django.utils.encoding import smart_str as smart_b, force_unicode as force_u, smart_text as smart_u # noinspection PyUnresolvedReferences from urllib2 import quote # noinspection PyUnresolvedReferences from itertools import ifilter b_str = str # noinspection PyUnresolvedReferences u_str = unicode iteritems = lambda dic: dic.iteritems() try: from django.conf.urls import patterns, url, include except ImportError: # noinspection PyUnresolvedReferences from django.conf.urls.defaults import patterns, url, include # Calm down unused import warnings: assert [smart_b, smart_u, force_u, quote, ifilter]
import sys if sys.version_info[0] == 3: from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u # noinspection PyUnresolvedReferences from urllib.parse import quote ifilter = filter b_str = bytes u_str = str iteritems = lambda dic: dic.items() else: from django.utils.encoding import smart_str as smart_b, force_unicode as force_u try: from django.utils.encoding import smart_text as smart_u except: # Django 1.3 from django.utils.encoding import smart_unicode as smart_u # noinspection PyUnresolvedReferences from urllib2 import quote # noinspection PyUnresolvedReferences from itertools import ifilter b_str = str # noinspection PyUnresolvedReferences u_str = unicode iteritems = lambda dic: dic.iteritems() try: from django.conf.urls import patterns, url, include except ImportError: # noinspection PyUnresolvedReferences from django.conf.urls.defaults import patterns, url, include # Calm down unused import warnings: assert [smart_b, smart_u, force_u, quote, ifilter]
Fix smart_u for Django 1.3
Fix smart_u for Django 1.3
Python
mit
5monkeys/django-formapi,andreif/django-formapi,5monkeys/django-formapi,andreif/django-formapi
import sys if sys.version_info[0] == 3: from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u # noinspection PyUnresolvedReferences from urllib.parse import quote ifilter = filter b_str = bytes u_str = str iteritems = lambda dic: dic.items() else: - from django.utils.encoding import smart_str as smart_b, force_unicode as force_u, smart_text as smart_u + from django.utils.encoding import smart_str as smart_b, force_unicode as force_u + try: + from django.utils.encoding import smart_text as smart_u + except: + # Django 1.3 + from django.utils.encoding import smart_unicode as smart_u # noinspection PyUnresolvedReferences from urllib2 import quote # noinspection PyUnresolvedReferences from itertools import ifilter b_str = str # noinspection PyUnresolvedReferences u_str = unicode iteritems = lambda dic: dic.iteritems() try: from django.conf.urls import patterns, url, include except ImportError: # noinspection PyUnresolvedReferences from django.conf.urls.defaults import patterns, url, include # Calm down unused import warnings: assert [smart_b, smart_u, force_u, quote, ifilter]
Fix smart_u for Django 1.3
## Code Before: import sys if sys.version_info[0] == 3: from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u # noinspection PyUnresolvedReferences from urllib.parse import quote ifilter = filter b_str = bytes u_str = str iteritems = lambda dic: dic.items() else: from django.utils.encoding import smart_str as smart_b, force_unicode as force_u, smart_text as smart_u # noinspection PyUnresolvedReferences from urllib2 import quote # noinspection PyUnresolvedReferences from itertools import ifilter b_str = str # noinspection PyUnresolvedReferences u_str = unicode iteritems = lambda dic: dic.iteritems() try: from django.conf.urls import patterns, url, include except ImportError: # noinspection PyUnresolvedReferences from django.conf.urls.defaults import patterns, url, include # Calm down unused import warnings: assert [smart_b, smart_u, force_u, quote, ifilter] ## Instruction: Fix smart_u for Django 1.3 ## Code After: import sys if sys.version_info[0] == 3: from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u # noinspection PyUnresolvedReferences from urllib.parse import quote ifilter = filter b_str = bytes u_str = str iteritems = lambda dic: dic.items() else: from django.utils.encoding import smart_str as smart_b, force_unicode as force_u try: from django.utils.encoding import smart_text as smart_u except: # Django 1.3 from django.utils.encoding import smart_unicode as smart_u # noinspection PyUnresolvedReferences from urllib2 import quote # noinspection PyUnresolvedReferences from itertools import ifilter b_str = str # noinspection PyUnresolvedReferences u_str = unicode iteritems = lambda dic: dic.iteritems() try: from django.conf.urls import patterns, url, include except ImportError: # noinspection PyUnresolvedReferences from django.conf.urls.defaults import patterns, url, include # Calm down unused import warnings: assert [smart_b, smart_u, force_u, quote, ifilter]
import sys if sys.version_info[0] == 3: from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u # noinspection PyUnresolvedReferences from urllib.parse import quote ifilter = filter b_str = bytes u_str = str iteritems = lambda dic: dic.items() else: - from django.utils.encoding import smart_str as smart_b, force_unicode as force_u, smart_text as smart_u ? ----------------------- + from django.utils.encoding import smart_str as smart_b, force_unicode as force_u + try: + from django.utils.encoding import smart_text as smart_u + except: + # Django 1.3 + from django.utils.encoding import smart_unicode as smart_u # noinspection PyUnresolvedReferences from urllib2 import quote # noinspection PyUnresolvedReferences from itertools import ifilter b_str = str # noinspection PyUnresolvedReferences u_str = unicode iteritems = lambda dic: dic.iteritems() try: from django.conf.urls import patterns, url, include except ImportError: # noinspection PyUnresolvedReferences from django.conf.urls.defaults import patterns, url, include # Calm down unused import warnings: assert [smart_b, smart_u, force_u, quote, ifilter]
8ca16832b54c887e6e3a84d7018181bf7e55fba0
comrade/core/context_processors.py
comrade/core/context_processors.py
from django.conf import settings from django.contrib.sites.models import Site from settings import DeploymentType def default(request): context = {} context['DEPLOYMENT'] = settings.DEPLOYMENT context['site'] = Site.objects.get_current() if settings.DEPLOYMENT != DeploymentType.PRODUCTION: context['GIT_COMMIT'] = settings.GIT_COMMIT return context
from django.conf import settings from django.contrib.sites.models import Site from settings import DeploymentType def default(request): context = {} context['DEPLOYMENT'] = settings.DEPLOYMENT context['site'] = Site.objects.get_current() if settings.DEPLOYMENT != DeploymentType.PRODUCTION: context['GIT_COMMIT'] = settings.GIT_COMMIT return context def ssl_media(request): if request.is_secure(): ssl_media_url = settings.MEDIA_URL.replace('http://','https://') else: ssl_media_url = settings.MEDIA_URL return {'MEDIA_URL': ssl_media_url}
Add SSL media context processor.
Add SSL media context processor.
Python
mit
bueda/django-comrade
from django.conf import settings from django.contrib.sites.models import Site from settings import DeploymentType def default(request): context = {} context['DEPLOYMENT'] = settings.DEPLOYMENT context['site'] = Site.objects.get_current() if settings.DEPLOYMENT != DeploymentType.PRODUCTION: context['GIT_COMMIT'] = settings.GIT_COMMIT return context + def ssl_media(request): + if request.is_secure(): + ssl_media_url = settings.MEDIA_URL.replace('http://','https://') + else: + ssl_media_url = settings.MEDIA_URL + return {'MEDIA_URL': ssl_media_url} +
Add SSL media context processor.
## Code Before: from django.conf import settings from django.contrib.sites.models import Site from settings import DeploymentType def default(request): context = {} context['DEPLOYMENT'] = settings.DEPLOYMENT context['site'] = Site.objects.get_current() if settings.DEPLOYMENT != DeploymentType.PRODUCTION: context['GIT_COMMIT'] = settings.GIT_COMMIT return context ## Instruction: Add SSL media context processor. ## Code After: from django.conf import settings from django.contrib.sites.models import Site from settings import DeploymentType def default(request): context = {} context['DEPLOYMENT'] = settings.DEPLOYMENT context['site'] = Site.objects.get_current() if settings.DEPLOYMENT != DeploymentType.PRODUCTION: context['GIT_COMMIT'] = settings.GIT_COMMIT return context def ssl_media(request): if request.is_secure(): ssl_media_url = settings.MEDIA_URL.replace('http://','https://') else: ssl_media_url = settings.MEDIA_URL return {'MEDIA_URL': ssl_media_url}
from django.conf import settings from django.contrib.sites.models import Site from settings import DeploymentType def default(request): context = {} context['DEPLOYMENT'] = settings.DEPLOYMENT context['site'] = Site.objects.get_current() if settings.DEPLOYMENT != DeploymentType.PRODUCTION: context['GIT_COMMIT'] = settings.GIT_COMMIT return context + + def ssl_media(request): + if request.is_secure(): + ssl_media_url = settings.MEDIA_URL.replace('http://','https://') + else: + ssl_media_url = settings.MEDIA_URL + return {'MEDIA_URL': ssl_media_url}
d9e7816da08a3d66e63356ea6f4474cc5f7d6b26
bush/main.py
bush/main.py
from bush import option from bush.spinner import Spinner from bush.aws.ec2 import EC2 from bush.aws.iam import IAM def run(): (options, args) = option.parse_args("bush") output = '' spinner = Spinner() spinner.start() if args[0] == 'ec2': ec2 = EC2(options) if args[1] == 'ls': output = ec2.ls() elif args[1] == "images": output = ec2.images() if args[0] == 'iam': iam = IAM(options) if args[1] == 'users': output = iam.list_users() if args[1] == 'keys': output = iam.list_access_keys() spinner.stop() if output: print("\n".join(output))
import sys import traceback from bush import option from bush.spinner import Spinner from bush.aws.ec2 import EC2 from bush.aws.iam import IAM def run(): (options, args) = option.parse_args("bush") output = '' spinner = Spinner() spinner.start() try: output = run_aws(options, args) except: spinner.stop() traceback.print_exc() sys.exit(2) spinner.stop() if output: print("\n".join(output)) def run_aws(options, args): if args[0] == 'ec2': ec2 = EC2(options) if args[1] == 'ls': output = ec2.ls() elif args[1] == "images": output = ec2.images() if args[0] == 'iam': iam = IAM(options) if args[1] == 'users': output = iam.list_users() if args[1] == 'keys': output = iam.list_access_keys() return output
Fix keep turning spinner when error occurred
Fix keep turning spinner when error occurred
Python
mit
okamos/bush
+ import sys + import traceback + from bush import option from bush.spinner import Spinner from bush.aws.ec2 import EC2 from bush.aws.iam import IAM def run(): (options, args) = option.parse_args("bush") output = '' spinner = Spinner() spinner.start() + try: + output = run_aws(options, args) + except: + spinner.stop() + traceback.print_exc() + sys.exit(2) + + spinner.stop() + if output: + print("\n".join(output)) + + def run_aws(options, args): if args[0] == 'ec2': ec2 = EC2(options) if args[1] == 'ls': output = ec2.ls() elif args[1] == "images": output = ec2.images() if args[0] == 'iam': iam = IAM(options) if args[1] == 'users': output = iam.list_users() if args[1] == 'keys': output = iam.list_access_keys() + return output - spinner.stop() - if output: - print("\n".join(output))
Fix keep turning spinner when error occurred
## Code Before: from bush import option from bush.spinner import Spinner from bush.aws.ec2 import EC2 from bush.aws.iam import IAM def run(): (options, args) = option.parse_args("bush") output = '' spinner = Spinner() spinner.start() if args[0] == 'ec2': ec2 = EC2(options) if args[1] == 'ls': output = ec2.ls() elif args[1] == "images": output = ec2.images() if args[0] == 'iam': iam = IAM(options) if args[1] == 'users': output = iam.list_users() if args[1] == 'keys': output = iam.list_access_keys() spinner.stop() if output: print("\n".join(output)) ## Instruction: Fix keep turning spinner when error occurred ## Code After: import sys import traceback from bush import option from bush.spinner import Spinner from bush.aws.ec2 import EC2 from bush.aws.iam import IAM def run(): (options, args) = option.parse_args("bush") output = '' spinner = Spinner() spinner.start() try: output = run_aws(options, args) except: spinner.stop() traceback.print_exc() sys.exit(2) spinner.stop() if output: print("\n".join(output)) def run_aws(options, args): if args[0] == 'ec2': ec2 = EC2(options) if args[1] == 'ls': output = ec2.ls() elif args[1] == "images": output = ec2.images() if args[0] == 'iam': iam = IAM(options) if args[1] == 'users': output = iam.list_users() if args[1] == 'keys': output = iam.list_access_keys() return output
+ import sys + import traceback + from bush import option from bush.spinner import Spinner from bush.aws.ec2 import EC2 from bush.aws.iam import IAM def run(): (options, args) = option.parse_args("bush") output = '' spinner = Spinner() spinner.start() + try: + output = run_aws(options, args) + except: + spinner.stop() + traceback.print_exc() + sys.exit(2) + + spinner.stop() + if output: + print("\n".join(output)) + + def run_aws(options, args): if args[0] == 'ec2': ec2 = EC2(options) if args[1] == 'ls': output = ec2.ls() elif args[1] == "images": output = ec2.images() if args[0] == 'iam': iam = IAM(options) if args[1] == 'users': output = iam.list_users() if args[1] == 'keys': output = iam.list_access_keys() + return output - spinner.stop() - if output: - print("\n".join(output))
3a5fb18a385ffd0533da94632d917e3c0bcfb051
tests/test_nulls.py
tests/test_nulls.py
from tests.models import EventWithNulls, EventWithNoNulls import pytest @pytest.mark.django_db def test_recurs_can_be_explicitly_none_if_none_is_allowed(): # Check we can save None correctly event = EventWithNulls.objects.create(recurs=None) assert event.recurs is None # Check we can deserialize None correctly reloaded = EventWithNulls.objects.get(pk=event.pk) assert reloaded.recurs is None @pytest.mark.django_db def test_recurs_cannot_be_explicitly_none_if_none_is_disallowed(): with pytest.raises(ValueError): EventWithNoNulls.objects.create(recurs=None)
from recurrence import Recurrence from tests.models import EventWithNulls, EventWithNoNulls import pytest @pytest.mark.django_db def test_recurs_can_be_explicitly_none_if_none_is_allowed(): # Check we can save None correctly event = EventWithNulls.objects.create(recurs=None) assert event.recurs is None # Check we can deserialize None correctly reloaded = EventWithNulls.objects.get(pk=event.pk) assert reloaded.recurs is None @pytest.mark.django_db def test_recurs_cannot_be_explicitly_none_if_none_is_disallowed(): with pytest.raises(ValueError): EventWithNoNulls.objects.create(recurs=None) @pytest.mark.django_db def test_recurs_can_be_empty_even_if_none_is_disallowed(): event = EventWithNoNulls.objects.create(recurs=Recurrence()) assert event.recurs == Recurrence()
Add a test for saving an empty recurrence object
Add a test for saving an empty recurrence object I wasn't sure whether this would fail on models which don't accept null values. Turns out it's allowed, so we should make sure it stays allowed.
Python
bsd-3-clause
linux2400/django-recurrence,linux2400/django-recurrence,django-recurrence/django-recurrence,Nikola-K/django-recurrence,FrankSalad/django-recurrence,Nikola-K/django-recurrence,FrankSalad/django-recurrence,django-recurrence/django-recurrence
+ from recurrence import Recurrence from tests.models import EventWithNulls, EventWithNoNulls import pytest @pytest.mark.django_db def test_recurs_can_be_explicitly_none_if_none_is_allowed(): # Check we can save None correctly event = EventWithNulls.objects.create(recurs=None) assert event.recurs is None # Check we can deserialize None correctly reloaded = EventWithNulls.objects.get(pk=event.pk) assert reloaded.recurs is None @pytest.mark.django_db def test_recurs_cannot_be_explicitly_none_if_none_is_disallowed(): with pytest.raises(ValueError): EventWithNoNulls.objects.create(recurs=None) + + @pytest.mark.django_db + def test_recurs_can_be_empty_even_if_none_is_disallowed(): + event = EventWithNoNulls.objects.create(recurs=Recurrence()) + assert event.recurs == Recurrence() +
Add a test for saving an empty recurrence object
## Code Before: from tests.models import EventWithNulls, EventWithNoNulls import pytest @pytest.mark.django_db def test_recurs_can_be_explicitly_none_if_none_is_allowed(): # Check we can save None correctly event = EventWithNulls.objects.create(recurs=None) assert event.recurs is None # Check we can deserialize None correctly reloaded = EventWithNulls.objects.get(pk=event.pk) assert reloaded.recurs is None @pytest.mark.django_db def test_recurs_cannot_be_explicitly_none_if_none_is_disallowed(): with pytest.raises(ValueError): EventWithNoNulls.objects.create(recurs=None) ## Instruction: Add a test for saving an empty recurrence object ## Code After: from recurrence import Recurrence from tests.models import EventWithNulls, EventWithNoNulls import pytest @pytest.mark.django_db def test_recurs_can_be_explicitly_none_if_none_is_allowed(): # Check we can save None correctly event = EventWithNulls.objects.create(recurs=None) assert event.recurs is None # Check we can deserialize None correctly reloaded = EventWithNulls.objects.get(pk=event.pk) assert reloaded.recurs is None @pytest.mark.django_db def test_recurs_cannot_be_explicitly_none_if_none_is_disallowed(): with pytest.raises(ValueError): EventWithNoNulls.objects.create(recurs=None) @pytest.mark.django_db def test_recurs_can_be_empty_even_if_none_is_disallowed(): event = EventWithNoNulls.objects.create(recurs=Recurrence()) assert event.recurs == Recurrence()
+ from recurrence import Recurrence from tests.models import EventWithNulls, EventWithNoNulls import pytest @pytest.mark.django_db def test_recurs_can_be_explicitly_none_if_none_is_allowed(): # Check we can save None correctly event = EventWithNulls.objects.create(recurs=None) assert event.recurs is None # Check we can deserialize None correctly reloaded = EventWithNulls.objects.get(pk=event.pk) assert reloaded.recurs is None @pytest.mark.django_db def test_recurs_cannot_be_explicitly_none_if_none_is_disallowed(): with pytest.raises(ValueError): EventWithNoNulls.objects.create(recurs=None) + + + @pytest.mark.django_db + def test_recurs_can_be_empty_even_if_none_is_disallowed(): + event = EventWithNoNulls.objects.create(recurs=Recurrence()) + assert event.recurs == Recurrence()
6a7a61d514ac738f8de29efe280ecfedfaf72685
ttrss/auth.py
ttrss/auth.py
from requests.auth import AuthBase import requests import json from exceptions import raise_on_error class TTRAuth(AuthBase): def __init__(self, user, password): self.user = user self.password = password def response_hook(self, r, **kwargs): j = json.loads(r.content) if int(j['status']) == 0: return r sid = None if r.headers['set-cookie']: sid = r.headers['set-cookie'].split(';')[0].split('=')[1] r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid) else: sid = r.request.headers['Cookie'].split('=')[1] res = requests.post(r.request.url, json.dumps({ 'sid': sid, 'op': 'login', 'user': self.user, 'password': self.password })) raise_on_error(res) r.request.deregister_hook('response', self.response_hook) _r = requests.Session().send(r.request) _r.cookies = r.cookies raise_on_error(_r) return _r def __call__(self, r): r.register_hook('response', self.response_hook) return r
from requests.auth import AuthBase import requests import json from exceptions import raise_on_error class TTRAuth(AuthBase): def __init__(self, user, password): self.user = user self.password = password def response_hook(self, r, **kwargs): j = json.loads(r.content) if int(j['status']) == 0: return r sid = None if 'ttrss_api_sid' in r.cookies: sid = r.cookies['ttrss_api_sid'] r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid) else: sid = r.request.headers['Cookie'].split('=')[1] res = requests.post(r.request.url, json.dumps({ 'sid': sid, 'op': 'login', 'user': self.user, 'password': self.password })) raise_on_error(res) r.request.deregister_hook('response', self.response_hook) _r = requests.Session().send(r.request) _r.cookies = r.cookies raise_on_error(_r) return _r def __call__(self, r): r.register_hook('response', self.response_hook) return r
Clean up cookie lookup in TTRAuth
Clean up cookie lookup in TTRAuth
Python
mit
Vassius/ttrss-python
from requests.auth import AuthBase import requests import json from exceptions import raise_on_error class TTRAuth(AuthBase): def __init__(self, user, password): self.user = user self.password = password def response_hook(self, r, **kwargs): j = json.loads(r.content) if int(j['status']) == 0: return r sid = None - if r.headers['set-cookie']: - sid = r.headers['set-cookie'].split(';')[0].split('=')[1] + if 'ttrss_api_sid' in r.cookies: + sid = r.cookies['ttrss_api_sid'] r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid) else: sid = r.request.headers['Cookie'].split('=')[1] res = requests.post(r.request.url, json.dumps({ 'sid': sid, 'op': 'login', 'user': self.user, 'password': self.password })) raise_on_error(res) r.request.deregister_hook('response', self.response_hook) _r = requests.Session().send(r.request) _r.cookies = r.cookies raise_on_error(_r) return _r def __call__(self, r): r.register_hook('response', self.response_hook) return r
Clean up cookie lookup in TTRAuth
## Code Before: from requests.auth import AuthBase import requests import json from exceptions import raise_on_error class TTRAuth(AuthBase): def __init__(self, user, password): self.user = user self.password = password def response_hook(self, r, **kwargs): j = json.loads(r.content) if int(j['status']) == 0: return r sid = None if r.headers['set-cookie']: sid = r.headers['set-cookie'].split(';')[0].split('=')[1] r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid) else: sid = r.request.headers['Cookie'].split('=')[1] res = requests.post(r.request.url, json.dumps({ 'sid': sid, 'op': 'login', 'user': self.user, 'password': self.password })) raise_on_error(res) r.request.deregister_hook('response', self.response_hook) _r = requests.Session().send(r.request) _r.cookies = r.cookies raise_on_error(_r) return _r def __call__(self, r): r.register_hook('response', self.response_hook) return r ## Instruction: Clean up cookie lookup in TTRAuth ## Code After: from requests.auth import AuthBase import requests import json from exceptions import raise_on_error class TTRAuth(AuthBase): def __init__(self, user, password): self.user = user self.password = password def response_hook(self, r, **kwargs): j = json.loads(r.content) if int(j['status']) == 0: return r sid = None if 'ttrss_api_sid' in r.cookies: sid = r.cookies['ttrss_api_sid'] r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid) else: sid = r.request.headers['Cookie'].split('=')[1] res = requests.post(r.request.url, json.dumps({ 'sid': sid, 'op': 'login', 'user': self.user, 'password': self.password })) raise_on_error(res) r.request.deregister_hook('response', self.response_hook) _r = requests.Session().send(r.request) _r.cookies = r.cookies raise_on_error(_r) return _r def __call__(self, r): r.register_hook('response', self.response_hook) return r
from requests.auth import AuthBase import requests import json from exceptions import raise_on_error class TTRAuth(AuthBase): def __init__(self, user, password): self.user = user self.password = password def response_hook(self, r, **kwargs): j = json.loads(r.content) if int(j['status']) == 0: return r sid = None - if r.headers['set-cookie']: - sid = r.headers['set-cookie'].split(';')[0].split('=')[1] + if 'ttrss_api_sid' in r.cookies: + sid = r.cookies['ttrss_api_sid'] r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid) else: sid = r.request.headers['Cookie'].split('=')[1] res = requests.post(r.request.url, json.dumps({ 'sid': sid, 'op': 'login', 'user': self.user, 'password': self.password })) raise_on_error(res) r.request.deregister_hook('response', self.response_hook) _r = requests.Session().send(r.request) _r.cookies = r.cookies raise_on_error(_r) return _r def __call__(self, r): r.register_hook('response', self.response_hook) return r
d18703237300f0e6b7d2a1ca88fbfa884e77c1b5
partner_event/models/res_partner.py
partner_event/models/res_partner.py
from openerp import models, fields, api class ResPartner(models.Model): _inherit = 'res.partner' registrations = fields.One2many( string="Event registrations", comodel_name='event.registration', inverse_name="partner_id") registration_count = fields.Integer( string='Event registrations number', compute='_count_registration', store=True) attended_registration_count = fields.Integer( string='Event attended registrations number', compute='_count_attended_registration', store=True) @api.one @api.depends('registrations') def _count_registration(self): self.registration_count = len(self.registrations) @api.one @api.depends('registrations') def _count_attended_registration(self): self.attended_registration_count = len(self.registrations.filtered( lambda x: x.state == 'done'))
from openerp import models, fields, api class ResPartner(models.Model): _inherit = 'res.partner' registrations = fields.One2many( string="Event registrations", comodel_name='event.registration', inverse_name="partner_id") registration_count = fields.Integer( string='Event registrations number', compute='_count_registration', store=True) attended_registration_count = fields.Integer( string='Event attended registrations number', compute='_count_registration', store=True) @api.one @api.depends('registrations') def _count_registration(self): self.registration_count = len(self.registrations) self.attended_registration_count = len(self.registrations.filtered( lambda x: x.state == 'done'))
Use only one method to recalculate event counters
Use only one method to recalculate event counters
Python
agpl-3.0
open-synergy/event,Endika/event,Antiun/event,open-synergy/event
from openerp import models, fields, api class ResPartner(models.Model): _inherit = 'res.partner' registrations = fields.One2many( string="Event registrations", comodel_name='event.registration', inverse_name="partner_id") registration_count = fields.Integer( string='Event registrations number', compute='_count_registration', store=True) attended_registration_count = fields.Integer( string='Event attended registrations number', - compute='_count_attended_registration', store=True) + compute='_count_registration', store=True) @api.one @api.depends('registrations') def _count_registration(self): self.registration_count = len(self.registrations) - - @api.one - @api.depends('registrations') - def _count_attended_registration(self): self.attended_registration_count = len(self.registrations.filtered( lambda x: x.state == 'done'))
Use only one method to recalculate event counters
## Code Before: from openerp import models, fields, api class ResPartner(models.Model): _inherit = 'res.partner' registrations = fields.One2many( string="Event registrations", comodel_name='event.registration', inverse_name="partner_id") registration_count = fields.Integer( string='Event registrations number', compute='_count_registration', store=True) attended_registration_count = fields.Integer( string='Event attended registrations number', compute='_count_attended_registration', store=True) @api.one @api.depends('registrations') def _count_registration(self): self.registration_count = len(self.registrations) @api.one @api.depends('registrations') def _count_attended_registration(self): self.attended_registration_count = len(self.registrations.filtered( lambda x: x.state == 'done')) ## Instruction: Use only one method to recalculate event counters ## Code After: from openerp import models, fields, api class ResPartner(models.Model): _inherit = 'res.partner' registrations = fields.One2many( string="Event registrations", comodel_name='event.registration', inverse_name="partner_id") registration_count = fields.Integer( string='Event registrations number', compute='_count_registration', store=True) attended_registration_count = fields.Integer( string='Event attended registrations number', compute='_count_registration', store=True) @api.one @api.depends('registrations') def _count_registration(self): self.registration_count = len(self.registrations) self.attended_registration_count = len(self.registrations.filtered( lambda x: x.state == 'done'))
from openerp import models, fields, api class ResPartner(models.Model): _inherit = 'res.partner' registrations = fields.One2many( string="Event registrations", comodel_name='event.registration', inverse_name="partner_id") registration_count = fields.Integer( string='Event registrations number', compute='_count_registration', store=True) attended_registration_count = fields.Integer( string='Event attended registrations number', - compute='_count_attended_registration', store=True) ? --------- + compute='_count_registration', store=True) @api.one @api.depends('registrations') def _count_registration(self): self.registration_count = len(self.registrations) - - @api.one - @api.depends('registrations') - def _count_attended_registration(self): self.attended_registration_count = len(self.registrations.filtered( lambda x: x.state == 'done'))
656c0a9b91ee6f6f3f9811b16ab75dc8003402ad
altair/examples/line_chart_with_generator.py
altair/examples/line_chart_with_generator.py
# category: line charts import altair as alt source = alt.sequence(start=0, stop=12.7, step=0.1, as_='x') alt.Chart(source).mark_line().transform_calculate( sin='sin(datum.x)' ).transform_calculate( cos='cos(datum.x)' ).transform_fold( ['sin', 'cos'] ).encode( x='x:Q', y='value:Q', color='key:N' )
# category: line charts import altair as alt source = alt.sequence(start=0, stop=12.7, step=0.1, as_='x') alt.Chart(source).mark_line().transform_calculate( sin='sin(datum.x)', cos='cos(datum.x)' ).transform_fold( ['sin', 'cos'] ).encode( x='x:Q', y='value:Q', color='key:N' )
Modify generator example to use single calculation transform
DOC: Modify generator example to use single calculation transform
Python
bsd-3-clause
jakevdp/altair,altair-viz/altair
# category: line charts import altair as alt source = alt.sequence(start=0, stop=12.7, step=0.1, as_='x') alt.Chart(source).mark_line().transform_calculate( - sin='sin(datum.x)' + sin='sin(datum.x)', - ).transform_calculate( cos='cos(datum.x)' ).transform_fold( ['sin', 'cos'] ).encode( x='x:Q', y='value:Q', color='key:N' )
Modify generator example to use single calculation transform
## Code Before: # category: line charts import altair as alt source = alt.sequence(start=0, stop=12.7, step=0.1, as_='x') alt.Chart(source).mark_line().transform_calculate( sin='sin(datum.x)' ).transform_calculate( cos='cos(datum.x)' ).transform_fold( ['sin', 'cos'] ).encode( x='x:Q', y='value:Q', color='key:N' ) ## Instruction: Modify generator example to use single calculation transform ## Code After: # category: line charts import altair as alt source = alt.sequence(start=0, stop=12.7, step=0.1, as_='x') alt.Chart(source).mark_line().transform_calculate( sin='sin(datum.x)', cos='cos(datum.x)' ).transform_fold( ['sin', 'cos'] ).encode( x='x:Q', y='value:Q', color='key:N' )
# category: line charts import altair as alt source = alt.sequence(start=0, stop=12.7, step=0.1, as_='x') alt.Chart(source).mark_line().transform_calculate( - sin='sin(datum.x)' + sin='sin(datum.x)', ? + - ).transform_calculate( cos='cos(datum.x)' ).transform_fold( ['sin', 'cos'] ).encode( x='x:Q', y='value:Q', color='key:N' )
c99f5ffe9e23eeeb8f168c54d5f4b419e553d3b3
wizd/gnotifier.py
wizd/gnotifier.py
import pyinotify import gobject class GNotifier(pyinotify.Notifier): """ A notifier that can be attached to a mainloop """ def __init__(self, watch_manager, default_proc_fun=ProcessEvent()): """ Initialization. @param watch_manager: Watch Manager. @type watch_manager: WatchManager instance @param default_proc_fun: Default processing method. @type default_proc_fun: instance of ProcessEvent """ pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun) self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io) def _process_io(self): self.read_events() self.process_events() def stop(self): gobject.source_remove(self._handler) pyinotify.Notifier.stop(self)
import pyinotify import gobject class GNotifier(pyinotify.Notifier): """ A notifier that can be attached to a mainloop """ def __init__(self, watch_manager, default_proc_fun=pyinotify.ProcessEvent()): """ Initialization. @param watch_manager: Watch Manager. @type watch_manager: WatchManager instance @param default_proc_fun: Default processing method. @type default_proc_fun: instance of ProcessEvent """ pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun) self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io) def _process_io(self, foo, bar): self.read_events() self.process_events() return True def stop(self): gobject.source_remove(self._handler) pyinotify.Notifier.stop(self) if __name__ == "__main__": import sys wm = pyinotify.WatchManager() n = GNotifier(wm) if len(sys.argv) > 1: name = sys.argv[1] else: name = "/tmp" wm.add_watch(name, pyinotify.EventsCodes.ALL_EVENTS, rec=True, auto_add=True) mainloop = gobject.MainLoop() try: mainloop.run() except KeyboardInterrupt: pass
Add test for GNotifier, fix up bugs
Add test for GNotifier, fix up bugs
Python
lgpl-2.1
wizbit-archive/wizbit,wizbit-archive/wizbit
import pyinotify import gobject class GNotifier(pyinotify.Notifier): """ A notifier that can be attached to a mainloop """ - def __init__(self, watch_manager, default_proc_fun=ProcessEvent()): + def __init__(self, watch_manager, default_proc_fun=pyinotify.ProcessEvent()): """ Initialization. @param watch_manager: Watch Manager. @type watch_manager: WatchManager instance @param default_proc_fun: Default processing method. @type default_proc_fun: instance of ProcessEvent """ pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun) self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io) - def _process_io(self): + def _process_io(self, foo, bar): self.read_events() self.process_events() + return True def stop(self): gobject.source_remove(self._handler) pyinotify.Notifier.stop(self) + if __name__ == "__main__": + import sys + + wm = pyinotify.WatchManager() + n = GNotifier(wm) + + if len(sys.argv) > 1: + name = sys.argv[1] + else: + name = "/tmp" + wm.add_watch(name, pyinotify.EventsCodes.ALL_EVENTS, rec=True, auto_add=True) + + mainloop = gobject.MainLoop() + try: + mainloop.run() + except KeyboardInterrupt: + pass +
Add test for GNotifier, fix up bugs
## Code Before: import pyinotify import gobject class GNotifier(pyinotify.Notifier): """ A notifier that can be attached to a mainloop """ def __init__(self, watch_manager, default_proc_fun=ProcessEvent()): """ Initialization. @param watch_manager: Watch Manager. @type watch_manager: WatchManager instance @param default_proc_fun: Default processing method. @type default_proc_fun: instance of ProcessEvent """ pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun) self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io) def _process_io(self): self.read_events() self.process_events() def stop(self): gobject.source_remove(self._handler) pyinotify.Notifier.stop(self) ## Instruction: Add test for GNotifier, fix up bugs ## Code After: import pyinotify import gobject class GNotifier(pyinotify.Notifier): """ A notifier that can be attached to a mainloop """ def __init__(self, watch_manager, default_proc_fun=pyinotify.ProcessEvent()): """ Initialization. @param watch_manager: Watch Manager. @type watch_manager: WatchManager instance @param default_proc_fun: Default processing method. @type default_proc_fun: instance of ProcessEvent """ pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun) self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io) def _process_io(self, foo, bar): self.read_events() self.process_events() return True def stop(self): gobject.source_remove(self._handler) pyinotify.Notifier.stop(self) if __name__ == "__main__": import sys wm = pyinotify.WatchManager() n = GNotifier(wm) if len(sys.argv) > 1: name = sys.argv[1] else: name = "/tmp" wm.add_watch(name, pyinotify.EventsCodes.ALL_EVENTS, rec=True, auto_add=True) mainloop = gobject.MainLoop() try: mainloop.run() except KeyboardInterrupt: pass
import pyinotify import gobject class GNotifier(pyinotify.Notifier): """ A notifier that can be attached to a mainloop """ - def __init__(self, watch_manager, default_proc_fun=ProcessEvent()): + def __init__(self, watch_manager, default_proc_fun=pyinotify.ProcessEvent()): ? ++++++++++ """ Initialization. @param watch_manager: Watch Manager. @type watch_manager: WatchManager instance @param default_proc_fun: Default processing method. @type default_proc_fun: instance of ProcessEvent """ pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun) self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io) - def _process_io(self): + def _process_io(self, foo, bar): ? ++++++++++ self.read_events() self.process_events() + return True def stop(self): gobject.source_remove(self._handler) pyinotify.Notifier.stop(self) + + if __name__ == "__main__": + import sys + + wm = pyinotify.WatchManager() + n = GNotifier(wm) + + if len(sys.argv) > 1: + name = sys.argv[1] + else: + name = "/tmp" + wm.add_watch(name, pyinotify.EventsCodes.ALL_EVENTS, rec=True, auto_add=True) + + mainloop = gobject.MainLoop() + try: + mainloop.run() + except KeyboardInterrupt: + pass
ef285377df575e101dd59d11bfec9f5b5d12ab2e
gears/asset_handler.py
gears/asset_handler.py
from functools import wraps class BaseAssetHandler(object): def __call__(self, asset): raise NotImplementedError @classmethod def as_handler(cls, **initkwargs): @wraps(cls, updated=()) def handler(asset): return handler.handler_class(**initkwargs)(asset) handler.handler_class = cls return handler
from functools import wraps class BaseAssetHandler(object): """Base class for all asset handlers (processors, compilers and compressors). A subclass has to implement :meth:`__call__` which is called with asset as argument. """ def __call__(self, asset): """Subclasses have to override this method to implement the actual handler function code. This method is called with asset as argument. Depending on the type of the handler, this method must change asset state (as it does in :class:`~gears.processors.Directivesprocessor`) or return some value (in case of asset compressors). """ raise NotImplementedError @classmethod def as_handler(cls, **initkwargs): """Converts the class into an actual handler function that can be used when registering different types of processors in :class:`~gears.enviroment.Environment` class instance. The arguments passed to :meth:`as_handler` are forwarded to the constructor of the class. """ @wraps(cls, updated=()) def handler(asset): return handler.handler_class(**initkwargs)(asset) handler.handler_class = cls return handler
Add docstrings to AssetHandler class
Add docstrings to AssetHandler class
Python
isc
gears/gears,gears/gears,gears/gears
+ from functools import wraps class BaseAssetHandler(object): + """Base class for all asset handlers (processors, compilers and + compressors). A subclass has to implement :meth:`__call__` which is called + with asset as argument. + """ def __call__(self, asset): + """Subclasses have to override this method to implement the actual + handler function code. This method is called with asset as argument. + Depending on the type of the handler, this method must change asset + state (as it does in :class:`~gears.processors.Directivesprocessor`) + or return some value (in case of asset compressors). + """ raise NotImplementedError @classmethod def as_handler(cls, **initkwargs): + """Converts the class into an actual handler function that can be used + when registering different types of processors in + :class:`~gears.enviroment.Environment` class instance. + + The arguments passed to :meth:`as_handler` are forwarded to the + constructor of the class. + """ @wraps(cls, updated=()) def handler(asset): return handler.handler_class(**initkwargs)(asset) handler.handler_class = cls return handler
Add docstrings to AssetHandler class
## Code Before: from functools import wraps class BaseAssetHandler(object): def __call__(self, asset): raise NotImplementedError @classmethod def as_handler(cls, **initkwargs): @wraps(cls, updated=()) def handler(asset): return handler.handler_class(**initkwargs)(asset) handler.handler_class = cls return handler ## Instruction: Add docstrings to AssetHandler class ## Code After: from functools import wraps class BaseAssetHandler(object): """Base class for all asset handlers (processors, compilers and compressors). A subclass has to implement :meth:`__call__` which is called with asset as argument. """ def __call__(self, asset): """Subclasses have to override this method to implement the actual handler function code. This method is called with asset as argument. Depending on the type of the handler, this method must change asset state (as it does in :class:`~gears.processors.Directivesprocessor`) or return some value (in case of asset compressors). """ raise NotImplementedError @classmethod def as_handler(cls, **initkwargs): """Converts the class into an actual handler function that can be used when registering different types of processors in :class:`~gears.enviroment.Environment` class instance. The arguments passed to :meth:`as_handler` are forwarded to the constructor of the class. """ @wraps(cls, updated=()) def handler(asset): return handler.handler_class(**initkwargs)(asset) handler.handler_class = cls return handler
+ from functools import wraps class BaseAssetHandler(object): + """Base class for all asset handlers (processors, compilers and + compressors). A subclass has to implement :meth:`__call__` which is called + with asset as argument. + """ def __call__(self, asset): + """Subclasses have to override this method to implement the actual + handler function code. This method is called with asset as argument. + Depending on the type of the handler, this method must change asset + state (as it does in :class:`~gears.processors.Directivesprocessor`) + or return some value (in case of asset compressors). + """ raise NotImplementedError @classmethod def as_handler(cls, **initkwargs): + """Converts the class into an actual handler function that can be used + when registering different types of processors in + :class:`~gears.enviroment.Environment` class instance. + + The arguments passed to :meth:`as_handler` are forwarded to the + constructor of the class. + """ @wraps(cls, updated=()) def handler(asset): return handler.handler_class(**initkwargs)(asset) handler.handler_class = cls return handler
12c1ee81843e3e163756a39b68671cf5c1842df2
scrapi/harvesters/mason_archival.py
scrapi/harvesters/mason_archival.py
from __future__ import unicode_literals from scrapi.base import OAIHarvester class MasonArchival(OAIHarvester): short_name = 'mason' long_name = 'Mason Archival Repository Service' url = 'http://mars.gmu.edu/' base_url = 'http://mars.gmu.edu/oai/request' timezone_granularity = True property_list = [ 'type', 'source', 'setSpec', 'format', 'identifier' ] approved_sets = [ 'col_1920_6102', 'col_1920_6039', 'com_1920_262', 'com_1920_466', 'com_1920_1320', 'com_1920_2852', 'com_1920_2869', 'com_1920_2883', 'com_1920_3011', 'com_1920_7520', 'com_1920_8132', 'com_1920_8138', 'col_1920_13' ]
from __future__ import unicode_literals from scrapi.base import OAIHarvester class MasonArchival(OAIHarvester): short_name = 'mason' long_name = 'Mason Archival Repository Service' url = 'http://mars.gmu.edu/' base_url = 'http://mars.gmu.edu/oai/request' timezone_granularity = True property_list = [ 'type', 'source', 'setSpec', 'format', 'identifier' ] approved_sets = [ 'col_1920_6102', 'col_1920_6039', 'com_1920_262', 'com_1920_466', 'com_1920_1320', 'com_1920_2852', 'com_1920_2869', 'com_1920_2883', 'com_1920_3011', 'com_1920_7520', 'com_1920_8132', 'com_1920_8138', 'col_1920_13', 'com_1920_2811' ]
Add approved set to gmu
Add approved set to gmu
Python
apache-2.0
CenterForOpenScience/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,erinspace/scrapi,erinspace/scrapi
from __future__ import unicode_literals from scrapi.base import OAIHarvester class MasonArchival(OAIHarvester): short_name = 'mason' long_name = 'Mason Archival Repository Service' url = 'http://mars.gmu.edu/' base_url = 'http://mars.gmu.edu/oai/request' timezone_granularity = True property_list = [ 'type', 'source', 'setSpec', 'format', 'identifier' ] approved_sets = [ 'col_1920_6102', 'col_1920_6039', 'com_1920_262', 'com_1920_466', 'com_1920_1320', 'com_1920_2852', 'com_1920_2869', 'com_1920_2883', 'com_1920_3011', 'com_1920_7520', 'com_1920_8132', 'com_1920_8138', - 'col_1920_13' + 'col_1920_13', + 'com_1920_2811' ]
Add approved set to gmu
## Code Before: from __future__ import unicode_literals from scrapi.base import OAIHarvester class MasonArchival(OAIHarvester): short_name = 'mason' long_name = 'Mason Archival Repository Service' url = 'http://mars.gmu.edu/' base_url = 'http://mars.gmu.edu/oai/request' timezone_granularity = True property_list = [ 'type', 'source', 'setSpec', 'format', 'identifier' ] approved_sets = [ 'col_1920_6102', 'col_1920_6039', 'com_1920_262', 'com_1920_466', 'com_1920_1320', 'com_1920_2852', 'com_1920_2869', 'com_1920_2883', 'com_1920_3011', 'com_1920_7520', 'com_1920_8132', 'com_1920_8138', 'col_1920_13' ] ## Instruction: Add approved set to gmu ## Code After: from __future__ import unicode_literals from scrapi.base import OAIHarvester class MasonArchival(OAIHarvester): short_name = 'mason' long_name = 'Mason Archival Repository Service' url = 'http://mars.gmu.edu/' base_url = 'http://mars.gmu.edu/oai/request' timezone_granularity = True property_list = [ 'type', 'source', 'setSpec', 'format', 'identifier' ] approved_sets = [ 'col_1920_6102', 'col_1920_6039', 'com_1920_262', 'com_1920_466', 'com_1920_1320', 'com_1920_2852', 'com_1920_2869', 'com_1920_2883', 'com_1920_3011', 'com_1920_7520', 'com_1920_8132', 'com_1920_8138', 'col_1920_13', 'com_1920_2811' ]
from __future__ import unicode_literals from scrapi.base import OAIHarvester class MasonArchival(OAIHarvester): short_name = 'mason' long_name = 'Mason Archival Repository Service' url = 'http://mars.gmu.edu/' base_url = 'http://mars.gmu.edu/oai/request' timezone_granularity = True property_list = [ 'type', 'source', 'setSpec', 'format', 'identifier' ] approved_sets = [ 'col_1920_6102', 'col_1920_6039', 'com_1920_262', 'com_1920_466', 'com_1920_1320', 'com_1920_2852', 'com_1920_2869', 'com_1920_2883', 'com_1920_3011', 'com_1920_7520', 'com_1920_8132', 'com_1920_8138', - 'col_1920_13' + 'col_1920_13', ? + + 'com_1920_2811' ]
59651470489a4479db6d9a79de3aacee6b9d7cd8
travis/wait-until-cluster-initialised.py
travis/wait-until-cluster-initialised.py
import json import sys import time from urllib.request import urlopen STATS_URL = "http://localhost:18001/stats" MAXIMUM_TIME_SECONDS = 2 * 60 SLEEPING_INTERVAL_SECONDS = 1 STATUS_CODE_OK = 200 def is_initialised(): try: response = urlopen(STATS_URL) if (response.getcode() == STATUS_CODE_OK): encoding = response.info().get_content_charset('utf-8') content = response.read().decode(encoding) return json.loads(content)['initialised'] else: return False except Exception as e: return False def wait_until_cluster_initialised(): start = time.time() elapsed = 0.0 while elapsed < MAXIMUM_TIME_SECONDS: if is_initialised(): print("Cluster initialised!") break elapsed = time.time() - start print("Cluster not initialised... keep waiting... elapsed time: {1:.2f} seconds.".format( SLEEPING_INTERVAL_SECONDS, elapsed)) time.sleep(SLEEPING_INTERVAL_SECONDS) elapsed = time.time() - start else: sys.exit("Cluster not initialised after {} seconds. I give up!".format(MAXIMUM_TIME_SECONDS)) if __name__ == "__main__": wait_until_cluster_initialised()
import json import sys import time from urllib.request import urlopen STATS_URL = "http://localhost:18001/stats" MAXIMUM_TIME_SECONDS = 2 * 60 SLEEPING_INTERVAL_SECONDS = 1 STATUS_CODE_OK = 200 def is_initialised(): try: response = urlopen(STATS_URL) if (response.getcode() == STATUS_CODE_OK): encoding = response.info().get_content_charset('utf-8') content = response.read().decode(encoding) return json.loads(content)['initialised'] else: return False except Exception as e: return False def wait_until_cluster_initialised(): start = time.time() elapsed = 0.0 while elapsed < MAXIMUM_TIME_SECONDS: if is_initialised(): print("Cluster initialised!") break elapsed = time.time() - start print("Cluster not initialised... keep waiting... elapsed time: {0:.2f} seconds.".format(elapsed)) time.sleep(SLEEPING_INTERVAL_SECONDS) elapsed = time.time() - start else: sys.exit("Cluster not initialised after {} seconds. I give up!".format(MAXIMUM_TIME_SECONDS)) if __name__ == "__main__": wait_until_cluster_initialised()
Remove unused left over parameter
Remove unused left over parameter
Python
apache-2.0
codiply/barrio,codiply/barrio
import json import sys import time from urllib.request import urlopen STATS_URL = "http://localhost:18001/stats" MAXIMUM_TIME_SECONDS = 2 * 60 SLEEPING_INTERVAL_SECONDS = 1 STATUS_CODE_OK = 200 def is_initialised(): try: response = urlopen(STATS_URL) if (response.getcode() == STATUS_CODE_OK): encoding = response.info().get_content_charset('utf-8') content = response.read().decode(encoding) return json.loads(content)['initialised'] else: return False except Exception as e: return False def wait_until_cluster_initialised(): start = time.time() elapsed = 0.0 while elapsed < MAXIMUM_TIME_SECONDS: if is_initialised(): print("Cluster initialised!") break elapsed = time.time() - start - print("Cluster not initialised... keep waiting... elapsed time: {1:.2f} seconds.".format( + print("Cluster not initialised... keep waiting... elapsed time: {0:.2f} seconds.".format(elapsed)) - SLEEPING_INTERVAL_SECONDS, elapsed)) time.sleep(SLEEPING_INTERVAL_SECONDS) elapsed = time.time() - start else: sys.exit("Cluster not initialised after {} seconds. I give up!".format(MAXIMUM_TIME_SECONDS)) if __name__ == "__main__": wait_until_cluster_initialised() +
Remove unused left over parameter
## Code Before: import json import sys import time from urllib.request import urlopen STATS_URL = "http://localhost:18001/stats" MAXIMUM_TIME_SECONDS = 2 * 60 SLEEPING_INTERVAL_SECONDS = 1 STATUS_CODE_OK = 200 def is_initialised(): try: response = urlopen(STATS_URL) if (response.getcode() == STATUS_CODE_OK): encoding = response.info().get_content_charset('utf-8') content = response.read().decode(encoding) return json.loads(content)['initialised'] else: return False except Exception as e: return False def wait_until_cluster_initialised(): start = time.time() elapsed = 0.0 while elapsed < MAXIMUM_TIME_SECONDS: if is_initialised(): print("Cluster initialised!") break elapsed = time.time() - start print("Cluster not initialised... keep waiting... elapsed time: {1:.2f} seconds.".format( SLEEPING_INTERVAL_SECONDS, elapsed)) time.sleep(SLEEPING_INTERVAL_SECONDS) elapsed = time.time() - start else: sys.exit("Cluster not initialised after {} seconds. I give up!".format(MAXIMUM_TIME_SECONDS)) if __name__ == "__main__": wait_until_cluster_initialised() ## Instruction: Remove unused left over parameter ## Code After: import json import sys import time from urllib.request import urlopen STATS_URL = "http://localhost:18001/stats" MAXIMUM_TIME_SECONDS = 2 * 60 SLEEPING_INTERVAL_SECONDS = 1 STATUS_CODE_OK = 200 def is_initialised(): try: response = urlopen(STATS_URL) if (response.getcode() == STATUS_CODE_OK): encoding = response.info().get_content_charset('utf-8') content = response.read().decode(encoding) return json.loads(content)['initialised'] else: return False except Exception as e: return False def wait_until_cluster_initialised(): start = time.time() elapsed = 0.0 while elapsed < MAXIMUM_TIME_SECONDS: if is_initialised(): print("Cluster initialised!") break elapsed = time.time() - start print("Cluster not initialised... keep waiting... elapsed time: {0:.2f} seconds.".format(elapsed)) time.sleep(SLEEPING_INTERVAL_SECONDS) elapsed = time.time() - start else: sys.exit("Cluster not initialised after {} seconds. I give up!".format(MAXIMUM_TIME_SECONDS)) if __name__ == "__main__": wait_until_cluster_initialised()
import json import sys import time from urllib.request import urlopen STATS_URL = "http://localhost:18001/stats" MAXIMUM_TIME_SECONDS = 2 * 60 SLEEPING_INTERVAL_SECONDS = 1 STATUS_CODE_OK = 200 def is_initialised(): try: response = urlopen(STATS_URL) if (response.getcode() == STATUS_CODE_OK): encoding = response.info().get_content_charset('utf-8') content = response.read().decode(encoding) return json.loads(content)['initialised'] else: return False except Exception as e: return False def wait_until_cluster_initialised(): start = time.time() elapsed = 0.0 while elapsed < MAXIMUM_TIME_SECONDS: if is_initialised(): print("Cluster initialised!") break elapsed = time.time() - start - print("Cluster not initialised... keep waiting... elapsed time: {1:.2f} seconds.".format( ? ^ + print("Cluster not initialised... keep waiting... elapsed time: {0:.2f} seconds.".format(elapsed)) ? ^ +++++++++ - SLEEPING_INTERVAL_SECONDS, elapsed)) time.sleep(SLEEPING_INTERVAL_SECONDS) elapsed = time.time() - start else: sys.exit("Cluster not initialised after {} seconds. I give up!".format(MAXIMUM_TIME_SECONDS)) if __name__ == "__main__": wait_until_cluster_initialised()
9dc6de1a97c18fa03787349ed64c1a4100b5d170
datapackage_pipelines_od4tj/processors/fix-numbers.py
datapackage_pipelines_od4tj/processors/fix-numbers.py
from datapackage_pipelines.wrapper import process def process_row(row, row_index, spec, resource_index, parameters, stats): for f in spec['schema']['fields']: if 'factor' in f: factor = { '1m': 1000000 }[f['factor']] v = row[f['name']] if v: row[f['name']] = v * factor return row process(process_row=process_row)
from datapackage_pipelines.wrapper import process def process_row(row, row_index, spec, resource_index, parameters, stats): for f in spec['schema']['fields']: if 'factor' in f: factor = { '1m': 1000000 }[f['factor']] v = row[f['name']] if v: row[f['name']] = v * factor return row process(process_row=process_row)
Fix bad indentation in processor
Fix bad indentation in processor
Python
mit
okfn/datapackage_pipelines_od4tj
from datapackage_pipelines.wrapper import process def process_row(row, row_index, spec, resource_index, parameters, stats): for f in spec['schema']['fields']: if 'factor' in f: factor = { '1m': 1000000 }[f['factor']] - v = row[f['name']] + v = row[f['name']] - if v: + if v: - row[f['name']] = v * factor + row[f['name']] = v * factor return row process(process_row=process_row)
Fix bad indentation in processor
## Code Before: from datapackage_pipelines.wrapper import process def process_row(row, row_index, spec, resource_index, parameters, stats): for f in spec['schema']['fields']: if 'factor' in f: factor = { '1m': 1000000 }[f['factor']] v = row[f['name']] if v: row[f['name']] = v * factor return row process(process_row=process_row) ## Instruction: Fix bad indentation in processor ## Code After: from datapackage_pipelines.wrapper import process def process_row(row, row_index, spec, resource_index, parameters, stats): for f in spec['schema']['fields']: if 'factor' in f: factor = { '1m': 1000000 }[f['factor']] v = row[f['name']] if v: row[f['name']] = v * factor return row process(process_row=process_row)
from datapackage_pipelines.wrapper import process def process_row(row, row_index, spec, resource_index, parameters, stats): for f in spec['schema']['fields']: if 'factor' in f: factor = { '1m': 1000000 }[f['factor']] - v = row[f['name']] + v = row[f['name']] ? ++++ - if v: + if v: ? ++++ - row[f['name']] = v * factor + row[f['name']] = v * factor ? ++++ return row process(process_row=process_row)
db537ab80444b9e4cc22f332577c2cba640fca0a
tasks/factory_utils.py
tasks/factory_utils.py
from factory import enums from collections import namedtuple import gc # Factoryboy uses "__" and Salesforce uses "__". Luckily Factoryboy makes # theirs easy to override! enums.SPLITTER = "____" # More flexible than FactoryBoy's sequences because you can create and # destroy them where-ever you want. class Adder: def __init__(self, x=0): self.x = x def __call__(self, value): self.x += value return int(self.x) def reset(self, x): self.x = x # Boilerplate that every factory would need to deal with. def SessionBase(session): class BaseMeta: sqlalchemy_session = session sqlalchemy_session_persistence = "commit" return BaseMeta # Thin collector for the factories and a place to try to achieve better # scalability than the create_batch function from FactoryBoy. class Factories: unflushed_record_counter = 0 def __init__(self, session, namespace): self.session = session self.factory_classes = { key: value for key, value in namespace.items() if hasattr(value, "generate_batch") } def create_batch(self, classname, batchsize, **kwargs): cls = self.factory_classes.get(classname, None) assert cls, f"Cannot find a factory class named {classname}. Did you misspell it?" for _ in range(batchsize): cls.create(**kwargs)
from factory import enums from collections import namedtuple import gc # Factoryboy uses "__" and Salesforce uses "__". Luckily Factoryboy makes # theirs easy to override! enums.SPLITTER = "____" # More flexible than FactoryBoy's sequences because you can create and # destroy them where-ever you want. class Adder: def __init__(self, x=0): self.x = x def __call__(self, value): self.x += value return int(self.x) def reset(self, x): self.x = x # Boilerplate that every factory would need to deal with. def SessionBase(session): class BaseMeta: sqlalchemy_session = session sqlalchemy_session_persistence = "commit" return BaseMeta # Thin collector for the factories and a place to try to achieve better # scalability than the create_batch function from FactoryBoy. class Factories: unflushed_record_counter = 0 def __init__(self, session, namespace): self.session = session self.factory_classes = { key: value for key, value in namespace.items() if hasattr(value, "generate_batch") } def create_batch(self, classname, batchsize, **kwargs): cls = self.factory_classes.get(classname, None) assert cls, f"Cannot find a factory class named {classname}. Did you misspell it?" for _ in range(batchsize): cls.create(**kwargs) def __getitem__(self, name): return self.factory_classes[name]
Make it easy to get a single item.
Make it easy to get a single item.
Python
bsd-3-clause
SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus
from factory import enums from collections import namedtuple import gc # Factoryboy uses "__" and Salesforce uses "__". Luckily Factoryboy makes # theirs easy to override! enums.SPLITTER = "____" # More flexible than FactoryBoy's sequences because you can create and # destroy them where-ever you want. class Adder: def __init__(self, x=0): self.x = x def __call__(self, value): self.x += value return int(self.x) def reset(self, x): self.x = x # Boilerplate that every factory would need to deal with. def SessionBase(session): class BaseMeta: sqlalchemy_session = session sqlalchemy_session_persistence = "commit" return BaseMeta # Thin collector for the factories and a place to try to achieve better # scalability than the create_batch function from FactoryBoy. class Factories: unflushed_record_counter = 0 def __init__(self, session, namespace): self.session = session self.factory_classes = { key: value for key, value in namespace.items() if hasattr(value, "generate_batch") } def create_batch(self, classname, batchsize, **kwargs): cls = self.factory_classes.get(classname, None) assert cls, f"Cannot find a factory class named {classname}. Did you misspell it?" for _ in range(batchsize): cls.create(**kwargs) + def __getitem__(self, name): + return self.factory_classes[name] +
Make it easy to get a single item.
## Code Before: from factory import enums from collections import namedtuple import gc # Factoryboy uses "__" and Salesforce uses "__". Luckily Factoryboy makes # theirs easy to override! enums.SPLITTER = "____" # More flexible than FactoryBoy's sequences because you can create and # destroy them where-ever you want. class Adder: def __init__(self, x=0): self.x = x def __call__(self, value): self.x += value return int(self.x) def reset(self, x): self.x = x # Boilerplate that every factory would need to deal with. def SessionBase(session): class BaseMeta: sqlalchemy_session = session sqlalchemy_session_persistence = "commit" return BaseMeta # Thin collector for the factories and a place to try to achieve better # scalability than the create_batch function from FactoryBoy. class Factories: unflushed_record_counter = 0 def __init__(self, session, namespace): self.session = session self.factory_classes = { key: value for key, value in namespace.items() if hasattr(value, "generate_batch") } def create_batch(self, classname, batchsize, **kwargs): cls = self.factory_classes.get(classname, None) assert cls, f"Cannot find a factory class named {classname}. Did you misspell it?" for _ in range(batchsize): cls.create(**kwargs) ## Instruction: Make it easy to get a single item. ## Code After: from factory import enums from collections import namedtuple import gc # Factoryboy uses "__" and Salesforce uses "__". Luckily Factoryboy makes # theirs easy to override! enums.SPLITTER = "____" # More flexible than FactoryBoy's sequences because you can create and # destroy them where-ever you want. class Adder: def __init__(self, x=0): self.x = x def __call__(self, value): self.x += value return int(self.x) def reset(self, x): self.x = x # Boilerplate that every factory would need to deal with. def SessionBase(session): class BaseMeta: sqlalchemy_session = session sqlalchemy_session_persistence = "commit" return BaseMeta # Thin collector for the factories and a place to try to achieve better # scalability than the create_batch function from FactoryBoy. class Factories: unflushed_record_counter = 0 def __init__(self, session, namespace): self.session = session self.factory_classes = { key: value for key, value in namespace.items() if hasattr(value, "generate_batch") } def create_batch(self, classname, batchsize, **kwargs): cls = self.factory_classes.get(classname, None) assert cls, f"Cannot find a factory class named {classname}. Did you misspell it?" for _ in range(batchsize): cls.create(**kwargs) def __getitem__(self, name): return self.factory_classes[name]
from factory import enums from collections import namedtuple import gc # Factoryboy uses "__" and Salesforce uses "__". Luckily Factoryboy makes # theirs easy to override! enums.SPLITTER = "____" # More flexible than FactoryBoy's sequences because you can create and # destroy them where-ever you want. class Adder: def __init__(self, x=0): self.x = x def __call__(self, value): self.x += value return int(self.x) def reset(self, x): self.x = x # Boilerplate that every factory would need to deal with. def SessionBase(session): class BaseMeta: sqlalchemy_session = session sqlalchemy_session_persistence = "commit" return BaseMeta # Thin collector for the factories and a place to try to achieve better # scalability than the create_batch function from FactoryBoy. class Factories: unflushed_record_counter = 0 def __init__(self, session, namespace): self.session = session self.factory_classes = { key: value for key, value in namespace.items() if hasattr(value, "generate_batch") } def create_batch(self, classname, batchsize, **kwargs): cls = self.factory_classes.get(classname, None) assert cls, f"Cannot find a factory class named {classname}. Did you misspell it?" for _ in range(batchsize): cls.create(**kwargs) + + def __getitem__(self, name): + return self.factory_classes[name]
cbb90d03b83a495b1c46514a583538f2cfc0d29c
test/functional/test_manager.py
test/functional/test_manager.py
from osmviz.manager import PILImageManager, OSMManager import PIL.Image as Image def test_pil(): imgr = PILImageManager("RGB") osm = OSMManager(image_manager=imgr) image, bnds = osm.createOSMImage((30, 35, -117, -112), 9) wh_ratio = float(image.size[0]) / image.size[1] image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS) del image image2.show() if __name__ == "__main__": test_pil() # End of file
from osmviz.manager import PILImageManager, OSMManager import PIL.Image as Image def test_pil(): image_manager = PILImageManager("RGB") osm = OSMManager(image_manager=image_manager) image, bounds = osm.createOSMImage((30, 31, -117, -116), 9) wh_ratio = float(image.size[0]) / image.size[1] image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS) del image image2.show() if __name__ == "__main__": test_pil() # End of file
Reduce number of tiles downloaded
Reduce number of tiles downloaded
Python
mit
hugovk/osmviz,hugovk/osmviz
from osmviz.manager import PILImageManager, OSMManager import PIL.Image as Image def test_pil(): - imgr = PILImageManager("RGB") + image_manager = PILImageManager("RGB") - osm = OSMManager(image_manager=imgr) + osm = OSMManager(image_manager=image_manager) - image, bnds = osm.createOSMImage((30, 35, -117, -112), 9) + image, bounds = osm.createOSMImage((30, 31, -117, -116), 9) wh_ratio = float(image.size[0]) / image.size[1] image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS) del image image2.show() if __name__ == "__main__": test_pil() # End of file
Reduce number of tiles downloaded
## Code Before: from osmviz.manager import PILImageManager, OSMManager import PIL.Image as Image def test_pil(): imgr = PILImageManager("RGB") osm = OSMManager(image_manager=imgr) image, bnds = osm.createOSMImage((30, 35, -117, -112), 9) wh_ratio = float(image.size[0]) / image.size[1] image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS) del image image2.show() if __name__ == "__main__": test_pil() # End of file ## Instruction: Reduce number of tiles downloaded ## Code After: from osmviz.manager import PILImageManager, OSMManager import PIL.Image as Image def test_pil(): image_manager = PILImageManager("RGB") osm = OSMManager(image_manager=image_manager) image, bounds = osm.createOSMImage((30, 31, -117, -116), 9) wh_ratio = float(image.size[0]) / image.size[1] image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS) del image image2.show() if __name__ == "__main__": test_pil() # End of file
from osmviz.manager import PILImageManager, OSMManager import PIL.Image as Image def test_pil(): - imgr = PILImageManager("RGB") + image_manager = PILImageManager("RGB") ? + ++++++++ - osm = OSMManager(image_manager=imgr) + osm = OSMManager(image_manager=image_manager) ? + ++++++++ - image, bnds = osm.createOSMImage((30, 35, -117, -112), 9) ? ^ ^ + image, bounds = osm.createOSMImage((30, 31, -117, -116), 9) ? ++ ^ ^ wh_ratio = float(image.size[0]) / image.size[1] image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS) del image image2.show() if __name__ == "__main__": test_pil() # End of file