Instruction
stringlengths
14
778
input_code
stringlengths
0
4.24k
output_code
stringlengths
1
5.44k
Remove warning for long messages
import logging logger = logging.getLogger(__name__) def strspn(source, allowed): newchrs = [] for c in source: if c in allowed: newchrs.append(c) return u''.join(newchrs) def check_cell_phone_number(number): cleaned_number = strspn(number, u'+0123456789') if not u'+' in cleane...
import logging logger = logging.getLogger(__name__) def strspn(source, allowed): newchrs = [] for c in source: if c in allowed: newchrs.append(c) return u''.join(newchrs) def check_cell_phone_number(number): cleaned_number = strspn(number, u'+0123456789') if not u'+' in cleane...
Fix deoplete source compete position sent to language servers.
from .base import Base import re CompleteOutputs = "g:LanguageClient_omniCompleteResults" class Source(Base): def __init__(self, vim): super().__init__(vim) self.name = "LanguageClient" self.mark = "[LC]" self.rank = 1000 self.min_pattern_length = 1 self.filetype...
from .base import Base import re CompleteOutputs = "g:LanguageClient_omniCompleteResults" class Source(Base): def __init__(self, vim): super().__init__(vim) self.name = "LanguageClient" self.mark = "[LC]" self.rank = 1000 self.min_pattern_length = 0 self.filetype...
Update to 0.5.4 pre-alpha (preparing next build version)
VERSION = (0, 5, 3, 'final', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = '%s %...
VERSION = (0, 5, 4, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = '%s %...
Change PDF font to Helvetica
from ..converter import KnowledgePostConverter from .html import HTMLConverter class PDFConverter(KnowledgePostConverter): ''' Use this as a template for new KnowledgePostConverters. ''' _registry_keys = ['pdf'] @property def dependencies(self): # Dependencies required for this conve...
from ..converter import KnowledgePostConverter from .html import HTMLConverter class PDFConverter(KnowledgePostConverter): ''' Use this as a template for new KnowledgePostConverters. ''' _registry_keys = ['pdf'] @property def dependencies(self): # Dependencies required for this conve...
Change test file to support different parsers
import socket client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(("localhost", 5002)) with open("resources/Matrix.java", "r") as java_file: source = java_file.read() + "\nEOS_BITSHIFT" client_socket.send("%d\n%s" % (len(source), source)); data = '' while True: data = ...
import socket, sys file_name = 'resources/<name>.c' server_socket_number = 5001 if __name__ == '__main__': if len(sys.argv) == 1: print "Please input a parser to test." elif len(sys.argv) > 2: print "Too many arguments." else: if sys.argv[1] == 'c': pass elif...
Use connection settings from conf file.
import sys from rq import Queue, Connection, Worker import cli import worker class RunWorkerCli(cli.BaseCli): ''' A wrapper for RQ workers. Wrapping RQ is the only way to generate notifications when a job fails. ''' def _get_args(self, arg_parser): ''' Customize arguments. ''' ...
import sys from redis import Redis from rq import Queue, Connection, Worker import cli import worker class RunWorkerCli(cli.BaseCli): ''' A wrapper for RQ workers. Wrapping RQ is the only way to generate notifications when a job fails. ''' def _get_args(self, arg_parser): ''' Customize ...
Add a fallback for when pypandoc is not present
#!/usr/bin/env python from setuptools import setup import pypandoc setup( name='SVN-Ignore', py_modules=['sr', 'src.cli', 'src.svn_ignore'], version='1.1.1', description='An utility that provides .svnignore functionality similar to GIT', long_description=pypandoc.convert('README.md','rst',format=...
#!/usr/bin/env python from setuptools import setup def get_long_description(): try: import pypandoc long_description = pypandoc.convert('README.md','rst',format='markdown') except Exception: print('WARNING: Failed to convert README.md to rst, pypandoc was not present') f = open...
Add python2 specifically to classifier list.
import sys try: from setuptools import setup except ImportError: from distutils import setup if sys.version_info[0] == 2: base_dir = 'python2' elif sys.version_info[0] == 3: base_dir = 'python3' readme = open('README.rst', 'r') README_TEXT = readme.read() readme.close() setup( name='aniso8601', ...
import sys try: from setuptools import setup except ImportError: from distutils import setup if sys.version_info[0] == 2: base_dir = 'python2' elif sys.version_info[0] == 3: base_dir = 'python3' readme = open('README.rst', 'r') README_TEXT = readme.read() readme.close() setup( name='aniso8601', ...
Update download URL to match current version / tag.
from setuptools import setup setup(name='pagerduty_events_api', version='0.2.0', description='Python wrapper for Pagerduty Events API', url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api', download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.1.1', ...
from setuptools import setup setup(name='pagerduty_events_api', version='0.2.0', description='Python wrapper for Pagerduty Events API', url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api', download_url='https://github.com/BlasiusVonSzerencsi/pagerduty-events-api/tarball/0.2.0', ...
Replace phone number and avoid sending SMS for load testing
# coding: utf-8 # pylint: disable=W7936 from locust import HttpLocust, TaskSet, task from random import randint class SmsSponsorWorkflow(TaskSet): @task(1) def send_sms(self): url = "/sms/mnc?sender=%2B41789364{}&service=compassion".format( randint(100, 999)) self.client.get(url) ...
# coding: utf-8 # pylint: disable=W7936 from locust import HttpLocust, TaskSet, task from random import randint class SmsSponsorWorkflow(TaskSet): @task(1) def send_sms(self): url = "/sms/mnc?sender=%2B4199{}&service=compassion&text=test".format( randint(1000000, 9999999)) self.cl...
Increment version number to 0.5.0
__version__ = '0.5.0.dev0' # this should ensure that we can still import constants.py as climlab.constants from climlab.utils import constants from climlab.utils import thermo, legendre # some more useful shorcuts from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel from climlab.m...
__version__ = '0.5.0' # this should ensure that we can still import constants.py as climlab.constants from climlab.utils import constants from climlab.utils import thermo, legendre # some more useful shorcuts from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel from climlab.model....
Add unit test, update documentation
from unittest import TestCase from phi import math from phi.geom import Box from phi import field from phi.physics import Domain class TestFieldMath(TestCase): def test_gradient(self): domain = Domain(x=4, y=3) phi = domain.grid() * (1, 2) grad = field.gradient(phi, stack_dim='gradient')...
from unittest import TestCase from phi import math from phi.field import StaggeredGrid, CenteredGrid from phi.geom import Box from phi import field from phi.physics import Domain class TestFieldMath(TestCase): def test_gradient(self): domain = Domain(x=4, y=3) phi = domain.grid() * (1, 2) ...
Update import syntax to fit python3
from plugin import websocket from server import GeventWebSocketServer __all__ = ['websocket', 'GeventWebSocketServer'] __version__ = '0.2.8'
from .plugin import websocket from .server import GeventWebSocketServer __all__ = ['websocket', 'GeventWebSocketServer'] __version__ = '0.2.8'
Correct function name and put report_timing_summary at end of script.
""" Utility for generating TCL script to output timing information from a design checkpoint. """ import argparse def create_runme(f_out, args): print( """ report_timing_summary source {util_tcl} write_timing_info timing_{name}.json5 """.format(name=args.name, util_tcl=args.util_tcl), file=f_out ...
""" Utility for generating TCL script to output timing information from a design checkpoint. """ import argparse def create_output_timing(f_out, args): print( """ source {util_tcl} write_timing_info timing_{name}.json5 report_timing_summary """.format(name=args.name, util_tcl=args.util_tcl), file...
Correct import behavior to prevent Runtime error
""" sentry.utils.imports ~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import import pkgutil import six class ModuleProxyCache(dict): def __missing__(self, key): if '.' not...
""" sentry.utils.imports ~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import import pkgutil import six class ModuleProxyCache(dict): def __missing__(self, key): if '.' not...
Rewrite URLs for playbook app
from django.conf.urls import url from ansible.forms import AnsibleForm1, AnsibleForm2 from ansible.views import ( PlaybookWizard, PlaybookListView, PlaybookDetailView, PlaybookFileCreateView, PlaybookFileEditView, PlaybookFileView ) from . import views urlpatterns = [ url(r'^create/$', PlaybookWizard.as_v...
from django.conf.urls import url from ansible.forms import AnsibleForm1, AnsibleForm2 from ansible.views import ( PlaybookWizard, PlaybookListView, PlaybookDetailView, PlaybookFileCreateView, PlaybookFileEditView, PlaybookFileView ) from . import views urlpatterns = [ url(r'^create/$', PlaybookWizard.as_v...
Add ScrollableDropdown import to ipywidgets
from .widget import Widget, CallbackDispatcher, register, widget_serialization from .domwidget import DOMWidget from .trait_types import Color, EventfulDict, EventfulList from .widget_bool import Checkbox, ToggleButton, Valid from .widget_button import Button from .widget_box import Box, FlexBox, Proxy, PlaceProxy, H...
from .widget import Widget, CallbackDispatcher, register, widget_serialization from .domwidget import DOMWidget from .trait_types import Color, EventfulDict, EventfulList from .widget_bool import Checkbox, ToggleButton, Valid from .widget_button import Button from .widget_box import Box, FlexBox, Proxy, PlaceProxy, H...
Update search index to look for all objects.
import datetime from haystack.indexes import * from haystack import site from models import Paste, Commit class CommitIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) commit = CharField(model_attr='commit') created = DateField(model_attr='created') user = CharField(model_a...
import datetime from haystack.indexes import * from haystack import site from models import Paste, Commit class CommitIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) commit = CharField(model_attr='commit') created = DateField(model_attr='created') user = CharField(model_a...
Fix a warning in 1.8
from django.db import models from djangae import patches class CounterShard(models.Model): count = models.PositiveIntegerField() # Apply our django patches patches.patch()
from django.db import models from djangae import patches class CounterShard(models.Model): count = models.PositiveIntegerField() class Meta: app_label = "djangae" # Apply our django patches patches.patch()
Use gtk ui by default
# Copyright (c) 2012 John Reese # Licensed under the MIT License from __future__ import absolute_import, division engine = None ui = None def async_engine_command(command, network=None, params=None): """Send a command to the current backend engine.""" return engine.async_command(command, network, params) de...
# Copyright (c) 2012 John Reese # Licensed under the MIT License from __future__ import absolute_import, division engine = None ui = None def async_engine_command(command, network=None, params=None): """Send a command to the current backend engine.""" return engine.async_command(command, network, params) de...
Add static method for email address
from passlib.hash import pbkdf2_sha512 class Utils: @staticmethod def hash_password(password): """ Hashes a password using sha512 -> pbkdf2_sha512 encrypted password """ return pbkdf2_sha512.encrypt(password) @staticmethod def check_hashed_password(password, hashed_password): """ Checks the password ...
import re from passlib.hash import pbkdf2_sha512 class Utils: @staticmethod def email_is_valid(email): email_address_matcher = re.compile('^[\w-]+@([\w-]+\.)+[\w]+$') return True if email_address_matcher.match(email) else False @staticmethod def hash_password(password): """ Hashes a password using sha512...
Tweak PayPal output to use Payee and Memo fields
#!/usr/bin/env python3 import argparse import csv parser = argparse.ArgumentParser() parser.add_argument('--config', help='path to file containing column header mappings', required=True) parser.add_argument('--csv-file', help='path to CSV file', required=True) parser.add_argument('--skip-headers', help='skip first li...
#!/usr/bin/env python3 import argparse import csv parser = argparse.ArgumentParser() parser.add_argument('--config', help='path to file containing column header mappings', required=True) parser.add_argument('--csv-file', help='path to CSV file', required=True) parser.add_argument('--skip-headers', help='skip first li...
Set pool_recycle to deal with MySQL closing idle connections.
from sqlalchemy import create_engine from inbox.sqlalchemy_ext.util import ForceStrictMode from inbox.config import db_uri, config DB_POOL_SIZE = config.get_required('DB_POOL_SIZE') def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5): engine = create_engine(db_uri(), listeners=[Fo...
from sqlalchemy import create_engine from inbox.sqlalchemy_ext.util import ForceStrictMode from inbox.config import db_uri, config DB_POOL_SIZE = config.get_required('DB_POOL_SIZE') def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5): engine = create_engine(db_uri(), listeners=[Fo...
Remove unused imports from AppConfig module.
"""Django DDP app config.""" from __future__ import print_function from django.apps import AppConfig from django.conf import settings, ImproperlyConfigured from django.db import DatabaseError from django.db.models import signals from dddp import autodiscover from dddp.models import Connection class DjangoDDPConfig...
"""Django DDP app config.""" from __future__ import print_function from django.apps import AppConfig from django.conf import settings, ImproperlyConfigured from dddp import autodiscover class DjangoDDPConfig(AppConfig): """Django app config for django-ddp.""" api = None name = 'dddp' verbose_name...
Add missing facebook and google verif codes
""" Extra context processors for the CarnetDuMaker app. """ from django.contrib.sites.shortcuts import get_current_site from django.utils.translation import ugettext_lazy as _ def app_constants(request): """ Constants context processor. :param request: the current request. :return: All constants for ...
""" Extra context processors for the CarnetDuMaker app. """ from django.contrib.sites.shortcuts import get_current_site from django.utils.translation import ugettext_lazy as _ def app_constants(request): """ Constants context processor. :param request: the current request. :return: All constants for ...
Add new fields on LogConfiguration model
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin import logging LOG = logging.getLogger(__name__) class LogConfigurationAdmin(admin.ModelAdmin): list_filter = ("environment", "engine_type") list_display = ("environment", "engine_type", "rete...
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin import logging LOG = logging.getLogger(__name__) class LogConfigurationAdmin(admin.ModelAdmin): list_filter = ("environment", "engine_type") list_display = ("environment", "engine_type", "rete...
Check APP_ENGINE env var before using hard-coded path to Google AppEngine SDK.
#!/usr/bin/env python import optparse import sys from os import path from os.path import expanduser import unittest import argparse # Simple stand-alone test runner # - Runs independently of appengine runner # - So we need to find the GAE library # - Looks for tests as ./tests/test*.py # - Use --skipbasics to skip th...
#!/usr/bin/env python import argparse import optparse from os import getenv, path from os.path import expanduser import sys import unittest # Simple stand-alone test runner # - Runs independently of appengine runner # - So we need to find the GAE library # - Looks for tests as ./tests/test*.py # - Use --skipbasics to...
Add extra code to PostGIS migration to only create extensions if they're not already there. Drop on rollback only if extensions exist.
"""Enable PostGIS Revision ID: 4ef20b76cab1 Revises: 55004b0f00d6 Create Date: 2015-02-11 20:49:42.303864 """ # revision identifiers, used by Alembic. revision = '4ef20b76cab1' down_revision = '55004b0f00d6' from alembic import op import sqlalchemy as sa def upgrade(): op.execute("CREATE EXTENSION postgis;") ...
"""Enable PostGIS Revision ID: 4ef20b76cab1 Revises: 55004b0f00d6 Create Date: 2015-02-11 20:49:42.303864 """ # revision identifiers, used by Alembic. revision = '4ef20b76cab1' down_revision = '55004b0f00d6' from alembic import op import sqlalchemy as sa def upgrade(): op.execute("CREATE EXTENSION IF NOT EXIS...
Break get_images up to match get_tags
from __future__ import unicode_literals from catsnap import Client, HASH_KEY from boto.dynamodb.batch import BatchList import json MAX_ITEMS_TO_REQUEST = 99 def get_images(filenames): if not filenames: raise StopIteration filenames = list(filenames) unprocessed_keys = filenames[MAX_ITEMS_TO_REQUE...
from __future__ import unicode_literals from catsnap import Client, HASH_KEY from boto.dynamodb.batch import BatchList import json MAX_ITEMS_TO_REQUEST = 99 def get_image_items(filenames): if not filenames: raise StopIteration filenames = list(filenames) unprocessed_keys = filenames[MAX_ITEMS_TO_...
Test the presence of various tools
""" Core tests. Test general environment. """ import subprocess as sp from django.test import TestCase from django.test.client import Client from django.contrib.auth.models import User from django.conf import settings class CoreTest(TestCase): def setUp(self): pass def tearDown(self): pass ...
""" Core tests. Test general environment. """ import subprocess as sp from django.test import TestCase from django.test.client import Client from django.contrib.auth.models import User from django.conf import settings class CoreTest(TestCase): def setUp(self): pass def tearDown(self): pass ...
Set EAV form class name to match EAV model name
from django import forms from django.core.exceptions import ValidationError from cyder.base.eav.constants import ATTRIBUTE_TYPES from cyder.base.eav.models import Attribute def get_eav_form(eav_model, entity_model): class EAVForm(forms.ModelForm): def __init__(self, *args, **kwargs): if 'inst...
from django import forms from django.core.exceptions import ValidationError from cyder.base.eav.constants import ATTRIBUTE_TYPES from cyder.base.eav.models import Attribute def get_eav_form(eav_model, entity_model): class EAVForm(forms.ModelForm): def __init__(self, *args, **kwargs): if 'inst...
Move version num to own line
# -*- coding: utf-8 -*- # vim: sw=4:ts=4:expandtab """ ckanny ~~~~~~ Miscellaneous CKAN utility scripts Examples: literal blocks:: python example_google.py Attributes: module_level_variable1 (int): Module level variables may be documented in """ from __future__ import ( absolute_import, divisi...
# -*- coding: utf-8 -*- # vim: sw=4:ts=4:expandtab """ ckanny ~~~~~~ Miscellaneous CKAN utility scripts Examples: literal blocks:: python example_google.py Attributes: module_level_variable1 (int): Module level variables may be documented in """ from __future__ import ( absolute_import, divisi...
Add test logger if not added by nose
# Copyright 2013-2014 DataStax, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writi...
# Copyright 2013-2014 DataStax, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writi...
Fix in output to help command.
# The client of DDuplicated tool. from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path):...
# The client of DDuplicated tool. from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path):...
Update in output to terminal.
# The client of DDuplicated tool. from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): ...
# The client of DDuplicated tool. from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path):...
Use allclose for dataprep test
import os import pandas as pd import numpy.testing as npt from gypsy import DATA_DIR from gypsy.data_prep import prep_standtable def test_prep_standtable(): data_file_name = 'raw_standtable.csv' plot_data = pd.read_csv(os.path.join(DATA_DIR, data_file_name)) expected_data_path = os.path.join( DAT...
import os import pandas as pd import numpy.testing as npt from gypsy import DATA_DIR from gypsy.data_prep import prep_standtable def test_prep_standtable(): data_file_name = 'raw_standtable.csv' plot_data = pd.read_csv(os.path.join(DATA_DIR, data_file_name)) expected_data_path = os.path.join( DAT...
Fix pad_tx off by one error + nits
from .cdefs import MIN_TX_SIZE, MAX_TXOUT_PUBKEY_SCRIPT from .mininode import CTransaction, FromHex, ToHex, CTxOut from .script import OP_RETURN, CScript import random from binascii import hexlify, unhexlify # Pad outputs until it reaches at least min_size def pad_tx(tx, min_size=None): if min_size is None: ...
from .cdefs import MIN_TX_SIZE, MAX_TXOUT_PUBKEY_SCRIPT from .mininode import CTransaction, FromHex, ToHex, CTxOut from .script import OP_RETURN, CScript import random from binascii import hexlify, unhexlify # Pad outputs until it reaches at least min_size def pad_tx(tx, min_size=None): if min_size is None: ...
Add import in regression test
from __future__ import unicode_literals from ...tokens import Doc from ...vocab import Vocab def test_issue600(): doc = Doc(Vocab(tag_map={'NN': {'pos': 'NOUN'}}), words=['hello']) doc[0].tag_ = u'NN'
from __future__ import unicode_literals from ...tokens import Doc from ...vocab import Vocab from ...attrs import POS def test_issue600(): doc = Doc(Vocab(tag_map={'NN': {'pos': 'NOUN'}}), words=['hello']) doc[0].tag_ = u'NN'
Fix unit tests for arg_parser
import unittest from tailorscad.arg_parser import parse_args class TestArgParser(unittest.TestCase): def test_parse_args_none(self): args = [] argv = [] args = parse_args(argv) self.assertFalse(args) def test_parse_args_inknown(self): args = [] argv = ['...
import unittest from tailorscad.arg_parser import parse_args class TestArgParser(unittest.TestCase): def test_parse_args_none(self): args = [] argv = [] args = parse_args(argv) self.assertFalse(args.config) def test_parse_args_inknown(self): args = [] ar...
Fix add_idea when multiple States have no previous
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ return State.objects.get(previous__isnull=True)
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ #return State.objects.get(previous__isnull=True) # previous__isnull breaks functionality if someone creates a new state # without a previous state set. since we know the initial state # is id=1 per fixtur...
Put seasons and subreddit into a response object for the api
from flask import Flask, jsonify, render_template import test, stats, os app = Flask(__name__) cache = {} @app.route('/') def hello_world(): return 'Hello World!' @app.route('/r/<string:subreddit>') def episodes(subreddit): seasonsAndEpisodes = _getEpisodes(subreddit) return render_template('index.html',...
from flask import Flask, jsonify, render_template import test, stats, os app = Flask(__name__) cache = {} @app.route('/') def hello_world(): return 'Hello World!' @app.route('/r/<string:subreddit>') def episodes(subreddit): seasonsAndEpisodes = _getEpisodes(subreddit) return render_template('index.html',...
Add option to create/drop the database
import argparse from config import app_config as cfg from libraries.database_init import DataBase from libraries.tweetimporter import TweetImporter from libraries.twitterclient import TwitterClient # Twitter API configuration consumer_key = cfg.twitter["consumer_key"] consumer_secret = cfg.twitter["consumer_secret"]...
import argparse from config import app_config as cfg from libraries.database_init import DataBase from libraries.tweetimporter import TweetImporter from libraries.twitterclient import TwitterClient # Twitter API configuration consumer_key = cfg.twitter["consumer_key"] consumer_secret = cfg.twitter["consumer_secret"]...
Mark wiki edits as bot edit
import ConfigParser import datetime from wikitools import wiki from wikitools import category from plugin import Plugin class MediaWiki(Plugin): def __init__(self, config=None): if config: try: self.site = wiki.Wiki(config.get('MediaWiki', 'wikiapiurl')) self.site.login(config.get('MediaWik...
import ConfigParser import datetime from wikitools import wiki from wikitools import category from plugin import Plugin class MediaWiki(Plugin): def __init__(self, config=None): if config: try: self.site = wiki.Wiki(config.get('MediaWiki', 'wikiapiurl')) self.site.login(config.get('MediaWik...
Print out an array of points
import re import numpy as np infile = "shapes.txt" filt = re.compile(r'^"?([^"]*)"?$') converter = lambda x: filt.match(x.strip()).group(1) data = np.recfromcsv(infile, delimiter=',') shapes = np.array(map(int, [converter(x) for x in data["shape_id"]])) lats = np.array(map(float, [converter(x) for x in data["shape_pt_l...
import re, sys import numpy as np infile = sys.argv[1] filt = re.compile(r'^"?([^"]*)"?$') converter = lambda x: filt.match(x.strip()).group(1) data = np.recfromcsv(infile, delimiter=',') shapes = np.array(map(int, [converter(x) for x in data["shape_id"]])) lats = np.array(map(float, [converter(x) for x in data["shape_...
Move TODO items to GitHub issues
#!/usr/bin/env python3 import argparse import yaml import jinja2 import weasyprint parser = argparse.ArgumentParser() parser.add_argument('--data', help='path to data directory', required=True) parser.add_argument('--number', help='Invoice number', type=int, required=True) args = parser.parse_args() data_directory =...
#!/usr/bin/env python3 import argparse import yaml import jinja2 import weasyprint parser = argparse.ArgumentParser() parser.add_argument('--data', help='path to data directory', required=True) parser.add_argument('--number', help='Invoice number', type=int, required=True) args = parser.parse_args() data_directory =...
Add a unit test for ignored nicks in _parse_irc_message
import unittest from cobe.commands import LearnIrcLogCommand class testIrcLogParsing(unittest.TestCase): def setUp(self): self.command = LearnIrcLogCommand() def testNonPubmsg(self): msg = "this is some non-pubmsg text found in a log" cmd = self.command self.assertEqual(None,...
import unittest from cobe.commands import LearnIrcLogCommand class testIrcLogParsing(unittest.TestCase): def setUp(self): self.command = LearnIrcLogCommand() def testNonPubmsg(self): msg = "this is some non-pubmsg text found in a log" cmd = self.command self.assertEqual(None,...
Fix broken reference in documentation
"""This module provides an abstract base class for invocation plugins""" from abc import abstractmethod, ABCMeta class BasePlugin(object): """ Abstract base class for invocation plugins. Plugin developers can either derive their objects directly from this class or from :class:`orges.plugins.dummy.D...
"""This module provides an abstract base class for invocation plugins""" from abc import abstractmethod, ABCMeta class BasePlugin(object): """ Abstract base class for invocation plugins. Plugin developers can either derive their objects directly from this class or from :class:`orges.plugins.dummy.D...
Support environment variables for the extraction
from subgraph_extractor.cli import extract_from_config import click from cloudpathlib import AnyPath @click.command() @click.option( "--subgraph-config-folder", help="The folder containing the subgraph config files", default='config', ) @click.option( "--database-string", default="postgresql://grap...
from subgraph_extractor.cli import extract_from_config import click from cloudpathlib import AnyPath import os @click.command() @click.option( "--subgraph-config-folder", help="The folder containing the subgraph config files", default="config", ) @click.option( "--database-string", default=os.envi...
Test run speedup by changing password hasher
from familyconnect_registration.settings import * # flake8: noqa # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'TESTSEKRET' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True CELERY_ALWAYS_...
from familyconnect_registration.settings import * # flake8: noqa # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'TESTSEKRET' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True CELERY_ALWAYS_...
Add MIT LICENSE. Fix test dir removing issue - re-raise exception after delete this dir.
#!/usr/bin/env python3 from passwd_change import passwd_change, shadow_change, mails_delete from unittest import TestCase, TestLoader, TextTestRunner import os import subprocess class PasswdChange_Test(TestCase): def setUp(self): """ Preconditions """ subprocess.call(['mkdir', 't...
#!/usr/bin/env python3 from passwd_change import passwd_change, shadow_change, mails_delete from unittest import TestCase, TestLoader, TextTestRunner import os import subprocess class PasswdChange_Test(TestCase): def setUp(self): """ Preconditions """ subprocess.call(['mkdir', 't...
Adjust POST_LOGIN_VIEW and POST_LOGOUT_VIEW test
# -*- coding: utf-8 -*- """ test_configuration ~~~~~~~~~~~~~~~~~~ Basic configuration tests """ import base64 import pytest from utils import authenticate, logout @pytest.mark.settings( logout_url='/custom_logout', login_url='/custom_login', post_login_view='/post_login', post_logout_v...
# -*- coding: utf-8 -*- """ test_configuration ~~~~~~~~~~~~~~~~~~ Basic configuration tests """ import base64 import pytest from utils import authenticate, logout @pytest.mark.settings( logout_url='/custom_logout', login_url='/custom_login', post_login_view='/post_login', post_logout_v...
Fix a bug introduced in rebasing
"""Test the 20news downloader, if the data is available.""" import numpy as np from nose.tools import assert_equal from nose.tools import assert_true from nose.plugins.skip import SkipTest from scikits.learn import datasets def test_20news(): try: data = datasets.fetch_20newsgroups(subset='all', ...
"""Test the 20news downloader, if the data is available.""" import numpy as np from nose.tools import assert_equal from nose.plugins.skip import SkipTest from scikits.learn import datasets def test_20news(): try: data = datasets.fetch_20newsgroups(subset='all', download_if_missing=...
Move pyclamav import inside of clean method on RWValidatedFileField so that it doesn't get imported by streamscript or unless as needed for field validation
from django.forms import forms from south.modelsinspector import add_introspection_rules from validatedfile.fields import ValidatedFileField import pyclamav class RWValidatedFileField(ValidatedFileField): """ Same as FileField, but you can specify: * content_types - list containing allowed content_typ...
from django.forms import forms from south.modelsinspector import add_introspection_rules from validatedfile.fields import ValidatedFileField class RWValidatedFileField(ValidatedFileField): """ Same as FileField, but you can specify: * content_types - list containing allowed content_types. Exa...
Remove Django 1.8/1.9 warnings; much better supported now.
# django-salesforce # # by Phil Christensen # (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org) # See LICENSE.md for details # """ A database backend for the Django ORM. Allows access to all Salesforce objects accessible via the SOQL API. """ import logging import warnings import django DJANGO_18_PLU...
# django-salesforce # # by Phil Christensen # (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org) # See LICENSE.md for details # """ A database backend for the Django ORM. Allows access to all Salesforce objects accessible via the SOQL API. """ import logging import warnings import django DJANGO_18_PLU...
Add a simple search controller which wraps around the Github code search API
import requests import simplejson as json class GithubSearch(object): def __init__(self): self.api_url = "https://api.github.com/search/code?q=" self.repo = "OpenTreeOfLife/treenexus" def search(self,term): search_url = "%s+repo:%s" % (self.api_url, self.repo) r = requests.ge...
import requests import simplejson as json class GithubSearch(object): def __init__(self): self.api_url = "https://api.github.com/search/code?q=" self.repo = "OpenTreeOfLife/treenexus" def search(self,term): search_url = "%s%s+repo:%s" % (self.api_url, term, self.repo) print "...
Move from python anywhere to webfaction
import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_c...
import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_c...
Attach app to SQLAlchemy properly
""" Database Module """ from flask_sqlalchemy import SQLAlchemy db = SQLAlchemy()
""" Database Module """ from flask_sqlalchemy import SQLAlchemy from penelophant import app db = SQLAlchemy(app)
Add empty middleware setting to quiet warning
DEBUG = True TEMPLATE_DEBUG = DEBUG SECRET_KEY = 'fake_secret' ROOT_URLCONF = 'tests.test_urls' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'irrelevant.db' } } INSTALLED_APPS = ( 'djproxy', ) STATIC_ROOT = '' STATIC_URL = '/' APPEND_SLASH = False
DEBUG = True TEMPLATE_DEBUG = DEBUG SECRET_KEY = 'fake_secret' ROOT_URLCONF = 'tests.test_urls' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'irrelevant.db' } } MIDDLEWARE_CLASSES = [] INSTALLED_APPS = ( 'djproxy', ) STATIC_ROOT = '' STATIC_URL = '/' APPE...
Fix FAT constant for TI-83+
import os import requests from sys import stderr, exit from resources import get_resource_root def get_key(platform): if platform == "TI73": return 0x02 if platform == "TI83p" or platform == "TI83pSE": return 0x04 if platform == "TI84p" or platform == "TI84pSE": return 0x0A if platform == "TI84pCSE": r...
import os import requests from sys import stderr, exit from resources import get_resource_root def get_key(platform): if platform == "TI73": return 0x02 if platform == "TI83p" or platform == "TI83pSE": return 0x04 if platform == "TI84p" or platform == "TI84pSE": return 0x0A if platform == "TI84pCSE": r...
Implement high def pictures for python tutorials.
#! /usr/bin/env python import ROOT import shutil import os def makeimage(MacroName, ImageName, OutDir, cp, py, batch): '''Generates the ImageName output of the macro MacroName''' if batch: ROOT.gROOT.SetBatch(1) if py: execfile(MacroName) else: ROOT.gInterpreter.ProcessLine(".x " + MacroName...
#! /usr/bin/env python import ROOT import shutil import os def makeimage(MacroName, ImageName, OutDir, cp, py, batch): '''Generates the ImageName output of the macro MacroName''' ROOT.gStyle.SetImageScaling(3.) if batch: ROOT.gROOT.SetBatch(1) if py: execfile(MacroName) else: ROOT.gInte...
Add example of date type in Python
# Many built-in types have built-in names assert(type(5) == int) assert(type(True) == bool) assert(type(5.7) == float) assert(type(9 + 5j) == complex) assert(type((8, 'dog', False)) == tuple) assert(type('hello') == str) assert(type(b'hello') == bytes) assert(type([1, '', False]) == list) assert(type(range(1,10)) == ra...
# Many built-in types have built-in names assert(type(5) == int) assert(type(True) == bool) assert(type(5.7) == float) assert(type(9 + 5j) == complex) assert(type((8, 'dog', False)) == tuple) assert(type('hello') == str) assert(type(b'hello') == bytes) assert(type([1, '', False]) == list) assert(type(range(1,10)) == ra...
Fix python 2 error when dumping expected test results
from __future__ import print_function, absolute_import import json import os import sys from imp import reload from io import StringIO import pytest import yaml from adr import query from adr.main import run_recipe class new_run_query(object): def __init__(self, test): self.test = test def __call_...
from __future__ import print_function, absolute_import import json import os import sys from imp import reload from io import BytesIO, StringIO import pytest import yaml from adr import query from adr.main import run_recipe class new_run_query(object): def __init__(self, test): self.test = test de...
Fix up statsd work to support python 2.6
# -*- coding: utf-8 -*- from __future__ import absolute_import import logging from functools import wraps from django_statsd.clients import statsd logger = logging.getLogger(__name__) def task_timer(fn): @wraps(fn) def __wrapped__(self, *args, **kwargs): statsd.incr('tasks.{}.{}.count'.format( ...
# -*- coding: utf-8 -*- from __future__ import absolute_import import logging from functools import wraps from django_statsd.clients import statsd logger = logging.getLogger(__name__) def task_timer(fn): @wraps(fn) def __wrapped__(self, *args, **kwargs): statsd.incr('tasks.{0}.{1}.count'.format( ...
Use integer distances to avoid so many events.
import threading import time class SensorThread(object): def __init__(self, notify, delay=0): self.notify = notify self.delay = delay self.interval = 1 self.distance = -1 def start(self, robot): self.robot = robot thread = threading.Thread(target=self.run, args...
import threading import time class SensorThread(object): def __init__(self, notify, delay=0): self.notify = notify self.delay = delay self.interval = 1 self.distance = -1 def start(self, robot): self.robot = robot thread = threading.Thread(target=self.run, args...
Use file patterns compiled to regular expressions to match hidden files.
class DirTreeFilter(object): def __init__(self, show_hidden=False, show_files=True, show_dirs=True): self.show_hidden = show_hidden self.show_files = show_files self.show_dirs = show_dirs self.hidden_exts = [".pyc", ".pyo", ".o", ".a", ".obj", ".lib", ".swp", "~"] self.hidden...
import re def compile_file_patterns(patterns): return re.compile("$%s^" % "|".join("(%s)" % re.escape(p).replace("\\*", ".*") for p in patterns)) hidden_files = [".*", "*~", "*.swp", "*.pyc", "*.pyo", "*.o", "*.a", "*.obj", "*.lib", "*.class"] hidden_dirs = ["CVS", "__pycache__"] class DirTreeFilter(object): ...
Allow "-" chars in the resync view
# Amara, universalsubtitles.org # # Copyright (C) 2013 Participatory Culture Foundation # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your op...
# Amara, universalsubtitles.org # # Copyright (C) 2013 Participatory Culture Foundation # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your op...
FIX sample data load for Windows
import cPickle from os.path import dirname from os.path import join import numpy as np def load_letters(): """Load the OCR letters dataset. This is a chain classification task. Each example consists of a word, segmented into letters. The first letter of each word is ommited from the data, as it ...
import cPickle from os.path import dirname from os.path import join import numpy as np def load_letters(): """Load the OCR letters dataset. This is a chain classification task. Each example consists of a word, segmented into letters. The first letter of each word is ommited from the data, as it ...
Add to_struct to attribute table
# -*- coding: utf-8 -*- """ Extension attribute table type. Supports ordered (struct) fields, or unordered (hash-based) fields. """ from numba.typesystem import * from numba.typesystem.exttypes import ordering #------------------------------------------------------------------------ # Extension Attributes Type #----...
# -*- coding: utf-8 -*- """ Extension attribute table type. Supports ordered (struct) fields, or unordered (hash-based) fields. """ import numba from numba.typesystem import NumbaType, is_obj from numba.typesystem.exttypes import ordering #------------------------------------------------------------------------ # Ex...
Add scope_types to token revocation policies
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under t...
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under t...
Rename new sequence search url
""" Copyright [2009-2014] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or a...
""" Copyright [2009-2014] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or a...
Raise our own ImportError if all fails. Looks better than to complain about django when that happens
""" Get the best JSON encoder/decoder available on this system. """ __version__ = "0.1" __author__ = "Rune Halvorsen <runefh@gmail.com>" __homepage__ = "http://bitbucket.org/runeh/anyjson/" __docformat__ = "restructuredtext" """ .. function:: serialize(obj) Serialize the object to JSON. .. function:: deseriali...
""" Get the best JSON encoder/decoder available on this system. """ __version__ = "0.1" __author__ = "Rune Halvorsen <runefh@gmail.com>" __homepage__ = "http://bitbucket.org/runeh/anyjson/" __docformat__ = "restructuredtext" """ .. function:: serialize(obj) Serialize the object to JSON. .. function:: deseriali...
Remove unused ability to use custom newrelic.ini
# flake8: noqa # newrelic import & initialization must come first # https://docs.newrelic.com/docs/agents/python-agent/installation/python-agent-advanced-integration#manual-integration try: import newrelic.agent except ImportError: newrelic = False if newrelic: newrelic_ini = config('NEWRELIC_PYTHON_INI_F...
# flake8: noqa # newrelic import & initialization must come first # https://docs.newrelic.com/docs/agents/python-agent/installation/python-agent-advanced-integration#manual-integration try: import newrelic.agent except ImportError: newrelic = False else: newrelic.agent.initialize() import os from bedrock...
Add js2xml.parse() method that wraps the slimit visitor/xml-builder
import lxml.etree from slimit.parser import Parser from js2xml.xmlvisitor import XmlVisitor _parser = Parser() _visitor = XmlVisitor() def parse(text, debug=False): tree = _parser.parse(text, debug=debug) xml = _visitor.visit(tree) return xml
Use better type definitions for the array API custom types
""" This file defines the types for type annotations. These names aren't part of the module namespace, but they are used in the annotations in the function signatures. The functions in the module are only valid for inputs that match the given type annotations. """ __all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPac...
""" This file defines the types for type annotations. These names aren't part of the module namespace, but they are used in the annotations in the function signatures. The functions in the module are only valid for inputs that match the given type annotations. """ __all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPac...
Add a missing test for param-parsing
from django.test import TestCase from django.db import OperationalError class ApiTestUtils(TestCase): def test_db_timeout(self): from api.view_utils import db_timeout @db_timeout(1) def do_long_running_query(): from django.db import connection cursor = conn...
from django.test import TestCase from django.db import OperationalError class ApiTestUtils(TestCase): def test_db_timeout(self): from api.view_utils import db_timeout @db_timeout(1) def do_long_running_query(): from django.db import connection cursor = conn...
Set session cookie flag `SameSite` to `Lax` (instead of `None`)
""" byceps.config_defaults ~~~~~~~~~~~~~~~~~~~~~~ Default configuration values :Copyright: 2006-2021 Jochen Kupperschmidt :License: Revised BSD (see `LICENSE` file for details) """ from datetime import timedelta from pathlib import Path # database connection SQLALCHEMY_ECHO = False # Avoid connection errors after...
""" byceps.config_defaults ~~~~~~~~~~~~~~~~~~~~~~ Default configuration values :Copyright: 2006-2021 Jochen Kupperschmidt :License: Revised BSD (see `LICENSE` file for details) """ from datetime import timedelta from pathlib import Path # database connection SQLALCHEMY_ECHO = False # Avoid connection errors after...
Update expected number of tas statements in test
from indra.sources.tas import process_from_web def test_processor(): tp = process_from_web(affinity_class_limit=10) assert tp assert tp.statements num_stmts = len(tp.statements) # This is the total number of statements about human genes assert num_stmts == 51722, num_stmts assert all(len(s...
from indra.sources.tas import process_from_web def test_processor(): tp = process_from_web(affinity_class_limit=10) assert tp assert tp.statements num_stmts = len(tp.statements) # This is the total number of statements about human genes assert num_stmts == 1601159, num_stmts assert all(len...
Update usage string for min
"""calc.py: A simple calculator.""" import sys def add_all(nums): return sum(nums) def multiply_all(nums): return reduce(lambda a, b: a * b, nums) if __name__ == '__main__': command = sys.argv[1] nums = map(float, sys.argv[2:]) if command == 'add': print(add_all(nums)) elif command =...
"""calc.py: A simple calculator.""" import sys def add_all(nums): return sum(nums) def multiply_all(nums): return reduce(lambda a, b: a * b, nums) if __name__ == '__main__': command = sys.argv[1] nums = map(float, sys.argv[2:]) if command == 'add': print(add_all(nums)) elif command =...
Set def arguments to immutable to avoid nasty side effect.
# Last Change: Sat Jun 09 07:00 PM 2007 J #======================================================== # Constants used throughout the module (def args, etc...) #======================================================== # This is the default dimension for representing confidence ellipses DEF_VIS_DIM = [0, 1] DEF_ELL_NP = ...
# Last Change: Sat Jun 09 08:00 PM 2007 J #======================================================== # Constants used throughout the module (def args, etc...) #======================================================== # This is the default dimension for representing confidence ellipses DEF_VIS_DIM = (0, 1) DEF_ELL_NP = ...
Fix exception handling in management command. Clean up.
''' Creates an admin user if there aren't any existing superusers ''' from django.core.management.base import BaseCommand, CommandError from django.contrib.auth.models import User from optparse import make_option class Command(BaseCommand): help = 'Creates/Updates an Admin user' def add_arguments(self, pars...
"""Creates an admin user if there aren't any existing superusers.""" from optparse import make_option from django.contrib.auth.models import User from django.core.management.base import BaseCommand, CommandError class Command(BaseCommand): help = 'Creates/Updates an Admin user' def add_arguments(self, par...
Make `test_except` swift test pass
#!/usr/bin/env python3.5 import parsl from parsl import * parsl.set_stream_logger() from parsl.executors.swift_t import * def foo(x, y): return x * y def slow_foo(x, y): import time time.sleep(x) return x * y def bad_foo(x, y): time.sleep(x) return x * y def test_simple(): print("...
#!/usr/bin/env python3.5 from nose.tools import assert_raises import parsl from parsl import * parsl.set_stream_logger() from parsl.executors.swift_t import * def foo(x, y): return x * y def slow_foo(x, y): import time time.sleep(x) return x * y def bad_foo(x, y): time.sleep(x) return x ...
Add newline at the end of file
#!/usr/bin/env python import unittest import glob import sys def create_test_suite(mod): if mod is None: test_file_strings = glob.glob('jinger/test/test_*.py') module_strings = [str[0:len(str)-3].replace('/', '.') for str in test_file_strings] else: module_strings = ['jinger.test.test...
#!/usr/bin/env python import unittest import glob import sys def create_test_suite(mod): if mod is None: test_file_strings = glob.glob('jinger/test/test_*.py') module_strings = [str[0:len(str)-3].replace('/', '.') for str in test_file_strings] else: module_strings = ['jinger.test.test...
Add a test for json retrieval.
import unittest from flask import Flask from flask.ext.autodoc import Autodoc class TestAutodocWithFlask(unittest.TestCase): def setUp(self): self.app = Flask(__name__) self.autodoc = Autodoc(self.app) @self.app.route('/') @self.autodoc.doc() def index(): """R...
import unittest from flask import Flask from flask.ext.autodoc import Autodoc class TestAutodocWithFlask(unittest.TestCase): def setUp(self): self.app = Flask(__name__) self.autodoc = Autodoc(self.app) @self.app.route('/') @self.autodoc.doc() def index(): """R...
Comment out pandas because its unused
from matplotlib import pyplot as plt import numpy as np import os import pandas as pd data_files = "./data" def movingAv(interval, window_size): window = np.ones(int(window_size))/float(window_size) return np.convolve(interval, window, 'same') fig,ax = plt.subplots() for subdir, dirs, files in os.walk(data_files...
from matplotlib import pyplot as plt import numpy as np import os # import pandas as pd data_files = "./data" def movingAv(interval, window_size): window = np.ones(int(window_size))/float(window_size) return np.convolve(interval, window, 'same') fig,ax = plt.subplots() for subdir, dirs, files in os.walk(data_fil...
Allow hooks to return values (and simplify the code)
# -*- coding: utf-8 -*- # # Copyright (c) 2014 Université Catholique de Louvain. # # This file is part of INGInious. # # INGInious is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the...
# -*- coding: utf-8 -*- # # Copyright (c) 2014 Université Catholique de Louvain. # # This file is part of INGInious. # # INGInious is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the...
Use logging directly in main
import logging import os from apscheduler.schedulers.blocking import BlockingScheduler from raven.base import Client as RavenClient import warner import archiver import announcer import flagger raven_client = RavenClient() logger = logging.getLogger(__name__) # When testing changes, set the "TEST_SCHEDULE" envvar...
import logging import os from apscheduler.schedulers.blocking import BlockingScheduler from raven.base import Client as RavenClient import warner import archiver import announcer import flagger raven_client = RavenClient() # When testing changes, set the "TEST_SCHEDULE" envvar to run more often if os.getenv("TEST_...
Hide comments meant as unseen
from ckan.plugins import toolkit as tk def archiver_resource_show(resource_id): data_dict = {'id': resource_id} return tk.get_action('archiver_resource_show')(data_dict) def archiver_is_resource_broken_html(resource): archival = resource.get('archiver') if not archival: return '<!-- No archi...
from ckan.plugins import toolkit as tk def archiver_resource_show(resource_id): data_dict = {'id': resource_id} return tk.get_action('archiver_resource_show')(data_dict) def archiver_is_resource_broken_html(resource): archival = resource.get('archiver') if not archival: return tk.literal('<!...
Handle chapters as sequence modules
from setuptools import setup, find_packages setup( name="XModule", version="0.1", packages=find_packages(), install_requires=['distribute'], package_data={ '': ['js/*'] }, # See http://guide.python-distribute.org/creation.html#entry-points # for a description of entry_points ...
from setuptools import setup, find_packages setup( name="XModule", version="0.1", packages=find_packages(), install_requires=['distribute'], package_data={ '': ['js/*'] }, # See http://guide.python-distribute.org/creation.html#entry-points # for a description of entry_points ...
Remove test_single_gene_deletion (renamed in cobra=0.4.0b1)
# Copyright 2014 Novo Nordisk Foundation Center for Biosustainability, DTU. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless requi...
# Copyright 2014 Novo Nordisk Foundation Center for Biosustainability, DTU. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless requi...
Remove useless code from LinearColorMapper
from __future__ import absolute_import import numpy as np from six import string_types from ..plot_object import PlotObject from ..properties import Any, Float, Color from .. import palettes class ColorMapper(PlotObject): ''' Base class for color mapper objects. ''' pass class LinearColorMapper(ColorMapper)...
from __future__ import absolute_import import numpy as np from six import string_types from ..plot_object import PlotObject from ..properties import Any, Float, Color from .. import palettes class ColorMapper(PlotObject): ''' Base class for color mapper objects. ''' pass class LinearColorMapper(ColorMapper)...
Add utility function for retrieving the active registration backend.
from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module def get_backend(): """ Return an instance of the registration backend for use on this site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise ``django.cor...
Add `clowder save` logic to Cement controller
from cement.ext.ext_argparse import expose from clowder.cli.abstract_base_controller import AbstractBaseController class SaveController(AbstractBaseController): class Meta: label = 'save' stacked_on = 'base' stacked_type = 'nested' description = 'Create version of clowder.yaml for...
import os import sys from cement.ext.ext_argparse import expose import clowder.util.formatting as fmt from clowder.cli.abstract_base_controller import AbstractBaseController from clowder.util.decorators import valid_clowder_yaml_required from clowder.commands.util import ( validate_groups, validate_projects_e...
Reduce MAX_FLIGHTS on screen to 10.
# Maximum airports to return as JSON. MAX_AIRPORTS = 100 # Maximum flights to return as JSON. MAX_FLIGHTS = 100 # Pixel size of gravatar icons. GRAVATAR_SIZE = 32
# Maximum airports to return as JSON. MAX_AIRPORTS = 100 # Maximum flights to return as JSON. MAX_FLIGHTS = 10 # Pixel size of gravatar icons. GRAVATAR_SIZE = 32
Use imperative import to avoid lint (import order) and as a good convention when side-effects is the intention.
from .api import ( Distribution, PackageNotFoundError, distribution, distributions, entry_points, files, metadata, requires, version) # Import for installation side-effects. from . import _hooks # noqa: F401 __all__ = [ 'Distribution', 'PackageNotFoundError', 'distribution', 'distributions',...
from .api import ( Distribution, PackageNotFoundError, distribution, distributions, entry_points, files, metadata, requires, version) # Import for installation side-effects. __import__('importlib_metadata._hooks') __all__ = [ 'Distribution', 'PackageNotFoundError', 'distribution', 'distributi...
Change to cmake to 3.4 and test sys.platform to choose lib extension to resolve import error on MacOSX
import ctypes import numpy as np import os __all__ = ['square'] lib = ctypes.cdll.LoadLibrary("libfoo.so") lib.square.restype = ctypes.c_int lib.square.argtypes = [ctypes.c_int] def square(value): """ Parameters ---------- value: int Returns -------- value square """ return lib....
import ctypes import numpy as np import os import sys __all__ = ['square'] _path = os.path.dirname(__file__) libname = None if sys.platform.startswith('linux'): libname = 'libfoo.so' elif sys.platform == 'darwin': libname = 'libfoo.dylib' elif sys.platform.startswith('win'): libname = 'foo.dll' if libname ==None:...
Revert "Remove apparently superfluous call to fill_recommended_bugs_cache."
import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bu...
import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bu...
Use the object instead of the class
from django.db import models def register(cls, admin_cls): cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True)) def getter(): if not cls._prepared_date: try: return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull...
from django.db import models def register(cls, admin_cls): cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True)) def getter(obj): if not obj._prepared_date: try: return obj.get_ancestors(ascending=True).filter(_prepared_date__isn...
Make code comply to PEP8
# # -*- encoding: utf-8 -*- # import re import config import surllib import semail import datetime import urllib URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME URL_MAIN = URL_PREFIX + 'Ugeplaner.asp' def docFindWeekplans(bs): trs = bs.findAll('tr') for line in trs: if not line.has_key('clas...
# # -*- encoding: utf-8 -*- # import re import config import surllib import semail import datetime import urllib URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME URL_MAIN = URL_PREFIX + 'Ugeplaner.asp' def docFindWeekplans(bs): trs = bs.findAll('tr') for line in trs: if not line.has_key('cla...
Store field acc_type on res.partner.bank, so that we can search and groupby on it
# -*- coding: utf-8 -*- # © 2016 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>) # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp import models, fields class ResPartnerBank(models.Model): _inherit = 'res.partner.bank' # TODO: It doesn't work, I don't understand wh...
# -*- coding: utf-8 -*- # © 2016 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>) # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp import models, fields class ResPartnerBank(models.Model): _inherit = 'res.partner.bank' # I also have to change the label of the field...
Rename the package to "nyaa"
#!/usr/bin/env python from setuptools import setup, find_packages def install(): desc = 'A Python client library for nyaa.se!', setup( name='py-nyaa', version='1.0', description=desc, long_description=desc, author='SuHun Han', author_email='ssut@ssut.me', ...
#!/usr/bin/env python from setuptools import setup, find_packages def install(): desc = 'A Python client library for nyaa.se!', setup( name='nyaa', version='1.0', description=desc, long_description=desc, author='SuHun Han', author_email='ssut@ssut.me', ur...