commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
8b96a3189f744820763b77075a08f67c898075d4 | Remove default log handlers | NewAcropolis/api,NewAcropolis/api,NewAcropolis/api | app/__init__.py | app/__init__.py | import os
import logging
from logging.handlers import RotatingFileHandler
from flask import Blueprint, Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
application = Flask(__name__)
def create_app(**kwargs):
from app.config import configs
environment_state = get_env()
application.config.from_object(configs[environment_state])
application.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
application.config.update(kwargs)
configure_logging()
application.logger.debug("connected to db: {}".format(application.config.get('SQLALCHEMY_DATABASE_URI')))
db.init_app(application)
register_blueprint()
return application
def register_blueprint():
from app.events.rest import events_blueprint
from app.fees.rest import fees_blueprint
application.register_blueprint(events_blueprint)
application.register_blueprint(fees_blueprint)
def get_env():
if 'www-preview' in get_root_path():
return 'preview'
elif 'www-live' in get_root_path():
return 'live'
else:
return os.environ.get('ENVIRONMENT', 'development')
def get_root_path():
return application.root_path
def configure_logging():
del application.logger.handlers[:]
f = logging.Formatter("%(asctime)s;%(levelname)s;%(message)s", "%Y-%m-%d %H:%M:%S")
rfh = RotatingFileHandler('logs/app.log', maxBytes=10000, backupCount=3)
rfh.setLevel(logging.DEBUG)
rfh.setFormatter(f)
application.logger.addHandler(rfh)
ch = logging.StreamHandler()
ch.setFormatter(f)
if ch not in application.logger.handlers:
application.logger.addHandler(ch)
log = logging.getLogger('werkzeug')
log.setLevel(logging.DEBUG)
if rfh not in log.handlers:
log.addHandler(rfh)
if ch not in log.handlers:
log.addHandler(ch)
application.logger.info('Logging configured')
| import os
import logging
from logging.handlers import RotatingFileHandler
from flask import Blueprint, Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
application = Flask(__name__)
def create_app(**kwargs):
from app.config import configs
environment_state = get_env()
application.config.from_object(configs[environment_state])
application.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
application.config.update(kwargs)
configure_logging()
application.logger.debug("connected to db: {}".format(application.config.get('SQLALCHEMY_DATABASE_URI')))
db.init_app(application)
register_blueprint(application)
return application
def register_blueprint(application):
from app.events.rest import events_blueprint
from app.fees.rest import fees_blueprint
application.register_blueprint(events_blueprint)
application.register_blueprint(fees_blueprint)
def get_env():
if 'www-preview' in get_root_path():
return 'preview'
elif 'www-live' in get_root_path():
return 'live'
else:
return os.environ.get('ENVIRONMENT', 'development')
def get_root_path():
return application.root_path
def configure_logging():
f = logging.Formatter("%(asctime)s;%(levelname)s;%(message)s", "%Y-%m-%d %H:%M:%S")
rfh = RotatingFileHandler('logs/app.log', maxBytes=10000, backupCount=1)
rfh.setLevel(logging.DEBUG)
rfh.setFormatter(f)
if rfh not in application.logger.handlers:
application.logger.addHandler(rfh)
ch = logging.StreamHandler()
ch.setFormatter(f)
if ch not in application.logger.handlers:
application.logger.addHandler(ch)
log = logging.getLogger('werkzeug')
log.setLevel(logging.DEBUG)
if rfh not in log.handlers:
log.addHandler(rfh)
| mit | Python |
2fe22f80d2a75dec50d2af56df16669149b1197d | change version | kontron/robotframework-ipmilibrary | src/IpmiLibrary/version.py | src/IpmiLibrary/version.py | # This file was autogenerated by setup.py
__version__ = '0.2.2'
| # This file was autogenerated by setup.py
__version__ = '0.2.1-dirty'
| apache-2.0 | Python |
88de184c1d9daa79e47873b0bd8912ea67b32ec1 | Change the VCAP_SERVICE key for elasticsearch | alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api | app/__init__.py | app/__init__.py | from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
application.config['ELASTICSEARCH_HOST'] = \
cf_services['elasticsearch-compose'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
es_certfile.write(
base64.b64decode(cf_services['elasticsearch-compose'][0]['credentials']['ca_certificate_base64'])
)
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application
| from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64']))
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application
| mit | Python |
15f1abef288411539b512f6bdb572c4a54aa5447 | Correct down_revision dag_id/state index creation | lyft/incubator-airflow,artwr/airflow,mrkm4ntr/incubator-airflow,stverhae/incubator-airflow,hamedhsn/incubator-airflow,OpringaoDoTurno/airflow,dgies/incubator-airflow,preete-dixit-ck/incubator-airflow,AllisonWang/incubator-airflow,gilt/incubator-airflow,mtagle/airflow,malmiron/incubator-airflow,sekikn/incubator-airflow,owlabs/incubator-airflow,dmitry-r/incubator-airflow,wndhydrnt/airflow,lxneng/incubator-airflow,Fokko/incubator-airflow,edgarRd/incubator-airflow,andrewmchen/incubator-airflow,subodhchhabra/airflow,bolkedebruin/airflow,Twistbioscience/incubator-airflow,aminghadersohi/airflow,dhuang/incubator-airflow,jesusfcr/airflow,zoyahav/incubator-airflow,asnir/airflow,mrares/incubator-airflow,artwr/airflow,brandsoulmates/incubator-airflow,preete-dixit-ck/incubator-airflow,wileeam/airflow,cfei18/incubator-airflow,skudriashev/incubator-airflow,r39132/airflow,N3da/incubator-airflow,Tagar/incubator-airflow,criccomini/airflow,N3da/incubator-airflow,Twistbioscience/incubator-airflow,wileeam/airflow,sid88in/incubator-airflow,vijaysbhat/incubator-airflow,jiwang576/incubator-airflow,N3da/incubator-airflow,edgarRd/incubator-airflow,lyft/incubator-airflow,lyft/incubator-airflow,mrares/incubator-airflow,fenglu-g/incubator-airflow,hgrif/incubator-airflow,holygits/incubator-airflow,DinoCow/airflow,jgao54/airflow,apache/airflow,edgarRd/incubator-airflow,zack3241/incubator-airflow,rishibarve/incubator-airflow,jfantom/incubator-airflow,aminghadersohi/airflow,mistercrunch/airflow,andyxhadji/incubator-airflow,apache/airflow,andrewmchen/incubator-airflow,spektom/incubator-airflow,Fokko/incubator-airflow,jfantom/incubator-airflow,sekikn/incubator-airflow,holygits/incubator-airflow,lxneng/incubator-airflow,adamhaney/airflow,brandsoulmates/incubator-airflow,dmitry-r/incubator-airflow,adrpar/incubator-airflow,owlabs/incubator-airflow,apache/incubator-airflow,NielsZeilemaker/incubator-airflow,bolkedebruin/airflow,mrkm4ntr/incubator-airflow,CloverHealth/airflow,malmiron/incubator-airflow,cjqian/incubator-airflow,zack3241/incubator-airflow,dhuang/incubator-airflow,zoyahav/incubator-airflow,aminghadersohi/airflow,NielsZeilemaker/incubator-airflow,nathanielvarona/airflow,cjqian/incubator-airflow,malmiron/incubator-airflow,hamedhsn/incubator-airflow,rishibarve/incubator-airflow,wolfier/incubator-airflow,subodhchhabra/airflow,NielsZeilemaker/incubator-airflow,zack3241/incubator-airflow,gritlogic/incubator-airflow,wooga/airflow,cjqian/incubator-airflow,malmiron/incubator-airflow,OpringaoDoTurno/airflow,dmitry-r/incubator-airflow,spektom/incubator-airflow,lxneng/incubator-airflow,Acehaidrey/incubator-airflow,yk5/incubator-airflow,sekikn/incubator-airflow,mtagle/airflow,stverhae/incubator-airflow,AllisonWang/incubator-airflow,dmitry-r/incubator-airflow,lxneng/incubator-airflow,danielvdende/incubator-airflow,MetrodataTeam/incubator-airflow,alexvanboxel/airflow,apache/incubator-airflow,dhuang/incubator-airflow,nathanielvarona/airflow,airbnb/airflow,vijaysbhat/incubator-airflow,sdiazb/airflow,jgao54/airflow,danielvdende/incubator-airflow,danielvdende/incubator-airflow,nathanielvarona/airflow,adrpar/incubator-airflow,Acehaidrey/incubator-airflow,jgao54/airflow,AllisonWang/incubator-airflow,zodiac/incubator-airflow,janczak10/incubator-airflow,sid88in/incubator-airflow,r39132/airflow,lyft/incubator-airflow,jesusfcr/airflow,aminghadersohi/airflow,jiwang576/incubator-airflow,airbnb/airflow,hgrif/incubator-airflow,CloverHealth/airflow,preete-dixit-ck/incubator-airflow,adrpar/incubator-airflow,yk5/incubator-airflow,Acehaidrey/incubator-airflow,janczak10/incubator-airflow,jlowin/airflow,MortalViews/incubator-airflow,MetrodataTeam/incubator-airflow,mattuuh7/incubator-airflow,ProstoMaxim/incubator-airflow,nathanielvarona/airflow,stverhae/incubator-airflow,ProstoMaxim/incubator-airflow,mtagle/airflow,MetrodataTeam/incubator-airflow,wndhydrnt/airflow,preete-dixit-ck/incubator-airflow,andyxhadji/incubator-airflow,dgies/incubator-airflow,MortalViews/incubator-airflow,jgao54/airflow,mrkm4ntr/incubator-airflow,hgrif/incubator-airflow,yati-sagade/incubator-airflow,RealImpactAnalytics/airflow,wndhydrnt/airflow,jhsenjaliya/incubator-airflow,mtagle/airflow,wooga/airflow,MortalViews/incubator-airflow,ronfung/incubator-airflow,saguziel/incubator-airflow,saguziel/incubator-airflow,gritlogic/incubator-airflow,yati-sagade/incubator-airflow,jiwang576/incubator-airflow,mistercrunch/airflow,skudriashev/incubator-airflow,akosel/incubator-airflow,andyxhadji/incubator-airflow,wileeam/airflow,apache/airflow,adamhaney/airflow,vijaysbhat/incubator-airflow,KL-WLCR/incubator-airflow,asnir/airflow,yati-sagade/incubator-airflow,akosel/incubator-airflow,jesusfcr/airflow,holygits/incubator-airflow,gilt/incubator-airflow,gtoonstra/airflow,RealImpactAnalytics/airflow,CloverHealth/airflow,sdiazb/airflow,r39132/airflow,mattuuh7/incubator-airflow,saguziel/incubator-airflow,NielsZeilemaker/incubator-airflow,artwr/airflow,artwr/airflow,danielvdende/incubator-airflow,apache/incubator-airflow,adamhaney/airflow,Acehaidrey/incubator-airflow,vijaysbhat/incubator-airflow,bolkedebruin/airflow,zodiac/incubator-airflow,jesusfcr/airflow,Fokko/incubator-airflow,janczak10/incubator-airflow,asnir/airflow,easytaxibr/airflow,andrewmchen/incubator-airflow,sergiohgz/incubator-airflow,mrares/incubator-airflow,gritlogic/incubator-airflow,nathanielvarona/airflow,Fokko/incubator-airflow,sdiazb/airflow,KL-WLCR/incubator-airflow,jlowin/airflow,asnir/airflow,wolfier/incubator-airflow,easytaxibr/airflow,rishibarve/incubator-airflow,mistercrunch/airflow,mrares/incubator-airflow,wileeam/airflow,sergiohgz/incubator-airflow,cjqian/incubator-airflow,spektom/incubator-airflow,sid88in/incubator-airflow,gtoonstra/airflow,stverhae/incubator-airflow,sekikn/incubator-airflow,gilt/incubator-airflow,gtoonstra/airflow,apache/airflow,yk5/incubator-airflow,adrpar/incubator-airflow,criccomini/airflow,RealImpactAnalytics/airflow,airbnb/airflow,wndhydrnt/airflow,dgies/incubator-airflow,brandsoulmates/incubator-airflow,sdiazb/airflow,bolkedebruin/airflow,CloverHealth/airflow,zoyahav/incubator-airflow,owlabs/incubator-airflow,cfei18/incubator-airflow,yk5/incubator-airflow,wooga/airflow,easytaxibr/airflow,mattuuh7/incubator-airflow,janczak10/incubator-airflow,Acehaidrey/incubator-airflow,sergiohgz/incubator-airflow,jlowin/airflow,dhuang/incubator-airflow,jfantom/incubator-airflow,DinoCow/airflow,danielvdende/incubator-airflow,spektom/incubator-airflow,andrewmchen/incubator-airflow,fenglu-g/incubator-airflow,criccomini/airflow,DinoCow/airflow,ProstoMaxim/incubator-airflow,DinoCow/airflow,danielvdende/incubator-airflow,zack3241/incubator-airflow,airbnb/airflow,Acehaidrey/incubator-airflow,subodhchhabra/airflow,mattuuh7/incubator-airflow,andyxhadji/incubator-airflow,zoyahav/incubator-airflow,edgarRd/incubator-airflow,alexvanboxel/airflow,jiwang576/incubator-airflow,skudriashev/incubator-airflow,bolkedebruin/airflow,ronfung/incubator-airflow,apache/airflow,criccomini/airflow,wooga/airflow,Twistbioscience/incubator-airflow,wolfier/incubator-airflow,Tagar/incubator-airflow,nathanielvarona/airflow,saguziel/incubator-airflow,mistercrunch/airflow,ronfung/incubator-airflow,apache/incubator-airflow,owlabs/incubator-airflow,hgrif/incubator-airflow,Tagar/incubator-airflow,jfantom/incubator-airflow,fenglu-g/incubator-airflow,subodhchhabra/airflow,brandsoulmates/incubator-airflow,cfei18/incubator-airflow,OpringaoDoTurno/airflow,jhsenjaliya/incubator-airflow,RealImpactAnalytics/airflow,cfei18/incubator-airflow,AllisonWang/incubator-airflow,alexvanboxel/airflow,ProstoMaxim/incubator-airflow,KL-WLCR/incubator-airflow,alexvanboxel/airflow,gritlogic/incubator-airflow,akosel/incubator-airflow,hamedhsn/incubator-airflow,skudriashev/incubator-airflow,MortalViews/incubator-airflow,gilt/incubator-airflow,yati-sagade/incubator-airflow,akosel/incubator-airflow,gtoonstra/airflow,sergiohgz/incubator-airflow,OpringaoDoTurno/airflow,r39132/airflow,apache/airflow,MetrodataTeam/incubator-airflow,jhsenjaliya/incubator-airflow,adamhaney/airflow,cfei18/incubator-airflow,wolfier/incubator-airflow,jlowin/airflow,fenglu-g/incubator-airflow,zodiac/incubator-airflow,dgies/incubator-airflow,jhsenjaliya/incubator-airflow,hamedhsn/incubator-airflow,ronfung/incubator-airflow,zodiac/incubator-airflow,Tagar/incubator-airflow,KL-WLCR/incubator-airflow,cfei18/incubator-airflow,N3da/incubator-airflow,Twistbioscience/incubator-airflow,easytaxibr/airflow,mrkm4ntr/incubator-airflow,sid88in/incubator-airflow,holygits/incubator-airflow,rishibarve/incubator-airflow | airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py | airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add dag_id/state index on dag_run table
Revision ID: 127d2bf2dfa7
Revises: 5e7d17757c7a
Create Date: 2017-01-25 11:43:51.635667
"""
# revision identifiers, used by Alembic.
revision = '127d2bf2dfa7'
down_revision = '5e7d17757c7a'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_index('dag_id_state', 'dag_run', ['dag_id', 'state'], unique=False)
def downgrade():
op.drop_index('dag_id_state', table_name='dag_run')
| #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add dag_id/state index on dag_run table
Revision ID: 127d2bf2dfa7
Revises: 1a5a9e6bf2b5
Create Date: 2017-01-25 11:43:51.635667
"""
# revision identifiers, used by Alembic.
revision = '127d2bf2dfa7'
down_revision = '1a5a9e6bf2b5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_index('dag_id_state', 'dag_run', ['dag_id', 'state'], unique=False)
def downgrade():
op.drop_index('dag_id_state', table_name='dag_run')
| apache-2.0 | Python |
3321b1bbaf6f68a823eb625f8921d14b8caf574a | Fix user reference on admin membership page | wagnerand/olympia,mozilla/olympia,diox/olympia,kumar303/addons-server,mozilla/addons-server,kumar303/addons-server,atiqueahmedziad/addons-server,psiinon/addons-server,wagnerand/olympia,eviljeff/olympia,diox/olympia,bqbn/addons-server,aviarypl/mozilla-l10n-addons-server,wagnerand/addons-server,eviljeff/olympia,kumar303/olympia,kumar303/olympia,bqbn/addons-server,kumar303/addons-server,aviarypl/mozilla-l10n-addons-server,mozilla/addons-server,mozilla/addons-server,wagnerand/addons-server,bqbn/addons-server,wagnerand/olympia,diox/olympia,aviarypl/mozilla-l10n-addons-server,mozilla/olympia,mozilla/olympia,kumar303/olympia,kumar303/addons-server,diox/olympia,atiqueahmedziad/addons-server,kumar303/olympia,lavish205/olympia,wagnerand/addons-server,aviarypl/mozilla-l10n-addons-server,atiqueahmedziad/addons-server,wagnerand/olympia,psiinon/addons-server,atiqueahmedziad/addons-server,wagnerand/addons-server,bqbn/addons-server,mozilla/olympia,lavish205/olympia,lavish205/olympia,mozilla/addons-server,eviljeff/olympia,eviljeff/olympia,psiinon/addons-server,psiinon/addons-server,lavish205/olympia | src/olympia/access/admin.py | src/olympia/access/admin.py | from django.core.urlresolvers import reverse
from django.contrib import admin
from django.utils.html import format_html
from .models import Group, GroupUser
class GroupUserInline(admin.TabularInline):
model = GroupUser
raw_id_fields = ('user',)
readonly_fields = ('user_profile_link',)
def user_profile_link(self, obj):
if obj.pk:
return format_html(
'<a href="{}">Admin User Profile</a>',
reverse('admin:users_userprofile_change', args=(obj.user.pk,)))
else:
return ''
user_profile_link.short_description = 'User Profile'
class GroupAdmin(admin.ModelAdmin):
raw_id_fields = ('users',)
ordering = ('name',)
list_display = ('name', 'rules', 'notes')
inlines = (GroupUserInline,)
admin.site.register(Group, GroupAdmin)
| from django.core.urlresolvers import reverse
from django.contrib import admin
from django.utils.html import format_html
from .models import Group, GroupUser
class GroupUserInline(admin.TabularInline):
model = GroupUser
raw_id_fields = ('user',)
readonly_fields = ('user_profile_link',)
def user_profile_link(self, obj):
if obj.pk:
return format_html(
'<a href="{}">Admin User Profile</a>',
reverse('admin:users_userprofile_change', args=(obj.pk,)))
else:
return ''
user_profile_link.short_description = 'User Profile'
class GroupAdmin(admin.ModelAdmin):
raw_id_fields = ('users',)
ordering = ('name',)
list_display = ('name', 'rules', 'notes')
inlines = (GroupUserInline,)
admin.site.register(Group, GroupAdmin)
| bsd-3-clause | Python |
a026662fb1ba7a99c4323fa4a1d9731f437cda3a | Update makedb | jasonsbrooks/ysniff-software,jasonsbrooks/ysniff-software | devops/makedb.py | devops/makedb.py | import boto.dynamodb
dynamoconn = boto.dynamodb.connect_to_region('us-east-1')
table_schema_macs = dynamoconn.create_schema(hash_key_name='MAC',hash_key_proto_value=str)
table_schema_ips = dynamoconn.create_schema(hash_key_name='LOCATION',hash_key_proto_value=str)
dynamoconn.create_table(name='prod-ysniff',schema=table_schema_macs,read_units=5,write_units=40)
dynamoconn.create_table(name='prod-ysniff-ips',schema=table_schema_ips,read_units=4,write_units=1)
| import boto.dynamodb
dynamoconn = boto.dynamodb.connect_to_region('us-east-1')
table_schema_macs = dynamoconn.create_schema(hash_key_name='MAC',hash_key_proto_value=str)
table_schema_ips = dynamoconn.create_schema(hash_key_name='LOCATION',hash_key_proto_value=str)
dynamoconn.create_table(name='dev3-ysniff',schema=table_schema_macs,read_units=5,write_units=40)
#dynamoconn.create_table(name='dev-ysniff-ips',schema=table_schema_ips,read_units=4,write_units=1)
| mit | Python |
1547ab8ccfcd7db4e1f2f15e40f482a2adb0d94e | Fix for: https://github.com/TechEmpower/FrameworkBenchmarks/pull/688#issuecomment-32546800 | methane/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,denkab/FrameworkBenchmarks,testn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,leafo/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,khellang/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sxend/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,herloct/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,methane/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,leafo/FrameworkBenchmarks,actframework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,testn/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,grob/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sgml/FrameworkBenchmarks,joshk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,dmacd/FB-try1,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,joshk/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,joshk/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,torhve/FrameworkBenchmarks,herloct/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sgml/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sgml/FrameworkBenchmarks,torhve/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Verber/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zloster/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,methane/FrameworkBenchmarks,actframework/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,methane/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,leafo/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,doom369/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,doom369/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,leafo/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,khellang/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,testn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,herloct/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,khellang/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,methane/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,khellang/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,khellang/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zloster/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,methane/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,sxend/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sgml/FrameworkBenchmarks,herloct/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,torhve/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,valyala/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,dmacd/FB-try1,donovanmuller/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sxend/FrameworkBenchmarks,khellang/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,actframework/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Verber/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,testn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,grob/FrameworkBenchmarks,grob/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,grob/FrameworkBenchmarks,dmacd/FB-try1,Rayne/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,joshk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,testn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,valyala/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,doom369/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sxend/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,testn/FrameworkBenchmarks,methane/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jamming/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,dmacd/FB-try1,ratpack/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zapov/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,doom369/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zloster/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,dmacd/FB-try1,grob/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sxend/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Verber/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zloster/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,methane/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,actframework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,grob/FrameworkBenchmarks,joshk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,testn/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,dmacd/FB-try1,xitrum-framework/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sgml/FrameworkBenchmarks,joshk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jamming/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,dmacd/FB-try1,steveklabnik/FrameworkBenchmarks,zloster/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,denkab/FrameworkBenchmarks,testn/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,denkab/FrameworkBenchmarks,herloct/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,joshk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,leafo/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,sgml/FrameworkBenchmarks,dmacd/FB-try1,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,grob/FrameworkBenchmarks,Verber/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Verber/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,dmacd/FB-try1,martin-g/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sxend/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,doom369/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zapov/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,methane/FrameworkBenchmarks,torhve/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,testn/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,leafo/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,denkab/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,torhve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,herloct/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,dmacd/FB-try1,jetty-project/FrameworkBenchmarks,valyala/FrameworkBenchmarks,methane/FrameworkBenchmarks,methane/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,valyala/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,grob/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zapov/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,leafo/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,joshk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,actframework/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jamming/FrameworkBenchmarks,methane/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zloster/FrameworkBenchmarks,actframework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,grob/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zloster/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zapov/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,herloct/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zloster/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,grob/FrameworkBenchmarks,jamming/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,methane/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,actframework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sxend/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,khellang/FrameworkBenchmarks,testn/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,denkab/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,grob/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,herloct/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,torhve/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,methane/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zloster/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,testn/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,leafo/FrameworkBenchmarks,khellang/FrameworkBenchmarks,grob/FrameworkBenchmarks,torhve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,doom369/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,grob/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,actframework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,denkab/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,leafo/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sxend/FrameworkBenchmarks,herloct/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,testn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,dmacd/FB-try1,valyala/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,leafo/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,dmacd/FB-try1,herloct/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zapov/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sxend/FrameworkBenchmarks,torhve/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,denkab/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Verber/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jamming/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Verber/FrameworkBenchmarks,actframework/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,doom369/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,torhve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zloster/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,testn/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zapov/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zloster/FrameworkBenchmarks,khellang/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sxend/FrameworkBenchmarks,khellang/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,zapov/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,herloct/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,testn/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks | ninja-standalone/setup.py | ninja-standalone/setup.py | import subprocess
import sys
import setup_util
import os
def start(args, logfile, errfile):
setup_util.replace_text("ninja-standalone/src/main/java/conf/application.conf", "mysql:\/\/.*:3306", "mysql://" + args.database_host + ":3306")
try:
subprocess.check_call("mvn clean compile assembly:single", shell=True, cwd="ninja-standalone", stderr=errfile, stdout=logfile)
subprocess.Popen("java -Dninja.port=8080 -jar target/ninja-standalone-0.0.1-SNAPSHOT-jar-with-dependencies.jar", cwd="ninja-standalone", shell=True, stderr=errfile, stdout=logfile)
return 0
except subprocess.CalledProcessError:
return 1
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'ninja-standalone' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
| import subprocess
import sys
import setup_util
import os
def start(args, logfile, errfile):
setup_util.replace_text("ninja-standalone/src/main/java/conf/application.conf", "mysql:\/\/.*:3306", "mysql://" + args.database_host + ":3306")
try:
subprocess.check_call("mvn clean compile assembly:single", shell=True, cwd="ninja-standalone", stderr=errfile, stdout=logfile)
subprocess.check_call("java -Dninja.port=8080 -jar target/ninja-standalone-0.0.1-SNAPSHOT-jar-with-dependencies.jar", cwd="ninja-standalone", shell=True, stderr=errfile, stdout=logfile)
return 0
except subprocess.CalledProcessError:
return 1
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'ninja-standalone' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
| bsd-3-clause | Python |
e3059c66541946afaf7e40776d7fb921bf56073b | Bump version for PyPi | rkhleics/wagtailmodeladmin,rkhleics/wagtailmodeladmin,ababic/wagtailmodeladmin,ababic/wagtailmodeladmin | wagtailmodeladmin/__init__.py | wagtailmodeladmin/__init__.py | __version__ = '2.4.6'
| __version__ = '2.4.5'
| mit | Python |
1ecc56995405e9fe734f185e4a56e07a289fd4f6 | Allow for different computers to analyse different ensemble members. | markmuetz/stormtracks,markmuetz/stormtracks | ipython_setup.py | ipython_setup.py | import time
import datetime as dt
import socket
import detect as d
import load as l
import match as m
import plotting as pl
#num_ensemble_members = 56
num_ensemble_members = 3
start = time.time()
print(start)
short_name = socket.gethostname().split('.')[0]
if short_name == 'linz':
ensemble_member_range = range(0, 3)
elif short_name == 'athens':
ensemble_member_range = range(3, 6)
elif short_name == 'madrid':
ensemble_member_range = range(6, 9)
elif short_name == 'madrid':
ensemble_member_range = range(6, 9)
elif short_name == 'determinist-mint':
ensemble_member_range = range(9, 12)
tracks, cou = l.load_ibtracks_year(2005)
ncdata = d.NCData(2005, verbose=False)
gdatas = []
all_good_matches = []
for i in ensemble_member_range:
gdata = d.GlobalCyclones(ncdata, i)
#gdata.track_vort_maxima(dt.datetime(2005, 6, 1), dt.datetime(2005, 7, 1))
gdata.track_vort_maxima(dt.datetime(2005, 6, 1), dt.datetime(2005, 12, 1))
matches = m.match2(gdata.vort_tracks_by_date, tracks)
good_matches = [ma for ma in matches.values() if ma.av_dist() < 5 and ma.overlap > 6]
gdatas.append(gdata)
all_good_matches.append(good_matches)
end = time.time()
print('{0} - {1}'.format(short_name, ensemble_member_range))
print('Start: {0}, end: {1}, duration: {2}'.format(start, end, end - start))
| import time
import datetime as dt
import detect as d
import load as l
import match as m
import plotting as pl
#num_ensemble_members = 56
num_ensemble_members = 3
start = time.time()
print(start)
tracks, cou = l.load_ibtracks_year(2005)
ncdata = d.NCData(2005)
gdatas = []
all_good_matches = []
for i in range(num_ensemble_members):
gdata = d.GlobalCyclones(ncdata, i)
#gdata.track_vort_maxima(dt.datetime(2005, 6, 1), dt.datetime(2005, 7, 1))
gdata.track_vort_maxima(dt.datetime(2005, 6, 1), dt.datetime(2005, 12, 1))
matches = m.match2(gdata.vort_tracks_by_date, tracks)
good_matches = [ma for ma in matches.values() if ma.av_dist() < 5 and ma.overlap > 6]
gdatas.append(gdata)
all_good_matches.append(good_matches)
end = time.time()
print('Start: {0}, end: {1}, duration: {2}'.format(start, end, end - start))
| mit | Python |
c037f405de773a3c9e9a7affedf2ee154a3c1766 | Remove and replace task.id field, instead of Alter | Koed00/django-q | django_q/migrations/0003_auto_20150708_1326.py | django_q/migrations/0003_auto_20150708_1326.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('django_q', '0002_auto_20150630_1624'),
]
operations = [
migrations.AlterModelOptions(
name='failure',
options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'},
),
migrations.AlterModelOptions(
name='schedule',
options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'},
),
migrations.AlterModelOptions(
name='success',
options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'},
),
migrations.RemoveField(
model_name='task',
name='id',
),
migrations.AddField(
model_name='task',
name='id',
field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('django_q', '0002_auto_20150630_1624'),
]
operations = [
migrations.AlterModelOptions(
name='failure',
options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'},
),
migrations.AlterModelOptions(
name='schedule',
options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'},
),
migrations.AlterModelOptions(
name='success',
options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'},
),
migrations.AlterField(
model_name='task',
name='id',
field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False),
),
]
| mit | Python |
576e53e729992dddad5f51f8116848719a6f0d23 | use insert() rather than a new variable | keon/algorithms,amaozhao/algorithms | array/plus_one.py | array/plus_one.py | # Given a non-negative number represented as an array of digits,
# plus one to the number.
# The digits are stored such that the most significant
# digit is at the head of the list.
def plusOne(digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
digits[-1] = digits[-1] + 1
res = []
ten = 0
i = len(digits)-1
while i >= 0 or ten == 1:
sum = 0
if i >= 0:
sum += digits[i]
if ten:
sum += 1
res.append(sum % 10)
ten = sum / 10
i -= 1
return res[::-1]
def plus_one(digits):
n = len(digits)
for i in range(n-1, -1, -1):
if digits[i] < 9:
digits[i] += 1
return digits
digits[i] = 0
digits.insert(0, 1)
return digits
def plus_1(num_arr):
for idx, digit in reversed(list(enumerate(num_arr))):
num_arr[idx] = (num_arr[idx] + 1) % 10
if num_arr[idx]:
return num_arr
return [1] + num_arr
| # Given a non-negative number represented as an array of digits,
# plus one to the number.
# The digits are stored such that the most significant
# digit is at the head of the list.
def plusOne(digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
digits[-1] = digits[-1] + 1
res = []
ten = 0
i = len(digits)-1
while i >= 0 or ten == 1:
sum = 0
if i >= 0:
sum += digits[i]
if ten:
sum += 1
res.append(sum % 10)
ten = sum / 10
i -= 1
return res[::-1]
def plus_one(digits):
n = len(digits)
for i in range(n-1, -1, -1):
if digits[i] < 9:
digits[i] += 1
return digits
digits[i] = 0
new_num = [0] * (n+1)
new_num[0] = 1
return new_num
def plus_1(num_arr):
for idx, digit in reversed(list(enumerate(num_arr))):
num_arr[idx] = (num_arr[idx] + 1) % 10
if num_arr[idx]:
return num_arr
return [1] + num_arr
| mit | Python |
4f54aed65d0717e0512797964588fab31660fc6c | 생성자 () 안했었음 | gomjellie/SoongSiri,gomjellie/SoongSiri | app/managers.py | app/managers.py | from .message import *
class Singleton(type):
instance = None
def __call__(cls, *args, **kwargs):
if not cls.instance:
cls.instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls.instance
class APIManager(metaclass=Singleton):
def process(self, stat, req=None):
if stat is 'home':
home_message = MessageAdmin.get_home_message()
return home_message
else:
content = req['content']
if content == u'밥':
print('food')
return MessageAdmin.get_food_message()
elif content in ['학식', '교식']:
if content == '학식':
return PupilFoodMessage()
elif content == '교식':
return FacultyFoodMessage()
elif content == '버스':
return BusMessage()
elif content == '도서관':
return LibMessage()
elif content == '지하철':
return SubMessage()
elif content == 'fail':
fail_message = MessageAdmin.get_fail_message()
return fail_message
else:
return MessageAdmin.get_on_going_message()
class MessageManager(metaclass=Singleton):
def get_home_message(self):
return HomeMessage()
def get_food_message(self):
return FoodMessage()
def get_fail_message(self):
return FailMessage()
def get_on_going_message(self):
return OnGoingMessage()
class KeyboardManager(metaclass=Singleton):
pass
APIAdmin = APIManager()
MessageAdmin = MessageManager()
| from .message import *
class Singleton(type):
instance = None
def __call__(cls, *args, **kwargs):
if not cls.instance:
cls.instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls.instance
class APIManager(metaclass=Singleton):
def process(self, stat, req=None):
if stat is 'home':
home_message = MessageAdmin.get_home_message()
return home_message
else:
content = req['content']
if content == u'밥':
print('food')
return MessageAdmin.get_food_message()
elif content in ['학식', '교식']:
if content == '학식':
return PupilFoodMessage()
elif content == '교식':
return FacultyFoodMessage()
elif content == '버스':
return BusMessage()
elif content == '도서관':
return LibMessage
elif content == '지하철':
return SubMessage
elif content == 'fail':
fail_message = MessageAdmin.get_fail_message()
return fail_message
else:
return MessageAdmin.get_on_going_message()
class MessageManager(metaclass=Singleton):
def get_home_message(self):
return HomeMessage()
def get_food_message(self):
return FoodMessage()
def get_fail_message(self):
return FailMessage()
def get_on_going_message(self):
return OnGoingMessage()
class KeyboardManager(metaclass=Singleton):
pass
APIAdmin = APIManager()
MessageAdmin = MessageManager()
| mit | Python |
69c18f28f4c5c1ad7c7469b1ac214b58d70a01fd | Update logpm.py | TingPing/plugins,TingPing/plugins | HexChat/logpm.py | HexChat/logpm.py | import hexchat
__module_name__ = "LogPMs"
__module_author__ = "TingPing"
__module_version__ = "1"
__module_description__ = "Auto log pm's"
def open_cb(word, word_eol, userdata):
chan = hexchat.get_info('channel')
if chan[0] != '#' and chan not in hexchat.get_prefs('irc_no_hilight'):
hexchat.command('chanopt text_logging on')
hexchat.hook_print("Open Context", open_cb)
| import hexchat
__module_name__ = "LogPM"
__module_author__ = "TingPing"
__module_version__ = "0"
__module_description__ = "Auto log pm's"
def open_cb(word, word_eol, userdata):
if hexchat.get_info('channel')[0] != '#':
hexchat.command('chanopt text_logging on')
hexchat.hook_print("Open Context", open_cb)
| mit | Python |
f22752aacbac9400bda207a5199322b1d1f709d6 | Update landing fields names. | 1flow/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow | oneflow/landing/models.py | oneflow/landing/models.py | # -*- coding: utf-8 -*-
from transmeta import TransMeta
from django.utils.translation import ugettext_lazy as _
from django.db import models
class LandingContent(models.Model):
__metaclass__ = TransMeta
name = models.CharField(_('Template variable name'),
max_length=128, unique=True)
content = models.TextField(_('Template variable content'))
def __unicode__(self):
return _(u'{field_name}: {truncated_field_value}').format(
field_name=self.name, truncated_field_value=self.content[:30]
+ (self.content[30:] and u'…'))
class Meta:
translate = ('content', )
verbose_name = _(u'Landing page content')
verbose_name_plural = _(u'Landing page contents')
| # -*- coding: utf-8 -*-
from transmeta import TransMeta
from django.utils.translation import ugettext_lazy as _
from django.db import models
class LandingContent(models.Model):
__metaclass__ = TransMeta
name = models.CharField(_('Name'), max_length=128)
content = models.TextField(_('Content'))
def __unicode__(self):
return _(u'{field_name}: {truncated_field_value}').format(
field_name=self.name, truncated_field_value=self.content[:30]
+ (self.content[30:] and u'…'))
class Meta:
translate = ('content', )
verbose_name = _(u'Landing page content')
verbose_name_plural = _(u'Landing page contents')
| agpl-3.0 | Python |
3880e4cd73630b19863ecf9bb500fa168cba2722 | Bump the version missed in the 0.0.76 prep CR. | baroquebobcat/pants,15Dkatz/pants,benjyw/pants,foursquare/pants,manasapte/pants,landism/pants,tdyas/pants,ericzundel/pants,twitter/pants,wisechengyi/pants,lahosken/pants,lahosken/pants,peiyuwang/pants,kwlzn/pants,landism/pants,peiyuwang/pants,gmalmquist/pants,tdyas/pants,manasapte/pants,landism/pants,ericzundel/pants,gmalmquist/pants,benjyw/pants,UnrememberMe/pants,benjyw/pants,ity/pants,landism/pants,15Dkatz/pants,baroquebobcat/pants,wisechengyi/pants,benjyw/pants,wisechengyi/pants,pantsbuild/pants,foursquare/pants,15Dkatz/pants,gmalmquist/pants,UnrememberMe/pants,ericzundel/pants,lahosken/pants,twitter/pants,cevaris/pants,UnrememberMe/pants,jsirois/pants,benjyw/pants,fkorotkov/pants,baroquebobcat/pants,ity/pants,gmalmquist/pants,kwlzn/pants,peiyuwang/pants,15Dkatz/pants,peiyuwang/pants,pantsbuild/pants,kwlzn/pants,gmalmquist/pants,twitter/pants,pantsbuild/pants,fkorotkov/pants,mateor/pants,cevaris/pants,kwlzn/pants,wisechengyi/pants,mateor/pants,dbentley/pants,15Dkatz/pants,fkorotkov/pants,tdyas/pants,pombredanne/pants,UnrememberMe/pants,pantsbuild/pants,cevaris/pants,pombredanne/pants,dbentley/pants,mateor/pants,baroquebobcat/pants,ity/pants,pombredanne/pants,dbentley/pants,ity/pants,peiyuwang/pants,lahosken/pants,UnrememberMe/pants,foursquare/pants,foursquare/pants,twitter/pants,pantsbuild/pants,pantsbuild/pants,fkorotkov/pants,jsirois/pants,ity/pants,twitter/pants,tdyas/pants,tdyas/pants,wisechengyi/pants,peiyuwang/pants,15Dkatz/pants,fkorotkov/pants,cevaris/pants,wisechengyi/pants,foursquare/pants,twitter/pants,dbentley/pants,fkorotkov/pants,manasapte/pants,peiyuwang/pants,foursquare/pants,kwlzn/pants,fkorotkov/pants,mateor/pants,tdyas/pants,foursquare/pants,15Dkatz/pants,cevaris/pants,mateor/pants,twitter/pants,pombredanne/pants,kwlzn/pants,tdyas/pants,benjyw/pants,baroquebobcat/pants,UnrememberMe/pants,dbentley/pants,lahosken/pants,benjyw/pants,pantsbuild/pants,tdyas/pants,twitter/pants,pombredanne/pants,landism/pants,wisechengyi/pants,kwlzn/pants,ericzundel/pants,manasapte/pants,baroquebobcat/pants,jsirois/pants,twitter/pants,baroquebobcat/pants,foursquare/pants,lahosken/pants,cevaris/pants,wisechengyi/pants,manasapte/pants,ericzundel/pants,gmalmquist/pants,pombredanne/pants,landism/pants,wisechengyi/pants,landism/pants,15Dkatz/pants,peiyuwang/pants,mateor/pants,ericzundel/pants,manasapte/pants,mateor/pants,lahosken/pants,pombredanne/pants,lahosken/pants,UnrememberMe/pants,ericzundel/pants,ericzundel/pants,cevaris/pants,fkorotkov/pants,mateor/pants,tdyas/pants,manasapte/pants,baroquebobcat/pants,landism/pants,foursquare/pants,UnrememberMe/pants,dbentley/pants,ity/pants,dbentley/pants,baroquebobcat/pants,gmalmquist/pants,UnrememberMe/pants,ity/pants | src/python/pants/version.py | src/python/pants/version.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.revision import Revision
VERSION = '0.0.76'
PANTS_SEMVER = Revision.semver(VERSION)
| # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.revision import Revision
VERSION = '0.0.75'
PANTS_SEMVER = Revision.semver(VERSION)
| apache-2.0 | Python |
7d62bea75a4d75546475eeea10ccc12cc8c408bc | Add color to robot | anassinator/dqn-obstacle-avoidance | simulator.py | simulator.py | # -*- coding: utf-8 -*-
from robot import Robot
from world import World
from PythonQt import QtGui
from director import applogic
from director import objectmodel as om
from director import visualization as vis
from director.consoleapp import ConsoleApp
class Simulator(object):
"""Simulator."""
def __init__(self, robot, world):
"""Constructs the simulator.
Args:
robot: Robot.
world: World.
"""
self._robot = robot
self._world = world
self._app = ConsoleApp()
self._view = self._app.createView(useGrid=False)
self._initialize()
def _initialize(self):
"""Initializes the world."""
# Add world to view.
om.removeFromObjectModel(om.findObjectByName("world"))
vis.showPolyData(self._world.to_polydata(), "world")
# Add robot to view.
robot_color = [0.4, 0.85098039, 0.9372549]
om.removeFromObjectModel(om.findObjectByName("robot"))
vis.showPolyData(self._robot.to_polydata(), "robot", color=robot_color)
def display(self):
"""Launches and displays the simulator."""
widget = QtGui.QWidget()
layout = QtGui.QVBoxLayout(widget)
layout.addWidget(self._view)
widget.showMaximized()
# Set camera.
applogic.resetCamera(viewDirection=[0.2, 0, -1])
self._app.start()
if __name__ == "__main__":
robot = Robot()
world = World(120, 100).add_obstacles()
sim = Simulator(robot, world)
sim.display()
| # -*- coding: utf-8 -*-
from robot import Robot
from world import World
from PythonQt import QtGui
from director import applogic
from director import objectmodel as om
from director import visualization as vis
from director.consoleapp import ConsoleApp
class Simulator(object):
"""Simulator."""
def __init__(self, robot, world):
"""Constructs the simulator.
Args:
robot: Robot.
world: World.
"""
self._robot = robot
self._world = world
self._app = ConsoleApp()
self._view = self._app.createView(useGrid=False)
self._initialize()
def _initialize(self):
"""Initializes the world."""
# Add world to view.
om.removeFromObjectModel(om.findObjectByName("world"))
vis.showPolyData(self._world.to_polydata(), "world")
# Add robot to view.
om.removeFromObjectModel(om.findObjectByName("robot"))
vis.showPolyData(self._robot.to_polydata(), "robot")
def display(self):
"""Launches and displays the simulator."""
widget = QtGui.QWidget()
layout = QtGui.QVBoxLayout(widget)
layout.addWidget(self._view)
widget.showMaximized()
# Set camera.
applogic.resetCamera(viewDirection=[0.2, 0, -1])
self._app.start()
if __name__ == "__main__":
robot = Robot()
world = World(120, 100).add_obstacles()
sim = Simulator(robot, world)
sim.display()
| mit | Python |
92f1dcee29cdfaff49953b3035d5d7127885cc23 | fix editing runs in admin | scottbecker/autolims,scottbecker/autolims,scottbecker/autolims | autolims/admin.py | autolims/admin.py | from django.contrib import admin
from django.apps import apps
app = apps.get_app_config('autolims')
class RunAdmin(admin.ModelAdmin):
#autoprotocol can only be set on creation
def get_readonly_fields(self, request, obj=None):
if obj is not None:
return ['protocol']
return []
for model_name, model in app.models.items():
if model_name=='run':
admin.site.register(model,RunAdmin)
else:
admin.site.register(model) | from django.contrib import admin
from django.apps import apps
app = apps.get_app_config('autolims')
class RunAdmin(admin.ModelAdmin):
#autoprotocol can only be set on creation
def get_readonly_fields(self, request, obj=None):
if obj is not None:
return ['autoprotocol']
return []
for model_name, model in app.models.items():
if model_name=='run':
admin.site.register(model,RunAdmin)
else:
admin.site.register(model) | mit | Python |
fecba5b596180b403cefd5a8072079fcce6012d3 | Add a more useful name that shows this is a task. | jessicalucci/TaskManagement,citrix-openstack-build/taskflow,junneyang/taskflow,citrix-openstack-build/taskflow,jimbobhickville/taskflow,varunarya10/taskflow,jessicalucci/TaskManagement,openstack/taskflow,jimbobhickville/taskflow,varunarya10/taskflow,pombredanne/taskflow-1,openstack/taskflow,pombredanne/taskflow-1,junneyang/taskflow | taskflow/task.py | taskflow/task.py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
class Task(object):
"""An abstraction that defines a potential piece of work that can be
applied and can be reverted to undo the work as a single unit.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, name=None):
if name is None:
name = "%s: %s" % (self.__class__.__name__, id(self))
self.name = name
def __str__(self):
return "Task: %s" % (self.name)
def requires(self):
"""Return any input 'resource' names this task depends on existing
before this task can be applied."""
return set()
def provides(self):
"""Return any output 'resource' names this task produces that other
tasks may depend on this task providing."""
return set()
@abc.abstractmethod
def apply(self, context, *args, **kwargs):
"""Activate a given task which will perform some operation and return.
This method can be used to apply some given context and given set
of args and kwargs to accomplish some goal. Note that the result
that is returned needs to be serializable so that it can be passed
back into this task if reverting is triggered."""
raise NotImplementedError()
def revert(self, context, result, cause):
"""Revert this task using the given context, result that the apply
provided as well as any information which may have caused
said reversion."""
pass
| # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
class Task(object):
"""An abstraction that defines a potential piece of work that can be
applied and can be reverted to undo the work as a single unit.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, name=None):
if name is None:
name = "%s: %s" % (self.__class__.__name__, id(self))
self.name = name
def __str__(self):
return self.name
def requires(self):
"""Return any input 'resource' names this task depends on existing
before this task can be applied."""
return set()
def provides(self):
"""Return any output 'resource' names this task produces that other
tasks may depend on this task providing."""
return set()
@abc.abstractmethod
def apply(self, context, *args, **kwargs):
"""Activate a given task which will perform some operation and return.
This method can be used to apply some given context and given set
of args and kwargs to accomplish some goal. Note that the result
that is returned needs to be serializable so that it can be passed
back into this task if reverting is triggered."""
raise NotImplementedError()
def revert(self, context, result, cause):
"""Revert this task using the given context, result that the apply
provided as well as any information which may have caused
said reversion."""
pass
| apache-2.0 | Python |
168e0128ff09de95fb3946da29a8244dd2baf26e | Correct user type hint | m-ober/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps | byceps/services/authentication/session/models/current_user.py | byceps/services/authentication/session/models/current_user.py | """
byceps.services.authentication.session.models.current_user
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from enum import Enum
from typing import Optional, Set, Union
from .....services.user.models.user import AnonymousUser, User as DbUser
from .....services.user import service as user_service
from .....services.user.transfer.models import User
class CurrentUser:
def __init__(self, user: Union[AnonymousUser, DbUser], is_anonymous: bool,
avatar_url: Optional[str], permissions: Set[Enum]) -> None:
self._user = user
self.id = user.id
self.screen_name = user.screen_name if not is_anonymous else None
self.is_active = user.enabled if not is_anonymous else False
self.is_anonymous = is_anonymous
self.avatar_url = avatar_url
self.permissions = permissions
@classmethod
def create_anonymous(self) -> 'CurrentUser':
user = user_service.get_anonymous_user()
is_anonymous = True
avatar_url = None
permissions = frozenset()
return CurrentUser(user, is_anonymous, avatar_url, permissions)
@classmethod
def create_from_user(self, user: DbUser, avatar_url: Optional[str],
permissions: Set[Enum]) -> 'CurrentUser':
is_anonymous = False
return CurrentUser(user, is_anonymous, avatar_url, permissions)
@property
def is_orga(self) -> bool:
return self._user.is_orga
def has_permission(self, permission: Enum) -> bool:
return permission in self.permissions
def has_any_permission(self, *permissions: Set[Enum]) -> bool:
return any(map(self.has_permission, permissions))
def to_dto(self) -> User:
is_orga = False # Information is deliberately not obtained here.
return User(
self.id,
self.screen_name,
self._user.suspended,
self._user.deleted,
self.avatar_url,
is_orga,
)
def __eq__(self, other) -> bool:
return (other is not None) and (self.id == other.id)
def __hash__(self) -> str:
return hash(self._user)
| """
byceps.services.authentication.session.models.current_user
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from enum import Enum
from typing import Optional, Set
from .....services.user.models.user import User as DbUser
from .....services.user import service as user_service
from .....services.user.transfer.models import User
class CurrentUser:
def __init__(self, user: DbUser, is_anonymous: bool,
avatar_url: Optional[str], permissions: Set[Enum]) -> None:
self._user = user
self.id = user.id
self.screen_name = user.screen_name if not is_anonymous else None
self.is_active = user.enabled if not is_anonymous else False
self.is_anonymous = is_anonymous
self.avatar_url = avatar_url
self.permissions = permissions
@classmethod
def create_anonymous(self) -> 'CurrentUser':
user = user_service.get_anonymous_user()
is_anonymous = True
avatar_url = None
permissions = frozenset()
return CurrentUser(user, is_anonymous, avatar_url, permissions)
@classmethod
def create_from_user(self, user: DbUser, avatar_url: Optional[str],
permissions: Set[Enum]) -> 'CurrentUser':
is_anonymous = False
return CurrentUser(user, is_anonymous, avatar_url, permissions)
@property
def is_orga(self) -> bool:
return self._user.is_orga
def has_permission(self, permission: Enum) -> bool:
return permission in self.permissions
def has_any_permission(self, *permissions: Set[Enum]) -> bool:
return any(map(self.has_permission, permissions))
def to_dto(self) -> User:
is_orga = False # Information is deliberately not obtained here.
return User(
self.id,
self.screen_name,
self._user.suspended,
self._user.deleted,
self.avatar_url,
is_orga,
)
def __eq__(self, other) -> bool:
return (other is not None) and (self.id == other.id)
def __hash__(self) -> str:
return hash(self._user)
| bsd-3-clause | Python |
d577545431c1e41a8987497ee116472f20404252 | Change PyZ3950 to use git+git | mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject | molly/installer/__init__.py | molly/installer/__init__.py | # Packages which Molly needs, but Pip can't handle
PIP_PACKAGES = [
('PyZ3950', 'git+git://github.com/oucs/PyZ3950.git'), # Custom PyZ3950, contains some bug fixes
('django-compress', 'git+git://github.com/mollyproject/django-compress.git#egg=django-compress'), # Fork of django-compress contains some extra features we need
('PIL', 'PIL'), # Because it doesn't install properly when called using setuptools...
] | # Packages which Molly needs, but Pip can't handle
PIP_PACKAGES = [
('PyZ3950', 'git+http://github.com/oucs/PyZ3950.git'), # Custom PyZ3950, contains some bug fixes
('django-compress', 'git+git://github.com/mollyproject/django-compress.git#egg=django-compress'), # Fork of django-compress contains some extra features we need
('PIL', 'PIL'), # Because it doesn't install properly when called using setuptools...
] | apache-2.0 | Python |
1d8592d3d958b50e726654e3a3c95c6957a605d3 | Correct crypto calls to sign | alkadis/vcv,SysTheron/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,liqd/adhocracy,SysTheron/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy,alkadis/vcv,liqd/adhocracy,phihag/adhocracy,alkadis/vcv,phihag/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,alkadis/vcv,phihag/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy,SysTheron/adhocracy,DanielNeugebauer/adhocracy | src/adhocracy/lib/crypto.py | src/adhocracy/lib/crypto.py | import hashlib
import hmac
from pylons import config
try:
from hmac import compare_digest
except ImportError: # Python < 3.3
def compare_digest(a, b):
# We'll just try emulating it here and hope that the network noise
# is sufficient and the Python interpreter isn't too clever
if type(a) != type(b) or len(a) != len(b):
# This conforms to the doc, which says:
# > If a and b are of different lengths, or if an error occurs, a
# > timing attack could theoretically reveal information about the
# > types and lengths of a and b - but not their values.
return False
res = 1
for achar, bchar in zip(a, b):
# The "and" operator short-circuits!
res = res & int(achar == bchar)
return res == 1
def get_secret(config=config, key=None):
search_keys = [
'adhocracy.crypto.secret',
'beaker.session.secret',
'adhocracy.auth.secret',
]
if key is not None:
search_keys.insert(0, key)
for k in search_keys:
if config.get(k):
assert config[k] != 'autogenerated'
res = config[k]
if not isinstance(res, bytes):
res = res.encode('ascii')
return res
raise Exception('No secret configured!')
def _sign(val, secret, salt):
hm = hmac.new(secret + salt, val, hashlib.sha256)
digest = hm.hexdigest()
return digest.encode('ascii')
def sign(val, secret=None, salt=b''):
if secret is None:
secret = get_secret()
assert isinstance(secret, bytes)
assert isinstance(val, bytes)
assert isinstance(salt, bytes)
return _sign(val, secret, salt) + b'!' + val
def verify(signed, secret=None, salt=b''):
if secret is None:
secret = get_secret()
assert isinstance(secret, bytes)
assert isinstance(signed, bytes)
assert isinstance(salt, bytes)
signature, _, val = signed.partition(b'!')
correct_signature = _sign(val, secret, salt)
if compare_digest(signature, correct_signature):
return val
else:
raise ValueError(salt.decode('ascii') + u' MAC verification failed')
| import hashlib
import hmac
from pylons import config
try:
from hmac import compare_digest
except ImportError: # Python < 3.3
def compare_digest(a, b):
# We'll just try emulating it here and hope that the network noise
# is sufficient and the Python interpreter isn't too clever
if type(a) != type(b) or len(a) != len(b):
# This conforms to the doc, which says:
# > If a and b are of different lengths, or if an error occurs, a
# > timing attack could theoretically reveal information about the
# > types and lengths of a and b - but not their values.
return False
res = 1
for achar, bchar in zip(a, b):
# The "and" operator short-circuits!
res = res & int(achar == bchar)
return res == 1
def get_secret(config=config, key=None):
search_keys = [
'adhocracy.crypto.secret',
'beaker.session.secret',
'adhocracy.auth.secret',
]
if key is not None:
search_keys.insert(0, key)
for k in search_keys:
if config.get(k):
assert config[k] != 'autogenerated'
res = config[k]
if not isinstance(res, bytes):
res = res.encode('ascii')
return res
raise Exception('No secret configured!')
def _sign(val, secret, salt):
hm = hmac.new(secret + salt, val, hashlib.sha256)
digest = hm.hexdigest()
return digest.encode('ascii')
def sign(val, secret=None, salt=b''):
if secret is None:
secret = get_secret()
assert isinstance(secret, bytes)
assert isinstance(val, bytes)
assert isinstance(salt, bytes)
return _sign(secret, val, salt) + b'!' + val
def verify(signed, secret=None, salt=b''):
if secret is None:
secret = get_secret()
assert isinstance(secret, bytes)
assert isinstance(signed, bytes)
assert isinstance(salt, bytes)
signature, _, val = signed.partition(b'!')
correct_signature = _sign(secret, val, salt)
if compare_digest(signature, correct_signature):
return val
else:
raise ValueError(salt.decode('ascii') + u' MAC verification failed')
| agpl-3.0 | Python |
6183054ac61ca69e71a8ee5821f97e163d29a3fb | Make path to TODO file configurable | tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status | bumblebee/modules/todo.py | bumblebee/modules/todo.py | # pylint: disable=C0111,R0903
"""Displays the number of todo items from a text file
Parameters:
* todo.file: File to read TODOs from (defaults to ~/Documents/todo.txt)
"""
import platform
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.output)
)
self._todos = self.count_items()
def output(self, widget):
self._todos = self.count_items()
return str(self._todos)
def state(self, widgets):
if self._todos == 0 :
return "empty"
return "items"
def count_items(filename):
try:
i = -1
doc = self.parameter("file", "~/Documents/todo.txt")
with open(doc) as f:
for i, l in enumerate(f):
pass
return i+1
except Exception:
return 0
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| # pylint: disable=C0111,R0903
"""Displays the number of todo items in ~/Documents/todo.txt"""
import platform
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.output)
)
self._todos = self.count_items()
def output(self, widget):
self._todos = self.count_items()
return str(self._todos)
def state(self, widgets):
if self._todos == 0 :
return "empty"
return "items"
def count_items(filename):
try:
i=-1
with open('~/Documents/todo.txt') as f:
for i, l in enumerate(f):
pass
return i+1
except Exception:
return 0
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | Python |
e79facf3688cf6b98d18c475b5f41ced6248cc64 | Document rename_mp3.py | jleung51/scripts,jleung51/scripts,jleung51/scripts | mp3-formatter/rename_mp3.py | mp3-formatter/rename_mp3.py | #!/usr/bin/python3
# This Python 3 file reads (from stdin) a list of tracks, each separated by
# a newline, and renames the MP3 files in the current folder to the tracklist.
import ID3
import os
import sys
def read_tracklist():
"""Return list of tracks from stdin.
"""
tracklist = []
for line in sys.stdin:
tracklist.append(line)
return tracklist
def match_length(files, tracklist):
"""Raise error if the two lists have different lengths.
"""
if len(files) != len(tracklist):
raise RuntimeError(
str(len(tracklist)) +
" file names were given but " +
str(len(files)) +
" files were found.")
tracklist = read_tracklist()
mp3_extension = ".mp3"
files_all = os.listdir('.')
files = []
for f in files_all:
# Prune directories
if not os.path.isfile(f):
continue
# Prune non-MP3 files
filename, extension = os.path.splitext(f)
if extension != mp3_extension:
continue
# Prune this file
f_temp = os.path.abspath(f)
if f_temp == os.path.abspath(__file__):
continue
files.append(f)
match_length(files, tracklist)
files.sort()
i = 0
for f in files:
os.rename(f, tracklist[i] + mp3_extension)
i += 1
| #!/usr/bin/python3
import ID3
import os
import sys
def read_tracklist():
tracklist = []
for line in sys.stdin:
tracklist.append(line)
return tracklist
def match_length(files, tracklist):
if len(files) != len(tracklist):
raise RuntimeError(
str(len(tracklist)) +
" file names were given but " +
str(len(files)) +
" files were found.")
tracklist = read_tracklist()
mp3_extension = ".mp3"
files_all = os.listdir('.')
files = []
for f in files_all:
# Prune directories
if not os.path.isfile(f):
continue
# Prune non-MP3 files
filename, extension = os.path.splitext(f)
if extension != mp3_extension:
continue
# Prune this file
f_temp = os.path.abspath(f)
if f_temp == os.path.abspath(__file__):
continue
files.append(f)
match_length(files, tracklist)
files.sort()
i = 0
for f in files:
os.rename(f, tracklist[i] + mp3_extension)
i += 1
| mit | Python |
8c177c84d0b0ea6e63fdaa50d317cfad4528ac57 | add todo | opencivicdata/scrapers-ca,opencivicdata/scrapers-ca | ca_on_lambton/__init__.py | ca_on_lambton/__init__.py | from __future__ import unicode_literals
from utils import CanadianJurisdiction
from pupa.scrape import Organization
class Lambton(CanadianJurisdiction):
classification = 'legislature'
division_id = 'ocd-division/country:ca/cd:3538'
division_name = 'Lambton'
name = 'Lambton County Council'
url = 'http://www.lambtononline.ca/home/government/accessingcountycouncil/countycouncillors/Pages/default.aspx'
def get_organizations(self): # @todo Fix labels along the lines of Waterloo Region.
organization = Organization(self.name, classification=self.classification)
organization.add_post(role='Warden', label='Lambton')
organization.add_post(role='Deputy Warden', label='Lambton')
for i in range(15):
organization.add_post(role='Councillor', label='Lambton (seat %d)' % (i + 1))
yield organization
| from __future__ import unicode_literals
from utils import CanadianJurisdiction
from pupa.scrape import Organization
class Lambton(CanadianJurisdiction):
classification = 'legislature'
division_id = 'ocd-division/country:ca/cd:3538'
division_name = 'Lambton'
name = 'Lambton County Council'
url = 'http://www.lambtononline.ca/home/government/accessingcountycouncil/countycouncillors/Pages/default.aspx'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
organization.add_post(role='Warden', label='Lambton')
organization.add_post(role='Deputy Warden', label='Lambton')
for i in range(15):
organization.add_post(role='Councillor', label='Lambton (seat %d)' % (i + 1))
yield organization
| mit | Python |
022a9ee685c317a43482034257937defc726c36e | use metasite delimiter for sites with multiple IDs | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | cgi-bin/request/recent.py | cgi-bin/request/recent.py | #!/usr/bin/env python
"""
Return a simple CSV of recent observations from the database
"""
import psycopg2
import cgi
import sys
import memcache
def run(sid):
""" run() """
dbconn = psycopg2.connect(database='iem', host='iemdb', user='nobody')
cursor = dbconn.cursor()
cursor.execute("""SELECT valid at time zone 'UTC', tmpf, dwpf, raw,
x(geom), y(geom) , tmpf, dwpf, drct, sknt, phour, alti, mslp, vsby, gust
from current_log c JOIN
stations t on (t.iemid = c.iemid) WHERE t.id = %s and t.metasite = 'f'
ORDER by valid ASC""", (sid,))
res = "station,utcvalid,lon,lat,tmpf,dwpf,drct,sknt,p01i,alti,mslp,vsby,gust,raw\n"
for row in cursor:
res += "%s,%s,%.4f,%.4f,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n" % (sid, row[0].strftime("%Y-%m-%d %H:%M"),
row[4], row[5], row[6], row[7], row[8],
row[9], row[10], row[11], row[12],
row[13], row[14], row[3])
return res.replace("None", "M")
if __name__ == '__main__':
sys.stdout.write("Content-type: text/plain\n\n")
form = cgi.FieldStorage()
sid = form.getfirst('station', 'AMW')[:5]
mckey = "/cgi-bin/request/recent.py|%s" % (sid,)
mc = memcache.Client(['iem-memcached:11211'], debug=0)
res = mc.get(mckey)
if not res:
res = run(sid)
sys.stdout.write(res)
mc.set(mckey, res, 300)
else:
sys.stdout.write( res )
| #!/usr/bin/env python
"""
Return a simple CSV of recent observations from the database
"""
import psycopg2
import cgi
import sys
import memcache
def run(sid):
""" run() """
dbconn = psycopg2.connect(database='iem', host='iemdb', user='nobody')
cursor = dbconn.cursor()
cursor.execute("""SELECT valid at time zone 'UTC', tmpf, dwpf, raw,
x(geom), y(geom) , tmpf, dwpf, drct, sknt, phour, alti, mslp, vsby, gust
from current_log c JOIN
stations t on (t.iemid = c.iemid) WHERE t.id = %s and
(t.network ~* 'ASOS' or t.network = 'AWOS')
ORDER by valid ASC""", (sid,))
res = "station,utcvalid,lon,lat,tmpf,dwpf,drct,sknt,p01i,alti,mslp,vsby,gust,raw\n"
for row in cursor:
res += "%s,%s,%.4f,%.4f,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n" % (sid, row[0].strftime("%Y-%m-%d %H:%M"),
row[4], row[5], row[6], row[7], row[8],
row[9], row[10], row[11], row[12],
row[13], row[14], row[3])
return res.replace("None", "M")
if __name__ == '__main__':
sys.stdout.write("Content-type: text/plain\n\n")
form = cgi.FieldStorage()
sid = form.getfirst('station', 'AMW')[:5]
mckey = "/cgi-bin/request/recent.py|%s" % (sid,)
mc = memcache.Client(['iem-memcached:11211'], debug=0)
res = mc.get(mckey)
if not res:
res = run(sid)
sys.stdout.write(res)
mc.set(mckey, res, 300)
else:
sys.stdout.write( res )
| mit | Python |
5109e5dcea8364182bfbccb6c616d0b2d9a7e4be | Update test.py | inkenbrandt/WellApplication | test/test.py | test/test.py | # -*- coding: utf-8 -*-
"""
Created on Sat Jan 23 13:03:00 2016
@author: p
"""
import wellapplication as wa
import pandas as pd
import matplotlib
def test_getelev():
x = [-111.21, 41.4]
m = wa.getelev(x)
assert m > 100.0
def test_gethuc():
x = [-111.21, 41.4]
huc_data = wa.get_huc(x)
assert len(huc_data[0])>0
def test_USGSID():
x = [-111.21, 41.4]
usgs_id = wa.USGSID(x)
assert usgs_id == '412400111123601'
def test_nwis():
nw = wa.nwis('dv', '01585200', 'sites')
def test_mktest():
x = range(0,100)
trend = wa.MannKendall.mk_test(x,0.05)
assert trend.trend == 'increasing'
def test_pipe():
pipr = wa.piper()
Chem = {'Type':[1,2,2,3], 'Cl':[1.72,0.90,4.09,1.52], 'HCO3':[4.02,1.28,4.29,3.04],
'SO4':[0.58,0.54,0.38,0.46], 'NaK':[1.40,0.90,3.38,2.86], 'Ca':[4.53,None,4.74,1.90],
'Mg':[0.79,0.74,0.72,0.66], 'EC':[672.0,308.0,884.0,542.0], 'NO3':[0.4,0.36,0.08,0.40],
'Sicc':[0.21,0.56,None,-0.41]}
chem = pd.DataFrame(Chem)
pipr.piperplot(chem)
assert type(pipr.plot) == matplotlib.figure.Figure
def test_new_xle_imp():
xle = 'docs/20160919_LittleHobble.xle'
xle_df = wa.new_xle_imp(xle)
assert len(xle_df) > 0
def test_xle_head_table():
xle_dir = 'docs/'
dir_df = wa.xle_head_table(xle_dir)
assert len(xle_dir) > 0
def test_dataendclean():
xle = 'docs/20160919_LittleHobble.xle'
df = wa.new_xle_imp(xle)
x = Value
xle1 = wa.dataendclean(df, x)
assert xle != xle1
def test_smoother():
xle = 'docs/20160919_LittleHobble.xle'
df = wa.new_xle_imp(xle)
x = Value
xle1 = wa.smoother(df, x, std=1)
assert xle != xle1
def test_hourly_resample():
xle = 'docs/20160919_LittleHobble.xle'
df = wa.new_xle_imp(xle)
xle1 = wa.smoother(df, minutes=30)
| # -*- coding: utf-8 -*-
"""
Created on Sat Jan 23 13:03:00 2016
@author: p
"""
import wellapplication as wa
import pandas as pd
import matplotlib
def test_getelev():
x = [-111.21, 41.4]
m = wa.getelev(x)
assert m > 100.0
def test_gethuc():
x = [-111.21, 41.4]
huc_data = wa.get_huc(x)
assert len(huc_data[0])>0
def test_USGSID():
x = [-111.21, 41.4]
usgs_id = wa.USGSID(x)
assert usgs_id == '412400111123601'
def test_nwis():
nw = wa.nwis('dv', '01585200', 'sites')
def test_mktest():
x = range(0,100)
trend = wa.MannKendall.mk_test(x,0.05)
assert trend.trend == 'increasing'
def test_pipe():
pipr = wa.piper()
Chem = {'Type':[1,2,2,3], 'Cl':[1.72,0.90,4.09,1.52], 'HCO3':[4.02,1.28,4.29,3.04],
'SO4':[0.58,0.54,0.38,0.46], 'NaK':[1.40,0.90,3.38,2.86], 'Ca':[4.53,None,4.74,1.90],
'Mg':[0.79,0.74,0.72,0.66], 'EC':[672.0,308.0,884.0,542.0], 'NO3':[0.4,0.36,0.08,0.40],
'Sicc':[0.21,0.56,None,-0.41]}
chem = pd.DataFrame(Chem)
pipr.piperplot(chem)
assert type(pipr.plot) == matplotlib.figure.Figure
def test_new_xle_imp():
xle = 'docs/20160919_LittleHobble.xle'
xle_df = wa.new_xle_imp(xle)
assert len(xle_df) > 0
def test_xle_head_table():
xle_dir = 'docs/'
dir_df = wa.xle_head_table(xle_dir)
assert len(xle_dir) > 0
| mit | Python |
239ae541caa0f8ddcb3b26b91289669c69e15cdb | Support python2-like sorting in python3 | oneklc/dimod,oneklc/dimod | dimod/compatibility23.py | dimod/compatibility23.py | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
import sys
import inspect
try:
import collections.abc as abc
except ImportError:
import collections as abc
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
def getargspec(f):
return inspect.getargspec(f)
def SortKey(obj):
return obj
else:
_ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
def getargspec(f):
argspec = inspect.getfullargspec(f)
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
return _ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
# Based on an answer https://stackoverflow.com/a/34757114/8766655
# by kindall https://stackoverflow.com/users/416467/kindall
class SortKey(object):
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
try:
return self.obj < other.obj
except TypeError:
pass
if isinstance(self.obj, type(other.obj)):
if not isinstance(self.obj, abc.Sequence):
raise TypeError("cannot compare types")
for v0, v1 in zip(self.obj, other.obj):
if SortKey(v0) < SortKey(v1):
return True
return len(self.obj) < len(other.obj)
return type(self.obj).__name__ < type(other.obj).__name__
| # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
import sys
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
def getargspec(f):
return inspect.getargspec(f)
else:
_ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
def getargspec(f):
argspec = inspect.getfullargspec(f)
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
return _ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
| apache-2.0 | Python |
e3835b3b28f21262719c8ffe63e390c12963a69f | make tiles accessable at /layername/ and /tiles/layername/ | trailbehind/EasyTileServer,trailbehind/EasyTileServer | webApp/easyTileServer/urls.py | webApp/easyTileServer/urls.py | from django.conf.urls import patterns, include, url
from rest_framework import routers
from django.contrib.auth.decorators import login_required
from layers import views
from django.contrib import admin
admin.autodiscover()
router = routers.DefaultRouter()
router.register(r'layeradmin', views.LayerAdmin)
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^$', views.IndexView.as_view()),
url(r'^', include(router.urls)),
url(r'^layers/$', views.TileJson.as_view({'get': 'list'})),
url(r'^layers.(?P<format>[^/\.]+)$', views.TileJson.as_view({'get': 'list'})),
url(r'^layers/(?P<layerName>[^/\.]+).(?P<format>[\w]+)$', views.TileJson.as_view({'get': 'retrieve'})),
url(r'^layers/(?P<layerName>[^/]+)/$', views.TileJson.as_view({'get': 'retrieve'})),
url(r'^preview/(?P<layer_name>[^/\.]+)/$', views.LayerPreviewView.as_view()),
url(r'^upload/', login_required(views.UploadFileView.as_view(success_url="/layeradmin/"))),
url(r'^tiles/(?P<layer_name>[^/]+)/(?P<z>[^/]+)/(?P<x>[^/]+)/(?P<y>[^/]+)\.(?P<extension>.+)$', 'layers.views.tiles', name='tiles_url'),
url(r'^(?P<layer_name>[^/]+)/(?P<z>[^/]+)/(?P<x>[^/]+)/(?P<y>[^/]+)\.(?P<extension>.+)$', 'layers.views.tiles', name='tiles_url'),
)
| from django.conf.urls import patterns, include, url
from rest_framework import routers
from django.contrib.auth.decorators import login_required
from layers import views
from django.contrib import admin
admin.autodiscover()
router = routers.DefaultRouter()
router.register(r'layeradmin', views.LayerAdmin)
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^$', views.IndexView.as_view()),
url(r'^', include(router.urls)),
url(r'^layers/$', views.TileJson.as_view({'get': 'list'})),
url(r'^layers.(?P<format>[^/\.]+)$', views.TileJson.as_view({'get': 'list'})),
url(r'^layers/(?P<layerName>[^/\.]+).(?P<format>[\w]+)$', views.TileJson.as_view({'get': 'retrieve'})),
url(r'^layers/(?P<layerName>[^/]+)/$', views.TileJson.as_view({'get': 'retrieve'})),
url(r'^preview/(?P<layer_name>[^/\.]+)/$', views.LayerPreviewView.as_view()),
url(r'^upload/', login_required(views.UploadFileView.as_view(success_url="/layeradmin/"))),
url(r'^tiles/(?P<layer_name>[^/]+)/(?P<z>[^/]+)/(?P<x>[^/]+)/(?P<y>[^/]+)\.(?P<extension>.+)$', 'layers.views.tiles', name='tiles_url'),
)
| bsd-3-clause | Python |
d1ac8994ac19d59860e305409221e5f93ff8a148 | Include a default DATABASES setting in settings_base | wangjun/djangae,jscissr/djangae,jscissr/djangae,chargrizzle/djangae,nealedj/djangae,leekchan/djangae,SiPiggles/djangae,leekchan/djangae,grzes/djangae,asendecka/djangae,trik/djangae,potatolondon/djangae,chargrizzle/djangae,kirberich/djangae,armirusco/djangae,pablorecio/djangae,stucox/djangae,stucox/djangae,pablorecio/djangae,nealedj/djangae,grzes/djangae,leekchan/djangae,potatolondon/djangae,b-cannon/my_djae,chargrizzle/djangae,armirusco/djangae,asendecka/djangae,SiPiggles/djangae,trik/djangae,wangjun/djangae,kirberich/djangae,stucox/djangae,jscissr/djangae,SiPiggles/djangae,pablorecio/djangae,grzes/djangae,martinogden/djangae,wangjun/djangae,trik/djangae,martinogden/djangae,asendecka/djangae,kirberich/djangae,martinogden/djangae,armirusco/djangae,nealedj/djangae | djangae/settings_base.py | djangae/settings_base.py |
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
} |
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
| bsd-3-clause | Python |
d755d53fdacc686200abfc0dd0409f4233af510d | Fix bug with error while run django-admin loaddata. | Dybov/real_estate_agency,Dybov/real_estate_agency,Dybov/real_estate_agency | real_estate_agency/new_buildings/signals.py | real_estate_agency/new_buildings/signals.py | from django.db.models.signals import post_save, m2m_changed
from django.dispatch import receiver
from .models import NewBuilding, NewApartment, ResidentalComplex
@receiver(
post_save,
sender=NewBuilding,
dispatch_uid="save_apartment_after_building_saved"
)
def newbuilding_post_saver(sender, instance, created, **kwargs):
"""Set date of construction to NewApartment and RC objects
if building changes date_of_construction"""
related_apartments = NewApartment.objects.filter(buildings=instance)
if not hasattr(instance, 'instance'):
return
residental_complex = instance.residental_complex
for apartment in related_apartments:
if apartment._set_date_of_construction():
apartment.save()
if residental_complex._set_date_of_construction():
residental_complex.save()
@receiver(m2m_changed,
sender=NewApartment.buildings.through,
dispatch_uid="save_apartment_if_m2m_changed")
def apartment_m2m_changer(sender, instance, action, reverse, **kwargs):
""" Set date of construction to NewApartment obj
if it changes building field and these date changed beacuse of that"""
if not reverse and action == "post_add":
if instance._set_date_of_construction():
instance.save()
if instance.residental_complex._set_lowest_price():
instance.residental_complex.save()
@receiver(
post_save,
sender=ResidentalComplex,
dispatch_uid="set_prices_to_rc"
)
def residental_complex_price_setter(sender, instance, **kwargs):
if instance._set_lowest_price():
instance.save()
| from django.db.models.signals import post_save, m2m_changed
from django.dispatch import receiver
from .models import NewBuilding, NewApartment, ResidentalComplex
@receiver(post_save,
sender=NewBuilding,
dispatch_uid="save_apartment_after_building_saved")
def newbuilding_post_saver(sender, instance, created, **kwargs):
"""Set date of construction to NewApartment and RC objects
if building changes date_of_construction"""
related_apartments = NewApartment.objects.filter(buildings=instance)
residental_complex = instance.residental_complex
for apartment in related_apartments:
if apartment._set_date_of_construction():
apartment.save()
if residental_complex._set_date_of_construction():
residental_complex.save()
@receiver(m2m_changed,
sender=NewApartment.buildings.through,
dispatch_uid="save_apartment_if_m2m_changed")
def apartment_m2m_changer(sender, instance, action, reverse, **kwargs):
""" Set date of construction to NewApartment obj
if it changes building field and these date changed beacuse of that"""
if not reverse and action == "post_add":
if instance._set_date_of_construction():
instance.save()
if instance.residental_complex._set_lowest_price():
instance.residental_complex.save()
@receiver(post_save,
sender=ResidentalComplex,
dispatch_uid="set_prices_to_rc")
def residental_complex_price_setter(sender, instance, **kwargs):
if instance._set_lowest_price():
instance.save()
'''
@receiver(post_save,
sender=NewApartment,
dispatch_uid="set_prices_to_rc_after_apartment_saved")
def residental_complex_price_setter_after_apartment_saved(sender, instance, **kwargs):
if instance.residental_complex._set_lowest_price():
instance.residental_complex.save()
''' | mit | Python |
1df3cb78ef27b61eb85f6570b75bb2d7c6b17e03 | Allow complete paths for script | nojhan/weboob-devel,Konubinix/weboob,RouxRC/weboob,sputnick-dev/weboob,yannrouillard/weboob,nojhan/weboob-devel,frankrousseau/weboob,Konubinix/weboob,yannrouillard/weboob,yannrouillard/weboob,Boussadia/weboob,sputnick-dev/weboob,Boussadia/weboob,laurent-george/weboob,willprice/weboob,Konubinix/weboob,laurent-george/weboob,nojhan/weboob-devel,frankrousseau/weboob,willprice/weboob,Boussadia/weboob,RouxRC/weboob,sputnick-dev/weboob,frankrousseau/weboob,RouxRC/weboob,laurent-george/weboob,willprice/weboob,Boussadia/weboob | tools/local_run.py | tools/local_run.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
import sys
import os
if len(sys.argv) < 2:
print "Usage: %s SCRIPTNAME [args]" % sys.argv[0]
sys.exit(1)
else:
script = sys.argv[1]
args = sys.argv[2:]
project = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
wd = os.path.join(project, 'localconfig')
if not os.path.isdir(wd):
os.makedirs(wd)
env = os.environ.copy()
env['PYTHONPATH'] = project
env['WEBOOB_WORKDIR'] = wd
env['WEBOOB_BACKENDS'] = os.path.expanduser('~/.config/weboob/backends')
with open(os.path.join(wd, 'sources.list'), 'w') as f:
f.write("file://%s\n" % os.path.join(project, 'modules'))
# Hide output unless there is an error
p = subprocess.Popen(
[sys.executable, os.path.join(project, 'scripts', 'weboob-config'), 'update'],
env=env,
stdout=subprocess.PIPE)
s = p.communicate()
if p.returncode != 0:
print s[0]
sys.exit(p.returncode)
if os.path.exists(script):
spath = script
else:
spath = os.path.join(project, 'scripts', script)
os.execvpe(
sys.executable,
['-Wall', spath] + args,
env)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
import sys
import os
if len(sys.argv) < 2:
print "Usage: %s SCRIPTNAME [args]" % sys.argv[0]
sys.exit(1)
else:
script = sys.argv[1]
args = sys.argv[2:]
project = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
wd = os.path.join(project, 'localconfig')
if not os.path.isdir(wd):
os.makedirs(wd)
env = os.environ.copy()
env['PYTHONPATH'] = project
env['WEBOOB_WORKDIR'] = wd
env['WEBOOB_BACKENDS'] = os.path.expanduser('~/.config/weboob/backends')
with open(os.path.join(wd, 'sources.list'), 'w') as f:
f.write("file://%s\n" % os.path.join(project, 'modules'))
# Hide output unless there is an error
p = subprocess.Popen(
[sys.executable, os.path.join(project, 'scripts', 'weboob-config'), 'update'],
env=env,
stdout=subprocess.PIPE)
s = p.communicate()
if p.returncode != 0:
print s[0]
sys.exit(p.returncode)
os.execvpe(
sys.executable,
['-Wall', os.path.join(project, 'scripts', script)] + args,
env)
| agpl-3.0 | Python |
d606c8053cea1d3d23e4858b31cd8a3120869dcb | add libraries to system via PYTHONPATH | linkmax91/bitquant,linkmax91/bitquant,joequant/bitquant,linkmax91/bitquant,joequant/bitquant,linkmax91/bitquant,joequant/bitquant,joequant/bitquant,joequant/bitquant,linkmax91/bitquant,linkmax91/bitquant | web/scripts/ipython_config.py | web/scripts/ipython_config.py | def init_ipython():
from os.path import expanduser
import sys
import os.path
import os
home = expanduser("~")
os.environ['PYTHONPATH'] = \
':'.join( [os.path.join(home, "ipython"),
os.path.join(home, "git", "bitquant", "web", "scripts")] )
init_ipython()
| def init_ipython():
from os.path import expanduser
import sys
import os.path
import os
home = expanduser("~")
sys.path.append(os.path.join(home, "ipython"))
sys.path.append(os.path.join(home, "git", "bitquant", "web", "scripts"))
init_ipython()
| bsd-2-clause | Python |
bf8e5410afed79c243466e06c61bc5c994dda00f | Use the object.__new__ decorator to create a singleton instance of the YES object. | PyCQA/astroid | astroid/util.py | astroid/util.py | # copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
#
# The code in this file was originally part of logilab-common, licensed under
# the same license.
import sys
import six
def reraise(exception):
'''Reraises an exception with the traceback from the current exception
block.'''
six.reraise(type(exception), exception, sys.exc_info()[2])
@object.__new__
class YES(object):
"""Special inference object, which is returned when inference fails."""
def __repr__(self):
return 'YES'
def __getattribute__(self, name):
if name == 'next':
raise AttributeError('next method should not be called')
if name.startswith('__') and name.endswith('__'):
return object.__getattribute__(self, name)
return self
def __call__(self, *args, **kwargs):
return self
| # copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
#
# The code in this file was originally part of logilab-common, licensed under
# the same license.
import sys
import six
def reraise(exception):
'''Reraises an exception with the traceback from the current exception
block.'''
six.reraise(type(exception), exception, sys.exc_info()[2])
class _Yes(object):
"""Special inference object, which is returned when inference fails."""
def __repr__(self):
return 'YES'
def __getattribute__(self, name):
if name == 'next':
raise AttributeError('next method should not be called')
if name.startswith('__') and name.endswith('__'):
return super(_Yes, self).__getattribute__(name)
return self
def __call__(self, *args, **kwargs):
return self
YES = _Yes()
| lgpl-2.1 | Python |
983487ea68e7e285d9d7329c6e52d9454f46ee95 | format fixed and debugging done | mitenjain/nanopore,mitenjain/nanopore,isovic/marginAlign,benedictpaten/marginAlign,mitenjain/nanopore,mitenjain/nanopore,mitenjain/nanopore,mitenjain/nanopore,benedictpaten/marginAlign,benedictpaten/marginAlign,mitenjain/nanopore,mitenjain/nanopore,isovic/marginAlign | nanopore/analyses/fastqc.py | nanopore/analyses/fastqc.py | from nanopore.analyses.abstractAnalysis import AbstractAnalysis
from sonLib.bioio import system
import os
class FastQC(AbstractAnalysis):
def run(self):
system("fastqc %s --outdir=%s" % (self.readFastqFile, self.outputDir))
| from nanopore.analyses.abstractAnalysis import AbstractAnalysis
from sonLib.bioio import system
import os
class FastQC(AbstractAnalysis):
def run(self):
system("fastqc %s --outdir=%s" % (self.readFastQFile, self.outputDir))
| mit | Python |
423d9b9e294ef20fafbb1cb67a6c54c38112cddb | Improve Worker resistance against external code exceptions | alvarogzp/telegram-bot,alvarogzp/telegram-bot | bot/multithreading/worker.py | bot/multithreading/worker.py | import queue
import threading
class Worker:
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
self.name = name
self.queue = work_queue
# using an event instead of a boolean flag to avoid race conditions between threads
self.end = threading.Event()
self.error_handler = error_handler
def run(self):
while self._should_run():
work = self.queue.get()
self._work(work)
def _should_run(self):
return not self.end.is_set()
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.end.set()
class Work:
def __init__(self, func: callable, name: str):
self.func = func
self.name = name
def do_work(self):
self.func()
| import queue
import threading
class Worker:
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
self.name = name
self.queue = work_queue
# using an event instead of a boolean flag to avoid race conditions between threads
self.end = threading.Event()
self.error_handler = error_handler
def run(self):
while self._should_run():
work = self.queue.get()
self._work(work)
def _should_run(self):
return not self.end.is_set()
def _work(self, work: Work):
try:
work.do_work()
except Exception as e:
self.error_handler(e, work, self)
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.end.set()
class Work:
def __init__(self, func: callable, name: str):
self.func = func
self.name = name
def do_work(self):
self.func()
| agpl-3.0 | Python |
5cbc98cf59ab1d81faaa07b392bf92b282f044fc | add result count param to expand() | StephanieMak/CS3245PatentSearchEngine,weikengary/CS3245PatentSearchEngine | QueryExpansion.py | QueryExpansion.py | '''
This module expands the given patent query with the terms collected from
the top 10 of Google patent search.
'''
import nltk
import json
import urllib
import urllib2
import HTMLParser
import string
def expand(query, google_result_count = 3):
# Maximum value for google_result_count allowed is only 8
if google_result_count > 8:
google_result_count = 8
url = 'https://ajax.googleapis.com/ajax/services/search/patent?v=1.0' + \
'&rsz=' + str(google_result_count) + \
'&q=' + urllib.quote(query)
response = json.load(urllib2.urlopen(url))
results = response['responseData']['results']
for result in results:
output = result['titleNoFormatting'].encode('ascii', 'ignore')
output = ''.join(ch for ch in output if ch not in string.punctuation)
query += ' ' + output
return query
def get_nouns(sentence):
nouns = []
results = nltk.pos_tag(sentence.strip().split())
for result in results:
if result[1][:1] == 'N' and result[0] != 'documents':
nouns.append(result[0])
return ' '.join(nouns)
# print expand('Washers that clean laundry with bubbles') | '''
This module expands the given patent query with the terms collected from
the top 10 of Google patent search.
'''
import nltk
import json
import urllib
import urllib2
import HTMLParser
import string
def expand(query):
url = 'https://ajax.googleapis.com/ajax/services/search/patent?v=1.0&rsz=3&q=' + urllib.quote(query)
response = json.load(urllib2.urlopen(url))
results = response['responseData']['results']
for result in results:
output = result['titleNoFormatting'].encode('ascii', 'ignore')
output = ''.join(ch for ch in output if ch not in string.punctuation)
query += ' ' + output
return query
def get_nouns(sentence):
nouns = []
results = nltk.pos_tag(sentence.strip().split())
for result in results:
if result[1][:1] == 'N' and result[0] != 'documents':
nouns.append(result[0])
return ' '.join(nouns)
#print get_nouns('Washers that clean laundry with bubbles elevant documents will describe washing technologies that clean or induce using bubbles, foam, by means of vacuuming, swirling, inducing flow or other mechanisms') | apache-2.0 | Python |
46c63fea860217fecf4ca334149970e8df7fd149 | Change init param of wordnet | nlesc-sherlock/concept-search,nlesc-sherlock/concept-search,nlesc-sherlock/concept-search,nlesc-sherlock/concept-search | webserver/webTermSuggester.py | webserver/webTermSuggester.py | #!/usr/bin/env python
################################################################################
# Created by Oscar Martinez #
# o.rubi@esciencecenter.nl #
################################################################################
from flask import Flask, request, jsonify
from TermSuggester import TermSuggester, SearchMethodAggregation
from elsearch import ELSearch
from wnsearch import WNSearch
app = Flask(__name__)
searchMethodClasses = (ELSearch, WNSearch)
initializeParameters = ((None, False),([]))
ts = TermSuggester(searchMethodClasses, initializeParameters)
@app.route("/suggester", methods = ['GET',])
def api_term():
if request.method == 'GET':
if 'term' in request.args:
data = ts.getSuggestions(str(request.args['term']), SearchMethodAggregation.SumMethod)
resp = jsonify(data)
resp.status_code = 200
return resp
else:
return 'Error: Need to specif a term!'
if __name__ == "__main__":
app.run(debug=True) | #!/usr/bin/env python
################################################################################
# Created by Oscar Martinez #
# o.rubi@esciencecenter.nl #
################################################################################
from flask import Flask, request, jsonify
from TermSuggester import TermSuggester, SearchMethodAggregation
from elsearch import ELSearch
from wnsearch import WNSearch
app = Flask(__name__)
searchMethodClasses = (ELSearch, WNSearch)
initializeParameters = ((None, False),('/home/oscarr/concept-search-wd/data/wordnet', False))
ts = TermSuggester(searchMethodClasses, initializeParameters)
@app.route("/suggester", methods = ['GET',])
def api_term():
if request.method == 'GET':
if 'term' in request.args:
data = ts.getSuggestions(str(request.args['term']), SearchMethodAggregation.SumMethod)
resp = jsonify(data)
resp.status_code = 200
return resp
else:
return 'Error: Need to specif a term!'
if __name__ == "__main__":
app.run(debug=True) | apache-2.0 | Python |
e6988b51017890f1398de56e2870b439a928381d | clean up install_modules.py | joejulian/openstack-guest-agents-unix,prometheanfire/openstack-guest-agents-unix,prometheanfire/openstack-guest-agents-unix,joejulian/openstack-guest-agents-unix,rackerlabs/openstack-guest-agents-unix,coreos/openstack-guest-agents-unix,rackerlabs/openstack-guest-agents-unix,rackerlabs/openstack-guest-agents-unix,prometheanfire/openstack-guest-agents-unix,coreos/openstack-guest-agents-unix,coreos/openstack-guest-agents-unix,coreos/openstack-guest-agents-unix,joejulian/openstack-guest-agents-unix | src/unix/install_modules.py | src/unix/install_modules.py |
import os
import shutil
import subprocess
import sys
# For nova_agent binary
test_mode = True
def install_plugins(destdir):
import plugins
to_install = set()
for modname in sys.modules:
try:
mod_fn = __import__(modname).__file__
except:
continue
(mod_dir, mod_file) = mod_fn.rsplit('/', 1)
if mod_dir == "%s/%s" % (sys.path[0], "plugins"):
# Skip our plugins.
continue
if mod_dir in sys.path:
to_install.add(mod_fn)
else:
to_install.add(mod_dir)
try:
os.mkdir(destdir)
except:
pass
for i in to_install:
if os.path.isdir(i):
subdir = i.rsplit('/', 1)[1]
shutil.copytree(i, "%s/%s" % (destdir, subdir))
else:
shutil.copy2(i, destdir)
if len(sys.argv) != 2:
print "Usage: install_modules.py <dest_dir>"
sys.exit(1)
destdir = sys.argv[1]
if not os.path.exists(destdir):
os.mkdir(destdir)
elif not os.path.isdir(destdir):
print "Error: '%s' exists and is not a directory" % destdir
sys.exit(1)
install_plugins(destdir)
|
import sys
import os
import shutil
import plugins
test_mode = True
if len(sys.argv) < 2:
print "No destination directory specified"
sys.exit(1)
dest_dir = sys.argv[1]
to_install = set()
for modname in sys.modules:
try:
mod_fn = __import__(modname).__file__
except:
continue
(mod_dir, mod_file) = mod_fn.rsplit('/', 1)
if mod_dir == "%s/%s" % (sys.path[0], "plugins"):
# Skip our plugins.
continue
if mod_dir in sys.path:
to_install.add(mod_fn)
else:
to_install.add(mod_dir)
try:
os.mkdir(dest_dir)
except:
pass
for i in to_install:
if os.path.isdir(i):
subdir = i.rsplit('/', 1)[1]
shutil.copytree(i, "%s/%s" % (dest_dir, subdir))
else:
shutil.copy2(i, dest_dir)
| apache-2.0 | Python |
3cb6484231841e0125ca456fc15cae5e20d625d9 | bump version to 0.3 | dpranke/pyjson5 | json5/version.py | json5/version.py | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = '0.3'
| # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = '0.2.4'
| apache-2.0 | Python |
b912374b96fac67e213e65ea980e402c214fa54f | check the finance book via the journal entry | gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext | erpnext/accounts/doctype/finance_book/test_finance_book.py | erpnext/accounts/doctype/finance_book/test_finance_book.py | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
from erpnext.accounts.doctype.journal_entry.test_journal_entry import make_journal_entry
import frappe
import unittest
class TestFinanceBook(unittest.TestCase):
def create_finance_book(self):
if not frappe.db.exists("Finance Book", "_Test Finance Book"):
finance_book = frappe.get_doc({
"doctype": "Finance Book",
"finance_book_name": "_Test Finance Book"
}).insert()
else:
finance_book = frappe.get_doc("Finance Book", "_Test Finance Book")
return finance_book
def test_finance_book(self):
finance_book = self.create_finance_book()
# create jv entry
jv = make_journal_entry("_Test Bank - _TC",
"_Test Receivable - _TC", 100, save=False)
jv.accounts[1].update({
"party_type": "Customer",
"party": "_Test Customer USD"
})
jv.finance_book = finance_book.finance_book_name
jv.submit()
# check the Finance Book in the GL Entry
gl_entries = frappe.get_all("GL Entry", fields=["name", "finance_book"],
filters={"voucher_type": "Journal Entry", "voucher_no": jv.name})
for gl_entry in gl_entries:
self.assertEqual(gl_entry.finance_book, finance_book.name) | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestFinanceBook(unittest.TestCase):
pass
| agpl-3.0 | Python |
94ac3f9efb1bc76f6d418d549349d2f3caec4114 | Bump 0.3.0 | shin-/dockerpy-creds,shin-/dockerpy-creds | dockerpycreds/version.py | dockerpycreds/version.py | version = "0.3.0"
version_info = tuple([int(d) for d in version.split("-")[0].split(".")])
| version = "0.2.3"
version_info = tuple([int(d) for d in version.split("-")[0].split(".")])
| apache-2.0 | Python |
0358809377d7217fbef7c000ac772387fa4a9249 | Rename notifications to messages. | whatsthehubbub/rippleeffect,whatsthehubbub/rippleeffect,whatsthehubbub/rippleeffect,whatsthehubbub/rippleeffect,whatsthehubbub/rippleeffect | riskgame/urls.py | riskgame/urls.py | from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
urlpatterns = patterns('',
url(r'^$', 'riskgame.views.index', name='index'),
# url(r'^pre/launch/$', 'riskgame.views.pre_launch', name='pre_launch'),
url(r'^teams/$', 'riskgame.views.teams', name='teams'),
url(r'^teams/(?P<pk>\d+)/$', 'riskgame.views.team_detail', name='team_detail'),
url(r'^teams/your/$', 'riskgame.views.team_your', name="team_your"),
# url(r'^teams/create/$', 'riskgame.views.team_create', name='team_create'),
# url(r'^team/(?P<pk>\d+)/join/request/$', 'riskgame.views.request_team_join', name='request_team_join'),
# url(r'^team/(?P<pk>\d+)/join/accept/$', 'riskgame.views.accept_team_join', name='accept_team_join'),
url(r'^dummy/$', TemplateView.as_view(template_name='riskgame/dummy.html'), name='dummy'),
url(r'^players/$', 'riskgame.views.players', name='players'),
url(r'^messages/$', 'riskgame.views.notifications', name='notifications'),
url(r'^players/(?P<pk>\d+)/$', 'riskgame.views.player_profile', name='player_profile'),
url(r'^players/you/$', 'riskgame.views.player_profile_own', name='player_profile_own'),
url(r'^players/you/edit/$', 'riskgame.views.player_profile_edit', name='player_profile_edit'),
url(r'^players/(\S+?)/unsubscribe/$', 'riskgame.views.player_unsubscribe', name='player_unsubscribe'),
url(r'^home/$', 'riskgame.views.home', name='home'),
url(r'^game/start/$', 'riskgame.views.game_start', name='game_start'),
url(r'^play/inspect/$', 'riskgame.views.play_inspect', name='play_inspect'),
url(r'^play/improve/$', 'riskgame.views.play_invest', name='play_invest'),
url(r'^play/plan/$', 'riskgame.views.play_gather', name='play_gather'),
url(r'^play/barrier/$', 'riskgame.views.play_prevent', name='play_prevent'),
url(r'^play/confirm-production/$', 'riskgame.views.play_confirm_pump', name='play_confirm_pump'),
url(r'^play/produce/$', 'riskgame.views.play_pump', name='play_pump'),
url(r'^frontline/safety/$', 'riskgame.views.inspect_risks', name='frontline_risks'),
url(r'^frontline/event/$', 'riskgame.views.inspect_event', name='frontline_event'),
)
| from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
urlpatterns = patterns('',
url(r'^$', 'riskgame.views.index', name='index'),
# url(r'^pre/launch/$', 'riskgame.views.pre_launch', name='pre_launch'),
url(r'^teams/$', 'riskgame.views.teams', name='teams'),
url(r'^teams/(?P<pk>\d+)/$', 'riskgame.views.team_detail', name='team_detail'),
url(r'^teams/your/$', 'riskgame.views.team_your', name="team_your"),
# url(r'^teams/create/$', 'riskgame.views.team_create', name='team_create'),
# url(r'^team/(?P<pk>\d+)/join/request/$', 'riskgame.views.request_team_join', name='request_team_join'),
# url(r'^team/(?P<pk>\d+)/join/accept/$', 'riskgame.views.accept_team_join', name='accept_team_join'),
url(r'^dummy/$', TemplateView.as_view(template_name='riskgame/dummy.html'), name='dummy'),
url(r'^players/$', 'riskgame.views.players', name='players'),
url(r'^notifications/$', 'riskgame.views.notifications', name='notifications'),
url(r'^players/(?P<pk>\d+)/$', 'riskgame.views.player_profile', name='player_profile'),
url(r'^players/you/$', 'riskgame.views.player_profile_own', name='player_profile_own'),
url(r'^players/you/edit/$', 'riskgame.views.player_profile_edit', name='player_profile_edit'),
url(r'^players/(\S+?)/unsubscribe/$', 'riskgame.views.player_unsubscribe', name='player_unsubscribe'),
url(r'^home/$', 'riskgame.views.home', name='home'),
url(r'^game/start/$', 'riskgame.views.game_start', name='game_start'),
url(r'^play/inspect/$', 'riskgame.views.play_inspect', name='play_inspect'),
url(r'^play/improve/$', 'riskgame.views.play_invest', name='play_invest'),
url(r'^play/plan/$', 'riskgame.views.play_gather', name='play_gather'),
url(r'^play/barrier/$', 'riskgame.views.play_prevent', name='play_prevent'),
url(r'^play/confirm-production/$', 'riskgame.views.play_confirm_pump', name='play_confirm_pump'),
url(r'^play/produce/$', 'riskgame.views.play_pump', name='play_pump'),
url(r'^frontline/safety/$', 'riskgame.views.inspect_risks', name='frontline_risks'),
url(r'^frontline/event/$', 'riskgame.views.inspect_event', name='frontline_event'),
)
| mit | Python |
a683d160267ee4d8e139a19b9adacdf8f82f370c | bump version number of rmgpy to 1.0.4 | nyee/RMG-Py,pierrelb/RMG-Py,nyee/RMG-Py,pierrelb/RMG-Py,nickvandewiele/RMG-Py,nickvandewiele/RMG-Py | rmgpy/version.py | rmgpy/version.py | # This file describes the version of RMG-Py
__version__ = '1.0.4' | # This file describes the version of RMG-Py
__version__ = '1.0.3' | mit | Python |
66e2e3bee9996a0cb55c7b802a638e42bc72ccbe | Use formatted flag on astyle to simplify code | stopthatcow/zazu,stopthatcow/zazu | zazu/plugins/astyle_styler.py | zazu/plugins/astyle_styler.py | # -*- coding: utf-8 -*-
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
args = ['astyle', '--formatted'] + self.options
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
return file, bool(output)
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
| # -*- coding: utf-8 -*-
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
args = ['astyle', '-v'] + self.options
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
fix_needed = output.startswith('Formatted ')
return file, fix_needed
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
| mit | Python |
3342c5c82960274b0ed2ba9500e33d305f1b4399 | Correct ESLint case | stopthatcow/zazu,stopthatcow/zazu | zazu/plugins/eslint_styler.py | zazu/plugins/eslint_styler.py | # -*- coding: utf-8 -*-
"""ESLint plugin for zazu."""
import zazu.styler
zazu.util.lazy_import(locals(), [
'subprocess',
'os',
'tempfile'
])
__author__ = "Patrick Moore"
__copyright__ = "Copyright 2018"
class ESLintStyler(zazu.styler.Styler):
"""ESLint plugin for code styling."""
def style_string(self, string):
"""Fix a string to be within style guidelines."""
temp = tempfile.NamedTemporaryFile(delete=False, suffix=".js")
temp_path = temp.name
args = ['eslint', '--fix'] + self.options + [temp_path]
temp.write(string)
temp.close()
try:
subprocess.check_output(args)
except subprocess.CalledProcessError:
pass
with open(temp_path, "r") as f:
ret = f.read()
os.remove(temp_path)
return ret
@staticmethod
def default_extensions():
"""Return the list of file extensions that are compatible with this Styler."""
return ['*.js']
@staticmethod
def type():
"""Return the string type of this Styler."""
return 'eslint'
| # -*- coding: utf-8 -*-
"""eslint plugin for zazu."""
import zazu.styler
zazu.util.lazy_import(locals(), [
'subprocess',
'os',
'tempfile'
])
__author__ = "Patrick Moore"
__copyright__ = "Copyright 2018"
class eslintStyler(zazu.styler.Styler):
"""ESLint plugin for code styling."""
def style_string(self, string):
"""Fix a string to be within style guidelines."""
temp = tempfile.NamedTemporaryFile(delete=False, suffix=".js")
temp_path = temp.name
args = ['eslint', '--fix'] + self.options + [temp_path]
temp.write(string)
temp.close()
try:
subprocess.check_output(args)
except subprocess.CalledProcessError:
pass
with open(temp_path, "r") as f:
ret = f.read()
os.remove(temp_path)
return ret
@staticmethod
def default_extensions():
"""Return the list of file extensions that are compatible with this Styler."""
return ['*.js']
@staticmethod
def type():
"""Return the string type of this Styler."""
return 'eslint'
| mit | Python |
807aa2ec34441e919f27079eb4ed075f76fe17e5 | Add log_bernoulli shortname | RuiShu/tensorbayes | tensorbayes/distributions.py | tensorbayes/distributions.py | """ Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
| """ Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
# return -tf.reduce_sum(
# tf.nn.sigmoid_cross_entropy_with_logits(logits, x), axis)
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
| mit | Python |
979424d0510834abb3918777f8b6a46b344d2bee | Bump version number | johanvdw/niche_vlaanderen | niche_vlaanderen/version.py | niche_vlaanderen/version.py | __version__ = "1.0a6"
| __version__ = "1.0a5"
| mit | Python |
45cc00d2f4bf1ec5dfec60301e48c984af9acb64 | Add validator json to PACKAGE_DATA | INCF/pybids | bids/version.py | bids/version.py | from __future__ import absolute_import, division, print_function
import os
CLASSIFIERS = ["Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering"]
# Description should be a one-liner:
description = "bids: interface with datasets conforming BIDS"
# Long description will go up on the pypi page
long_description = """
PyBIDS
======
PyBIDS is a Python module to interface with datasets conforming BIDS.
See BIDS paper_ and http://bids.neuroimaging.io website for more information.
.. paper_: http://www.nature.com/articles/sdata201644
License
=======
``pybids`` is licensed under the terms of the MIT license. See the file
"LICENSE" for information on the history of this software, terms & conditions
for usage, and a DISCLAIMER OF ALL WARRANTIES.
All trademarks referenced herein are property of their respective holders.
Copyright (c) 2016--, PyBIDS developers, Planet Earth
"""
NAME = "pybids"
MAINTAINER = "PyBIDS Developers"
MAINTAINER_EMAIL = "bids-discussion@googlegroups.com"
DESCRIPTION = description
LONG_DESCRIPTION = long_description
URL = "http://github.com/bids-standard/pybids"
DOWNLOAD_URL = ""
LICENSE = "MIT"
AUTHOR = "PyBIDS developers"
AUTHOR_EMAIL = "bids-discussion@googlegroups.com"
PLATFORMS = "OS Independent"
# No data for now
REQUIRES = ["grabbit==0.2.4", "six", "num2words", "numpy", "scipy", "pandas",
"nibabel", "patsy"]
EXTRAS_REQUIRE = {
# Just to not break compatibility with externals requiring
# now deprecated installation schemes
'analysis': []
}
TESTS_REQUIRE = ["pytest>=3.3.0"]
def package_files(directory):
# from https://stackoverflow.com/questions/27664504/how-to-add-package-data-recursively-in-python-setup-py
paths = []
for (path, directories, filenames) in os.walk(directory):
for filename in filenames:
paths.append(os.path.join('..', path, filename))
return paths
extra_files = package_files('path_to/extra_files_dir')
PACKAGE_DATA = {
'bids.layout': ['config/*.json'],
'bids.reports': ['config/*.json'],
'bids.validator': ['config/validator/*.json'],
'bids': package_files('bids/tests/data')
}
| from __future__ import absolute_import, division, print_function
import os
CLASSIFIERS = ["Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering"]
# Description should be a one-liner:
description = "bids: interface with datasets conforming BIDS"
# Long description will go up on the pypi page
long_description = """
PyBIDS
======
PyBIDS is a Python module to interface with datasets conforming BIDS.
See BIDS paper_ and http://bids.neuroimaging.io website for more information.
.. paper_: http://www.nature.com/articles/sdata201644
License
=======
``pybids`` is licensed under the terms of the MIT license. See the file
"LICENSE" for information on the history of this software, terms & conditions
for usage, and a DISCLAIMER OF ALL WARRANTIES.
All trademarks referenced herein are property of their respective holders.
Copyright (c) 2016--, PyBIDS developers, Planet Earth
"""
NAME = "pybids"
MAINTAINER = "PyBIDS Developers"
MAINTAINER_EMAIL = "bids-discussion@googlegroups.com"
DESCRIPTION = description
LONG_DESCRIPTION = long_description
URL = "http://github.com/bids-standard/pybids"
DOWNLOAD_URL = ""
LICENSE = "MIT"
AUTHOR = "PyBIDS developers"
AUTHOR_EMAIL = "bids-discussion@googlegroups.com"
PLATFORMS = "OS Independent"
# No data for now
REQUIRES = ["grabbit==0.2.4", "six", "num2words", "numpy", "scipy", "pandas",
"nibabel", "patsy"]
EXTRAS_REQUIRE = {
# Just to not break compatibility with externals requiring
# now deprecated installation schemes
'analysis': []
}
TESTS_REQUIRE = ["pytest>=3.3.0"]
def package_files(directory):
# from https://stackoverflow.com/questions/27664504/how-to-add-package-data-recursively-in-python-setup-py
paths = []
for (path, directories, filenames) in os.walk(directory):
for filename in filenames:
paths.append(os.path.join('..', path, filename))
return paths
extra_files = package_files('path_to/extra_files_dir')
PACKAGE_DATA = {
'bids.layout': ['config/*.json'],
'bids.reports': ['config/*.json'],
'bids': package_files('bids/tests/data')
}
| mit | Python |
887cb1b1a021b6d4a1952fdeb178e602d8cabfdc | Fix `clifford.test.run_all_tests` to use pytest | arsenovic/clifford,arsenovic/clifford | clifford/test/__init__.py | clifford/test/__init__.py | import os
import pytest
def run_all_tests(*args):
""" Invoke pytest, forwarding options to pytest.main """
pytest.main([os.path.dirname(__file__)] + list(args))
| from .test_algebra_initialisation import *
from .test_clifford import *
from .test_io import *
from .test_g3c_tools import *
from .test_tools import *
from .test_g3c_CUDA import *
import unittest
def run_all_tests():
unittest.main()
| bsd-3-clause | Python |
e222de58671a11c83e70c90f2bac10e576240214 | Adjust to new script parameter syntax | imagej/imagej-legacy,imagej/imagej-legacy,imagej/imagej-legacy,imagej/imagej-legacy | src/main/resources/script_templates/ImageJ_1.x/Examples/Process_Folder.py | src/main/resources/script_templates/ImageJ_1.x/Examples/Process_Folder.py | #@ File (label = "Input directory", style = "directory") srcFile
#@ File (label = "Output directory", style = "directory") dstFile
#@ String (label = "File extension", value=".tif") ext
#@ String (label = "File name contains", value = "") containString
#@ boolean (label = "Keep directory structure when saving", value = true) keepDirectories
# See also Process_Folder.ijm for a version of this code
# in the ImageJ 1.x macro language.
import os
from ij import IJ, ImagePlus
def run():
srcDir = srcFile.getAbsolutePath()
dstDir = dstFile.getAbsolutePath()
for root, directories, filenames in os.walk(srcDir):
filenames.sort();
for filename in filenames:
# Check for file extension
if not filename.endswith(ext):
continue
# Check for file name pattern
if containString not in filename:
continue
process(srcDir, dstDir, root, filename, keepDirectories)
def process(srcDir, dstDir, currentDir, fileName, keepDirectories):
print "Processing:"
# Opening the image
print "Open image file", fileName
imp = IJ.openImage(os.path.join(currentDir, fileName))
# Put your processing commands here!
# Saving the image
saveDir = currentDir.replace(srcDir, dstDir) if keepDirectories else dstDir
if not os.path.exists(saveDir):
os.makedirs(saveDir)
print "Saving to", saveDir
IJ.saveAs(imp, "Tiff", os.path.join(saveDir, fileName));
imp.close()
run()
| # @File(label = "Input directory", style = "directory") srcFile
# @File(label = "Output directory", style = "directory") dstFile
# @String(label = "File extension", value=".tif") ext
# @String(label = "File name contains", value = "") containString
# @boolean(label = "Keep directory structure when saving", value = true) keepDirectories
# See also Process_Folder.ijm for a version of this code
# in the ImageJ 1.x macro language.
import os
from ij import IJ, ImagePlus
def run():
srcDir = srcFile.getAbsolutePath()
dstDir = dstFile.getAbsolutePath()
for root, directories, filenames in os.walk(srcDir):
filenames.sort();
for filename in filenames:
# Check for file extension
if not filename.endswith(ext):
continue
# Check for file name pattern
if containString not in filename:
continue
process(srcDir, dstDir, root, filename, keepDirectories)
def process(srcDir, dstDir, currentDir, fileName, keepDirectories):
print "Processing:"
# Opening the image
print "Open image file", fileName
imp = IJ.openImage(os.path.join(currentDir, fileName))
# Put your processing commands here!
# Saving the image
saveDir = currentDir.replace(srcDir, dstDir) if keepDirectories else dstDir
if not os.path.exists(saveDir):
os.makedirs(saveDir)
print "Saving to", saveDir
IJ.saveAs(imp, "Tiff", os.path.join(saveDir, fileName));
imp.close()
run()
| bsd-2-clause | Python |
612521ba5c9fe9dace98f09d07e359bbbef29d48 | update __manifest__ | akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil | l10n_br_base/__manifest__.py | l10n_br_base/__manifest__.py | # -*- coding: utf-8 -*-
# Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Base',
'summary': 'Customization of base module for implementations in Brazil.',
'category': 'Localisation',
'license': 'AGPL-3',
'author': (
'Akretion',
'Odoo Community Association (OCA)'
),
'website': 'http://odoo-brasil.org',
'version': '12.0.1.0.0',
'depends': [
'base',
'base_setup',
'base_address_city',
'base_address_extended'
],
'data': [
'security/ir.model.access.csv',
'data/res.city.csv',
'data/base_data.xml',
'data/res.country.state.csv',
'data/res.bank.csv',
'views/res_config_settings_view.xml',
'views/res_city_view.xml',
'views/res_bank_view.xml',
'views/res_country_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml'
],
'demo': [
'demo/l10n_br_base_demo.xml',
'demo/res_partner_demo.xml',
],
'installable': True,
'external_dependencies': {
'python': ['num2words'],
}
}
| # -*- coding: utf-8 -*-
# Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Base',
'summary': 'Customization of base module for implementations in Brazil.',
'category': 'Localisation',
'license': 'AGPL-3',
'author': (
'Akretion',
'Odoo Community Association (OCA)'
),
'website': 'http://odoo-brasil.org',
'version': '12.0.1.0.0',
'depends': [
'base',
'base_setup',
'base_address_city',
'base_address_extends'
],
'data': [
'data/res.city.csv',
'data/l10n_br_base_data.xml',
'data/res.country.state.csv',
'data/res.bank.csv',
'views/l10n_br_base_view.xml',
'views/res_bank_view.xml',
'views/res_country_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/l10n_br_base_demo.xml',
'demo/res_partner_demo.xml',
],
'installable': True,
'external_dependencies': {
'python': ['num2words'],
}
}
| agpl-3.0 | Python |
174d9d0a131a65924a4f4ef9eb5ae2edf56202a6 | Update l10n_br_sale depedency | OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil | l10n_br_sale/__manifest__.py | l10n_br_sale/__manifest__.py | # Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
"name": "Brazilian Localization Sale",
"category": "Localisation",
"license": "AGPL-3",
"author":
'Akretion, '
'Odoo Community Association (OCA)',
"website": "http://odoo-brasil.org",
"version": "12.0.1.0.0",
"depends": ["sale_management", "l10n_br_account"],
"data": [
# Data
"data/company_data.xml",
# Security
"security/ir.model.access.csv",
"security/l10n_br_sale_security.xml",
# View
"views/res_config_settings_view.xml",
"views/res_company_view.xml",
"views/sale_view.xml",
# Report
"report/sale_report_view.xml",
],
"demo": [
# Demo
"demo/company_demo.xml",
"demo/l10n_br_sale_demo.xml",
"demo/l10n_br_sale_product_demo.xml",
],
"installable": True,
"auto_install": True,
"development_status": "Production/Stable",
"maintainers": ["renatonlima"],
"external_dependencies": {"python": ["erpbrasil.base"]},
}
| # Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
"name": "Brazilian Localization Sale",
"category": "Localisation",
"license": "AGPL-3",
"author":
'Akretion, '
'Odoo Community Association (OCA)',
"website": "http://odoo-brasil.org",
"version": "12.0.1.0.0",
"depends": ["sale", "l10n_br_account"],
"data": [
# Data
"data/company_data.xml",
# Security
"security/ir.model.access.csv",
"security/l10n_br_sale_security.xml",
# View
"views/res_config_settings_view.xml",
"views/res_company_view.xml",
"views/sale_view.xml",
# Report
"report/sale_report_view.xml",
],
"demo": [
# Demo
"demo/company_demo.xml",
"demo/l10n_br_sale_demo.xml",
"demo/l10n_br_sale_product_demo.xml",
],
"installable": True,
"auto_install": True,
"development_status": "Production/Stable",
"maintainers": ["renatonlima"],
"external_dependencies": {"python": ["erpbrasil.base"]},
}
| agpl-3.0 | Python |
ed968fabd811690e0a2f2011b5ed44cec31145c7 | Fix broken import in translate_nuc script. | cfe-lab/MiCall,cfe-lab/MiCall,cfe-lab/MiCall | micall/tests/microtest/translate_nuc.py | micall/tests/microtest/translate_nuc.py | from micall.core import aln2counts
""" Translate nucleotide sequence to amino acid sequence and compare the result
with an expected sequence.
You might also want to identify the amino acid sequence with BLASTp:
http://blast.ncbi.nlm.nih.gov/Blast.cgi?PROGRAM=blastp&PAGE_TYPE=BlastSearch
"""
nuc_seq = ''.join([
"CCTCAGGTCACTCTTTGGCAACGACCCCTCGTCACAATAAAGATAGGGGGGCAACTAAAGGAAGC",
"TCTATTAGATACAGGAGCAGATGATACAGTATTAGAAGAAATGAGTTTGCCAGGAAGATGGAAAC",
"CAAAAATGATAGGGGGAATTGGAGGTTTTATCAAAGTAAGACAGTATGATCAGATACTCATAGAA",
"ATCTGTGGACATAAAGCTATAGGTACAGTATTAGTAGGACCTACACCTGTCAACATAATTGGAAG",
"AAATCTGTTGACTCAGATTGGTTGCACTTTAAATTTT"
])
aa_compare = ''.join([
"PQVTLWQRPLVTIKIGGQLKEALLDTGADDTVLEEMSLPGRWKPKMIGGIGGFIKVRQYDQILIE",
"ICGHKAIGTVLVGPTPVNIIGRNLLTQIGCTLNF"
])
aa_seq = aln2counts.translate(nuc_seq)
pairs = zip(aa_seq, aa_compare)
diffs = [' ' if a == b else '*' for a, b in pairs]
print 'result ', aa_seq
print 'diffs ', ''.join(diffs) if aa_seq != aa_compare else 'no diffs'
print 'compare', aa_compare
| import aln2counts
""" Translate nucleotide sequence to amino acid sequence and compare the result
with an expected sequence.
You might also want to identify the amino acid sequence with BLASTp:
http://blast.ncbi.nlm.nih.gov/Blast.cgi?PROGRAM=blastp&PAGE_TYPE=BlastSearch
"""
nuc_seq = ''.join([
"CCTCAGGTCACTCTTTGGCAACGACCCCTCGTCACAATAAAGATAGGGGGGCAACTAAAGGAAGC",
"TCTATTAGATACAGGAGCAGATGATACAGTATTAGAAGAAATGAGTTTGCCAGGAAGATGGAAAC",
"CAAAAATGATAGGGGGAATTGGAGGTTTTATCAAAGTAAGACAGTATGATCAGATACTCATAGAA",
"ATCTGTGGACATAAAGCTATAGGTACAGTATTAGTAGGACCTACACCTGTCAACATAATTGGAAG",
"AAATCTGTTGACTCAGATTGGTTGCACTTTAAATTTT"
])
aa_compare = ''.join([
"PQVTLWQRPLVTIKIGGQLKEALLDTGADDTVLEEMSLPGRWKPKMIGGIGGFIKVRQYDQILIE",
"ICGHKAIGTVLVGPTPVNIIGRNLLTQIGCTLNF"
])
aa_seq = aln2counts.translate(nuc_seq)
pairs = zip(aa_seq, aa_compare)
diffs = [' ' if a == b else '*' for a, b in pairs]
print 'result ', aa_seq
print 'diffs ', ''.join(diffs) if aa_seq != aa_compare else 'no diffs'
print 'compare', aa_compare
| agpl-3.0 | Python |
65c52d9982cf3a87bd6f3efa9591a8781a799ffb | Change selector (site changed) | laurent-george/weboob,Konubinix/weboob,willprice/weboob,RouxRC/weboob,nojhan/weboob-devel,sputnick-dev/weboob,frankrousseau/weboob,Boussadia/weboob,nojhan/weboob-devel,franek/weboob,Konubinix/weboob,laurent-george/weboob,frankrousseau/weboob,Konubinix/weboob,sputnick-dev/weboob,sputnick-dev/weboob,RouxRC/weboob,eirmag/weboob,willprice/weboob,nojhan/weboob-devel,willprice/weboob,eirmag/weboob,franek/weboob,RouxRC/weboob,Boussadia/weboob,yannrouillard/weboob,laurent-george/weboob,yannrouillard/weboob,frankrousseau/weboob,eirmag/weboob,Boussadia/weboob,franek/weboob,Boussadia/weboob,yannrouillard/weboob | modules/leclercmobile/pages/homepage.py | modules/leclercmobile/pages/homepage.py | # -*- coding: utf-8 -*-
# Copyright(C) 2012 Florent Fourcot
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.capabilities.bill import Subscription
from weboob.tools.browser import BasePage
__all__ = ['HomePage']
class HomePage(BasePage):
def on_loaded(self):
pass
def get_list(self):
l = []
phone = unicode(self.document.xpath('//span[@id="ctl00_ctl00_cMain_cEspCli_lblMsIsdn"]')[0].text.replace(' ', ''))
self.browser.logger.debug('Found ' + phone + ' has phone number')
phoneplan = unicode(self.document.xpath('//span[@id="ctl00_ctl00_cMain_cEspCli_aoaOffreActuelle_aooOffreEtOptions"]/dl/dd/span')[0].text)
self.browser.logger.debug('Found ' + phoneplan + ' has subscription type')
subscription = Subscription(phone)
subscription.label = phone + ' - ' + phoneplan
l.append(subscription)
return l
| # -*- coding: utf-8 -*-
# Copyright(C) 2012 Florent Fourcot
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.capabilities.bill import Subscription
from weboob.tools.browser import BasePage
__all__ = ['HomePage']
class HomePage(BasePage):
def on_loaded(self):
pass
def get_list(self):
l = []
phone = unicode(self.document.xpath('//span[@id="ctl00_ctl00_cMain_cEspCli_lblMsIsdn"]')[0].text.replace(' ', ''))
self.browser.logger.debug('Found ' + phone + ' has phone number')
phoneplan = unicode(self.document.xpath('//span[@id="ctl00_ctl00_cMain_cEspCli_lblOffre"]')[0].text)
self.browser.logger.debug('Found ' + phoneplan + ' has subscription type')
subscription = Subscription(phone)
subscription.label = phone + ' - ' + phoneplan
l.append(subscription)
return l
| agpl-3.0 | Python |
7c2d5304b0f305d069a4265043c19cbae71b3e06 | Add several routes | DavidJFelix/hatchit,DavidJFelix/hatchit,DavidJFelix/hatchit | src/flock.py | src/flock.py | #!/usr/bin/env python
from flask import Flask, jsonify, redirect, render_template, request, session, url_for
app = Flask(__name__)
app.secret_key = 'development'
@app.route('/')
def hello_world():
return render_template('base.html')
@app.route('/u/<user>')
@app.route('/user/<user>')
def get_user(user):
pass
@app.route('/e/a', methods=['POST'])
@app.route('/e/add', methods=['POST'])
@app.route('/event/a', methods=['POST'])
@app.route('/event/add', methods=['POST'])
def add_event():
pass
@app.route('/s/a', methods=['POST'])
@app.route('/s/add', methods=['POST'])
@app.route('/suggestion/a', methods=['POST'])
@app.route('/suggestion/add', methods=['POST'])
def add_suggestion():
pass
if __name__ == '__main__':
app.run()
| #!/usr/bin/env python
from flask import Flask, jsonify, redirect, render_template, request, session, url_for
app = Flask(__name__)
app.secret_key = 'development'
@app.route('/')
def hello_world():
return render_template('base.html')
if __name__ == '__main__':
app.run()
| agpl-3.0 | Python |
c9ef00ff3225aa545cbb1a3da592c9af1bb0791e | Fix issue when GIT is not tagged. | weijia/django-git,weijia/django-git | django_git/management/commands/git_pull_utils/git_folder_enum.py | django_git/management/commands/git_pull_utils/git_folder_enum.py | from django_git.models import RepoInfo
from tagging.models import Tag, TaggedItem
def enum_git_repo(tag_name="git"):
tag_filter = Tag.objects.filter(name=tag_name)
if tag_filter.exists():
tag = tag_filter[0]
tagged_item_list = TaggedItem.objects.filter(tag__exact=tag.pk)
for tagged_item in tagged_item_list:
obj_tag = tagged_item.tag.name
obj = tagged_item.object
if obj is None:
continue
RepoInfo.objects.get_or_create(full_path=obj.full_path)
for repo in RepoInfo.objects.all().order_by("last_checked"):
yield repo
| from django_git.models import RepoInfo
from tagging.models import Tag, TaggedItem
def enum_git_repo(tag_name="git"):
tag_filter = Tag.objects.filter(name=tag_name)
if tag_filter.exists():
tag = tag_filter[0]
tagged_item_list = TaggedItem.objects.filter(tag__exact=tag.pk)
for tagged_item in tagged_item_list:
obj_tag = tagged_item.tag.name
obj = tagged_item.object
if obj is None:
continue
RepoInfo.objects.get_or_create(full_path=obj.full_path)
for repo in RepoInfo.objects.all().order_by("last_checked"):
yield repo
| bsd-3-clause | Python |
b40439e3b1e99027952308fa1ce5bcc8ebfbcabb | Remove a useless parameter | Kozea/sitenco | sitenco/config/bug_tracker.py | sitenco/config/bug_tracker.py | """
Bug tracker tools.
"""
import abc
from docutils import nodes
from .tool import Tool, Role
class BugTracker(Tool):
"""Abstract class for bug tracker tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, project_name):
self.project_name = project_name
super(BugTracker, self).__init__()
def update(self):
"""Nothing has to be done to update bug tracker tools."""
@abc.abstractproperty
def base_url(self):
"""Base URL of the bug tracker service."""
raise NotImplementedError
@property
def bug_link(self):
"""Link to the bug tracker interface."""
raise NotImplementedError
class Github(BugTracker):
"""Github bug tracker tool."""
base_url = 'https://github.com/'
@property
def bug_link(self):
return '%s%s/issues' % (self.base_url, self.project_name)
class Redmine(BugTracker):
"""Redmine bug tracker tool."""
def __init__(self, project_name, base_url):
super(Redmine, self).__init__(project_name)
self._base_url = base_url
@property
def base_url(self):
return self._base_url
@property
def bug_link(self):
return '%sprojects/%s/issues' % (self.base_url, self.project_name)
class BugLink(Role):
"""Link to the bug tracker."""
def run(self, name, rawtext, text, lineno, inliner, options=None,
content=None):
return [nodes.reference('', text, refuri=self.tool.bug_link)], []
| """
Bug tracker tools.
"""
import abc
from docutils import nodes
from .tool import Tool, Role
class BugTracker(Tool):
"""Abstract class for bug tracker tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, project_name):
self.project_name = project_name
super(BugTracker, self).__init__()
def update(self):
"""Nothing has to be done to update bug tracker tools."""
@abc.abstractproperty
def base_url(self):
"""Base URL of the bug tracker service."""
raise NotImplementedError
@property
def bug_link(self, number=10):
"""Link to the bug tracker interface."""
raise NotImplementedError
class Github(BugTracker):
"""Github bug tracker tool."""
base_url = 'https://github.com/'
@property
def bug_link(self):
return '%s%s/issues' % (self.base_url, self.project_name)
class Redmine(BugTracker):
"""Redmine bug tracker tool."""
def __init__(self, project_name, base_url):
super(Redmine, self).__init__(project_name)
self._base_url = base_url
@property
def base_url(self):
return self._base_url
@property
def bug_link(self):
return '%sprojects/%s/issues' % (self.base_url, self.project_name)
class BugLink(Role):
"""Link to the bug tracker."""
def run(self, name, rawtext, text, lineno, inliner, options=None,
content=None):
return [nodes.reference('', text, refuri=self.tool.bug_link)], []
| bsd-3-clause | Python |
3454f377c82c11e4ec1485ec96d7af4123cc78ed | Add linux logos in show symbols script | mkofinas/prompt-support,mkofinas/prompt-support | test/symbols/show_glyphs.py | test/symbols/show_glyphs.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
octicons_start = "f400"
octicons_end = "f4e5"
print "\nOcticons"
for ii in xrange(int(octicons_start, 16), int(octicons_end, 16) + 1):
print unichr(ii),
octicons_start = "f300"
octicons_end = "f313"
print "\nFont Linux"
for ii in xrange(int(octicons_start, 16), int(octicons_end, 16) + 1):
print unichr(ii),
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
octicons_start = "f400"
octicons_end = "f4e5"
print "\nOcticons"
for ii in xrange(int(octicons_start, 16), int(octicons_end, 16) + 1):
print unichr(ii),
| mit | Python |
7258923a3fc6467c2aac2c81f108c71e790a9e6b | Fix bug in RegEx parser mixin | elegion/djangodash2013,elegion/djangodash2013 | wtl/wtparser/parsers/regex.py | wtl/wtparser/parsers/regex.py | import re
from itertools import repeat
class RegexParserMixin(object):
quoted_re = r'''(?P<q>"|')(?P<x>.+)(?P=q)'''
version_re = r'''(?P<s>[<>=~]*)\s*(?P<n>.*)'''
def _get_value(self, lines, prefix, regex):
filtered = self._lines_startwith(lines, '{0} '.format(prefix))
return self._match(filtered[0], 'x', regex) if len(filtered) else None
def _lines_startwith(self, lines, init):
return [l.strip() for l in lines if l.strip().startswith(init)]
def _match(self, line, group, regex):
ms = re.compile(regex).match(line)
if ms is not None:
return ms.groupdict().get(group, None)
def _match_groups(self, line, regex):
ms = re.compile(regex).match(line)
return ms.groups() if ms is not None else repeat(None)
| import re
from itertools import repeat
class RegexParserMixin(object):
quoted_re = r'''(?P<q>"|')(?P<x>.+)(?P=q)'''
version_re = r'''(?P<s>[<>=~]*)\s*(?P<n>.*)'''
def _get_value(self, lines, prefix, regex):
filtered = self._lines_startwith(lines, '{0} '.format(prefix))
return self._match(filtered[0], 'x', regex) if len(lines) else None
def _lines_startwith(self, lines, init):
return [l.strip() for l in lines if l.strip().startswith(init)]
def _match(self, line, group, regex):
ms = re.compile(regex).match(line)
if ms is not None:
return ms.groupdict().get(group, None)
def _match_groups(self, line, regex):
ms = re.compile(regex).match(line)
return ms.groups() if ms is not None else repeat(None)
| mit | Python |
43ec7668045b04ca6b0d265113c763f22b40396d | Remove bin generation | matslindh/codingchallenges,matslindh/codingchallenges | knowit2016/knowit19.py | knowit2016/knowit19.py | import string
out = open("input/knowit19_output.pgm", "w")
#out_bin = open("input/knowit19_output.bin", "wb")
s = ''.join(open("input/knowit19").readlines()).replace("\n", '')
for i in range(0, len(s), 2):
pass
#out_bin.write(chr(int(s[i:i + 2])).encode("ascii"))
height = 21
width = int(len(s) / (height * 2))
out.write("P2\n" + str(width) + ' ' + str(height) + "\n99\n")
for i in range(0, len(s), 2):
letter = '99' if int(s[i:i+2]) % 2 == 0 else '0'
if len(letter) < 2:
letter = ' ' + letter
out.write(letter + ' ')
if (i + 2) % width == 0:
out.write("\n")
for line in open("input/knowit19").readlines():
line = line.strip()
# print(int(line)&0xff)
str = ''.join(open("input/knowit19").readlines()).replace("\n", '')
freq = {}
for i in range(2, len(str), 2):
v = int(str[i:i+2])
v_diff = v - int(str[i-2:i])
if v not in freq:
freq[v] = 0
freq[v] += 1
for k in freq:
print(k, freq[k])
"""
v = int(str)
while v:
x = v & 0xff
print(chr(x))
v >>= 8
print(v)""" | import string
out = open("input/knowit19_output.pgm", "w")
out_bin = open("input/knowit19_output.bin", "wb")
s = ''.join(open("input/knowit19").readlines()).replace("\n", '')
for i in range(0, len(s), 2):
out_bin.write(chr(int(s[i:i + 2])).encode("ascii"))
height = 21
width = int(len(s) / (height * 2))
out.write("P2\n" + str(width) + ' ' + str(height) + "\n99\n")
for i in range(0, len(s), 2):
letter = '99' if int(s[i:i+2]) % 2 == 0 else '0'
if len(letter) < 2:
letter = ' ' + letter
out.write(letter + ' ')
if (i + 2) % width == 0:
out.write("\n")
for line in open("input/knowit19").readlines():
line = line.strip()
# print(int(line)&0xff)
str = ''.join(open("input/knowit19").readlines()).replace("\n", '')
freq = {}
for i in range(2, len(str), 2):
v = int(str[i:i+2])
v_diff = v - int(str[i-2:i])
if v not in freq:
freq[v] = 0
freq[v] += 1
for k in freq:
print(k, freq[k])
"""
v = int(str)
while v:
x = v & 0xff
print(chr(x))
v >>= 8
print(v)""" | mit | Python |
9633f3ee1a3431cb373a4652afbfc2cd8b3b4c23 | Allow specifying modules to be mocked | Stvad/CrowdAnki,Stvad/CrowdAnki,Stvad/CrowdAnki | test_utils/anki/__init__.py | test_utils/anki/__init__.py | from typing import List
from typing import Optional
import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
module_names_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
def __init__(self, module_names_list: Optional[List[str]] = None):
if module_names_list is None:
module_names_list = self.module_names_list
self.shadowed_modules = {}
for module_name in module_names_list:
self.shadowed_modules[module_name] = sys.modules.get(module_name)
sys.modules[module_name] = MagicMock()
def unmock(self):
for module_name, module in self.shadowed_modules.items():
if module is not None:
sys.modules[module_name] = module
else:
if module_name in sys.modules:
del sys.modules[module_name]
| import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
def __init__(self):
self.shadowed_modules = {}
for module in self.modules_list:
self.shadowed_modules[module] = sys.modules.get(module)
sys.modules[module] = MagicMock()
def unmock(self):
for module in self.modules_list:
shadowed_module = self.shadowed_modules[module]
if shadowed_module is not None:
sys.modules[module] = shadowed_module
else:
if module in sys.modules:
del sys.modules[module]
| mit | Python |
30ddea8aa577bc6bff64c9da543c559258a4e51f | fix plot_rereference_eeg example | bloyl/mne-python,olafhauk/mne-python,larsoner/mne-python,kambysese/mne-python,drammock/mne-python,teonlamont/mne-python,Teekuningas/mne-python,Teekuningas/mne-python,cjayb/mne-python,pravsripad/mne-python,olafhauk/mne-python,wmvanvliet/mne-python,olafhauk/mne-python,adykstra/mne-python,mne-tools/mne-python,Eric89GXL/mne-python,rkmaddox/mne-python,jaeilepp/mne-python,drammock/mne-python,mne-tools/mne-python,larsoner/mne-python,kingjr/mne-python,kingjr/mne-python,Teekuningas/mne-python,bloyl/mne-python,wmvanvliet/mne-python,kingjr/mne-python,drammock/mne-python,pravsripad/mne-python,teonlamont/mne-python,Eric89GXL/mne-python,jaeilepp/mne-python,larsoner/mne-python,pravsripad/mne-python,kambysese/mne-python,cjayb/mne-python,rkmaddox/mne-python,mne-tools/mne-python,adykstra/mne-python,wmvanvliet/mne-python | examples/preprocessing/plot_rereference_eeg.py | examples/preprocessing/plot_rereference_eeg.py | """
=============================
Re-referencing the EEG signal
=============================
Load raw data and apply some EEG referencing schemes.
"""
# Authors: Marijn van Vliet <w.m.vanvliet@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
#
# License: BSD (3-clause)
import mne
from mne.datasets import sample
from matplotlib import pyplot as plt
print(__doc__)
# Setup for reading the raw data
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
event_id, tmin, tmax = 1, -0.2, 0.5
# Read the raw data
raw = mne.io.read_raw_fif(raw_fname, preload=True)
events = mne.read_events(event_fname)
# The EEG channels will be plotted to visualize the difference in referencing
# schemes.
picks = mne.pick_types(raw.info, meg=False, eeg=True, eog=True, exclude='bads')
###############################################################################
# Apply different EEG referencing schemes and plot the resulting evokeds.
reject = dict(eog=150e-6)
epochs_params = dict(events=events, event_id=event_id, tmin=tmin, tmax=tmax,
picks=picks, reject=reject)
fig, (ax1, ax2, ax3) = plt.subplots(nrows=3, ncols=1, sharex=True)
# No reference. This assumes that the EEG has already been referenced properly.
# This explicitly prevents MNE from adding a default EEG reference.
raw.set_eeg_reference([])
evoked_no_ref = mne.Epochs(raw, **epochs_params).average()
evoked_no_ref.plot(axes=ax1, titles=dict(eeg='EEG Original reference'))
# Average reference. This is normally added by default, but can also be added
# explicitly.
raw.set_eeg_reference()
evoked_car = mne.Epochs(raw, **epochs_params).average()
evoked_car.plot(axes=ax2, titles=dict(eeg='EEG Average reference'))
# Re-reference from an average reference to the mean of channels EEG 001 and
# EEG 002.
raw.set_eeg_reference(['EEG 001', 'EEG 002'])
evoked_custom = mne.Epochs(raw, **epochs_params).average()
evoked_custom.plot(axes=ax3, titles=dict(eeg='EEG Custom reference'))
| """
=============================
Re-referencing the EEG signal
=============================
Load raw data and apply some EEG referencing schemes.
"""
# Authors: Marijn van Vliet <w.m.vanvliet@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
#
# License: BSD (3-clause)
import mne
from mne.datasets import sample
from matplotlib import pyplot as plt
print(__doc__)
# Setup for reading the raw data
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
event_id, tmin, tmax = 1, -0.2, 0.5
# Read the raw data
raw = mne.io.read_raw_fif(raw_fname, preload=True)
events = mne.read_events(event_fname)
# The EEG channels will be plotted to visualize the difference in referencing
# schemes.
picks = mne.pick_types(raw.info, meg=False, eeg=True, eog=True, exclude='bads')
###############################################################################
# Apply different EEG referencing schemes and plot the resulting evokeds.
reject = dict(eeg=180e-6, eog=150e-6)
epochs_params = dict(events=events, event_id=event_id, tmin=tmin, tmax=tmax,
picks=picks, reject=reject)
fig, (ax1, ax2, ax3) = plt.subplots(nrows=3, ncols=1, sharex=True)
# No reference. This assumes that the EEG has already been referenced properly.
# This explicitly prevents MNE from adding a default EEG reference.
raw.set_eeg_reference([])
evoked_no_ref = mne.Epochs(raw, **epochs_params).average()
evoked_no_ref.plot(axes=ax1, titles=dict(eeg='EEG Original reference'))
# Average reference. This is normally added by default, but can also be added
# explicitly.
raw.set_eeg_reference()
evoked_car = mne.Epochs(raw, **epochs_params).average()
evoked_car.plot(axes=ax2, titles=dict(eeg='EEG Average reference'))
# Re-reference from an average reference to the mean of channels EEG 001 and
# EEG 002.
raw.set_eeg_reference(['EEG 001', 'EEG 002'])
evoked_custom = mne.Epochs(raw, **epochs_params).average()
evoked_custom.plot(axes=ax3, titles=dict(eeg='EEG Custom reference'))
| bsd-3-clause | Python |
8d6905d35dbae5cc16769989bc666e01c4e289ef | fix setting xlabel and ylabel (#8454) | Teekuningas/mne-python,mne-tools/mne-python,pravsripad/mne-python,Eric89GXL/mne-python,Teekuningas/mne-python,pravsripad/mne-python,kingjr/mne-python,mne-tools/mne-python,kingjr/mne-python,bloyl/mne-python,rkmaddox/mne-python,pravsripad/mne-python,drammock/mne-python,mne-tools/mne-python,drammock/mne-python,bloyl/mne-python,wmvanvliet/mne-python,rkmaddox/mne-python,kambysese/mne-python,larsoner/mne-python,Eric89GXL/mne-python,larsoner/mne-python,wmvanvliet/mne-python,larsoner/mne-python,drammock/mne-python,olafhauk/mne-python,kingjr/mne-python,wmvanvliet/mne-python,olafhauk/mne-python,kambysese/mne-python,olafhauk/mne-python,Teekuningas/mne-python | examples/visualization/plot_topo_customized.py | examples/visualization/plot_topo_customized.py | """
========================================
Plot custom topographies for MEG sensors
========================================
This example exposes the :func:`~mne.viz.iter_topography` function that makes
it very easy to generate custom sensor topography plots.
Here we will plot the power spectrum of each channel on a topographic
layout.
"""
# Author: Denis A. Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
import mne
from mne.viz import iter_topography
from mne import io
from mne.time_frequency import psd_welch
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
raw = io.read_raw_fif(raw_fname, preload=True)
raw.filter(1, 20, fir_design='firwin')
picks = mne.pick_types(raw.info, meg=True, exclude=[])
tmin, tmax = 0, 120 # use the first 120s of data
fmin, fmax = 2, 20 # look at frequencies between 2 and 20Hz
n_fft = 2048 # the FFT size (n_fft). Ideally a power of 2
psds, freqs = psd_welch(raw, picks=picks, tmin=tmin, tmax=tmax,
fmin=fmin, fmax=fmax)
psds = 20 * np.log10(psds) # scale to dB
def my_callback(ax, ch_idx):
"""
This block of code is executed once you click on one of the channel axes
in the plot. To work with the viz internals, this function should only take
two parameters, the axis and the channel or data index.
"""
ax.plot(freqs, psds[ch_idx], color='red')
ax.set_xlabel('Frequency (Hz)')
ax.set_ylabel('Power (dB)')
for ax, idx in iter_topography(raw.info,
fig_facecolor='white',
axis_facecolor='white',
axis_spinecolor='white',
on_pick=my_callback):
ax.plot(psds[idx], color='red')
plt.gcf().suptitle('Power spectral densities')
plt.show()
| """
========================================
Plot custom topographies for MEG sensors
========================================
This example exposes the :func:`~mne.viz.iter_topography` function that makes
it very easy to generate custom sensor topography plots.
Here we will plot the power spectrum of each channel on a topographic
layout.
"""
# Author: Denis A. Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
import mne
from mne.viz import iter_topography
from mne import io
from mne.time_frequency import psd_welch
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
raw = io.read_raw_fif(raw_fname, preload=True)
raw.filter(1, 20, fir_design='firwin')
picks = mne.pick_types(raw.info, meg=True, exclude=[])
tmin, tmax = 0, 120 # use the first 120s of data
fmin, fmax = 2, 20 # look at frequencies between 2 and 20Hz
n_fft = 2048 # the FFT size (n_fft). Ideally a power of 2
psds, freqs = psd_welch(raw, picks=picks, tmin=tmin, tmax=tmax,
fmin=fmin, fmax=fmax)
psds = 20 * np.log10(psds) # scale to dB
def my_callback(ax, ch_idx):
"""
This block of code is executed once you click on one of the channel axes
in the plot. To work with the viz internals, this function should only take
two parameters, the axis and the channel or data index.
"""
ax.plot(freqs, psds[ch_idx], color='red')
ax.set_xlabel = 'Frequency (Hz)'
ax.set_ylabel = 'Power (dB)'
for ax, idx in iter_topography(raw.info,
fig_facecolor='white',
axis_facecolor='white',
axis_spinecolor='white',
on_pick=my_callback):
ax.plot(psds[idx], color='red')
plt.gcf().suptitle('Power spectral densities')
plt.show()
| bsd-3-clause | Python |
a1bbd3d68e4729ac232f03b8980edd1dec93006d | use relative import path in package | gilsho/kryptonite | kryptonite/__init__.py | kryptonite/__init__.py | from .cipher import Cipher, DecryptionError
from .password import conceal, verify
| from cipher import Cipher, DecryptionError
from password import conceal, verify
| mit | Python |
036a3b1d0037ea0d6888df4ab4b5f052040c95c8 | Remove errant space. | ameily/mongo-python-driver,ultrabug/mongo-python-driver,ShaneHarvey/mongo-python-driver,ramnes/mongo-python-driver,mongodb/mongo-python-driver,macdiesel/mongo-python-driver,gormanb/mongo-python-driver,pigate/mongo-python-driver,brianwrf/mongo-python-driver,ramnes/mongo-python-driver,jameslittle/mongo-python-driver,rychipman/mongo-python-driver,ramnes/mongo-python-driver,ameily/mongo-python-driver,llvtt/mongo-python-driver,llvtt/mongo-python-driver,inspectlabs/mongo-python-driver,jameslittle/mongo-python-driver,mongodb/mongo-python-driver,felixonmars/mongo-python-driver,bq-xiao/mongo-python-driver,inspectlabs/mongo-python-driver,ShaneHarvey/mongo-python-driver,bq-xiao/mongo-python-driver,WingGao/mongo-python-driver,aherlihy/mongo-python-driver,brianwrf/mongo-python-driver,gormanb/mongo-python-driver,develf/mongo-python-driver,macdiesel/mongo-python-driver,WingGao/mongo-python-driver,aherlihy/mongo-python-driver,pigate/mongo-python-driver,mongodb/mongo-python-driver,rychipman/mongo-python-driver,ShaneHarvey/mongo-python-driver,felixonmars/mongo-python-driver,ultrabug/mongo-python-driver,aherlihy/mongo-python-driver,develf/mongo-python-driver | bson/py3compat.py | bson/py3compat.py | # Copyright 2009-2012 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Utility functions and definitions for python3 compatibility."""
import sys
PY3 = sys.version_info[0] == 3
if PY3:
import codecs
from io import BytesIO as StringIO
def b(s):
# BSON and socket operations deal in binary data. In
# python 3 that means instances of `bytes`. In python
# 2.6 and 2.7 you can create an alias for `bytes` using
# the b prefix (e.g. b'foo'). Python 2.4 and 2.5 don't
# provide this marker so we provide this compat function.
# In python 3.x b('foo') results in b'foo'.
# See http://python3porting.com/problems.html#nicer-solutions
return codecs.latin_1_encode(s)[0]
def bytes_from_hex(h):
return bytes.fromhex(h)
binary_type = bytes
text_type = str
else:
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
def b(s):
# See comments above. In python 2.x b('foo') is just 'foo'.
return s
def bytes_from_hex(h):
return h.decode('hex')
binary_type = str
# 2to3 will convert this to "str". That's okay
# since we won't ever get here under python3.
text_type = unicode
string_types = (binary_type, text_type)
| # Copyright 2009-2012 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Utility functions and definitions for python3 compatibility."""
import sys
PY3 = sys.version_info[0] == 3
if PY3:
import codecs
from io import BytesIO as StringIO
def b(s):
# BSON and socket operations deal in binary data. In
# python 3 that means instances of `bytes`. In python
# 2.6 and 2.7 you can create an alias for `bytes` using
# the b prefix (e.g. b'foo'). Python 2.4 and 2.5 don't
# provide this marker so we provide this compat function.
# In python 3.x b('foo') results in b'foo'.
# See http://python3porting.com/problems.html#nicer-solutions
return codecs.latin_1_encode(s)[0]
def bytes_from_hex(h):
return bytes.fromhex(h)
binary_type = bytes
text_type = str
else:
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
def b(s):
# See comments above. In python 2.x b('foo') is just 'foo'.
return s
def bytes_from_hex(h):
return h.decode('hex')
binary_type = str
# 2to3 will convert this to "str". That's okay
# since we won't ever get here under python3.
text_type = unicode
string_types = (binary_type, text_type)
| apache-2.0 | Python |
f28209b1ba9d2fe84753a05cacd810e38f314a7e | Replace hvad with parler in settings | czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq | test_settings.py | test_settings.py | # -*- coding: utf-8 -*-
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://localhost:9001/solr/default',
'TIMEOUT': 60 * 5,
'INCLUDE_SPELLING': True,
'BATCH_SIZE': 100,
'EXCLUDED_INDEXES': ['thirdpartyapp.search_indexes.BarIndex'],
},
'en': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://my-solr-server/solr/my-site-en/',
'TIMEOUT': 60 * 5,
'INCLUDE_SPELLING': True,
'BATCH_SIZE': 100,
},
'de': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://my-solr-server/solr/my-site-de/',
'TIMEOUT': 60 * 5,
'INCLUDE_SPELLING': True,
'BATCH_SIZE': 100,
},
}
HELPER_SETTINGS = {
'ROOT_URLCONF': 'aldryn_faq.tests.urls',
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'adminsortable',
'aldryn_faq',
'djangocms_text_ckeditor',
'parler',
'sortedm2m',
],
"HAYSTACK_CONNECTIONS": HAYSTACK_CONNECTIONS
}
def run():
from djangocms_helper import runner
runner.cms('aldryn_faq')
if __name__ == "__main__":
run()
| # -*- coding: utf-8 -*-
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://localhost:9001/solr/default',
'TIMEOUT': 60 * 5,
'INCLUDE_SPELLING': True,
'BATCH_SIZE': 100,
'EXCLUDED_INDEXES': ['thirdpartyapp.search_indexes.BarIndex'],
},
'en': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://my-solr-server/solr/my-site-en/',
'TIMEOUT': 60 * 5,
'INCLUDE_SPELLING': True,
'BATCH_SIZE': 100,
},
'de': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://my-solr-server/solr/my-site-de/',
'TIMEOUT': 60 * 5,
'INCLUDE_SPELLING': True,
'BATCH_SIZE': 100,
},
}
HELPER_SETTINGS = {
'ROOT_URLCONF': 'aldryn_faq.tests.urls',
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'adminsortable',
'aldryn_faq',
'djangocms_text_ckeditor',
'hvad',
'sortedm2m',
],
"HAYSTACK_CONNECTIONS": HAYSTACK_CONNECTIONS
}
def run():
from djangocms_helper import runner
runner.cms('aldryn_faq')
if __name__ == "__main__":
run()
| bsd-3-clause | Python |
30956e5cff8b94b4c6998f34a3dfbfaa423dac9b | load up typography | tBaxter/Tango,tBaxter/Tango | test_settings.py | test_settings.py | SECRET_KEY = "lorem ipsum"
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'autotagger',
'tango_shared',
'tango_user',
'video',
'typogrify' # installed by shared, keeps templates happy
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SITE_ID = 1
AUTH_USER_MODEL = 'tango_user.Profile'
ROOT_URLCONF = 'test_urls'
#stripped down middleware
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ACTIVITY_MONITOR_MODELS = (
{
'model': 'auth.user', # Required: the model to watch.
'verb': " joined ",
},
)
| SECRET_KEY = "lorem ipsum"
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'autotagger',
'tango_shared',
'tango_user',
'video'
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SITE_ID = 1
AUTH_USER_MODEL = 'tango_user.Profile'
ROOT_URLCONF = 'test_urls'
#stripped down middleware
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ACTIVITY_MONITOR_MODELS = (
{
'model': 'auth.user', # Required: the model to watch.
'verb': " joined ",
},
)
| mit | Python |
ae98a9955545a8a895c4b66802e2762fdffe9272 | set allowed hosts | bhoggard/nurtureart,bhoggard/nurtureart,bhoggard/nurtureart | nurtureart/settings/prod.py | nurtureart/settings/prod.py | from .base import *
DEBUG = False
TEMPLATE_DEBUG = False
SECRET_KEY = os.environ.get('SECRET_KEY')
ALLOWED_HOSTS = ['nurtureart.artcat.com']
| from .base import *
DEBUG = False
TEMPLATE_DEBUG = False
SECRET_KEY = os.environ.get('SECRET_KEY')
| mit | Python |
0b5d8476c7656d6088fd4cf62bdcbce6bd8dfd4c | remove unused imports | DOAJ/doaj,DOAJ/doaj,DOAJ/doaj,DOAJ/doaj | portality/migrate/20191128_2056_keywords_to_lower/operations.py | portality/migrate/20191128_2056_keywords_to_lower/operations.py | def rewrite_keywords(journal_like):
bib = journal_like.bibjson()
kwords = [k.lower() for k in bib.keywords]
bib.set_keywords(kwords)
| from portality import models
from portality.core import app
import esprit
import re
def rewrite_keywords(journal_like):
bib = journal_like.bibjson()
kwords = [k.lower() for k in bib.keywords]
bib.set_keywords(kwords)
| apache-2.0 | Python |
7164a4c14d8b4abfbeac83475539c9c68a5d5807 | Remove VALID_USERNAME regex from utils.py | hasgeek/funnel,hasgeek/funnel,hasgeek/lastuser,hasgeek/funnel,hasgeek/funnel,hasgeek/lastuser,hasgeek/lastuser,hasgeek/lastuser,hasgeek/funnel,hasgeek/lastuser | lastuser_core/utils.py | lastuser_core/utils.py | # -*- coding: utf-8 -*-
# Id generation
import re
import urlparse
from urllib import urlencode as make_query_string
# --- Constants ---------------------------------------------------------------
PHONE_STRIP_RE = re.compile(r'[\t .()\[\]-]+')
PHONE_VALID_RE = re.compile(r'^\+[0-9]+$')
# --- Utilities ---------------------------------------------------------------
def make_redirect_url(url, use_fragment=False, **params):
urlparts = list(urlparse.urlsplit(url))
# URL parts:
# 0: scheme
# 1: netloc
# 2: path
# 3: query -- appended to
# 4: fragment
queryparts = urlparse.parse_qsl(urlparts[3], keep_blank_values=True)
queryparts.extend([(k, v) for k, v in params.items() if v is not None])
queryparts = [(key.encode('utf-8') if isinstance(key, unicode) else key,
value.encode('utf-8') if isinstance(value, unicode) else value) for key, value in queryparts]
if use_fragment:
urlparts[3] = None
urlparts[4] = make_query_string(queryparts)
else:
urlparts[3] = make_query_string(queryparts)
return urlparse.urlunsplit(urlparts)
def strip_phone(candidate):
return PHONE_STRIP_RE.sub('', candidate)
def valid_phone(candidate):
return not PHONE_VALID_RE.search(candidate) is None
def get_gravatar_md5sum(url):
"""
Retrieve the MD5 sum from a Gravatar URL. Returns None if the URL is invalid.
>>> get_gravatar_md5sum(
... 'https://secure.gravatar.com/avatar/31b0e7df40a7e327e7908f61a314fe47?d=https'
... '://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-140.png')
'31b0e7df40a7e327e7908f61a314fe47'
"""
parts = urlparse.urlparse(url)
if parts.netloc not in ['www.gravatar.com', 'secure.gravatar.com', 'gravatar.com']:
return None
if not parts.path.startswith('/avatar/'):
return None
md5sum = parts.path.split('/')[2]
if len(md5sum) != 32:
return None
return md5sum
| # -*- coding: utf-8 -*-
# Id generation
import re
import urlparse
from urllib import urlencode as make_query_string
# --- Constants ---------------------------------------------------------------
USERNAME_VALID_RE = re.compile('^[a-z0-9][a-z0-9-]*[a-z0-9]$')
PHONE_STRIP_RE = re.compile(r'[\t .()\[\]-]+')
PHONE_VALID_RE = re.compile(r'^\+[0-9]+$')
# --- Utilities ---------------------------------------------------------------
def make_redirect_url(url, use_fragment=False, **params):
urlparts = list(urlparse.urlsplit(url))
# URL parts:
# 0: scheme
# 1: netloc
# 2: path
# 3: query -- appended to
# 4: fragment
queryparts = urlparse.parse_qsl(urlparts[3], keep_blank_values=True)
queryparts.extend([(k, v) for k, v in params.items() if v is not None])
queryparts = [(key.encode('utf-8') if isinstance(key, unicode) else key,
value.encode('utf-8') if isinstance(value, unicode) else value) for key, value in queryparts]
if use_fragment:
urlparts[3] = None
urlparts[4] = make_query_string(queryparts)
else:
urlparts[3] = make_query_string(queryparts)
return urlparse.urlunsplit(urlparts)
def strip_phone(candidate):
return PHONE_STRIP_RE.sub('', candidate)
def valid_phone(candidate):
return not PHONE_VALID_RE.search(candidate) is None
def get_gravatar_md5sum(url):
"""
Retrieve the MD5 sum from a Gravatar URL. Returns None if the URL is invalid.
>>> get_gravatar_md5sum(
... 'https://secure.gravatar.com/avatar/31b0e7df40a7e327e7908f61a314fe47?d=https'
... '://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-140.png')
'31b0e7df40a7e327e7908f61a314fe47'
"""
parts = urlparse.urlparse(url)
if parts.netloc not in ['www.gravatar.com', 'secure.gravatar.com', 'gravatar.com']:
return None
if not parts.path.startswith('/avatar/'):
return None
md5sum = parts.path.split('/')[2]
if len(md5sum) != 32:
return None
return md5sum
| agpl-3.0 | Python |
67efd82370159628ac0c19ad89fcf186efa6a535 | Fix small issues | cboling/xos,zdw/xos,open-cloud/xos,zdw/xos,opencord/xos,zdw/xos,open-cloud/xos,cboling/xos,zdw/xos,cboling/xos,cboling/xos,open-cloud/xos,cboling/xos,opencord/xos,opencord/xos | xos/observers/helloworldservice_complete/steps/sync_helloworldtenant.py | xos/observers/helloworldservice_complete/steps/sync_helloworldtenant.py | import os
import sys
from django.db.models import Q, F
from helloworldservice_complete.models import HelloWorldServiceComplete, HelloWorldTenantComplete
from observers.base.SyncInstanceUsingAnsible import SyncInstanceUsingAnsible
parentdir = os.path.join(os.path.dirname(__file__), "..")
sys.path.insert(0, parentdir)
# Class to define how we sync a tenant. Using SyncInstanceUsingAnsible we
# indicate where the find the YAML for ansible, where to find the SSH key,
# and the logic for determining what tenant needs updating, what additional
# attributes are needed, and how to delete an instance.
class SyncHelloWorldTenantComplete(SyncInstanceUsingAnsible):
# Indicates the position in the data model, this will run when XOS needs to
# enact a HelloWorldTenantComplete
provides = [HelloWorldTenantComplete]
# The actual model being enacted, usually the same as provides.
observes = HelloWorldTenantComplete
# Number of milliseconds between interruptions of the observer
requested_interval = 0
# The ansible template to run
template_name = "sync_helloworldtenant.yaml"
# The location of the SSH private key to use when ansible connects to
# instances.
service_key_name = "/opt/xos/observers/helloworldservice_complete/helloworldservice_private_key"
def __init__(self, *args, **kwargs):
super(SyncHelloWorldTenantComplete, self).__init__(*args, **kwargs)
# Defines the logic for determining what HelloWorldTenantCompletes need to be
# enacted.
def fetch_pending(self, deleted):
# If the update is not a deletion, then we get all of the instnaces that
# have been updated or have not been enacted.
if (not deleted):
objs = HelloWorldTenantComplete.get_tenant_objects().filter(
Q(enacted__lt=F('updated')) | Q(enacted=None), Q(lazy_blocked=False))
else:
# If this is a deletion we get all of the deleted tenants..
objs = HelloWorldTenantComplete.get_deleted_tenant_objects()
return objs
# Gets the attributes that are used by the Ansible template but are not
# part of the set of default attributes.
def get_extra_attributes(self, o):
return {"display_message": o.display_message}
| import os
import sys
from django.db.models import Q, F
from helloworldservice_complete.models import HelloWorldServiceComplete, HelloWorldTenantComplete
from observers.base.SyncInstanceUsingAnsible import SyncInstanceUsingAnsible
parentdir = os.path.join(os.path.dirname(__file__), "..")
sys.path.insert(0, parentdir)
# Class to define how we sync a tenant. Using SyncInstanceUsingAnsible we
# indicate where the find the YAML for ansible, where to find the SSH key,
# and the logic for determining what tenant needs updating, what additional
# attributes are needed, and how to delete an instance.
class SyncHelloWorldTenantComplete(SyncInstanceUsingAnsible):
# Indicates the position in the data model, this will run when XOS needs to
# enact a HelloWorldTenantComplete
provides = [HelloWorldTenantComplete]
# The actual model being enacted, usually the same as provides.
observes = HelloWorldTenantComplete
# Number of milliseconds between interruptions of the observer
requested_interval = 0
# The ansible template to run
template_name = "sync_helloworldtenant.yaml"
# The location of the SSH private key to use when ansible connects to
# instances.
service_key_name = "/opt/xos/observers/helloworldservice_complete/helloworldservice_private_key"
def __init__(self, *args, **kwargs):
super(HelloWorldTenantComplete self).__init__(*args, **kwargs)
# Defines the logic for determining what HelloWorldTenantCompletes need to be
# enacted.
def fetch_pending(self, deleted):
# If the update is not a deletion, then we get all of the instnaces that
# have been updated or have not been enacted.
if (not deleted):
objs = HelloWorldTenantComplete.get_tenant_objects().filter(
Q(enacted__lt=F('updated')) | Q(enacted=None), Q(lazy_blocked=False))
else:
# If this is a deletion we get all of the deleted tenants..
objs = HelloWorldTenantComplete.get_deleted_tenant_objects()
return objs
# Gets the attributes that are used by the Ansible template but are not
# part of the set of default attributes.
def get_extra_attributes(self, o):
return {"display_message": o.display_message}
| apache-2.0 | Python |
f203d508fc83158794061a56cdd9f0a941716883 | Bump version | theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs | bulbs/__init__.py | bulbs/__init__.py | __version__ = "3.9.0"
| __version__ = "3.8.1"
| mit | Python |
24515e08362cdfc1ee6e4a8582ae4988055ca946 | Update admin | vinta/sublimall-server,socketubs/sublimall-server,vinta/sublimall-server | sublimall/accounts/admin.py | sublimall/accounts/admin.py | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Member
from .models import Registration
class MemberAdmin(admin.ModelAdmin):
list_display = ('email', 'api_key', 'is_active', )
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('get_email', 'key', )
def get_email(self, obj):
return obj.member.email
get_email.short_description = 'Email'
admin.site.register(Member, MemberAdmin)
admin.site.register(Registration, RegistrationAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from django.contrib.auth.models import User
from django.contrib.auth.admin import UserAdmin
from .models import Member
from .models import Registration
class MemberInline(admin.TabularInline):
model = Member
can_delete = False
class MemberAdmin(admin.ModelAdmin):
list_display = ('get_email', 'api_key', )
def get_email(self, obj):
return obj.user.email
get_email.short_description = 'Email'
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('get_email', 'key', )
def get_email(self, obj):
return obj.member.user.email
get_email.short_description = 'Email'
class UserAdmin(UserAdmin):
inlines = (MemberInline, )
# admin.site.unregister(User)
# admin.site.register(User, UserAdmin)
# admin.site.register(Member, MemberAdmin)
admin.site.register(Member)
admin.site.register(Registration, RegistrationAdmin)
| mit | Python |
05fe7895a672b3a221fb4a02ba1f37b772e30a9b | Update openacademy_course.py | JesusZapata/openacademy-project | openacademy/model/openacademy_course.py | openacademy/model/openacademy_course.py | # -*- coding: utf-8 -*-
from openerp import api, models, fields, _
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
new_field = fields.Char('My New Field', help="My new help")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
_("The title of the course should not be the description")),
('name_unique',
'UNIQUE(name)',
_("The course title must be unique")),
]
@api.multi
def copy(self, default=None):
default = dict(default or {})
copied_count = self.search_count(
[('name', '=like', _(u"Copy of {}%").format(self.name))])
if not copied_count:
new_name = _(u"Copy of {}").format(self.name)
else:
new_name = _(u"Copy of {} ({})").format(self.name, copied_count)
default['name'] = new_name
default['test_1'] = _('Test 1')
default['test_2'] = _('Test 2')
default['test_3'] = _('Test 3')
default['test_4'] = _('Test 4')
return super(Course, self).copy(default)
| # -*- coding: utf-8 -*-
from openerp import api, models, fields, _
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
new_field = fields.Char('My New Field', help="My new help")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
_("The title of the course should not be the description")),
('name_unique',
'UNIQUE(name)',
_("The course title must be unique")),
]
@api.multi
def copy(self, default=None):
default = dict(default or {})
copied_count = self.search_count(
[('name', '=like', _(u"Copy of {}%").format(self.name))])
if not copied_count:
new_name = _(u"Copy of {}").format(self.name)
else:
new_name = _(u"Copy of {} ({})").format(self.name, copied_count)
default['name'] = new_name
default['test_1'] = _('Test 1')
default['test_2'] = _('Test 2')
default['test_3'] = _('Test 3')
return super(Course, self).copy(default)
| apache-2.0 | Python |
6f5d25d6dec2455bf408cb2292ff4d33f248cdde | update get_identifier method | datamade/la-metro-councilmatic,datamade/la-metro-councilmatic,datamade/la-metro-councilmatic,datamade/la-metro-councilmatic | lametro/utils.py | lametro/utils.py | import re
import pytz
from datetime import datetime, timedelta
import requests
import lxml.html
from lxml.etree import tostring
from django.conf import settings
from django.utils import timezone
from councilmatic_core.models import Organization, Event
app_timezone = pytz.timezone(settings.TIME_ZONE)
def format_full_text(full_text):
'''
The search results and board report titles (on the BillDetail) should show the "SUBJECT:" header from the board report when present.
The ocr_full_text contains this information. Some example snippets:
# Subject header followed by two linebreaks.
..Subject\nSUBJECT:\tFOOD SERVICE OPERATOR\n\n..Action\nACTION:\tAWARD SERVICES CONTRACT\n\n..
# Subject header followed by a return carriage and linebreak.
..Subject/Action\r\nSUBJECT: MONTHLY REPORT ON CRENSHAW/LAX SAFETY\r\nACTION: RECEIVE AND FILE\r\n
# Subject header with a linebreak in the middle and without an ACTION header.
..Subject\nSUBJECT: REVISED MOTION BY DIRECTORS HAHN, SOLIS, \nGARCIA, AND DUPONT-WALKER\n..Title\n
'''
results = ''
if full_text:
clean_full_text = full_text.replace('\n\n', 'NEWLINE').replace('\r\n', 'NEWLINE').replace('\n..', 'NEWLINE').replace('\n', ' ')
match = re.search('(SUBJECT:)(.*?)(NEWLINE|ACTION:)', clean_full_text)
if match:
results = match.group(2)
return results
def parse_subject(text):
if ('[PROJECT OR SERVICE NAME]' not in text) and ('[DESCRIPTION]' not in text) and ('[CONTRACT NUMBER]' not in text):
return text.strip()
def get_identifier(obj_or_string):
if isinstance(obj_or_string, string):
return obj_or_string
return bill.id
# to test:
# run update_index locally
# delete a bill in a django shell
# run update_index --remove locally so that this method runs
| import re
import pytz
from datetime import datetime, timedelta
import requests
import lxml.html
from lxml.etree import tostring
from django.conf import settings
from django.utils import timezone
from councilmatic_core.models import Organization, Event
app_timezone = pytz.timezone(settings.TIME_ZONE)
def format_full_text(full_text):
'''
The search results and board report titles (on the BillDetail) should show the "SUBJECT:" header from the board report when present.
The ocr_full_text contains this information. Some example snippets:
# Subject header followed by two linebreaks.
..Subject\nSUBJECT:\tFOOD SERVICE OPERATOR\n\n..Action\nACTION:\tAWARD SERVICES CONTRACT\n\n..
# Subject header followed by a return carriage and linebreak.
..Subject/Action\r\nSUBJECT: MONTHLY REPORT ON CRENSHAW/LAX SAFETY\r\nACTION: RECEIVE AND FILE\r\n
# Subject header with a linebreak in the middle and without an ACTION header.
..Subject\nSUBJECT: REVISED MOTION BY DIRECTORS HAHN, SOLIS, \nGARCIA, AND DUPONT-WALKER\n..Title\n
'''
results = ''
if full_text:
clean_full_text = full_text.replace('\n\n', 'NEWLINE').replace('\r\n', 'NEWLINE').replace('\n..', 'NEWLINE').replace('\n', ' ')
match = re.search('(SUBJECT:)(.*?)(NEWLINE|ACTION:)', clean_full_text)
if match:
results = match.group(2)
return results
def parse_subject(text):
if ('[PROJECT OR SERVICE NAME]' not in text) and ('[DESCRIPTION]' not in text) and ('[CONTRACT NUMBER]' not in text):
return text.strip()
def get_identifier(obj_or_string):
# obj_or_string is the ocd-bill string or an instance of a bill
# return id always
# id == ocd-bill string
# to test:
# run update_index locally
# delete a bill in a django shell
# run update_index --remove locally so that this method runs
| mit | Python |
deb87fefcc7fa76de3ae29ae58e816e49184d100 | Add numpy.round to model api | openfisca/openfisca-core,openfisca/openfisca-core | openfisca_core/model_api.py | openfisca_core/model_api.py | # -*- coding: utf-8 -*-
from datetime import date # noqa analysis:ignore
from numpy import ( # noqa analysis:ignore
logical_not as not_,
maximum as max_,
minimum as min_,
round as round_,
select,
where,
)
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
| # -*- coding: utf-8 -*-
from datetime import date # noqa analysis:ignore
from numpy import maximum as max_, minimum as min_, logical_not as not_, where, select # noqa analysis:ignore
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
| agpl-3.0 | Python |
bf3729cfb2d4b98077e4936c8f184c20df99506d | fix reporting of "no instances" found. | GoogleCloudPlatform/gcpdiag,GoogleCloudPlatform/gcpdiag,GoogleCloudPlatform/gcpdiag | gcpdiag/lint/gce/err_2021_002_osconfig_perm.py | gcpdiag/lint/gce/err_2021_002_osconfig_perm.py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""OS Config service account has the required permissions.
The OS Config service account (@gcp-sa-osconfig.iam.gserviceaccount.com) must
have the osconfig.serviceAgent role.
"""
import operator
from gcpdiag import lint, models
from gcpdiag.queries import crm, gce, iam
ROLE = 'roles/osconfig.serviceAgent'
#check metadata on project first if not per instance and skip get_metadata
def prefetch_rule(context: models.Context):
# Make sure that we have the IAM policy in cache.
project_ids = {i.project_id for i in gce.get_instances(context).values()}
for pid in project_ids:
iam.get_project_policy(pid)
def run_rule(context: models.Context, report: lint.LintReportRuleInterface):
instances = gce.get_instances(context)
instances_count = 0
for i in sorted(instances.values(),
key=operator.attrgetter('project_id', 'name')):
# GKE nodes never have OS Config enabled
if i.is_gke_node():
continue
if i.get_metadata('enable-osconfig'):
osconfig_service_account = 'service-{}@gcp-sa-osconfig.iam.gserviceaccount.com'.format(
crm.get_project(i.project_id).number)
instances_count += 1
iam_policy = iam.get_project_policy(i.project_id)
sa = i.service_account
if not sa:
# if an SA is not attched to the vm check if the service agent has the correct role
if not iam_policy.has_role_permissions(
f'serviceAccount:{osconfig_service_account}', ROLE):
report.add_failed(
i,
f'service account: {osconfig_service_account}\nmissing role: {ROLE}'
)
else:
report.add_ok(i)
else:
report.add_ok(i)
if not instances_count:
report.add_skipped(None, 'no instances found with OS Config enabled')
| # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""OS Config service account has the required permissions.
The OS Config service account (@gcp-sa-osconfig.iam.gserviceaccount.com) must
have the osconfig.serviceAgent role.
"""
import operator
from gcpdiag import lint, models
from gcpdiag.queries import crm, gce, iam
ROLE = 'roles/osconfig.serviceAgent'
#check metadata on project first if not per instance and skip get_metadata
def prefetch_rule(context: models.Context):
# Make sure that we have the IAM policy in cache.
project_ids = {i.project_id for i in gce.get_instances(context).values()}
for pid in project_ids:
iam.get_project_policy(pid)
def run_rule(context: models.Context, report: lint.LintReportRuleInterface):
instances = gce.get_instances(context)
instances_count = 0
for i in sorted(instances.values(),
key=operator.attrgetter('project_id', 'name')):
# GKE nodes never have OS Config enabled
if i.is_gke_node():
continue
if i.get_metadata('enable-osconfig'):
osconfig_service_account = 'service-{}@gcp-sa-osconfig.iam.gserviceaccount.com'.format(
crm.get_project(i.project_id).number)
instances_count += 1
iam_policy = iam.get_project_policy(i.project_id)
sa = i.service_account
if not sa:
# if an SA is not attched to the vm check if the service agent has the correct role
if not iam_policy.has_role_permissions(
f'serviceAccount:{osconfig_service_account}', ROLE):
report.add_failed(
i,
f'service account: {osconfig_service_account}\nmissing role: {ROLE}'
)
else:
report.add_ok(i)
else:
report.add_ok(i)
if not instances_count:
report.add_skipped(None, 'no instances found with OS Config enabled')
| apache-2.0 | Python |
b7b2b31f93fcf01b79d148d2296726de73d4b1e8 | Fix "Allow help options to ddev run command" (#5605) | DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core | datadog_checks_dev/datadog_checks/dev/tooling/commands/run.py | datadog_checks_dev/datadog_checks/dev/tooling/commands/run.py | # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import click
from ...subprocess import run_command
from ...utils import chdir
from ..constants import get_root
from .console import UNKNOWN_OPTIONS
@click.command(context_settings=UNKNOWN_OPTIONS, short_help='Run commands in the proper repo')
@click.argument('args', nargs=-1)
@click.pass_context
def run(ctx, args):
"""Run commands in the proper repo."""
if not args or (len(args) == 1 and args[0] in ('-h', '--help')):
click.echo(ctx.get_help())
return
with chdir(get_root()):
result = run_command(args)
ctx.exit(result.code)
| # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import click
from ...subprocess import run_command
from ...utils import chdir
from ..constants import get_root
from .console import UNKNOWN_OPTIONS
@click.command(context_settings=UNKNOWN_OPTIONS, short_help='Run commands in the proper repo')
@click.argument('args', nargs=-1)
@click.pass_context
def run(ctx, args):
"""Run commands in the proper repo."""
if not args or '-h' in args or '--help' in args:
click.echo(ctx.get_help())
return
with chdir(get_root()):
result = run_command(args)
ctx.exit(result.code)
| bsd-3-clause | Python |
b6c7338666c89843d734517e7efc8a0336bedd3b | Fix url pattern to stop requiring two trailing slashes. | RickMohr/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,recklessromeo/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core,recklessromeo/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core | opentreemap/treemap/urls.py | opentreemap/treemap/urls.py | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf.urls import patterns, include, url
from treemap.views import index, settings
urlpatterns = patterns(
'',
url(r'^$', index),
url(r'^config/settings.js$', settings)
)
| from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf.urls import patterns, include, url
from treemap.views import index, settings
urlpatterns = patterns(
'',
url(r'^/$', index),
url(r'^config/settings.js$', settings)
)
| agpl-3.0 | Python |
ccd2afdc687c3d6b7d01bed130e1b0097a4fdc2d | Implement experiment workflow execution with transpose method. | InScience/DAMIS-old,InScience/DAMIS-old | src/damis/run_experiment.py | src/damis/run_experiment.py | import sys
from damis.models import Experiment, Connection
from damis.settings import BUILDOUT_DIR
from os.path import splitext
from algorithms.preprocess import transpose
def transpose_data_callable(X, c, *args, **kwargs):
X_absolute = BUILDOUT_DIR + '/var/www' + X
Y = '%s_transposed%s' % splitext(X)
Y_absolute = BUILDOUT_DIR + '/var/www' + Y
transpose(X_absolute, Y_absolute, int(c))
return [('Y', Y)]
def do_nothing(*args, **kwargs):
return []
# Collables which get
SERVICES = {
"UPLOAD FILE": do_nothing,
"EXISTING FILE": do_nothing,
"MIDAS FILE": do_nothing,
"TECHNICAL DETAILS": do_nothing,
"CHART": do_nothing,
# "CLEAN DATA",
# "FILTER DATA",
# "SPLIT DATA",
"TRANSPOSE DATA": transpose_data_callable,
# "TRANSFORM DATA": transform_data_callable,
# "STAT PRIMITIVES",
# "MLP",
# "C45",
# "KMEANS",
# "PCA",
# "SMACOF",
# "DMA",
# "SDS",
# "SAMANN",
# "SOM",
# "SOMMDS",
# "SELECT FEATURES",
}
## Recursively walk through through tasks.
def execute_tasks(task):
# Get INPUT and COMMON parameter values.
kwargs = {}
for pv in task.parameter_values.all():
cons = Connection.objects.filter(target=pv)
if cons:
value = cons[0].source.value
else:
value = pv.value
kwargs[pv.parameter.name] = value
# Call executable
service = SERVICES[task.algorithm.title]
response = service(**kwargs) # Response dict: name -> value
# Set OUTPUT parameter values and save.
for name, value in response:
pv = task.parameter_values.get(parameter__name=name)
pv.value = value
pv.save()
task.status = 'SAVED'
task.save()
## Call its following tasks
for pv in task.parameter_values.all():
for con in Connection.objects.filter(source=pv):
next_task = con.target.task
if next_task.status == 'SAVED':
execute_tasks(next_task)
if __name__ == '__main__':
exp_pk = sys.argv[1]
exp = Experiment.objects.get(pk=exp_pk)
first_task = exp.tasks.filter(algorithm__category='DATA')[0]
execute_tasks(first_task)
exp.status = 'FINISHED'
exp.save()
| import sys
from damis.models import Experiment
exp_pk = sys.argv[1]
exp = Experiment.objects.get(pk=exp_pk)
exp.status = 'FINISHED'
exp.save()
| agpl-3.0 | Python |
b1f6db340516050b78e20315f90ba0ac9954f0a1 | add plots for mach 3 and 2 | cuspaceflight/firefish,cuspaceflight/firefish | examples/plot_fin_flutter.py | examples/plot_fin_flutter.py | """
An example script which generates a plot of flutter velocity versus altitude.
Run via: python plot_fin_flutter.py
The plot is written to: flutter-velocity-example.pdf
"""
# Configure matplotlib to generate PDF output rather than popping a window up
import matplotlib
matplotlib.use('PDF')
import numpy as np
from matplotlib import pyplot as plt
from firefish.finflutter import model_atmosphere, flutter_velocity_transonic, flutter_velocity_supersonic
def main(output='flutter-velocity-example.pdf'):
"""
>>> import io
>>> fobj = io.BytesIO()
>>> main(fobj)
>>> assert len(fobj.getvalue()) > 0
>>> assert fobj.getvalue()[:4] == b'%PDF'
"""
zs = np.linspace(0, 20000, 200)
ps, ts, ss = model_atmosphere(zs)
rhos = (ps/1000) / (0.2869 * (ts + 273.1))
vs_t = flutter_velocity_transonic(ps, ss, 20, 10, 10, 0.2)
vs_s1 = flutter_velocity_supersonic(rhos, 26796, 7025, 0.0527, 0.06, 0.0518, 0.0058, 4.3)
vs_s2 = flutter_velocity_supersonic(rhos, 26796, 7025, 0.0527, 0.06, 0.0518, 0.0058, 3)
vs_s3 = flutter_velocity_supersonic(rhos, 26796, 7025, 0.0527, 0.06, 0.0518, 0.0058, 2)
plt.figure()
#plt.plot(zs * 1e-3, vs_t, 'r', label="transonic flutter velocity")
plt.plot(zs*1e-3, vs_s1/343.2, 'g', label="Mach 4.3")
plt.plot(zs*1e-3, vs_s2/343.2, 'r', label="Mach 3")
plt.plot(zs*1e-3, vs_s3/343.2, 'b', label="Mach 2")
plt.title('Flutter velocity vs altitude')
plt.xlabel('Altitude [km]')
plt.ylabel('Flutter Velocity [Mach]')
plt.legend()
plt.savefig(output, format='PDF')
if __name__ == '__main__':
main()
| """
An example script which generates a plot of flutter velocity versus altitude.
Run via: python plot_fin_flutter.py
The plot is written to: flutter-velocity-example.pdf
"""
# Configure matplotlib to generate PDF output rather than popping a window up
import matplotlib
matplotlib.use('PDF')
import numpy as np
from matplotlib import pyplot as plt
from firefish.finflutter import model_atmosphere, flutter_velocity_transonic, flutter_velocity_supersonic
def main(output='flutter-velocity-example.pdf'):
"""
>>> import io
>>> fobj = io.BytesIO()
>>> main(fobj)
>>> assert len(fobj.getvalue()) > 0
>>> assert fobj.getvalue()[:4] == b'%PDF'
"""
zs = np.linspace(0, 20000, 200)
ps, ts, ss = model_atmosphere(zs)
rhos = (ps/1000) / (0.2869 * (ts + 273.1))
vs_t = flutter_velocity_transonic(ps, ss, 20, 10, 10, 0.2)
vs_s = flutter_velocity_supersonic(rhos, 26796, 7025, 0.0527, 0.06, 0.0518, 0.0058, 4.3)
plt.figure()
#plt.plot(zs * 1e-3, vs_t, 'r', label="transonic flutter velocity")
plt.plot(zs*1e-3, vs_s, 'g', label="supersonic flutter velocity, Mach 4.3")
plt.title('Flutter velocity vs altitude')
plt.xlabel('Altitude [km]')
plt.ylabel('Flutter velocity [m/s]')
plt.legend()
plt.savefig(output, format='PDF')
if __name__ == '__main__':
main()
| apache-2.0 | Python |
a4e85f6b9668fc09dd0443b7b1dcfa953206c64c | Update copyright | kcha/bio_utilities,kcha/bio_utilities | src/create_random_reads.py | src/create_random_reads.py | #!/usr/bin/python
# (c) 8/31/2009, Kevin Ha, McGill University
#
# Filename: Generate random reads for testing purposes
import sys
from Bio.Seq import Seq
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
from Bio.Alphabet import IUPAC
import random
from optparse import OptionParser
READ_LEN = 50
NUM_READS = 200
###############################################################################
usage = "usage: python %prog [options] reference.fa"
parser = OptionParser(usage=usage)
parser.add_option("-l", "--read-length", dest="read_len", default=READ_LEN,
metavar="INT", type="int",
help="Read length. [%default]")
parser.add_option("-n", "--num-reads", dest="num_reads", default=NUM_READS,
metavar="INT", type="int",
help="Number of reads to generate. [%default]")
(options, args) = parser.parse_args()
if len(args) == 0:
parser.print_help()
sys.exit()
###############################################################################
input_handle = open(args[0], 'r')
# go through each sequence record
for seq_record in SeqIO.parse(input_handle, "fasta"):
len = len(seq_record)
read_count = 0
seq_obj = seq_record.seq
for i in range(1,options.num_reads+1):
read_count = read_count + 1
# get start and end coordinates of new read
end = random.randint(options.read_len, len)
start = end - options.read_len
# get read sequence
new_read_string = seq_obj[start:end]
#print new_read_string + "\t" + str(start) + "\t" + str(end)
# create new read id
new_read_id = seq_record.id + "_" + str(read_count) + "_" + str(start) + "_" + str(end)
# create new SeqRecord
new_seq_record = SeqRecord(Seq(str(new_read_string), IUPAC.unambiguous_dna), \
id=new_read_id, description="")
SeqIO.write([new_seq_record], sys.stdout, "fasta")
| #!/usr/bin/python
# @created 8/31/2009
#
# Filename: Generate random reads for testing purposes
import sys
from Bio.Seq import Seq
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
from Bio.Alphabet import IUPAC
import random
from optparse import OptionParser
READ_LEN = 50
NUM_READS = 200
###############################################################################
usage = "usage: python %prog [options] reference.fa"
parser = OptionParser(usage=usage)
parser.add_option("-l", "--read-length", dest="read_len", default=READ_LEN,
metavar="INT", type="int",
help="Read length. [%default]")
parser.add_option("-n", "--num-reads", dest="num_reads", default=NUM_READS,
metavar="INT", type="int",
help="Number of reads to generate. [%default]")
(options, args) = parser.parse_args()
if len(args) == 0:
parser.print_help()
sys.exit()
###############################################################################
input_handle = open(args[0], 'r')
# go through each sequence record
for seq_record in SeqIO.parse(input_handle, "fasta"):
len = len(seq_record)
read_count = 0
seq_obj = seq_record.seq
for i in range(1,options.num_reads+1):
read_count = read_count + 1
# get start and end coordinates of new read
end = random.randint(options.read_len, len)
start = end - options.read_len
# get read sequence
new_read_string = seq_obj[start:end]
#print new_read_string + "\t" + str(start) + "\t" + str(end)
# create new read id
new_read_id = seq_record.id + "_" + str(read_count) + "_" + str(start) + "_" + str(end)
# create new SeqRecord
new_seq_record = SeqRecord(Seq(str(new_read_string), IUPAC.unambiguous_dna), \
id=new_read_id, description="")
SeqIO.write([new_seq_record], sys.stdout, "fasta")
| mit | Python |
4a37ea6a05c00cdebd72262156d1823e6579b478 | Return to normal | brotherlogic/pictureframe,brotherlogic/pictureframe,brotherlogic/pictureframe | runAndRestart.py | runAndRestart.py | import os
import sys
verbose = False
if verbose:
print "Running in verbose mode"
lines_before = len(os.popen('find ./codestore').readlines())
if verbose:
print "\n".join(os.popen('./syncer.sh ' + sys.argv[1]).readlines())
else:
os.popen('./syncer.sh ' + sys.argv[1]).readlines()
lines_after = len(os.popen('find ./codestore').readlines())
if lines_before != lines_after:
os.popen('sudo reboot').readlines()
| import os
import sys
verbose = True
if verbose:
print "Running in verbose mode"
lines_before = len(os.popen('find ./codestore').readlines())
if verbose:
print "\n".join(os.popen('./syncer.sh ' + sys.argv[1]).readlines())
else:
os.popen('./syncer.sh ' + sys.argv[1]).readlines()
lines_after = len(os.popen('find ./codestore').readlines())
if lines_before != lines_after:
os.popen('sudo reboot').readlines()
| apache-2.0 | Python |
5bca63680255a98288474a93a48590cddf16a2da | Remove legacy route | seekheart/show_watchdog,asishm/show_watchdog,seekheart/show_watchdog,seekheart/show_watchdog,asishm/show_watchdog,asishm/show_watchdog | run_flask_app.py | run_flask_app.py | #!/usr/bin/env python
#import stuff
from flask import Flask, render_template, request, redirect, url_for, abort
from flask_wtf import Form
from watchdog import watcher
import os
import urllib.parse
from data_model.models import db, imdbInfo
from setting import DevelopmentConfig
from fuzzywuzzy import fuzz
app = Flask(__name__)
app.config.from_object('setting.Config')
doggie = watcher.Watcher()
if len(imdbInfo.query.all()) == 0:
# populate empty table
for i in doggie.tracked_shows:
dummy = imdbInfo(i["id"], i["title"], i["poster"])
db.session.add(dummy)
db.session.commit()
@app.route('/')
def home():
return render_template('homepage.html')
@app.route('/movies', methods=['GET','POST'])
def movies():
if request.method == 'POST':
show_name = request.form['showName']
email = request.form['email']
return '{}, {}'.format(show_name, email)
return redirect(url_for('home'))
@app.route('/search', methods=["GET", "POST"])
def search():
search_string = request.values['q'].strip()
if search_string == '':
return redirect(url_for('shows', id='+'.join(k for k in doggie.get_show_titles())))
#show_object = imdbInfo.query.filter(imdbInfo.Title.like("%{}%".format(request.values['q']))).first_or_404()
fuzzes = ((k, fuzz.partial_ratio(search_string, k)) for k in doggie.get_show_titles())
fuzzes = sorted(fuzzes, key=lambda x: x[1], reverse=True)
#print(fuzzes[:3])
filter_fuzzes = (fuzz for fuzz in fuzzes if fuzz[1] >= 60)
param_str = '+'.join(name[0] for name in filter_fuzzes)
if param_str:
return redirect(url_for('shows',id=param_str))
else:
abort(404)
@app.route('/shows/')
def shows():
shows = request.args.get('id').split('+')
show_objects = (imdbInfo.query.filter_by(Title=k).first().TTid for k in shows)
#print(show_objects)
if show_objects:
return render_template('index.html',
images=["../static/images/{}.jpg".format(k) for k in show_objects])
if __name__ == '__main__':
app.run(port=DevelopmentConfig.PORT, debug=DevelopmentConfig.DEBUG)
| #!/usr/bin/env python
#import stuff
from flask import Flask, render_template, request, redirect, url_for, abort
from flask_wtf import Form
from watchdog import watcher
import os
import urllib.parse
from data_model.models import db, imdbInfo
from setting import DevelopmentConfig
from fuzzywuzzy import fuzz
app = Flask(__name__)
app.config.from_object('setting.Config')
doggie = watcher.Watcher()
if len(imdbInfo.query.all()) == 0:
# populate empty table
for i in doggie.tracked_shows:
dummy = imdbInfo(i["id"], i["title"], i["poster"])
db.session.add(dummy)
db.session.commit()
@app.route('/')
def home():
return render_template('homepage.html')
@app.route('/movies', methods=['GET','POST'])
def movies():
if request.method == 'POST':
show_name = request.form['showName']
email = request.form['email']
return '{}, {}'.format(show_name, email)
return redirect(url_for('home'))
@app.route('/search', methods=["GET", "POST"])
def search():
search_string = request.values['q'].strip()
if search_string == '':
return redirect(url_for('shows', id='+'.join(k for k in doggie.get_show_titles())))
#show_object = imdbInfo.query.filter(imdbInfo.Title.like("%{}%".format(request.values['q']))).first_or_404()
fuzzes = ((k, fuzz.partial_ratio(search_string, k)) for k in doggie.get_show_titles())
fuzzes = sorted(fuzzes, key=lambda x: x[1], reverse=True)
#print(fuzzes[:3])
filter_fuzzes = (fuzz for fuzz in fuzzes if fuzz[1] >= 60)
param_str = '+'.join(name[0] for name in filter_fuzzes)
if param_str:
return redirect(url_for('shows',id=param_str))
else:
abort(404)
@app.route('/shows/')
def shows():
shows = request.args.get('id').split('+')
show_objects = (imdbInfo.query.filter_by(Title=k).first().TTid for k in shows)
#print(show_objects)
if show_objects:
return render_template('index.html',
images=["../static/images/{}.jpg".format(k) for k in show_objects])
# @app.route('/movies')
# def home_page():
# images = os.path.join(os.path.dirname(__file__), 'static/images/')
# img_fi = os.listdir(images)
# img_fi = ['{}{}'.format(images, urllib.parse.quote(f)) for f in img_fi]
# #img_fi = ['{}{}'.format(images, f) for f in img_fi]
# return render_template('index.html', images=img_fi)
if __name__ == '__main__':
app.run(port=DevelopmentConfig.PORT, debug=DevelopmentConfig.DEBUG)
| mit | Python |
f4dd7dfe6294d02b22ed0fbd1ac29e1c401ac758 | fix image url | Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith | lazyblacksmith/models/user/user.py | lazyblacksmith/models/user/user.py | # -*- encoding: utf-8 -*-
from . import db
from lazyblacksmith.models.utcdatetime import UTCDateTime
from flask_login import UserMixin
from sqlalchemy import func
class User(db.Model, UserMixin):
character_id = db.Column(
db.BigInteger,
primary_key=True,
autoincrement=False
)
character_owner_hash = db.Column(db.String(255))
character_name = db.Column(db.String(200))
is_admin = db.Column(db.Boolean, default=False)
is_corp_director = db.Column(db.Boolean, default=False)
corporation_id = db.Column(db.BigInteger, nullable=True)
current_login_at = db.Column(
UTCDateTime(timezone=True),
server_default=func.now(),
)
created_at = db.Column(
UTCDateTime(timezone=True),
server_default=func.now()
)
updated_at = db.Column(
UTCDateTime(timezone=True),
server_default=func.now(),
onupdate=func.now()
)
# foreign keys
main_character_id = db.Column(
db.BigInteger,
db.ForeignKey('user.character_id'),
nullable=True
)
main_character = db.relationship(
'User',
remote_side=[character_id],
backref=db.backref('alts_characters', lazy='dynamic')
)
# methods
def get_portrait_url(self, datasource='tranquility', size=128):
"""returns URL to Character portrait from EVE Image Server"""
return "{0}/character/{1}/portrait/?size={3}&tenant={4}".format(
'https://images.evetech.net',
self.character_id,
size,
datasource
)
def get_id(self):
return self.character_id
| # -*- encoding: utf-8 -*-
from . import db
from lazyblacksmith.models.utcdatetime import UTCDateTime
from esipy import EsiClient
from flask_login import UserMixin
from sqlalchemy import func
class User(db.Model, UserMixin):
character_id = db.Column(
db.BigInteger,
primary_key=True,
autoincrement=False
)
character_owner_hash = db.Column(db.String(255))
character_name = db.Column(db.String(200))
is_admin = db.Column(db.Boolean, default=False)
is_corp_director = db.Column(db.Boolean, default=False)
corporation_id = db.Column(db.BigInteger, nullable=True)
current_login_at = db.Column(
UTCDateTime(timezone=True),
server_default=func.now(),
)
created_at = db.Column(
UTCDateTime(timezone=True),
server_default=func.now()
)
updated_at = db.Column(
UTCDateTime(timezone=True),
server_default=func.now(),
onupdate=func.now()
)
# foreign keys
main_character_id = db.Column(
db.BigInteger,
db.ForeignKey('user.character_id'),
nullable=True
)
main_character = db.relationship(
'User',
remote_side=[character_id],
backref=db.backref('alts_characters', lazy='dynamic')
)
# methods
def get_portrait_url(self, datasource='tranquility', size=128):
"""returns URL to Character portrait from EVE Image Server"""
return "{0}Character/{1}_{2}.jpg".format(
EsiClient.__image_server__[datasource],
self.character_id,
size
)
def get_id(self):
return self.character_id
| bsd-3-clause | Python |
a8c70e2f470714dc4365e551c4ba266ff14ec0bd | Revise problem docstring | bowen0701/algorithms_data_structures | lc0983_minimum_cost_for_tickets.py | lc0983_minimum_cost_for_tickets.py | """Leetcode 983. Minimum Cost For Tickets
Medium
URL: https://leetcode.com/problems/minimum-cost-for-tickets/
In a country popular for train travel, you have planned some train travelling
one year in advance. The days of the year that you will travel is given as
an array days. Each day is an integer from 1 to 365.
Train tickets are sold in 3 different ways:
- a 1-day pass is sold for costs[0] dollars;
- a 7-day pass is sold for costs[1] dollars;
- a 30-day pass is sold for costs[2] dollars.
The passes allow that many days of consecutive travel.
For example, if we get a 7-day pass on day 2, then we can travel for 7 days:
day 2, 3, 4, 5, 6, 7, and 8.
Return the minimum number of dollars you need to travel every day in the given
list of days.
Example 1:
Input: days = [1,4,6,7,8,20], costs = [2,7,15]
Output: 11
Explanation:
For example, here is one way to buy passes that lets you travel your travel plan:
On day 1, you bought a 1-day pass for costs[0] = $2, which covered day 1.
On day 3, you bought a 7-day pass for costs[1] = $7, which covered days 3, 4, ..., 9.
On day 20, you bought a 1-day pass for costs[0] = $2, which covered day 20.
In total you spent $11 and covered all the days of your travel.
Example 2:
Input: days = [1,2,3,4,5,6,7,8,9,10,30,31], costs = [2,7,15]
Output: 17
Explanation:
For example, here is one way to buy passes that lets you travel your travel plan:
On day 1, you bought a 30-day pass for costs[2] = $15 which covered days
1, 2, ..., 30.
On day 31, you bought a 1-day pass for costs[0] = $2 which covered day 31.
In total you spent $17 and covered all the days of your travel.
Note:
- 1 <= days.length <= 365
- 1 <= days[i] <= 365
- days is in strictly increasing order.
- costs.length == 3
- 1 <= costs[i] <= 1000
"""
class Solution(object):
def mincostTickets(self, days, costs):
"""
:type days: List[int]
:type costs: List[int]
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| """Leetcode 983. Minimum Cost For Tickets
Medium
URL: https://leetcode.com/problems/minimum-cost-for-tickets/
In a country popular for train travel, you have planned some train travelling
one year in advance. The days of the year that you will travel is given as
an array days. Each day is an integer from 1 to 365.
Train tickets are sold in 3 different ways:
- a 1-day pass is sold for costs[0] dollars;
- a 7-day pass is sold for costs[1] dollars;
- a 30-day pass is sold for costs[2] dollars.
The passes allow that many days of consecutive travel.
For example, if we get a 7-day pass on day 2, then we can travel for 7 days:
day 2, 3, 4, 5, 6, 7, and 8.
Return the minimum number of dollars you need to travel every day in the given
list of days.
Example 1:
Input: days = [1,4,6,7,8,20], costs = [2,7,15]
Output: 11
Explanation:
For example, here is one way to buy passes that lets you travel your travel plan:
On day 1, you bought a 1-day pass for costs[0] = $2, which covered day 1.
On day 3, you bought a 7-day pass for costs[1] = $7, which covered days 3, 4, ..., 9.
On day 20, you bought a 1-day pass for costs[0] = $2, which covered day 20.
In total you spent $11 and covered all the days of your travel.
Example 2:
Input: days = [1,2,3,4,5,6,7,8,9,10,30,31], costs = [2,7,15]
Output: 17
Explanation:
For example, here is one way to buy passes that lets you travel your travel plan:
On day 1, you bought a 30-day pass for costs[2] = $15 which covered days 1, 2, ..., 30.
On day 31, you bought a 1-day pass for costs[0] = $2 which covered day 31.
In total you spent $17 and covered all the days of your travel.
Note:
- 1 <= days.length <= 365
- 1 <= days[i] <= 365
- days is in strictly increasing order.
- costs.length == 3
- 1 <= costs[i] <= 1000
"""
class Solution(object):
def mincostTickets(self, days, costs):
"""
:type days: List[int]
:type costs: List[int]
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
beea4a514db73387fca80859a9cb8e7afbc21f27 | Update ex7.py | Kaggle/learntools,Kaggle/learntools | learntools/machine_learning/ex7.py | learntools/machine_learning/ex7.py | import numpy as np
from numpy import array
import pandas as pd
from learntools.core import *
class CheckSubmittablePreds(CodingProblem):
_var = 'test_preds'
_solution = CS("""
# In previous code cell
rf_model_on_full_data = RandomForestRegressor()
rf_model_on_full_data.fit(X, y)
# Then in last code cell
test_data_path = '../input/test.csv'
test_data = pd.read_csv(test_data_path)
test_X = test_data[features]
test_preds = rf_model_on_full_data.predict(test_X)
output = pd.DataFrame({'Id': test_data.Id,
'SalePrice': test_preds})
output.to_csv('submission.csv', index=False)
""")
def check(self, test_preds):
assert type(test_preds) == np.ndarray, "test_preds should be a numpy array but instead it is {}".format(type(test_preds))
assert test_preds.shape == (1459,), "Your predictions don't look right. It should be a numpy array of shape (1459,). But the actual shape is {}".format(test_preds.shape)
qvars = bind_exercises(globals(), [
CheckSubmittablePreds
],
var_format='step_{n}',
)
__all__ = list(qvars)
| import numpy as np
from numpy import array
import pandas as pd
from learntools.core import *
class CheckSubmittablePreds(CodingProblem):
_var = 'test_preds'
_solution = CS("""
# In previous code cell
rf_model_on_full_data = RandomForestRegressor()
rf_model_on_full_data.fit(X, y)
# Then in last code cell
test_data_path = '../input/home-data-for-ml-course/test.csv'
test_data = pd.read_csv(test_data_path)
test_X = test_data[features]
test_preds = rf_model_on_full_data.predict(test_X)
output = pd.DataFrame({'Id': test_data.Id,
'SalePrice': test_preds})
output.to_csv('submission.csv', index=False)
""")
def check(self, test_preds):
assert type(test_preds) == np.ndarray, "test_preds should be a numpy array but instead it is {}".format(type(test_preds))
assert test_preds.shape == (1459,), "Your predictions don't look right. It should be a numpy array of shape (1459,). But the actual shape is {}".format(test_preds.shape)
qvars = bind_exercises(globals(), [
CheckSubmittablePreds
],
var_format='step_{n}',
)
__all__ = list(qvars)
| apache-2.0 | Python |
d93d960319e22badccd68499df11f2a728dbbc04 | Fix test_utils_project under Windows | kmike/scrapy,ArturGaspar/scrapy,eLRuLL/scrapy,starrify/scrapy,elacuesta/scrapy,elacuesta/scrapy,ArturGaspar/scrapy,finfish/scrapy,scrapy/scrapy,dangra/scrapy,Ryezhang/scrapy,kmike/scrapy,ArturGaspar/scrapy,scrapy/scrapy,eLRuLL/scrapy,pablohoffman/scrapy,wujuguang/scrapy,finfish/scrapy,Ryezhang/scrapy,finfish/scrapy,pawelmhm/scrapy,wujuguang/scrapy,starrify/scrapy,elacuesta/scrapy,kmike/scrapy,scrapy/scrapy,starrify/scrapy,pawelmhm/scrapy,wujuguang/scrapy,eLRuLL/scrapy,dangra/scrapy,pablohoffman/scrapy,Ryezhang/scrapy,pablohoffman/scrapy,dangra/scrapy,pawelmhm/scrapy | tests/test_utils_project.py | tests/test_utils_project.py | import unittest
import os
import tempfile
import shutil
import contextlib
from scrapy.utils.project import data_path
@contextlib.contextmanager
def inside_a_project():
prev_dir = os.getcwd()
project_dir = tempfile.mkdtemp()
try:
os.chdir(project_dir)
with open('scrapy.cfg', 'w') as f:
# create an empty scrapy.cfg
f.close()
yield project_dir
finally:
os.chdir(prev_dir)
shutil.rmtree(project_dir)
class ProjectUtilsTest(unittest.TestCase):
def test_data_path_outside_project(self):
self.assertEqual(
os.path.join('.scrapy', 'somepath'),
data_path('somepath')
)
abspath = os.path.join(os.path.sep, 'absolute', 'path')
self.assertEqual(abspath, data_path(abspath))
def test_data_path_inside_project(self):
with inside_a_project() as proj_path:
expected = os.path.join(proj_path, '.scrapy', 'somepath')
self.assertEqual(
os.path.realpath(expected),
os.path.realpath(data_path('somepath'))
)
abspath = os.path.join(os.path.sep, 'absolute', 'path')
self.assertEqual(abspath, data_path(abspath))
| import unittest
import os
import tempfile
import shutil
import contextlib
from scrapy.utils.project import data_path
@contextlib.contextmanager
def inside_a_project():
prev_dir = os.getcwd()
project_dir = tempfile.mkdtemp()
try:
os.chdir(project_dir)
with open('scrapy.cfg', 'w') as f:
# create an empty scrapy.cfg
f.close()
yield project_dir
finally:
os.chdir(prev_dir)
shutil.rmtree(project_dir)
class ProjectUtilsTest(unittest.TestCase):
def test_data_path_outside_project(self):
self.assertEqual('.scrapy/somepath', data_path('somepath'))
self.assertEqual('/absolute/path', data_path('/absolute/path'))
def test_data_path_inside_project(self):
with inside_a_project() as proj_path:
expected = os.path.join(proj_path, '.scrapy', 'somepath')
self.assertEqual(
os.path.realpath(expected),
os.path.realpath(data_path('somepath'))
)
self.assertEqual('/absolute/path', data_path('/absolute/path'))
| bsd-3-clause | Python |
0c2112133146c19b4c2dc246d3927ee1b4f2d20c | Use 0.0.5 model. | izimobile/libshorttext,izimobile/libshorttext,izimobile/libshorttext,izimobile/libshorttext,izimobile/libshorttext | blvd_analyze.py | blvd_analyze.py | # coding=utf-8
import blvd_text
import json
from libshorttext.analyzer import *
from libshorttext.classifier import *
analyzer = Analyzer('outputs/0.0.5.model')
import zerorpc
import logging
logging.basicConfig()
class BlvdAnalyzer():
def __init__(self):
self.is_currently_useless = True
@staticmethod
def run(text):
tokens, indices, word_list = blvd_text.tokenize_with_indices(text)
text = ' '.join(tokens)
prediction_res = predict_single_text(str(text), analyzer.model)
decvals = prediction_res.decvals
features, weights, labels = analyzer.model.get_weight(str(text))
max_decval = max(decvals)
idx = decvals.index(max_decval)
label = labels[idx]
if max_decval <= 0:
decvals[idx] = 0.1e-10 # hacking the way out of divide by zero problem.
if label == 'skipped':
skipped_decval = decvals[idx]
nb_decval = max(decvals[:idx] + decvals[idx+1:]) # nb = 'next best'
nb_idx = decvals.index(nb_decval)
ratio = nb_decval / skipped_decval
if ratio > 0.2:
idx = nb_idx
label = labels[idx]
label_weights = []
# probably maps or something clever
for weight in weights:
label_weights.append(weight[idx])
if label_weights:
feature_idx = label_weights.index(max(label_weights))
feature = features[feature_idx]
token_idx = None
try:
token_idx = tokens.index(feature)
except ValueError:
for token in tokens:
if feature[0] in token:
token_idx = tokens.index(token)
word_idx = indices[token_idx]
word = word_list[word_idx]
return json.dumps({'relWord': word, 'tag': label})
else:
return json.dumps({'relWord': None, 'tag': 'skipped'})
s = zerorpc.Server(BlvdAnalyzer(), heartbeat=None)
s.bind('tcp://0.0.0.0:4241')
s.run()
| # coding=utf-8
import blvd_text
import json
from libshorttext.analyzer import *
from libshorttext.classifier import *
analyzer = Analyzer('outputs/0.0.3.model')
import zerorpc
import logging
logging.basicConfig()
class BlvdAnalyzer():
def __init__(self):
self.is_currently_useless = True
@staticmethod
def run(text):
tokens, indices, word_list = blvd_text.tokenize_with_indices(text)
text = ' '.join(tokens)
prediction_res = predict_single_text(str(text), analyzer.model)
decvals = prediction_res.decvals
features, weights, labels = analyzer.model.get_weight(str(text))
max_decval = max(decvals)
idx = decvals.index(max_decval)
label = labels[idx]
if max_decval <= 0:
decvals[idx] = 0.1e-10 # hacking the way out of divide by zero problem.
if label == 'skipped':
skipped_decval = decvals[idx]
nb_decval = max(decvals[:idx] + decvals[idx+1:]) # nb = 'next best'
nb_idx = decvals.index(nb_decval)
ratio = nb_decval / skipped_decval
if ratio > 0.2:
idx = nb_idx
label = labels[idx]
label_weights = []
# probably maps or something clever
for weight in weights:
label_weights.append(weight[idx])
if label_weights:
feature_idx = label_weights.index(max(label_weights))
feature = features[feature_idx]
token_idx = None
try:
token_idx = tokens.index(feature)
except ValueError:
for token in tokens:
if feature[0] in token:
token_idx = tokens.index(token)
word_idx = indices[token_idx]
word = word_list[word_idx]
return json.dumps({'relWord': word, 'tag': label})
else:
return json.dumps({'relWord': None, 'tag': 'skipped'})
s = zerorpc.Server(BlvdAnalyzer(), heartbeat=None)
s.bind('tcp://0.0.0.0:4241')
s.run()
| bsd-3-clause | Python |
f082b03421794509f2db736aca3d93f850e2c85e | Test new vim | cancro7/gem5,cancro7/gem5,cancro7/gem5,cancro7/gem5,cancro7/gem5,cancro7/gem5,cancro7/gem5 | configs/lapo/reg_fault.py | configs/lapo/reg_fault.py | """ This file creates a barebones system and executes 'hello', a simple Hello
World application.
This config file assumes that the x86 ISA was built.
See gem5/configs/learning_gem5/part1/simple.py for a general script.
"""
# import the m5 (gem5) library created when gem5 is built
import m5
# import all of the SimObjects
from m5.objects import *
import sys
# create the system we are going to simulate
system = System()
# Set the clock fequency of the system (and all of its children)
system.clk_domain = SrcClockDomain()
system.clk_domain.clock = '1GHz'
system.clk_domain.voltage_domain = VoltageDomain()
# Set up the system
system.mem_mode = 'timing' # Use timing accesses
system.mem_ranges = [AddrRange('512MB')] # Create an address range
# Create a simple CPU
system.cpu = TimingSimpleCPU()
system.cpu.monitor = CommMonitor()
#system.cpu.monitor.trace = MemTraceProbe(trace_file="my_trace.trc.gz")
# Create a memory bus, a coherent crossbar, in this case
system.membus = SystemXBar()
# Hook the CPU ports up to the membus
#system.cpu.icache_port = system.membus.slave
system.cpu.dcache_port = system.membus.slave
system.cpu.icache_port = system.cpu.monitor.slave
system.cpu.monitor.master = system.membus.slave
# create the interrupt controller for the CPU and connect to the membus
system.cpu.createInterruptController()
#system.cpu.interrupts[0].pio = system.membus.master
#system.cpu.interrupts[0].int_master = system.membus.slave
#system.cpu.interrupts[0].int_slave = system.membus.master
# Create a DDR3 memory controller and connect it to the membus
system.mem_ctrl = DDR3_1600_x64()
system.mem_ctrl.range = system.mem_ranges[0]
system.mem_ctrl.port = system.membus.master
# Connect the system up to the membus
system.system_port = system.membus.slave
# Create a process for a simple "Hello World" application
process = LiveProcess()
# Set the command
# cmd is a list which begins with the executable (like argv)
process.cmd = sys.argv[1]
# Set the cpu to use the process as its workload and create thread contexts
system.cpu.workload = process
system.cpu.createThreads()
# set up the root SimObject and start the simulation
root = Root(full_system = False, system = system)
root.registerFault = RegisterFault()
root.registerFault.startTick = 143930180
root.registerFault.system = system
root.registerFault.registerCategory = 0
root.registerFault.faultRegister = 13
root.registerFault.bitPosition = 4
# instantiate all of the objects we've created above
m5.instantiate()
print "Beginning simulation!"
exit_event = m5.simulate()
print 'Exiting @ tick %i because %s' % (m5.curTick(), exit_event.getCause())
| """ This file creates a barebones system and executes 'hello', a simple Hello
World application.
This config file assumes that the x86 ISA was built.
See gem5/configs/learning_gem5/part1/simple.py for a general script.
"""
# import the m5 (gem5) library created when gem5 is built
import m5
# import all of the SimObjects
from m5.objects import *
import sys
# create the system we are going to simulate
system = System()
# Set the clock fequency of the system (and all of its children)
system.clk_domain = SrcClockDomain()
system.clk_domain.clock = '1GHz'
system.clk_domain.voltage_domain = VoltageDomain()
# Set up the system
system.mem_mode = 'timing' # Use timing accesses
system.mem_ranges = [AddrRange('512MB')] # Create an address range
# Create a simple CPU
system.cpu = TimingSimpleCPU()
system.cpu.monitor = CommMonitor()
#system.cpu.monitor.trace = MemTraceProbe(trace_file="my_trace.trc.gz")
# Create a memory bus, a coherent crossbar, in this case
system.membus = SystemXBar()
# Hook the CPU ports up to the membus
#system.cpu.icache_port = system.membus.slave
system.cpu.dcache_port = system.membus.slave
system.cpu.icache_port = system.cpu.monitor.slave
system.cpu.monitor.master = system.membus.slave
# create the interrupt controller for the CPU and connect to the membus
system.cpu.createInterruptController()
#system.cpu.interrupts[0].pio = system.membus.master
#system.cpu.interrupts[0].int_master = system.membus.slave
#system.cpu.interrupts[0].int_slave = system.membus.master
# Create a DDR3 memory controller and connect it to the membus
system.mem_ctrl = DDR3_1600_x64()
system.mem_ctrl.range = system.mem_ranges[0]
system.mem_ctrl.port = system.membus.master
# Connect the system up to the membus
system.system_port = system.membus.slave
# Create a process for a simple "Hello World" application
process = LiveProcess()
# Set the command
# cmd is a list which begins with the executable (like argv)
process.cmd = sys.argv[1]
# Set the cpu to use the process as its workload and create thread contexts
system.cpu.workload = process
system.cpu.createThreads()
# set up the root SimObject and start the simulation
root = Root(full_system = False, system = system)
root.registerFault = RegisterFault()
root.registerFault.startTick = 143930180
root.registerFault.system = system
root.registerFault.registerCategory = 0
root.registerFault.faultRegister = 13
root.registerFault.bitPosition = 3
# instantiate all of the objects we've created above
m5.instantiate()
print "Beginning simulation!"
exit_event = m5.simulate()
print 'Exiting @ tick %i because %s' % (m5.curTick(), exit_event.getCause())
| bsd-3-clause | Python |
a9da2a04cb05af1cc65d1e4535a514a710d1f24b | Fix #389: Spelling of deprecated. | RaD/django-south,philipn/django-south,nimnull/django-south,philipn/django-south,RaD/django-south,nimnull/django-south,RaD/django-south | south/management/commands/startmigration.py | south/management/commands/startmigration.py | """
Now-obsolete startmigration command.
"""
from optparse import make_option
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--model', action='append', dest='added_model_list', type='string',
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --model parameters.'),
make_option('--add-field', action='append', dest='added_field_list', type='string',
help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--add-index', action='append', dest='added_index_list', type='string',
help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--initial', action='store_true', dest='initial', default=False,
help='Generate the initial schema for the app.'),
make_option('--auto', action='store_true', dest='auto', default=False,
help='Attempt to automatically detect differences from the last migration.'),
make_option('--freeze', action='append', dest='freeze_list', type='string',
help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'),
make_option('--stdout', action='store_true', dest='stdout', default=False,
help='Print the migration to stdout instead of writing it to a file.'),
)
help = "Depereciated command"
def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, added_index_list=None, **options):
print "The 'startmigration' command is now deprecated; please use the new 'schemamigration' and 'datamigration' commands." | """
Now-obsolete startmigration command.
"""
from optparse import make_option
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--model', action='append', dest='added_model_list', type='string',
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --model parameters.'),
make_option('--add-field', action='append', dest='added_field_list', type='string',
help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--add-index', action='append', dest='added_index_list', type='string',
help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--initial', action='store_true', dest='initial', default=False,
help='Generate the initial schema for the app.'),
make_option('--auto', action='store_true', dest='auto', default=False,
help='Attempt to automatically detect differences from the last migration.'),
make_option('--freeze', action='append', dest='freeze_list', type='string',
help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'),
make_option('--stdout', action='store_true', dest='stdout', default=False,
help='Print the migration to stdout instead of writing it to a file.'),
)
help = "Depereciated command"
def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, added_index_list=None, **options):
print "The 'startmigration' command is now depreciated; please use the new 'schemamigration' and 'datamigration' commands." | apache-2.0 | Python |
1da262affaa5bf2a9ca50936c327ec63090dd275 | remove assert | benagricola/exabgp,PowerDNS/exabgp,blablacar/exabgp,dneiter/exabgp,earies/exabgp,lochiiconnectivity/exabgp,PowerDNS/exabgp,fugitifduck/exabgp,dneiter/exabgp,earies/exabgp,benagricola/exabgp,lochiiconnectivity/exabgp,dneiter/exabgp,blablacar/exabgp,earies/exabgp,fugitifduck/exabgp,benagricola/exabgp,lochiiconnectivity/exabgp,fugitifduck/exabgp,blablacar/exabgp,PowerDNS/exabgp | lib/exabgp/bgp/message/update/attribute/community/extended/rt.py | lib/exabgp/bgp/message/update/attribute/community/extended/rt.py | # encoding: utf-8
"""
rt.py
Created by Thomas Mangin on 2014-06-20.
Copyright (c) 2014-2014 Orange. All rights reserved.
"""
import socket
from struct import pack,unpack
from exabgp.bgp.message.open.asn import ASN
from exabgp.bgp.message.update.attribute.community.extended import ExtendedCommunity
# ================================================================== RouteTarget
class RouteTarget (ExtendedCommunity):
COMMUNITY_TYPE = 0x00
COMMUNITY_SUBTYPE = 0x02
def __init__ (self,asn,ip,number):
# assert(asn is None or ip is None)
# assert(asn is not None or ip is not None)
if not asn is None:
self.asn = asn
self.number = number
self.ip = ""
else:
self.ip = ip
self.number = number
self.asn = 0
self.community = self.pack()
def pack (self):
if self.asn is not None:
# type could also be 0x02 -> FIXME check RFC
#return pack( 'BB!H!L', 0x00,0x02, self.asn, self.number)
return pack('!BBHL',0x00,0x02,self.asn,self.number)
else:
encoded_ip = socket.inet_pton(socket.AF_INET,self.ip)
return pack('!BB4sH',0x01,0x02,encoded_ip,self.number)
def __str__ (self):
if self.asn is not None:
return "target:%s:%d" % (str(self.asn), self.number)
else:
return "target:%s:%d" % (self.ip, self.number)
def __cmp__ (self,other):
if not isinstance(other,self.__class__):
return -1
if self.asn != other.asn:
return -1
if self.ip != other.ip:
return -1
if self.number != other.number:
return -1
return 0
def __hash__ (self):
return hash(self.community)
@staticmethod
def unpack(data):
type_ = ord(data[0]) & 0x0F
stype = ord(data[1])
data = data[2:]
if stype == 0x02: # XXX: FIXME: unclean
if type_ in (0x00,0x02):
asn,number = unpack('!HL',data[:6])
return RouteTarget(ASN(asn),None,number)
if type_ == 0x01:
ip = socket.inet_ntop(data[0:4])
number = unpack('!H',data[4:6])[0]
return RouteTarget(None,ip,number)
| # encoding: utf-8
"""
rt.py
Created by Thomas Mangin on 2014-06-20.
Copyright (c) 2014-2014 Orange. All rights reserved.
"""
import socket
from struct import pack,unpack
from exabgp.bgp.message.open.asn import ASN
from exabgp.bgp.message.update.attribute.community.extended import ExtendedCommunity
# ================================================================== RouteTarget
class RouteTarget (ExtendedCommunity):
COMMUNITY_TYPE = 0x00
COMMUNITY_SUBTYPE = 0x02
def __init__ (self,asn,ip,number):
assert (asn is None or ip is None)
assert (asn is not None or ip is not None)
if not asn is None:
self.asn = asn
self.number = number
self.ip = ""
else:
self.ip = ip
self.number = number
self.asn = 0
self.community = self.pack()
def pack (self):
if self.asn is not None:
# type could also be 0x02 -> FIXME check RFC
#return pack( 'BB!H!L', 0x00,0x02, self.asn, self.number)
return pack('!BBHL',0x00,0x02,self.asn,self.number)
else:
encoded_ip = socket.inet_pton(socket.AF_INET,self.ip)
return pack('!BB4sH',0x01,0x02,encoded_ip,self.number)
def __str__ (self):
if self.asn is not None:
return "target:%s:%d" % (str(self.asn), self.number)
else:
return "target:%s:%d" % (self.ip, self.number)
def __cmp__ (self,other):
if not isinstance(other,self.__class__):
return -1
if self.asn != other.asn:
return -1
if self.ip != other.ip:
return -1
if self.number != other.number:
return -1
return 0
def __hash__ (self):
return hash(self.community)
@staticmethod
def unpack(data):
type_ = ord(data[0]) & 0x0F
stype = ord(data[1])
data = data[2:]
if stype == 0x02: # XXX: FIXME: unclean
if type_ in (0x00,0x02):
asn,number = unpack('!HL',data[:6])
return RouteTarget(ASN(asn),None,number)
if type_ == 0x01:
ip = socket.inet_ntop(data[0:4])
number = unpack('!H',data[4:6])[0]
return RouteTarget(None,ip,number)
| bsd-3-clause | Python |
f0083b4053ceb43f9cf6a386f01f377736783f9a | Add unicode for NodeRelation | binoculars/osf.io,pattisdr/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,hmoco/osf.io,mattclark/osf.io,Nesiehr/osf.io,sloria/osf.io,Nesiehr/osf.io,pattisdr/osf.io,erinspace/osf.io,caseyrollins/osf.io,mfraezz/osf.io,leb2dg/osf.io,hmoco/osf.io,adlius/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,aaxelb/osf.io,baylee-d/osf.io,chennan47/osf.io,mfraezz/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,mattclark/osf.io,caseyrollins/osf.io,leb2dg/osf.io,binoculars/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,baylee-d/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,crcresearch/osf.io,chrisseto/osf.io,crcresearch/osf.io,cwisecarver/osf.io,laurenrevere/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,hmoco/osf.io,mfraezz/osf.io,caneruguz/osf.io,sloria/osf.io,aaxelb/osf.io,cwisecarver/osf.io,chrisseto/osf.io,icereval/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,sloria/osf.io,chennan47/osf.io,icereval/osf.io,leb2dg/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,chrisseto/osf.io,adlius/osf.io,felliott/osf.io,cwisecarver/osf.io,erinspace/osf.io,icereval/osf.io,caneruguz/osf.io,mattclark/osf.io,TomBaxter/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,felliott/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,mfraezz/osf.io,felliott/osf.io,felliott/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,hmoco/osf.io,leb2dg/osf.io,pattisdr/osf.io,caneruguz/osf.io,caneruguz/osf.io,cwisecarver/osf.io,crcresearch/osf.io | osf/models/node_relation.py | osf/models/node_relation.py | from django.db import models
from .base import BaseModel, ObjectIDMixin
class NodeRelation(ObjectIDMixin, BaseModel):
parent = models.ForeignKey('AbstractNode', related_name='node_relations')
child = models.ForeignKey('AbstractNode')
is_node_link = models.BooleanField(default=False, db_index=True)
def __unicode__(self):
return '{}, parent={}, child={}'.format(
'Node Link' if self.is_node_link else 'Component',
self.parent.__unicode__(),
self.child.__unicode__())
@property
def node(self):
"""For v1 compat."""
return self.child
class Meta:
order_with_respect_to = 'parent'
unique_together = ('parent', 'child')
index_together = (
('is_node_link', 'child', 'parent'),
)
| from django.db import models
from .base import BaseModel, ObjectIDMixin
class NodeRelation(ObjectIDMixin, BaseModel):
parent = models.ForeignKey('AbstractNode', related_name='node_relations')
child = models.ForeignKey('AbstractNode')
is_node_link = models.BooleanField(default=False, db_index=True)
@property
def node(self):
"""For v1 compat."""
return self.child
class Meta:
order_with_respect_to = 'parent'
unique_together = ('parent', 'child')
index_together = (
('is_node_link', 'child', 'parent'),
)
| apache-2.0 | Python |
6d11692f17f1c23ad0267d684c569c171b0f06e4 | Print MiB/s stats for pickling. | tabish121/pyActiveMQ,tabish121/pyActiveMQ,tabish121/pyActiveMQ | src/examples/numpypickle.py | src/examples/numpypickle.py | #!/usr/bin/env python
# Copyright 2007 Albert Strasheim <fullung@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyactivemq import ActiveMQConnectionFactory
from pyactivemq import AcknowledgeMode
from pyactivemq import DeliveryMode
from numpy.testing import assert_array_equal
import numpy as N
npickles = 1000
# generate random array containing 100*100 doubles
x = N.random.randn(100, 100)
f = ActiveMQConnectionFactory('tcp://localhost:61613?wireFormat=stomp')
conn = f.createConnection()
session = conn.createSession(AcknowledgeMode.DUPS_OK_ACKNOWLEDGE)
queue = session.createQueue('arrays')
consumer = session.createConsumer(queue)
producer = session.createProducer(queue)
producer.deliveryMode = DeliveryMode.NON_PERSISTENT
conn.start()
def test():
for i in xrange(npickles):
m = session.createBytesMessage()
# pickle array into BytesMessage's body
m.bodyBytes = x.dumps()
producer.send(m)
m2 = consumer.receive(1000)
assert m2 is not None
# unpickle array from BytesMessage's body
y = N.loads(m2.bodyBytes)
assert_array_equal(x, y)
from timeit import Timer
t = Timer('test()', 'from __main__ import test')
delta = t.timeit(1)
conn.close()
mibps = npickles * x.nbytes / (1024.0 * 1024.0) / delta
print 'pickled %d arrays, each %d bytes, in %f seconds (%.4f MiB/s)' % \
(npickles, x.nbytes, delta, mibps)
| #!/usr/bin/env python
# Copyright 2007 Albert Strasheim <fullung@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyactivemq import ActiveMQConnectionFactory
from pyactivemq import AcknowledgeMode
from pyactivemq import DeliveryMode
from numpy.testing import assert_array_equal
import numpy as N
npickles = 1000
# generate random array containing 100*100 doubles
x = N.random.randn(100, 100)
f = ActiveMQConnectionFactory('tcp://localhost:61613?wireFormat=stomp')
conn = f.createConnection()
session = conn.createSession(AcknowledgeMode.DUPS_OK_ACKNOWLEDGE)
queue = session.createQueue('arrays')
consumer = session.createConsumer(queue)
producer = session.createProducer(queue)
producer.deliveryMode = DeliveryMode.NON_PERSISTENT
conn.start()
def test():
for i in xrange(npickles):
m = session.createBytesMessage()
# pickle array into BytesMessage's body
m.bodyBytes = x.dumps()
producer.send(m)
m2 = consumer.receive(1000)
assert m2 is not None
# unpickle array from BytesMessage's body
y = N.loads(m2.bodyBytes)
assert_array_equal(x, y)
from timeit import Timer
t = Timer('test()', 'from __main__ import test')
delta = t.timeit(1)
conn.close()
print 'pickled %d arrays, each %d bytes, in %f seconds' % \
(npickles, x.nbytes, delta)
| apache-2.0 | Python |
9c4ae70661cf3a4e4d2f3876b3e81e95fae3f619 | revise __openerp__.py | amdeb/odoo-connector | connector8/__openerp__.py | connector8/__openerp__.py | # -*- coding: utf-8 -*-
{'name': 'Connector8',
'version': '0.1',
'author': 'Amdeb',
'license': 'AGPL-3',
'category': 'Generic Modules',
'description': """
This is a port of OCA connector to Odoo 8.0
""",
'depends': ['mail'
],
'data': ['security/connector_security.xml',
'security/ir.model.access.csv',
'queue/model_view.xml',
'queue/queue_data.xml',
'checkpoint/checkpoint_view.xml',
'connector_menu.xml',
'setting_view.xml',
'res_partner_view.xml',
],
'installable': True,
'application': True,
}
| # -*- coding: utf-8 -*-
{'name': 'Connector8',
'version': '0.1',
'author': 'Amdeb',
'license': 'AGPL-3',
'category': 'Generic Modules',
'description': """
This is a port of OCA connector to Odoo 8.0
""",
'depends': ['base',
'base_setup',
],
'data': ['security/connector_security.xml',
'security/ir.model.access.csv',
'queue/model_view.xml',
'queue/queue_data.xml',
'checkpoint/checkpoint_view.xml',
'connector_menu.xml',
'setting_view.xml',
],
'installable': True
}
| agpl-3.0 | Python |
c1075f9d696bae82474c4010731eb6392425e939 | Disable VEP cluster test until mismatch is resolved (#6597) | danking/hail,cseed/hail,danking/hail,cseed/hail,hail-is/hail,cseed/hail,hail-is/hail,hail-is/hail,cseed/hail,danking/hail,danking/hail,danking/hail,danking/hail,hail-is/hail,cseed/hail,cseed/hail,danking/hail,hail-is/hail,hail-is/hail,hail-is/hail,danking/hail,hail-is/hail,cseed/hail,cseed/hail | hail/python/cluster-tests/cluster-vep-check.py | hail/python/cluster-tests/cluster-vep-check.py | import hail as hl
GOLD_STD = 'gs://hail-common/vep/vep/vep_examplars/vep_no_csq_4dc19bc1b.mt/'
GOLD_STD_CSQ = 'gs://hail-common/vep/vep/vep_examplars/vep_csq_4dc19bc1b.mt/'
for path, csq in [(GOLD_STD, False), (GOLD_STD_CSQ, True)]:
print(f"Checking 'hl.vep' replicates on '{path}'")
expected = hl.read_matrix_table(path)
actual = hl.vep(expected.rows().select(), 'gs://hail-common/vep/vep/vep85-loftee-gcloud-testing.json', csq=csq)
actual._force_count()
# vep_result_agrees = actual._same(expected)
# if vep_result_agrees:
# print('TEST PASSED')
# else:
# print('TEST FAILED')
# assert vep_result_agrees
| import hail as hl
GOLD_STD = 'gs://hail-common/vep/vep/vep_examplars/vep_no_csq_4dc19bc1b.mt/'
GOLD_STD_CSQ = 'gs://hail-common/vep/vep/vep_examplars/vep_csq_4dc19bc1b.mt/'
for path, csq in [(GOLD_STD, False), (GOLD_STD_CSQ, True)]:
print(f"Checking 'hl.vep' replicates on '{path}'")
expected = hl.read_matrix_table(path)
actual = hl.vep(expected.select_rows(), 'gs://hail-common/vep/vep/vep85-loftee-gcloud-testing.json', csq=csq)
vep_result_agrees = actual._same(expected)
if vep_result_agrees:
print('TEST PASSED')
else:
print('TEST FAILED')
assert vep_result_agrees
| mit | Python |
95520e1b5020ff805d4bd3f51ac5c64d0f1a3215 | add computing reversed records | it-projects-llc/misc-addons,it-projects-llc/misc-addons,it-projects-llc/misc-addons | base_details/models/base_details.py | base_details/models/base_details.py | # -*- coding: utf-8 -*-
from odoo import fields, models, api
class BaseDetails(models.AbstractModel):
"""Model to be inherited by Model where details field has to be added"""
_name = 'base_details'
def _model_selection(self):
return []
@property
def details(self):
if self.details_model and self.details_model in self.env and self.details_res_id:
details_record = self.env[self.details_model].browse(self.details_res_id)
else:
details_record = None
return details_record
details_model = fields.Selection('_model_selection', string='Detail Model')
details_res_id = fields.Integer(string='Details')
class BaseDetailsRecord(models.AbstractModel):
"""Model to be inherited by Model with details"""
_name = 'base_details_record'
_base_details_model = 'UPDATE_THIS'
@api.multi
def _base_details_reversed(self):
reversed_records = self.env[self._base_details_model].search_read([
('details_model', '=', self._name),
('details_res_id', 'in', self.ids),
], fields=['id', 'details_res_id'])
return ((r['details_res_id'], r['id']) for r in reversed_records)
| # -*- coding: utf-8 -*-
from odoo import fields, models
class BaseDetails(models.AbstractModel):
_name = 'base_details'
def _model_selection(self):
return []
@property
def details(self):
if self.details_model and self.details_model in self.env and self.details_res_id:
details_record = self.env[self.details_model].browse(self.details_res_id)
else:
details_record = None
return details_record
details_model = fields.Selection('_model_selection', string='Detail Model')
details_res_id = fields.Integer(string='Details')
| mit | Python |
85a5a3b716de02b2f091577c8d84d3d5286849e8 | Update frames_rendering.py | duboviy/study_languages | image_translate/frames_rendering.py | image_translate/frames_rendering.py | # need to install python-opencv, pygame, numpy, scipy, PIL
import sys
import pygame
from pygame.locals import *
import opencv
#this is important for capturing/displaying images
from opencv import highgui
def get_image(camera):
img = highgui.cvQueryFrame(camera)
# Add the line below if you need it (Ubuntu 8.04+)
# im = opencv.cvGetMat(im)
# convert Ipl image to PIL image
return opencv.adaptors.Ipl2PIL(img)
def render_flipped_camera():
camera = highgui.cvCreateCameraCapture(0)
fps = 30.0
pygame.init()
pygame.display.set_mode((640, 480))
pygame.display.set_caption("WebCam Demo")
screen = pygame.display.get_surface()
while True:
events = pygame.event.get()
for event in events:
if event.type == QUIT or event.type == KEYDOWN:
sys.exit(0)
im = get_image(camera)
pg_img = pygame.image.frombuffer(im.tostring(), im.size, im.mode)
screen.blit(pg_img, (0, 0))
pygame.display.flip()
pygame.time.delay(int(1000 * 1.0/fps))
if __name__ == "__main__":
render_flipped_camera()
| # need to install python-opencv, pygame, numpy, scipy, PIL
import sys
import pygame
import Image
from pygame.locals import *
import opencv
#this is important for capturing/displaying images
from opencv import highgui
def get_image(camera):
img = highgui.cvQueryFrame(camera)
# Add the line below if you need it (Ubuntu 8.04+)
# im = opencv.cvGetMat(im)
# convert Ipl image to PIL image
return opencv.adaptors.Ipl2PIL(img)
def render_flipped_camera():
camera = highgui.cvCreateCameraCapture(0)
fps = 30.0
pygame.init()
window = pygame.display.set_mode((640, 480))
pygame.display.set_caption("WebCam Demo")
screen = pygame.display.get_surface()
while True:
events = pygame.event.get()
for event in events:
if event.type == QUIT or event.type == KEYDOWN:
sys.exit(0)
im = get_image(camera)
pg_img = pygame.image.frombuffer(im.tostring(), im.size, im.mode)
screen.blit(pg_img, (0, 0))
pygame.display.flip()
pygame.time.delay(int(1000 * 1.0/fps))
if __name__ == "__main__":
render_flipped_camera()
| mit | Python |
7d43f58fbcefae5885c3fd364e26c7f27d1e239a | migrate command fix | condograde/sqlibrist | sqlibrist/commands/migrate.py | sqlibrist/commands/migrate.py | # -*- coding: utf8 -*-
import glob
import os
from sys import stdout
from sqlibrist.helpers import get_engine, get_config, ApplyMigrationFailed
def unapplied_migrations(migration_list, last_migration):
on = False
for migration in migration_list:
if migration.split('/')[-1] == last_migration:
on = True
elif on:
yield migration
def migrate(config, fake, till_migration_name):
engine = get_engine(config)
last_applied_migration = engine.get_last_applied_migration()
if last_applied_migration:
migration_list = unapplied_migrations(
sorted(glob.glob('migrations/*')),
last_applied_migration)
else:
# no migrations at all
migration_list = sorted(glob.glob('migrations/*'))
for migration in migration_list:
with open(os.path.join(migration, 'up.sql')) as f:
up = f.read()
migration_name = migration.split('/')[-1]
stdout.write(u'Applying migration %s... ' % migration_name)
if fake:
stdout.write(u'(fake run) ')
try:
engine.apply_migration(migration_name, up, fake)
except ApplyMigrationFailed:
stdout.write(u'Error, rolled back\n')
else:
stdout.write(u'done\n')
if migration_name.startswith(till_migration_name):
break
def migrate_command(args):
return migrate(config=get_config(args),
fake=args.fake,
till_migration_name=args.migration)
| # -*- coding: utf8 -*-
import glob
import os
from sys import stdout
from sqlibrist.helpers import get_engine, get_config, ApplyMigrationFailed
def unapplied_migrations(migration_list, last_migration):
on = False
for migration in migration_list:
if migration.split('/')[-1] == last_migration:
on = True
elif on:
yield migration
def migrate(config, fake):
engine = get_engine(config)
last_applied_migration = engine.get_last_applied_migration()
if last_applied_migration:
migration_list = unapplied_migrations(
sorted(glob.glob('migrations/*')),
last_applied_migration)
else:
# no migrations at all
migration_list = sorted(glob.glob('migrations/*'))
for migration in migration_list:
with open(os.path.join(migration, 'up.sql')) as f:
up = f.read()
migration_name = migration.split('/')[-1]
stdout.write(u'Applying migration %s... ' % migration_name)
if fake:
stdout.write(u'(fake run) ')
try:
engine.apply_migration(migration_name, up, fake)
except ApplyMigrationFailed:
stdout.write(u'Error, rolled back\n')
else:
stdout.write(u'done\n')
def migrate_command(args):
return migrate(config=get_config(args), fake=args.fake)
| mit | Python |
99227d137c5257e5a850d65114d1d9c30072e738 | Make buildversion.py accept the argument - to print to stdout instead of a file | code-google-com/srcdemo2,AGSPhoenix/srcdemo2,EtiennePerot/srcdemo2,n1889/srcdemo2,AGSPhoenix/srcdemo2,EtiennePerot/srcdemo2,n1889/srcdemo2,EtiennePerot/srcdemo2,code-google-com/srcdemo2,n1889/srcdemo2,EtiennePerot/srcdemo2,xxtbg/srcdemo2,slav9nin/srcdemo2,xxtbg/srcdemo2,code-google-com/srcdemo2,Nofe92/srcdemo2,n1889/srcdemo2,slav9nin/srcdemo2,Nofe92/srcdemo2,AGSPhoenix/srcdemo2,AGSPhoenix/srcdemo2,xxtbg/srcdemo2,n1889/srcdemo2,Nofe92/srcdemo2,xxtbg/srcdemo2,Nofe92/srcdemo2,slav9nin/srcdemo2,AGSPhoenix/srcdemo2,slav9nin/srcdemo2,xxtbg/srcdemo2,slav9nin/srcdemo2,Nofe92/srcdemo2,code-google-com/srcdemo2,AGSPhoenix/srcdemo2,xxtbg/srcdemo2,AGSPhoenix/srcdemo2,n1889/srcdemo2,EtiennePerot/srcdemo2,slav9nin/srcdemo2,EtiennePerot/srcdemo2,code-google-com/srcdemo2,EtiennePerot/srcdemo2,Nofe92/srcdemo2,slav9nin/srcdemo2,n1889/srcdemo2,code-google-com/srcdemo2,Nofe92/srcdemo2,xxtbg/srcdemo2,code-google-com/srcdemo2 | package/any/buildversion.py | package/any/buildversion.py | #!/usr/bin/env python
import sys, time
if '-' in sys.argv:
print(time.strftime('%Y-%m-%d'))
else:
f = open('version.txt', 'w')
f.write(time.strftime('%Y-%m-%d'))
f.close()
| import time
f = open('version.txt', 'w')
f.write(time.strftime('%Y-%m-%d'))
f.close()
| bsd-2-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.