commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
a619d5b35eb88ab71126e53f195190536d71fdb4 | Throw exceptions error responses from server | solarwinds/orionsdk-python | orionsdk/swisclient.py | orionsdk/swisclient.py | import requests
import json
from datetime import datetime
def _json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, datetime):
serial = obj.isoformat()
return serial
class SwisClient:
def __init__(self, hostname, username, password, verify=False):
self.url = "https://{}:17778/SolarWinds/InformationService/v3/Json/".\
format(hostname)
self.credentials = (username, password)
self.verify = verify
def query(self, query, **params):
return self._req(
"POST",
"Query",
{'query': query, 'parameters': params}).json()
def invoke(self, entity, verb, *args):
return self._req(
"POST",
"Invoke/{}/{}".format(entity, verb), args).json()
def create(self, entity, **properties):
return self._req(
"POST",
"Create/" + entity, properties).json()
def read(self, uri):
return self._req("GET", uri).json()
def update(self, uri, **properties):
self._req("POST", uri, properties)
def delete(self, uri):
self._req("DELETE", uri)
def _req(self, method, frag, data=None):
resp = requests.request(method, self.url + frag,
data=json.dumps(data, default=_json_serial),
verify=self.verify,
auth=self.credentials,
headers={'Content-Type': 'application/json'})
resp.raise_for_status()
return resp
| import requests
import json
from datetime import datetime
def _json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, datetime):
serial = obj.isoformat()
return serial
class SwisClient:
def __init__(self, hostname, username, password, verify=False):
self.url = "https://{}:17778/SolarWinds/InformationService/v3/Json/".\
format(hostname)
self.credentials = (username, password)
self.verify = verify
def query(self, query, **params):
return self._req(
"POST",
"Query",
{'query': query, 'parameters': params}).json()
def invoke(self, entity, verb, *args):
return self._req(
"POST",
"Invoke/{}/{}".format(entity, verb), args).json()
def create(self, entity, **properties):
return self._req(
"POST",
"Create/" + entity, properties).json()
def read(self, uri):
return self._req("GET", uri).json()
def update(self, uri, **properties):
self._req("POST", uri, properties)
def delete(self, uri):
self._req("DELETE", uri)
def _req(self, method, frag, data=None):
return requests.request(method, self.url + frag,
data=json.dumps(data, default=_json_serial),
verify=self.verify,
auth=self.credentials,
headers={'Content-Type': 'application/json'})
| apache-2.0 | Python |
0da6b77ec037005caf5f0b06949cbf4981d82616 | Fix sensitivity factories | makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin | geotrek/sensitivity/factories.py | geotrek/sensitivity/factories.py | # -*- coding: utf-8 -*-
import factory
from geotrek.authent.factories import StructureRelatedDefaultFactory
from geotrek.common.utils.testdata import dummy_filefield_as_sequence
from . import models
class SportPracticeFactory(factory.DjangoModelFactory):
class Meta:
model = models.SportPractice
name = factory.Sequence(lambda n: u"Practice %s" % n)
class SpeciesFactory(factory.DjangoModelFactory):
class Meta:
model = models.Species
name = factory.Sequence(lambda n: u"Species %s" % n)
pictogram = dummy_filefield_as_sequence('thumbnail %s')
url = factory.Sequence(lambda n: u"http://url%s.com" % n)
period06 = True
period07 = True
category = models.Species.SPECIES
@classmethod
def _prepare(cls, create, **kwargs):
practices = kwargs.pop('practices', None)
species = super(SpeciesFactory, cls)._prepare(create, **kwargs)
if create:
if practices is None:
practices = [SportPracticeFactory.create(), SportPracticeFactory.create()]
for practice in practices:
species.practices.add(practice)
return species
class SensitiveAreaFactory(StructureRelatedDefaultFactory):
class Meta:
model = models.SensitiveArea
species = factory.SubFactory(SpeciesFactory)
geom = 'POLYGON((700000 6600000, 700000 6600003, 700003 6600003, 700003 6600000, 700000 6600000))'
published = True
description = "Blabla"
email = "toto@tata.com"
| # -*- coding: utf-8 -*-
import factory
from geotrek.authent.factories import StructureRelatedDefaultFactory
from geotrek.common.utils.testdata import dummy_filefield_as_sequence
from . import models
class SportPracticeFactory(factory.DjangoModelFactory):
class Meta:
model = models.SportPractice
name = factory.Sequence(lambda n: u"Practice %s" % n)
class SpeciesFactory(factory.DjangoModelFactory):
class Meta:
model = models.Species
name = factory.Sequence(lambda n: u"Species %s" % n)
pictogram = dummy_filefield_as_sequence('thumbnail %s')
url = factory.Sequence(lambda n: u"http://url%s.com" % n)
period06 = True
period07 = True
@classmethod
def _prepare(cls, create, **kwargs):
practices = kwargs.pop('practices', None)
species = super(SpeciesFactory, cls)._prepare(create, **kwargs)
if create:
if practices is None:
practices = [SportPracticeFactory.create(), SportPracticeFactory.create()]
for practice in practices:
species.practices.add(practice)
return species
class SensitiveAreaFactory(StructureRelatedDefaultFactory):
class Meta:
model = models.SensitiveArea
category = models.SensitiveArea.SPECIES
species = factory.SubFactory(SpeciesFactory)
geom = 'POLYGON((700000 6600000, 700000 6600003, 700003 6600003, 700003 6600000, 700000 6600000))'
published = True
description = "Blabla"
email = "toto@tata.com"
| bsd-2-clause | Python |
bce007eb1e89ed66d911827e764d1062dc220d4f | add another potential with steeper slope | adrn/StreamMorphology,adrn/StreamMorphology,adrn/StreamMorphology | streammorphology/potential.py | streammorphology/potential.py | # coding: utf-8
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Third-party
import astropy.units as u
import numpy as np
# Project
import gary.potential as gp
from gary.units import galactic
__all__ = ['potential_registry']
# built-in potentials
potential_registry = dict()
# --------------------------------------------------------------
p1 = gp.LeeSutoTriaxialNFWPotential(v_c=(150*u.km/u.s).to(u.kpc/u.Myr).value,
r_s=40., a=1., b=0.77, c=0.55,
units=galactic)
potential_registry['triaxial-NFW'] = p1
# --------------------------------------------------------------
p1 = gp.LeeSutoTriaxialNFWPotential(v_c=(175*u.km/u.s).to(u.kpc/u.Myr).value,
r_s=20., a=1., b=0.77, c=0.55,
units=galactic)
potential_registry['triaxial-NFW2'] = p1
# --------------------------------------------------------------
params = p1.parameters
params.pop('R')
p2 = gp.LeeSutoTriaxialNFWPotential(units=galactic, phi=np.pi/2.,
**params)
potential_registry['triaxial-NFW-yz'] = p2
# --------------------------------------------------------------
p = gp.TriaxialMWPotential(units=galactic)
potential_registry['triaxial-NFW-disk-bulge'] = p
# --------------------------------------------------------------
p = gp.LogarithmicPotential(v_c=np.sqrt(2)*(140.372*u.km/u.s).decompose(galactic).value,
r_h=5.87963,
q1=0.872614, q2=1., q3=1.16395,
phi=1.58374, units=galactic)
potential_registry['via-lactea-log'] = p
| # coding: utf-8
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Third-party
import astropy.units as u
import numpy as np
# Project
import gary.potential as gp
from gary.units import galactic
__all__ = ['potential_registry']
# built-in potentials
potential_registry = dict()
# --------------------------------------------------------------
p1 = gp.LeeSutoTriaxialNFWPotential(v_c=(150*u.km/u.s).to(u.kpc/u.Myr).value,
r_s=40., a=1., b=0.77, c=0.55,
units=galactic)
potential_registry['triaxial-NFW'] = p1
# --------------------------------------------------------------
params = p1.parameters
params.pop('R')
p2 = gp.LeeSutoTriaxialNFWPotential(units=galactic, phi=np.pi/2.,
**params)
potential_registry['triaxial-NFW-yz'] = p2
# --------------------------------------------------------------
p = gp.TriaxialMWPotential(units=galactic)
potential_registry['triaxial-NFW-disk-bulge'] = p
# --------------------------------------------------------------
p = gp.LogarithmicPotential(v_c=np.sqrt(2)*(140.372*u.km/u.s).decompose(galactic).value,
r_h=5.87963,
q1=0.872614, q2=1., q3=1.16395,
phi=1.58374, units=galactic)
potential_registry['via-lactea-log'] = p
| mit | Python |
336fd0a2258ae450e38dc9ddc22268b4d77f1be7 | Fix broken test | PersonalGenomesOrg/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans | studies/american_gut/tests.py | studies/american_gut/tests.py | from provider.oauth2.models import AccessToken
from rest_framework import status
from rest_framework.test import APITestCase
class UserDataTests(APITestCase):
fixtures = ['open_humans/fixtures/test-data.json']
def verify_request(self, url, status_code):
response = self.client.get('/api/american-gut' + url)
self.assertEqual(response.status_code, status_code)
def verify_request_200(self, url):
self.verify_request(url, status.HTTP_200_OK)
def verify_request_401(self, url):
self.verify_request(url, status.HTTP_401_UNAUTHORIZED)
def test_get_user_data(self):
"""
Ensure we can get a UserData object with credentials.
"""
access_token = AccessToken.objects.get(pk=1)
self.client.credentials(
HTTP_AUTHORIZATION='Bearer ' + access_token.token)
self.verify_request_200('/user-data/2/')
self.verify_request_200('/user-data/current/')
def test_get_user_data_no_credentials(self):
"""
Ensure we can't get a UserData object with no credentials.
"""
self.client.credentials()
self.verify_request_401('/user-data/1/')
self.verify_request_401('/user-data/2/')
self.verify_request_401('/user-data/current/')
| from provider.oauth2.models import AccessToken
from rest_framework import status
from rest_framework.test import APITestCase
class UserDataTests(APITestCase):
fixtures = ['open_humans/fixtures/test-data.json']
def verify_request(self, url, status_code):
response = self.client.get('/api/american-gut' + url)
self.assertEqual(response.status_code, status_code)
def verify_request_200(self, url):
self.verify_request(url, status.HTTP_200_OK)
def verify_request_403(self, url):
self.verify_request(url, status.HTTP_403_FORBIDDEN)
def test_get_user_data(self):
"""
Ensure we can get a UserData object with credentials.
"""
access_token = AccessToken.objects.get(pk=1)
self.client.credentials(
HTTP_AUTHORIZATION='Bearer ' + access_token.token)
self.verify_request_200('/user-data/2/')
self.verify_request_200('/user-data/current/')
def test_get_user_data_no_credentials(self):
"""
Ensure we can't get a UserData object with no credentials.
"""
self.client.credentials()
self.verify_request_403('/user-data/1/')
self.verify_request_403('/user-data/2/')
self.verify_request_403('/user-data/current/')
| mit | Python |
218f9d44904305fea28ce99c3c22fd246f18b3e5 | Bump 0.8.2. | Enyx-SA/yassh,enyx-opensource/yassh,enyx-opensource/yassh | yassh/__init__.py | yassh/__init__.py | import logging
from .reactor import Reactor
from .remote_run import RemoteRun, remote_run
from .remote_copy import RemoteCopy, remote_copy
from .local_run import LocalRun, local_run
from .exceptions import AlreadyStartedException
logging.getLogger(__name__).addHandler(logging.NullHandler())
__all__ = ['RemoteRun', 'remote_run',
'RemoteCopy', 'remote_copy',
'LocalRun', 'local_run',
'Reactor',
'AlreadyStartedException']
__version__ = '0.8.2'
| import logging
from .reactor import Reactor
from .remote_run import RemoteRun, remote_run
from .remote_copy import RemoteCopy, remote_copy
from .local_run import LocalRun, local_run
from .exceptions import AlreadyStartedException
logging.getLogger(__name__).addHandler(logging.NullHandler())
__all__ = ['RemoteRun', 'remote_run',
'RemoteCopy', 'remote_copy',
'LocalRun', 'local_run',
'Reactor',
'AlreadyStartedException']
__version__ = '0.8.1'
| mit | Python |
809ae0b330c528af59c299b45e3494291adf2c6a | Use nvm in project file | maxbrunsfeld/node-tree-sitter-compiler,maxbrunsfeld/node-tree-sitter-compiler,tree-sitter/node-tree-sitter-compiler,tree-sitter/tree-sitter-cli,tree-sitter/tree-sitter-cli,tree-sitter/tree-sitter-cli,tree-sitter/node-tree-sitter-compiler,tree-sitter/node-tree-sitter-compiler,tree-sitter/tree-sitter-cli,maxbrunsfeld/node-tree-sitter-compiler,tree-sitter/node-tree-sitter-compiler | .ycm_extra_conf.py | .ycm_extra_conf.py | import os
import ycm_core
from clang_helpers import PrepareClangFlags
flags = [
'-Wall',
'-std=c++11',
'-stdlib=libc++',
'-x', 'c++',
'-I', 'src',
'-I', os.path.expanduser('~/.nvm/current/include/node'),
'-I', 'vendor/tree-sitter/include',
'-isystem', '/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/c++/v1',
]
def DirectoryOfThisScript():
return os.path.dirname(os.path.abspath(__file__))
def MakeRelativePathsInFlagsAbsolute(flags, working_directory):
if not working_directory:
return flags
new_flags = []
make_next_absolute = False
path_flags = ['-isystem', '-I', '-iquote', '--sysroot=']
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith('/'):
new_flag = os.path.join(working_directory, flag)
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith(path_flag):
path = flag[len(path_flag):]
new_flag = path_flag + os.path.join(working_directory, path)
break
if new_flag:
new_flags.append(new_flag)
return new_flags
def FlagsForFile(filename):
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute(flags, relative_to)
return {
'flags': final_flags,
'do_cache': True}
| import os
import ycm_core
from clang_helpers import PrepareClangFlags
flags = [
'-Wall',
'-std=c++11',
'-stdlib=libc++',
'-x', 'c++',
'-I', 'src',
'-I', 'node_modules/tree-sitter/include',
'-I', '/usr/local/include/node',
'-I', 'vendor/tree-sitter/include',
'-isystem', '/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/c++/v1',
]
def DirectoryOfThisScript():
return os.path.dirname(os.path.abspath(__file__))
def MakeRelativePathsInFlagsAbsolute(flags, working_directory):
if not working_directory:
return flags
new_flags = []
make_next_absolute = False
path_flags = ['-isystem', '-I', '-iquote', '--sysroot=']
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith('/'):
new_flag = os.path.join(working_directory, flag)
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith(path_flag):
path = flag[len(path_flag):]
new_flag = path_flag + os.path.join(working_directory, path)
break
if new_flag:
new_flags.append(new_flag)
return new_flags
def FlagsForFile(filename):
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute(flags, relative_to)
return {
'flags': final_flags,
'do_cache': True}
| mit | Python |
dc5e0baebc6af7644340a610afe307ee2ef58cd2 | Update Flask example for new APIs. | rduplain/wsgi_party,rduplain/wsgi_party | examples/flask/flask_party.py | examples/flask/flask_party.py | from flask import Flask, abort, request
from wsgi_party import WSGIParty
class PartylineFlask(Flask):
def __init__(self, import_name, *args, **kwargs):
super(PartylineFlask, self).__init__(import_name, *args, **kwargs)
self.add_url_rule(WSGIParty.invite_path, endpoint='partyline',
view_func=self.join_party)
self.partyline = None
self.connected = False
def join_party(self, request=request):
# Bootstrap, turn the view function into a 404 after registering.
if self.connected:
# This route does not exist at the HTTP level.
abort(404)
self.partyline = request.environ.get(WSGIParty.partyline_key)
self.partyline.connect('ping', lambda x: 'pong')
self.connected = True
return 'ok'
# Demonstrate.
root = PartylineFlask(__name__)
one = PartylineFlask(__name__)
two = PartylineFlask(__name__)
root.debug = True
one.debug = True
two.debug = True
one.config['APPLICATION_ROOT'] = '/one'
two.config['APPLICATION_ROOT'] = '/two'
@root.route('/', endpoint='index')
def root_index():
if root.partyline is None:
return 'I have no friends.'
# Note: This is a synchronous call.
pongs = root.partyline.send_all('ping', None)
# Simply show responses.
return repr(list(pongs))
application = WSGIParty(root, {
'/one': one,
'/two': two,
})
if __name__ == '__main__':
import os
from werkzeug.serving import run_simple
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
run_simple('0.0.0.0', port, application, use_reloader=True)
| from flask import Flask, request
from wsgi_party import WSGIParty, PartylineConnector
class PartylineFlask(Flask, PartylineConnector):
def __init__(self, import_name, *args, **kwargs):
super(PartylineFlask, self).__init__(import_name, *args, **kwargs)
self.add_url_rule(WSGIParty.invite_path, endpoint='partyline',
view_func=self.join_party_wrapper)
def join_party_wrapper(self, request=request):
"""A simple wrapper to support Flask's request pattern."""
return self.join_party(request.environ)
# Demonstrate.
root = PartylineFlask(__name__)
one = PartylineFlask(__name__)
two = PartylineFlask(__name__)
root.debug = True
one.debug = True
two.debug = True
one.config['APPLICATION_ROOT'] = '/one'
two.config['APPLICATION_ROOT'] = '/two'
application = WSGIParty(root, {
'/one': one,
'/two': two,
})
if __name__ == '__main__':
import os
from werkzeug.serving import run_simple
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
run_simple('0.0.0.0', port, application, use_reloader=True)
| bsd-3-clause | Python |
30f48175d9e4972599b564708fb65a6c534c9f12 | fix migrations | justb4/GeoHealthCheck,tomkralidis/GeoHealthCheck,geopython/GeoHealthCheck,tomkralidis/GeoHealthCheck,tomkralidis/GeoHealthCheck,justb4/GeoHealthCheck,tomkralidis/GeoHealthCheck,geopython/GeoHealthCheck,geopython/GeoHealthCheck,geopython/GeoHealthCheck,justb4/GeoHealthCheck,justb4/GeoHealthCheck | GeoHealthCheck/migrations/versions/2638c2a40625_.py | GeoHealthCheck/migrations/versions/2638c2a40625_.py | """empty message
Revision ID: 2638c2a40625
Revises: 992013af402f
Create Date: 2017-09-08 10:48:19.596099
"""
from alembic import op
import sqlalchemy as sa
import imp
import os
# revision identifiers, used by Alembic.
revision = '2638c2a40625'
down_revision = '992013af402f'
branch_labels = None
depends_on = None
alembic_helpers = imp.load_source('alembic_helpers', (
os.getcwd() + '/' + op.get_context().script.dir + '/alembic_helpers.py'))
def upgrade():
if not alembic_helpers.table_has_column('resource', 'active'):
print('Column active not present in resource table, will create')
op.add_column(u'resource', sa.Column('active', sa.Boolean(),
nullable=False, default=1, server_default='True'))
else:
print('Column active already present in resource table')
def downgrade():
print('Dropping Column active from resource table')
op.drop_column(u'resource', 'active')
| """empty message
Revision ID: 2638c2a40625
Revises: 992013af402f
Create Date: 2017-09-08 10:48:19.596099
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2638c2a40625'
down_revision = '992013af402f'
branch_labels = None
depends_on = None
alembic_helpers = imp.load_source('alembic_helpers', (
os.getcwd() + '/' + op.get_context().script.dir + '/alembic_helpers.py'))
def upgrade():
if not alembic_helpers.table_has_column('resource', 'active'):
from sqlalchemy.sql import table, column
print('Column active not present in resource table, will create')
op.add_column(u'resource', sa.Column('active', sa.Boolean, nullable=True, default=True))
resource = table('resource', column('active'))
op.execute(resource.update().values(active=True))
op.alter_column('resource', 'active', nullable=False)
else:
print('Column active already present in resource table')
def downgrade():
print('Dropping Column active from resource table')
op.drop_column(u'resource', 'active')
| mit | Python |
27d35f4dbf2d8a05a6abfcf73ef9ef51986d8770 | add scheduled searchvector logging | datasciencebr/serenata-de-amor,datasciencebr/serenata-de-amor | jarbas/celery.py | jarbas/celery.py | import os
from celery import Celery
from celery.schedules import crontab
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'jarbas.settings')
app = Celery('jarbas')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
from django.core import management
@app.task(ignore_result=True)
def searchvector():
print('Running searchvector...')
management.call_command('searchvector')
print('Searchvector is done')
sender.add_periodic_task(
crontab(minute='0', hour='2', day_of_month='*/2'),
searchvector.s(),
)
| import os
from celery import Celery
from celery.schedules import crontab
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'jarbas.settings')
app = Celery('jarbas')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
from django.core import management
@app.task(ignore_result=True)
def searchvector():
management.call_command('searchvector')
sender.add_periodic_task(
crontab(minute='0', hour='2', day_of_month='*/2'),
searchvector.s(),
)
| mit | Python |
2c7ccb7d801dcedf5fac62eb2123480ba0523aad | fix for session stats parsing | steeve/libtorrent,steeve/libtorrent,steeve/libtorrent,steeve/libtorrent,steeve/libtorrent,steeve/libtorrent | parse_session_stats.py | parse_session_stats.py | #! /usr/bin/env python
# Copyright Arvid Norberg 2008. Use, modification and distribution is
# subject to the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import os, sys, time
ignore = ['download rate', 'disk block buffers']
stat = open(sys.argv[1])
line = stat.readline()
while not 'second:' in line:
line = stat.readline()
keys = line.strip().split(':')[1:]
axes = ['x1y2', 'x1y2', 'x1y1', 'x1y1', 'x1y1', 'x1y1', 'x1y1', 'x1y1', 'x1y2']
def gen_report(name, lines):
out = open('session_stats_%s.gnuplot' % name, 'wb')
print >>out, "set term png size 1200,700"
print >>out, 'set output "session_stats_%s.png"' % name
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time (s)"'
print >>out, 'set ylabel "number"'
print >>out, 'set y2label "Rate (B/s)"'
print >>out, 'set y2range [0:*]'
print >>out, 'set y2tics auto'
print >>out, "set tics nomirror"
print >>out, "set style data lines"
print >>out, "set key box"
print >>out, 'plot',
column = 2
first = True
for k in keys:
if k not in lines:
column = column + 1
continue
if not first: print >>out, ', ',
print >>out, ' "%s" using 1:%d title "%s" axes %s with steps' % (sys.argv[1], column, k, axes[column-2]),
first = False
column = column + 1
print >>out, ''
out.close()
os.system('gnuplot session_stats_%s.gnuplot' % name);
gen_report('rates', ['upload rate', 'download rate', 'downloading torrents', 'seeding torrents', 'peers', 'unchoked peers'])
gen_report('peers', ['peers', 'connecting peers', 'unchoked peers', 'num list peers'])
gen_report('buffers', ['upload rate', 'download rate', 'disk block buffers'])
| #! /usr/bin/env python
# Copyright Arvid Norberg 2008. Use, modification and distribution is
# subject to the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import os, sys, time
ignore = ['download rate', 'disk block buffers']
stat = open(sys.argv[1])
line = stat.readline()
while not 'second:' in line:
line = stat.readline()
keys = line.strip().split(':')[1:]
axes = ['x1y2', 'x1y2', 'x1y1', 'x1y1', 'x1y1', 'x1y1', 'x1y1', 'x1y1', 'x1y2']
def gen_report(name, lines):
out = open('session_stats_%s.gnuplot' % name, 'wb')
print >>out, "set term png size 1200,700"
print >>out, 'set output "session_stats_%s.png"' % name
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time (s)"'
print >>out, 'set ylabel "number"'
print >>out, 'set y2label "Rate (B/s)"'
print >>out, 'set y2range [0:*]'
print >>out, 'set y2tics auto'
print >>out, "set tics nomirror"
print >>out, "set style data lines"
print >>out, "set key box"
print >>out, 'plot',
column = 2
for k in keys:
if k not in lines:
column = column + 1
continue
print >>out, ' "%s" using 1:%d title "%s" axes %s with steps,' % (sys.argv[1], column, k, axes[column-2]),
column = column + 1
print >>out, 'x=0'
out.close()
os.system('gnuplot session_stats_%s.gnuplot' % name);
gen_report('rates', ['upload rate', 'download rate', 'downloading torrents', 'seeding torrents', 'peers', 'unchoked peers'])
gen_report('peers', ['peers', 'connecting peers', 'unchoked peers', 'num list peers'])
gen_report('buffers', ['upload rate', 'download rate', 'disk block buffers'])
| bsd-3-clause | Python |
16070ee6d1ddc8ebbbb3cdc3992553f6e4f1daa3 | Create get_company_type_selection | OCA/l10n-belgium,OCA/l10n-belgium | l10n_be_cooperator/models/subscription_request.py | l10n_be_cooperator/models/subscription_request.py | from odoo import models
class SubscriptionRequest(models.Model):
_inherit = "subscription.request"
def get_company_type_selection(self):
return [
("scrl", "SCRL"),
("asbl", "ASBL"),
("sprl", "SPRL"),
("sa", "SA"),
]
| from odoo import fields, models
class SubscriptionRequest(models.Model):
_inherit = "subscription.request"
company_type = fields.Selection(
[("scrl", "SCRL"), ("asbl", "ASBL"), ("sprl", "SPRL"), ("sa", "SA")]
)
| agpl-3.0 | Python |
490d09a31415d3fd1b16f650188bfd8e701ae8e8 | Support units in progress messages | dodocat/git-repo,luohongzhen/git-repo,HenningSchroeder/git-repo,ossxp-com/repo,jmollan/git-repo,iAlios/git-repo,opencontrail-ci-admin/git-repo,sapiippo/git-repo,alanbian/git-repo,Omnifarious/git-repo,loungin/git-repo,wzhy90/git-repo,CedricCabessa/repo,lchiocca/repo,josn-jys/git-repo,nuclearmistake/repo,xhteam/git-repo,chzhong/git-repo,hyper123/git-repo,GerritCodeReview/git-repo,azzurris/git-repo,testbetta/repo-pub,bhargavkumar040/android-source-browsing.git-repo,qingpingguo/git-repo,jpzhu/git-repo,Pivosgroup/google-git-repo,fantasyfly/git-repo,nbp/git-repo,nucked/git-repo,CedricCabessa/repo,Omnifarious/git-repo,dsiganos/git-repo,rochy2014/repo,testbetta/git-repo-pub,wavecomp/git-repo,la4430/repo,lewixliu/git-repo,FuangCao/git-repo,lovesecho/xrepo,jingyu/git-repo,AurelienBallier/git-repo,zbunix/git-repo,frogbywyplay/git-repo,dinpot/git-repo,jangle789/git-repo,baidurom/repo,Fr4ncky/git-repo,kdavis-mozilla/repo-repo,xianyo/git-repo,HenningSchroeder/git-repo,dsiganos/git-repo,eligoenergy/git-repo,chusiang/git-repo,ossxp-com/repo,codetutil/git-repo,Jokebin/git-repo,xecle/git-repo,lshain/repo,lanniaoershi/git-repo,linux-knight/repo,hyper123/git-repo,azzurris/git-repo,chenzilin/git-repo,nbp/git-repo,fantasyfly/git-repo,hejq0310/git-repo,mer-tools/git-repo,chenzilin/git-repo,alessandro-aglietti/git-repo,xyyangkun/git-repo,lipro-yocto/git-repo,dinpot/git-repo,linuxdeepin/git-repo,crashkita/git-repo,liaog/git-repo,hacker-camp/google_repo,testbetta/git-repo-pub,bestes/repo,daimajia/git-repo,DavidPu/git-repo,hackbutty/git-repo,SunRain/repo,vmx/git-repo,aep/repo,simbasailor/git-repo,djibi2/git-repo,biaolv/git-repo,lshain/repo,hanw/git-repo,darrengarvey/git-repo,yanjiegit/jiangxin-repo,TheQtCompany/git-repo,ceejatec/git-repo,mozilla/git-repo,rochy2014/repo,jingyu/git-repo,FuangCao/repo,jmollan/git-repo,folpindo/git-repo,eric011/git-repo,11NJ/git-repo,venus-solar/git-repo,windyan/git-repo,weixiaodong/git-repo,11NJ/git-repo,hisilicon/git-repo,jcfrank/myrepo,jangle789/git-repo,slfyusufu/repo,ChronoMonochrome/repo,derron/git-repo,posix4e/git-repo,Copypeng/git-repo,xin3liang/git-repo,GerritCodeReview/git-repo,eligoenergy/git-repo,xianyo/git-repo,couchbasedeps/git-repo,yanjiegit/andriod-repo,FuangCao/repo,FlymeOS/repo,yanjiegit/andriod-repo,CedricCabessa/repo,Pankaj-Sakariya/android-source-browsing.git-repo,RuanJG/git-repo,IbpTeam/repo,djibi2/git-repo,bestes/repo,mozilla/git-repo,IbpTeam/repo,ericmckean/git-repo,todototry/git-repo,Fr4ncky/git-repo,GatorQue/git-repo-flow,xyyangkun/git-repo,wavecomp/git-repo,demonyangyue/git-repo,finik/git-repo,la4430/repo,simbasailor/git-repo,hanw/git-repo,vmx/git-repo,gbraad/git-repo,duralog/repo,LA-Toth/git-repo,loungin/git-repo,cubieboard/git-repo,kdavis-mozilla/repo-repo,chzhong/git-repo,demonyangyue/git-repo,martinjina/git-repo,ilansmith/repo,enochcheng/git-repo,4455jkjh/repo,petemoore/git-repo,linux-knight/repo,finik/git-repo,duralog/repo,Copypeng/git-repo,cubieboard/git-repo,yanjiegit/andriod-repo,chusiang/git-repo,zbunix/git-repo,crashkita/git-repo,martinjina/git-repo,ThangBK2009/android-source-browsing.git-repo,xecle/git-repo,lanniaoershi/git-repo,qupai/git-repo,ChronoMonochrome/repo,ceejatec/git-repo,baidurom/repo,llg84/google_repo,yanjiegit/jiangxin-repo,hanchentech/git-repo,masscre/git-repo,slfyusufu/repo,bhargavkumar040/android-source-browsing.git-repo,Jokebin/git-repo,nickpith/git-repo,nuclearmistake/repo,zodsoft/git-repo,RuanJG/git-repo,CyanogenMod/tools_repo,LA-Toth/git-repo,llg84/google_repo,aosp-mirror/tools_repo,chenyun90323/git-repo,mixedpuppy/git-repo,yanjiegit/jiangxin-repo,alessandro-aglietti/git-repo,xxxrac/git-repo,sb2008/git-repo,zemug/repo,ediTLJ/git-repo,sramaswamy/repo,jcfrank/myrepo,TheQtCompany/git-repo,couchbasedeps/git-repo,chenyun90323/git-repo,ronan22/repo,sb2008/git-repo,jpzhu/git-repo,hisilicon/git-repo,biaolv/git-repo,weixiaodong/git-repo,nickpith/git-repo,SunRain/repo,folpindo/git-repo,petemoore/git-repo,masscre/git-repo,FuangCao/git-repo,flingone/git-repo,luohongzhen/git-repo,lipro-yocto/git-repo,qupai/git-repo,sapiippo/git-repo,Omnifarious/git-repo,venus-solar/git-repo,qioixiy/git-repo,dsiganos/git-repo,zbunix/git-repo,aosp-mirror/tools_repo,eric011/git-repo,lewixliu/git-repo,urras/git-repo,todototry/git-repo,dodocat/git-repo,ericmckean/git-repo,urras/git-repo,liaog/git-repo,posix4e/git-repo,xin3liang/git-repo,lovesecho/xrepo,alanbian/git-repo,linuxdeepin/git-repo,lovesecho/xrepo,linux-knight/repo,xhteam/git-repo,frogbywyplay/git-repo,lightsofapollo/git-repo,FlymeOS/repo,idwanglu2010/git-repo,AurelienBallier/git-repo,codetutil/git-repo,COrtegaflowcorp/git-repo,windyan/git-repo,ossxp-com/repo,cubieboard/git-repo,ThangBK2009/android-source-browsing.git-repo,sramaswamy/repo,enochcheng/git-repo,ediTLJ/git-repo,testbetta/repo-pub,xxxrac/git-repo,zodsoft/git-repo,gbraad/git-repo,GatorQue/git-repo-flow,flingone/git-repo,opencontrail-ci-admin/git-repo,amersons/git-repo,CyanogenMod/tools_repo,kangear/git-repo,hackbutty/git-repo,gabbayo/git-repo,wzhy90/git-repo,4455jkjh/repo,hejq0310/git-repo,idwanglu2010/git-repo,COrtegaflowcorp/git-repo,mixedpuppy/git-repo,ronan22/repo,daimajia/git-repo,DavidPu/git-repo,lightsofapollo/git-repo,qingpingguo/git-repo,josn-jys/git-repo,kangear/git-repo,Pivosgroup/google-git-repo,opensourcechipspark/repo,lchiocca/repo,Pankaj-Sakariya/android-source-browsing.git-repo,aep/repo,amersons/git-repo,iAlios/git-repo,darrengarvey/git-repo,finik/git-repo,qioixiy/git-repo,ilansmith/repo,derron/git-repo,zemug/repo,hanchentech/git-repo,artprogramming/git-repo,gabbayo/git-repo,mer-tools/git-repo,SaleJumper/android-source-browsing.git-repo,aep/repo,opensourcechipspark/repo,nucked/git-repo,hacker-camp/google_repo,artprogramming/git-repo,SaleJumper/android-source-browsing.git-repo | progress.py | progress.py | #
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from time import time
from trace import IsTrace
_NOT_TTY = not os.isatty(2)
class Progress(object):
def __init__(self, title, total=0, units=''):
self._title = title
self._total = total
self._done = 0
self._lastp = -1
self._start = time()
self._show = False
self._units = units
def update(self, inc=1):
self._done += inc
if _NOT_TTY or IsTrace():
return
if not self._show:
if 0.5 <= time() - self._start:
self._show = True
else:
return
if self._total <= 0:
sys.stderr.write('\r%s: %d, ' % (
self._title,
self._done))
sys.stderr.flush()
else:
p = (100 * self._done) / self._total
if self._lastp != p:
self._lastp = p
sys.stderr.write('\r%s: %3d%% (%d%s/%d%s) ' % (
self._title,
p,
self._done, self._units,
self._total, self._units))
sys.stderr.flush()
def end(self):
if _NOT_TTY or IsTrace() or not self._show:
return
if self._total <= 0:
sys.stderr.write('\r%s: %d, done. \n' % (
self._title,
self._done))
sys.stderr.flush()
else:
p = (100 * self._done) / self._total
sys.stderr.write('\r%s: %3d%% (%d%s/%d%s), done. \n' % (
self._title,
p,
self._done, self._units,
self._total, self._units))
sys.stderr.flush()
| #
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from time import time
from trace import IsTrace
_NOT_TTY = not os.isatty(2)
class Progress(object):
def __init__(self, title, total=0):
self._title = title
self._total = total
self._done = 0
self._lastp = -1
self._start = time()
self._show = False
def update(self, inc=1):
self._done += inc
if _NOT_TTY or IsTrace():
return
if not self._show:
if 0.5 <= time() - self._start:
self._show = True
else:
return
if self._total <= 0:
sys.stderr.write('\r%s: %d, ' % (
self._title,
self._done))
sys.stderr.flush()
else:
p = (100 * self._done) / self._total
if self._lastp != p:
self._lastp = p
sys.stderr.write('\r%s: %3d%% (%d/%d) ' % (
self._title,
p,
self._done,
self._total))
sys.stderr.flush()
def end(self):
if _NOT_TTY or IsTrace() or not self._show:
return
if self._total <= 0:
sys.stderr.write('\r%s: %d, done. \n' % (
self._title,
self._done))
sys.stderr.flush()
else:
p = (100 * self._done) / self._total
sys.stderr.write('\r%s: %3d%% (%d/%d), done. \n' % (
self._title,
p,
self._done,
self._total))
sys.stderr.flush()
| apache-2.0 | Python |
0ceb5c3f03d02f99ce28e9b92fa77c9384cd9470 | Fix failing BigQuery tests | cpcloud/ibis,ibis-project/ibis,cpcloud/ibis,cpcloud/ibis,cloudera/ibis,cpcloud/ibis,ibis-project/ibis,cloudera/ibis,ibis-project/ibis,cloudera/ibis,ibis-project/ibis | ibis/tests/all/test_client.py | ibis/tests/all/test_client.py | import pytest
from pkg_resources import parse_version
import ibis
import ibis.expr.datatypes as dt
from ibis.tests.backends import BigQuery
@pytest.mark.xfail_unsupported
def test_version(backend, con):
expected_type = (
type(parse_version('1.0')),
type(parse_version('1.0-legacy')),
)
assert isinstance(con.version, expected_type)
@pytest.mark.parametrize(
('expr_fn', 'expected'),
[
(lambda t: t.string_col, [('string_col', dt.String)]),
(
lambda t: t[t.string_col, t.bigint_col],
[('string_col', dt.String), ('bigint_col', dt.Int64)],
),
],
)
def test_query_schema(backend, con, alltypes, expr_fn, expected):
if not hasattr(con, '_build_ast'):
pytest.skip(
'{} backend has no _build_ast method'.format(
type(backend).__name__
)
)
expr = expr_fn(alltypes)
# we might need a public API for it
ast = con._build_ast(expr, backend.make_context())
query = con.query_class(con, ast)
schema = query.schema()
# clickhouse columns has been defined as non-nullable
# whereas other backends don't support non-nullable columns yet
expected = ibis.schema(
[
(name, dtype(nullable=schema[name].nullable))
for name, dtype in expected
]
)
assert query.schema().equals(expected)
@pytest.mark.parametrize(
'sql',
[
'select * from functional_alltypes limit 10',
'select * from functional_alltypes \nlimit 10\n',
],
)
@pytest.mark.xfail_backends((BigQuery,))
@pytest.mark.xfail_unsupported
def test_sql(backend, con, sql):
if not hasattr(con, 'sql') or not hasattr(con, '_get_schema_using_query'):
pytest.skip('Backend {} does not support sql method'.format(backend))
# execute the expression using SQL query
con.sql(sql).execute()
| import pytest
from pkg_resources import parse_version
import ibis
import ibis.expr.datatypes as dt
@pytest.mark.xfail_unsupported
def test_version(backend, con):
expected_type = (
type(parse_version('1.0')),
type(parse_version('1.0-legacy')),
)
assert isinstance(con.version, expected_type)
@pytest.mark.parametrize(
('expr_fn', 'expected'),
[
(lambda t: t.string_col, [('string_col', dt.String)]),
(
lambda t: t[t.string_col, t.bigint_col],
[('string_col', dt.String), ('bigint_col', dt.Int64)],
),
],
)
def test_query_schema(backend, con, alltypes, expr_fn, expected):
if not hasattr(con, '_build_ast'):
pytest.skip(
'{} backend has no _build_ast method'.format(
type(backend).__name__
)
)
expr = expr_fn(alltypes)
# we might need a public API for it
ast = con._build_ast(expr, backend.make_context())
query = con.query_class(con, ast)
schema = query.schema()
# clickhouse columns has been defined as non-nullable
# whereas other backends don't support non-nullable columns yet
expected = ibis.schema(
[
(name, dtype(nullable=schema[name].nullable))
for name, dtype in expected
]
)
assert query.schema().equals(expected)
@pytest.mark.parametrize(
'sql',
[
'select * from functional_alltypes limit 10',
'select * from functional_alltypes \nlimit 10\n',
],
)
@pytest.mark.xfail_unsupported
def test_sql(backend, con, sql):
if not hasattr(con, 'sql') or not hasattr(con, '_get_schema_using_query'):
pytest.skip('Backend {} does not support sql method'.format(backend))
# execute the expression using SQL query
con.sql(sql).execute()
| apache-2.0 | Python |
94b010cff5cba4619fe8c4643669c8c5eb3dec08 | Bump version to 0.3.3 | lillian-gardenia-seabreeze/sappho,lillian-gardenia-seabreeze/sappho,lily-seabreeze/sappho,lillian-lemmer/sappho,lillian-lemmer/sappho,lily-seabreeze/sappho | sappho/__init__.py | sappho/__init__.py | if __name__ == "__main__":
__version__ = "0.3.3"
else:
from animatedsprite import AnimatedSprite
from tilemap import TileMap, Tilesheet, tmx_file_to_tilemaps
from layers import SurfaceLayers
| if __name__ == "__main__":
__version__ = "0.3.2"
else:
from animatedsprite import AnimatedSprite
from tilemap import TileMap, Tilesheet, tmx_file_to_tilemaps
from layers import SurfaceLayers
| mit | Python |
c1a51f02c6c11fcec4d3f4cf64c774c8b907e8df | update to version 2.1.1 | haeygen/SSHLibrary,junousia/SSHLibrary,haeygen/SSHLibrary,junousia/SSHLibrary | src/SSHLibrary/version.py | src/SSHLibrary/version.py | VERSION = '2.1.1'
| VERSION = '2.1'
| apache-2.0 | Python |
46f2659a15a3abf1063c6fa4daad584ebc169ad9 | Allow 'localhost' for production settings | siggame/discuss | discuss/discuss/production.py | discuss/discuss/production.py | from discuss.discuss.settings import *
##########################################################################
#
# Server settings
#
##########################################################################
ALLOWED_HOSTS = ["localhost"]
WSGI_APPLICATION = 'discuss.discuss.wsgi_production.application'
##########################################################################
#
# Database settings
#
##########################################################################
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(VAR_DIR, 'db', 'production_db.sqlite3'),
}
}
| from discuss.discuss.settings import *
##########################################################################
#
# Server settings
#
##########################################################################
ALLOWED_HOSTS = []
WSGI_APPLICATION = 'discuss.discuss.wsgi_production.application'
##########################################################################
#
# Database settings
#
##########################################################################
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(VAR_DIR, 'db', 'production_db.sqlite3'),
}
}
| bsd-3-clause | Python |
d96438913865bd70df75c532919018ce547e3e18 | Remove incorrect docs for environment variables in attribute_modifications.py. | irtnog/SATOSA,SUNET/SATOSA,its-dirg/SATOSA,irtnog/SATOSA,SUNET/SATOSA | src/satosa/micro_services/attribute_modifications.py | src/satosa/micro_services/attribute_modifications.py | import re
from .base import ResponseMicroService
class AddStaticAttributes(ResponseMicroService):
"""
Add static attributes to the responses.
"""
def __init__(self, config, **kwargs):
super().__init__()
self.static_attributes = config["static_attributes"]
def process(self, context, data):
data.attributes.update(self.static_attributes)
return data
class FilterAttributeValues(ResponseMicroService):
"""
Filter attribute values, only preserving those matching the given regex.
"""
def __init__(self, config, **kwargs):
super().__init__()
self.attribute_filters = config["attribute_filters"]
def process(self, context, data):
# apply default filters
provider_filters = self.attribute_filters.get("", {})
self._apply_requester_filters(data.attributes, provider_filters, data.requester)
# apply target provider specific filters
target_provider = data.auth_info.issuer
provider_filters = self.attribute_filters.get(target_provider, {})
self._apply_requester_filters(data.attributes, provider_filters, data.requester)
return data
def _apply_requester_filters(self, attributes, provider_filters, requester):
# apply default requester filters
default_requester_filters = provider_filters.get("", {})
self._apply_filter(attributes, default_requester_filters)
# apply requester specific filters
requester_filters = provider_filters.get(requester, {})
self._apply_filter(attributes, requester_filters)
def _apply_filter(self, attributes, attribute_filters):
for attribute_name, attribute_filter in attribute_filters.items():
regex = re.compile(attribute_filter)
if attribute_name == "": # default filter for all attributes
for attribute, values in attributes.items():
attributes[attribute] = list(filter(regex.search, attributes[attribute]))
elif attribute_name in attributes:
attributes[attribute_name] = list(filter(regex.search, attributes[attribute_name]))
| import re
from .base import ResponseMicroService
class AddStaticAttributes(ResponseMicroService):
"""
Add static attributes to the responses.
The path to the file describing the mapping (as YAML) of static attributes must be specified
with the environment variable 'SATOSA_STATIC_ATTRIBUTES'.
"""
def __init__(self, config, **kwargs):
super().__init__()
self.static_attributes = config["static_attributes"]
def process(self, context, data):
data.attributes.update(self.static_attributes)
return data
class FilterAttributeValues(ResponseMicroService):
"""
Filter attribute values, only preserving those matching the given regex.
The path to the file describing the filters (as YAML) must be specified
with the environment variable 'SATOSA_ATTRIBUTE_VALUES_FILTER'.
"""
def __init__(self, config, **kwargs):
super().__init__()
self.attribute_filters = config["attribute_filters"]
def process(self, context, data):
# apply default filters
provider_filters = self.attribute_filters.get("", {})
self._apply_requester_filters(data.attributes, provider_filters, data.requester)
# apply target provider specific filters
target_provider = data.auth_info.issuer
provider_filters = self.attribute_filters.get(target_provider, {})
self._apply_requester_filters(data.attributes, provider_filters, data.requester)
return data
def _apply_requester_filters(self, attributes, provider_filters, requester):
# apply default requester filters
default_requester_filters = provider_filters.get("", {})
self._apply_filter(attributes, default_requester_filters)
# apply requester specific filters
requester_filters = provider_filters.get(requester, {})
self._apply_filter(attributes, requester_filters)
def _apply_filter(self, attributes, attribute_filters):
for attribute_name, attribute_filter in attribute_filters.items():
regex = re.compile(attribute_filter)
if attribute_name == "": # default filter for all attributes
for attribute, values in attributes.items():
attributes[attribute] = list(filter(regex.search, attributes[attribute]))
elif attribute_name in attributes:
attributes[attribute_name] = list(filter(regex.search, attributes[attribute_name]))
| apache-2.0 | Python |
79086d3de93329475dc2a67e05fccbb401f34754 | Add missing VariantCycle in _hoomd.py | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | hoomd/_hoomd.py | hoomd/_hoomd.py | # This file exists to allow the hoomd module to import from the source checkout dir
# for use when building the sphinx documentation.
class Messenger(object):
def openPython(self):
pass
def notice(self, i, v):
pass
class GetarCompression(object):
FastCompress = 1
class GetarDumpMode(object):
Append = 1
Overwrite = 1
OneShot = 1
class GetarProperty(object):
AngleNames = 1
AngleTags = 1
AngleTypes = 1
AngularMomentum = 1
Body = 1
BondNames = 1
BondTags = 1
BondTypes = 1
Box = 1
Charge = 1
Diameter = 1
DihedralNames = 1
DihedralTags = 1
DihedralTypes = 1
Dimensions = 1
Image = 1
ImproperNames = 1
ImproperTags = 1
ImproperTypes = 1
Mass = 1
MomentInertia = 1
Orientation = 1
Position = 1
PotentialEnergy = 1
Type = 1
TypeNames = 1
Velocity = 1
Virial = 1
class GetarResolution(object):
Text = 1
Individual = 1
Uniform = 1
def output_version_info():
pass
class SnapshotSystemData_float(object):
pass
class SnapshotSystemData_double(object):
pass
class WalltimeLimitReached(object):
pass
__version__ = "bogus"
def is_MPI_available():
pass
class Trigger:
pass
class PeriodicTrigger:
pass
class ParticleFilterAll:
pass
class ParticleFilterTags:
pass
class GSDStateReader:
pass
class Variant:
pass
class VariantConstant(Variant):
pass
class VariantRamp(Variant):
pass
class VariantCycle(Variant):
pass
| # This file exists to allow the hoomd module to import from the source checkout dir
# for use when building the sphinx documentation.
class Messenger(object):
def openPython(self):
pass
def notice(self, i, v):
pass
class GetarCompression(object):
FastCompress = 1
class GetarDumpMode(object):
Append = 1
Overwrite = 1
OneShot = 1
class GetarProperty(object):
AngleNames = 1
AngleTags = 1
AngleTypes = 1
AngularMomentum = 1
Body = 1
BondNames = 1
BondTags = 1
BondTypes = 1
Box = 1
Charge = 1
Diameter = 1
DihedralNames = 1
DihedralTags = 1
DihedralTypes = 1
Dimensions = 1
Image = 1
ImproperNames = 1
ImproperTags = 1
ImproperTypes = 1
Mass = 1
MomentInertia = 1
Orientation = 1
Position = 1
PotentialEnergy = 1
Type = 1
TypeNames = 1
Velocity = 1
Virial = 1
class GetarResolution(object):
Text = 1
Individual = 1
Uniform = 1
def output_version_info():
pass
class SnapshotSystemData_float(object):
pass
class SnapshotSystemData_double(object):
pass
class WalltimeLimitReached(object):
pass
__version__ = "bogus"
def is_MPI_available():
pass
class Trigger:
pass
class PeriodicTrigger:
pass
class ParticleFilterAll:
pass
class ParticleFilterTags:
pass
class GSDStateReader:
pass
class Variant:
pass
class VariantConstant(Variant):
pass
class VariantRamp(Variant):
pass
| bsd-3-clause | Python |
37605da734cff0359ed9555a810d94837f995231 | fix visitor extend modifiers | nikitanovosibirsk/district42 | district42/_schema_visitor.py | district42/_schema_visitor.py | from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Any, Generic, TypeVar
if TYPE_CHECKING:
from .types import (
AnySchema,
BoolSchema,
ConstSchema,
DictSchema,
FloatSchema,
IntSchema,
ListSchema,
NoneSchema,
StrSchema,
)
__all__ = ("SchemaVisitor", "SchemaVisitorReturnType",)
SchemaVisitorReturnType = TypeVar("SchemaVisitorReturnType")
class SchemaVisitor(ABC, Generic[SchemaVisitorReturnType]):
@abstractmethod
def visit_none(self, schema: "NoneSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_bool(self, schema: "BoolSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_int(self, schema: "IntSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_float(self, schema: "FloatSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_str(self, schema: "StrSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_list(self, schema: "ListSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_dict(self, schema: "DictSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_any(self, schema: "AnySchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_const(self, schema: "ConstSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
def __getattr__(self, name: Any) -> Any:
raise AttributeError(f"{self.__class__.__name__!r} object has no attribute {name!r}")
def __init_subclass__(cls, **kwargs: Any) -> None:
if kwargs.get("extend", False) is not True:
return
parent = cls.__bases__[0]
assert issubclass(parent, SchemaVisitor)
for name, value in cls.__dict__.items():
if callable(value) and not name.startswith("__"):
setattr(parent, name, value)
| from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Any, Generic, TypeVar
if TYPE_CHECKING:
from .types import (
AnySchema,
BoolSchema,
ConstSchema,
DictSchema,
FloatSchema,
IntSchema,
ListSchema,
NoneSchema,
StrSchema,
)
__all__ = ("SchemaVisitor", "SchemaVisitorReturnType",)
SchemaVisitorReturnType = TypeVar("SchemaVisitorReturnType")
class SchemaVisitor(ABC, Generic[SchemaVisitorReturnType]):
@abstractmethod
def visit_none(self, schema: "NoneSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_bool(self, schema: "BoolSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_int(self, schema: "IntSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_float(self, schema: "FloatSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_str(self, schema: "StrSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_list(self, schema: "ListSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_dict(self, schema: "DictSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_any(self, schema: "AnySchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
@abstractmethod
def visit_const(self, schema: "ConstSchema", **kwargs: Any) -> SchemaVisitorReturnType:
pass
def __getattr__(self, name: Any) -> Any:
raise AttributeError(f"{self.__class__.__name__!r} object has no attribute {name!r}")
def __init_subclass__(cls, **kwargs: Any) -> None:
if kwargs.get("extend", False) is not True:
return
parent = cls.__bases__[0]
assert issubclass(parent, SchemaVisitor)
for name, value in cls.__dict__.items():
if callable(value) and not name.startswith("_"):
setattr(parent, name, value)
| apache-2.0 | Python |
128d771654b99e56c0a3f399936ac2dbe046109a | create release/1.6.1 branch | rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo | dojo/__init__.py | dojo/__init__.py |
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
__version__ = '1.6.1'
__url__ = 'https://github.com/DefectDojo/django-DefectDojo'
__docs__ = 'http://defectdojo.readthedocs.io/'
__demo__ = 'http://defectdojo.pythonanywhere.com/'
|
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
__version__ = '1.6.0'
__url__ = 'https://github.com/DefectDojo/django-DefectDojo'
__docs__ = 'http://defectdojo.readthedocs.io/'
__demo__ = 'http://defectdojo.pythonanywhere.com/'
| bsd-3-clause | Python |
4f006ccf3b53b237ada95099b44ba1d9f2f106fe | Clean up imports | AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud | python_scripts/extractor_python_readability_server.py | python_scripts/extractor_python_readability_server.py | #!/usr/bin/python
import sys
import os
import glob
sys.path.append(os.path.join(os.path.dirname(__file__),"gen-py/thrift_solr/"))
sys.path.append(os.path.dirname(__file__) )
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
from thrift.protocol.TBinaryProtocol import TBinaryProtocolAccelerated
from thrift.server.TProcessPoolServer import TProcessPoolServer
import ExtractorService
import sys
import readability
def extract_with_python_readability( raw_content ):
doc = readability.Document( raw_content )
return [ u'' + doc.short_title(),
u'' + doc.summary() ]
class ExtractorHandler:
def extract_html( self, raw_html ):
#print >> sys.stderr, "in ExtractorHandler.extract_html"
#print >> sys.stderr, "type: ", type ( raw_html )
#print raw_html
#raw_html = raw_html.encode( 'utf-8' )
ret = extract_with_python_readability( raw_html )
#print ret[1]
return ret
handler = ExtractorHandler()
processor = ExtractorService.Processor(handler)
listening_socket = TSocket.TServerSocket(port=9090)
tfactory = TTransport.TBufferedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
#pfactory = TBinaryProtocol.TBinaryProtocolAcceleratedFactory()
#server = TServer.TThreadPoolServer(processor, listening_socket, tfactory, pfactory)
#server.setNumThreads( 30 )
server = TProcessPoolServer(processor, listening_socket, tfactory, pfactory)
server.setNumWorkers( 30 )
print ("[Server] Started")
server.serve()
| #!/usr/bin/python
import sys
import os
import glob
#sys.path.append(os.path.join(os.path.dirname(__file__), "gen-py"))
sys.path.append(os.path.join(os.path.dirname(__file__),"gen-py/thrift_solr/"))
sys.path.append(os.path.dirname(__file__) )
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
from thrift.protocol.TBinaryProtocol import TBinaryProtocolAccelerated
from thrift.server.TProcessPoolServer import TProcessPoolServer
#import thrift_solr
import ExtractorService
import sys
import readability
import readability
def extract_with_python_readability( raw_content ):
doc = readability.Document( raw_content )
return [ u'' + doc.short_title(),
u'' + doc.summary() ]
class ExtractorHandler:
def extract_html( self, raw_html ):
#print >> sys.stderr, "in ExtractorHandler.extract_html"
#print >> sys.stderr, "type: ", type ( raw_html )
#print raw_html
#raw_html = raw_html.encode( 'utf-8' )
ret = extract_with_python_readability( raw_html )
#print ret[1]
return ret
handler = ExtractorHandler()
processor = ExtractorService.Processor(handler)
listening_socket = TSocket.TServerSocket(port=9090)
tfactory = TTransport.TBufferedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
#pfactory = TBinaryProtocol.TBinaryProtocolAcceleratedFactory()
#server = TServer.TThreadPoolServer(processor, listening_socket, tfactory, pfactory)
#server.setNumThreads( 30 )
server = TProcessPoolServer(processor, listening_socket, tfactory, pfactory)
server.setNumWorkers( 30 )
print ("[Server] Started")
server.serve()
| agpl-3.0 | Python |
65105ab4b886ddc87a60cfa4e6600d8996164a81 | Update __init__.py | PKRoma/python-for-android,kronenpj/python-for-android,wexi/python-for-android,rnixx/python-for-android,wexi/python-for-android,ibobalo/python-for-android,kivy/python-for-android,wexi/python-for-android,germn/python-for-android,wexi/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,kivy/python-for-android,ibobalo/python-for-android,kivy/python-for-android,germn/python-for-android,germn/python-for-android,germn/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android,germn/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,wexi/python-for-android,kivy/python-for-android,kivy/python-for-android,ibobalo/python-for-android,rnixx/python-for-android,germn/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,ibobalo/python-for-android,ibobalo/python-for-android,ibobalo/python-for-android,rnixx/python-for-android,wexi/python-for-android,rnixx/python-for-android | pythonforandroid/recipes/websocket-client/__init__.py | pythonforandroid/recipes/websocket-client/__init__.py | from pythonforandroid.toolchain import Recipe
# if android app crashes on start with "ImportError: No module named websocket"
#
# copy the 'websocket' directory into your app directory to force inclusion.
#
# see my example at https://github.com/debauchery1st/example_kivy_websocket-recipe
#
# If you see errors relating to 'SSL not available' ensure you have the package backports.ssl-match-hostname
# in the buildozer requirements, since Kivy targets python 2.7.x
#
# You may also need sslopt={"cert_reqs": ssl.CERT_NONE} as a parameter to ws.run_forever() if you get an error relating to
# host verification
class WebSocketClient(Recipe):
url = 'https://github.com/debauchery1st/websocket-client/raw/master/websocket_client-0.40.0.tar.gz'
version = '0.40.0'
# md5sum = 'f1cf4cc7869ef97a98e5f4be25c30986'
# patches = ['websocket.patch'] # Paths relative to the recipe dir
depends = ['kivy', 'python2', 'android', 'pyjnius',
'cryptography', 'pyasn1', 'pyopenssl']
recipe = WebSocketClient()
| from pythonforandroid.toolchain import Recipe
# if android app crashes on start with "ImportError: No module named websocket"
#
# copy the 'websocket' directory into your app directory to force inclusion.
#
# see my example at https://github.com/debauchery1st/example_kivy_websocket-recipe
class WebSocketClient(Recipe):
url = 'https://github.com/debauchery1st/websocket-client/raw/master/websocket_client-0.40.0.tar.gz'
version = '0.40.0'
# md5sum = 'f1cf4cc7869ef97a98e5f4be25c30986'
# patches = ['websocket.patch'] # Paths relative to the recipe dir
depends = ['kivy', 'python2', 'android', 'pyjnius',
'cryptography', 'pyasn1', 'pyopenssl']
recipe = WebSocketClient()
| mit | Python |
8ca2b5c366c8eb678790e4865bef0ccbf8b9ffde | remove gevent monkey_patch while we are developing | poppingtonic/call-power,spacedogXYZ/call-power,jinding/call-power,spacedogXYZ/call-power,jinding/call-power,OpenSourceActivismTech/call-power,OpenSourceActivismTech/call-power,poppingtonic/call-power,18mr/call-congress,18mr/call-congress,spacedogXYZ/call-power,OpenSourceActivismTech/call-power,jinding/call-power,OpenSourceActivismTech/call-power,poppingtonic/call-power,spacedogXYZ/call-power,18mr/call-congress,18mr/call-congress,jinding/call-power,poppingtonic/call-power | call_server/app.py | call_server/app.py | #TODO, figure out how to load gevent monkey patch only in production
# try:
# from gevent.monkey import patch_all
# patch_all()
# except ImportError:
# if not DEBUG:
# print "unable to apply gevent monkey.patch_all"
from flask import Flask
from flask.ext.assets import Bundle
from .config import DefaultConfig
from .admin import admin
from .user import user
from .call import call
from .campaign import campaign
from .api import api
from extensions import cache, db, assets
DEFAULT_BLUEPRINTS = (
admin,
user,
call,
campaign,
api
)
def create_app(config=None, app_name=None, blueprints=None):
"""Create the main Flask app."""
if app_name is None:
app_name = DefaultConfig.APP_NAME
if blueprints is None:
blueprints = DEFAULT_BLUEPRINTS
app = Flask(app_name)
app.config.from_object('call_server.config.DefaultConfig')
configure_extensions(app)
configure_blueprints(app, blueprints)
configure_assets(app)
app.logger.info('call_server started')
return app
def configure_extensions(app):
db.init_app(app)
cache.init_app(app)
assets.init_app(app)
def configure_blueprints(app, blueprints):
for blueprint in blueprints:
app.register_blueprint(blueprint)
def configure_assets(app):
vendor_js = Bundle('bower_components/jquery/dist/jquery.js',
'bower_components/bootstrap/dist/js/bootstrap.min.js',
filters='rjsmin', output='static/js/vendor.js')
assets.register('vendor_js', vendor_js)
| try:
from gevent.monkey import patch_all
patch_all()
except ImportError:
print "unable to apply gevent monkey.patch_all"
from flask import Flask
from flask.ext.assets import Bundle
from .config import DefaultConfig
from .admin import admin
from .user import user
from .call import call
from .campaign import campaign
from .api import api
from extensions import cache, db, assets
DEFAULT_BLUEPRINTS = (
admin,
user,
call,
campaign,
api
)
def create_app(config=None, app_name=None, blueprints=None):
"""Create the main Flask app."""
if app_name is None:
app_name = DefaultConfig.APP_NAME
if blueprints is None:
blueprints = DEFAULT_BLUEPRINTS
app = Flask(app_name)
app.config.from_object('call_server.config.DefaultConfig')
configure_extensions(app)
configure_blueprints(app, blueprints)
configure_assets(app)
app.logger.info('call_server started')
return app
def configure_extensions(app):
db.init_app(app)
cache.init_app(app)
assets.init_app(app)
def configure_blueprints(app, blueprints):
for blueprint in blueprints:
app.register_blueprint(blueprint)
def configure_assets(app):
vendor_js = Bundle('bower_components/jquery/dist/jquery.js',
'bower_components/bootstrap/dist/js/bootstrap.min.js',
filters='rjsmin', output='static/js/vendor.js')
assets.register('vendor_js', vendor_js)
| agpl-3.0 | Python |
8e6237288dae3964cdd0a36e747f53f11b285073 | Include recently added matchers in callee.__all__ | Xion/callee | callee/__init__.py | callee/__init__.py | """
callee
"""
__version__ = "0.0.1"
__description__ = "Argument matcher for unittest.mock"
__author__ = "Karol Kuczmarski"
__license__ = "Simplified BSD"
from callee.base import And, Or, Not
from callee.collections import Dict, List, Mapping, Iterable, Sequence, Set
from callee.general import \
Any, ArgThat, IsA, Inherits, InstanceOf, Matching, SubclassOf
from callee.strings import Bytes, String, Unicode
__all__ = [
'BaseMatcher', 'Eq',
'Not', 'And', 'Or',
'Iterable', 'Sequence',
'List', 'Set', 'Mapping', 'Dict',
'Any', 'Matching', 'ArgThat',
'Callable', 'Function', 'GeneratorFunction',
'InstanceOf', 'IsA', 'SubclassOf', 'Inherits', 'Type', 'Class',
'String', 'Unicode', 'Bytes',
]
# TODO(xion): operator-based matchers (GreaterThan, ShorterThan, etc.)
# TODO(xion): matchers for positional & keyword arguments
| """
callee
"""
__version__ = "0.0.1"
__description__ = "Argument matcher for unittest.mock"
__author__ = "Karol Kuczmarski"
__license__ = "Simplified BSD"
from callee.base import And, Or, Not
from callee.general import \
Any, ArgThat, IsA, Inherits, InstanceOf, Matching, SubclassOf
from callee.strings import Bytes, String, Unicode
__all__ = [
'Not', 'And', 'Or',
'Any',
'Matching', 'ArgThat', 'InstanceOf', 'IsA', 'SubclassOf', 'Inherits',
'String', 'Unicode', 'Bytes',
]
# TODO(xion): operator-based matchers (GreaterThan, ShorterThan, etc.)
# TODO(xion): collection matchers (lists, sequences, dicts, ...)
# TODO(xion): matchers for positional & keyword arguments
| bsd-3-clause | Python |
bb6c33969c7bb8359c2b0cdcfeff5aa6f9e8d3ff | Bump to v4.1.1-rc1 | gregoiresage/pebble-tool,pebble/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,pebble/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,gregoiresage/pebble-tool | pebble_tool/version.py | pebble_tool/version.py | version_base = (4, 1, 1)
version_suffix = 'rc1'
if version_suffix is None:
__version_info__ = version_base
else:
__version_info__ = version_base + (version_suffix,)
__version__ = '{}.{}'.format(*version_base)
if version_base[2] != 0:
__version__ += '.{}'.format(version_base[2])
if version_suffix is not None:
__version__ += '-{}'.format(version_suffix)
| version_base = (4, 1, 0)
version_suffix = None
if version_suffix is None:
__version_info__ = version_base
else:
__version_info__ = version_base + (version_suffix,)
__version__ = '{}.{}'.format(*version_base)
if version_base[2] != 0:
__version__ += '.{}'.format(version_base[2])
if version_suffix is not None:
__version__ += '-{}'.format(version_suffix)
| mit | Python |
7e358d7d32233ffce863307b745d92f017be5a69 | Update an example test | seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase | examples/test_double_click.py | examples/test_double_click.py | from seleniumbase import BaseCase
class DoubleClickTestClass(BaseCase):
def test_switch_to_frame_and_double_click(self):
self.open("https://www.w3schools.com/jsref"
"/tryit.asp?filename=tryjsref_ondblclick")
self.ad_block()
self.switch_to_frame("iframe#iframeResult")
self.double_click('[ondblclick="myFunction()"]')
self.assert_text("Hello World", "#demo")
def test_switch_to_frame_of_element_and_double_click(self):
self.open("https://www.w3schools.com/jsref"
"/tryit.asp?filename=tryjsref_ondblclick")
self.ad_block()
self.switch_to_frame_of_element('[ondblclick="myFunction()"]')
self.double_click('[ondblclick="myFunction()"]')
self.assert_text("Hello World", "#demo")
| from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_double_click_and_switch_to_frame(self):
self.open("https://www.w3schools.com/jsref"
"/tryit.asp?filename=tryjsref_ondblclick")
self.ad_block()
self.switch_to_frame("#iframeResult")
self.double_click('[ondblclick="myFunction()"]')
self.assert_text("Hello World", "#demo")
def test_double_click_and_switch_to_frame_of_element(self):
self.open("https://www.w3schools.com/jsref"
"/tryit.asp?filename=tryjsref_ondblclick")
self.ad_block()
self.switch_to_frame_of_element('[ondblclick="myFunction()"]')
self.double_click('[ondblclick="myFunction()"]')
self.assert_text("Hello World", "#demo")
| mit | Python |
2a2a1c9ad37932bf300caf02419dd55a463d46d1 | Add nocov for lines that will never normally run | mystfox/python-tmod-tools | src/tmod_tools/__main__.py | src/tmod_tools/__main__.py | """
Entrypoint module, in case you use `python -mtmod_tools`.
Why does this file exist, and why __main__? For more info, read:
- https://www.python.org/dev/peps/pep-0338/
- https://docs.python.org/2/using/cmdline.html#cmdoption-m
- https://docs.python.org/3/using/cmdline.html#cmdoption-m
"""
from tmod_tools.cli import main # pragma: no cover
if __name__ == "__main__": # pragma: no cover
main()
| """
Entrypoint module, in case you use `python -mtmod_tools`.
Why does this file exist, and why __main__? For more info, read:
- https://www.python.org/dev/peps/pep-0338/
- https://docs.python.org/2/using/cmdline.html#cmdoption-m
- https://docs.python.org/3/using/cmdline.html#cmdoption-m
"""
from tmod_tools.cli import main
if __name__ == "__main__":
main()
| isc | Python |
8b8c698f1a02789e54d6f765872facf099ab851c | Add more interpreter versions to multipython example | MichaelAquilina/pytest,markshao/pytest,jaraco/pytest,alfredodeza/pytest,tareqalayan/pytest,hpk42/pytest,hackebrot/pytest,skylarjhdownes/pytest,nicoddemus/pytest,malinoff/pytest,pfctdayelise/pytest,tgoodlet/pytest,hpk42/pytest,The-Compiler/pytest,flub/pytest,RonnyPfannschmidt/pytest,ddboline/pytest,eli-b/pytest,etataurov/pytest,davidszotten/pytest,txomon/pytest,vmalloc/dessert,tomviner/pytest,nicoddemus/pytest,Akasurde/pytest,tomviner/pytest,rmfitzpatrick/pytest,pytest-dev/pytest,The-Compiler/pytest | doc/en/example/multipython.py | doc/en/example/multipython.py | """
module containing a parametrized tests testing cross-python
serialization via the pickle module.
"""
import py
import pytest
import _pytest._code
pythonlist = ['python2.6', 'python2.7', 'python3.4', 'python3.5']
@pytest.fixture(params=pythonlist)
def python1(request, tmpdir):
picklefile = tmpdir.join("data.pickle")
return Python(request.param, picklefile)
@pytest.fixture(params=pythonlist)
def python2(request, python1):
return Python(request.param, python1.picklefile)
class Python:
def __init__(self, version, picklefile):
self.pythonpath = py.path.local.sysfind(version)
if not self.pythonpath:
pytest.skip("%r not found" %(version,))
self.picklefile = picklefile
def dumps(self, obj):
dumpfile = self.picklefile.dirpath("dump.py")
dumpfile.write(_pytest._code.Source("""
import pickle
f = open(%r, 'wb')
s = pickle.dump(%r, f, protocol=2)
f.close()
""" % (str(self.picklefile), obj)))
py.process.cmdexec("%s %s" %(self.pythonpath, dumpfile))
def load_and_is_true(self, expression):
loadfile = self.picklefile.dirpath("load.py")
loadfile.write(_pytest._code.Source("""
import pickle
f = open(%r, 'rb')
obj = pickle.load(f)
f.close()
res = eval(%r)
if not res:
raise SystemExit(1)
""" % (str(self.picklefile), expression)))
print (loadfile)
py.process.cmdexec("%s %s" %(self.pythonpath, loadfile))
@pytest.mark.parametrize("obj", [42, {}, {1:3},])
def test_basic_objects(python1, python2, obj):
python1.dumps(obj)
python2.load_and_is_true("obj == %s" % obj)
| """
module containing a parametrized tests testing cross-python
serialization via the pickle module.
"""
import py
import pytest
import _pytest._code
pythonlist = ['python2.6', 'python2.7', 'python3.3']
@pytest.fixture(params=pythonlist)
def python1(request, tmpdir):
picklefile = tmpdir.join("data.pickle")
return Python(request.param, picklefile)
@pytest.fixture(params=pythonlist)
def python2(request, python1):
return Python(request.param, python1.picklefile)
class Python:
def __init__(self, version, picklefile):
self.pythonpath = py.path.local.sysfind(version)
if not self.pythonpath:
pytest.skip("%r not found" %(version,))
self.picklefile = picklefile
def dumps(self, obj):
dumpfile = self.picklefile.dirpath("dump.py")
dumpfile.write(_pytest._code.Source("""
import pickle
f = open(%r, 'wb')
s = pickle.dump(%r, f, protocol=2)
f.close()
""" % (str(self.picklefile), obj)))
py.process.cmdexec("%s %s" %(self.pythonpath, dumpfile))
def load_and_is_true(self, expression):
loadfile = self.picklefile.dirpath("load.py")
loadfile.write(_pytest._code.Source("""
import pickle
f = open(%r, 'rb')
obj = pickle.load(f)
f.close()
res = eval(%r)
if not res:
raise SystemExit(1)
""" % (str(self.picklefile), expression)))
print (loadfile)
py.process.cmdexec("%s %s" %(self.pythonpath, loadfile))
@pytest.mark.parametrize("obj", [42, {}, {1:3},])
def test_basic_objects(python1, python2, obj):
python1.dumps(obj)
python2.load_and_is_true("obj == %s" % obj)
| mit | Python |
28d7e7915db0529a04ee7ad70f041bee95b35a5b | Fix imports in backoff.py to prevent warnings | ZeusWPI/hydra,ZeusWPI/hydra,ZeusWPI/hydra | scraper/backoff.py | scraper/backoff.py | import requests.adapters
from urllib3 import Retry
TIMEOUT = 5 # Time before a request times out
BACKOFF = 0.25 # Try 0.0s, 0.25s, 0.5s, 1s, 2s between requests
AMOUNT = 13 # Amount of request to make before giving up
class TimeoutHTTPAdapter(requests.adapters.HTTPAdapter):
def __init__(self, timeout=None, *args, **kwargs):
self.timeout = timeout
super().__init__(*args, **kwargs)
def send(self, *args, **kwargs):
if 'timeout' not in kwargs or kwargs['timeout'] is None:
kwargs['timeout'] = self.timeout
return super().send(*args, **kwargs)
retry_session = requests.Session()
retries = Retry(total=AMOUNT, backoff_factor=BACKOFF)
for type in ('http://', 'https://'):
retry_session.mount(type, TimeoutHTTPAdapter(timeout=TIMEOUT, max_retries=retries))
| import requests
from requests.packages.urllib3.util.retry import Retry
TIMEOUT = 5 # Time before a request times out
BACKOFF = 0.25 # Try 0.0s, 0.25s, 0.5s, 1s, 2s between requests
AMOUNT = 13 # Amount of request to make before giving up
class TimeoutHTTPAdapter(requests.adapters.HTTPAdapter):
def __init__(self, timeout=None, *args, **kwargs):
self.timeout = timeout
super().__init__(*args, **kwargs)
def send(self, *args, **kwargs):
if 'timeout' not in kwargs or kwargs['timeout'] is None:
kwargs['timeout'] = self.timeout
return super().send(*args, **kwargs)
retry_session = requests.Session()
retries = Retry(total=AMOUNT, backoff_factor=BACKOFF)
for type in ('http://', 'https://'):
retry_session.mount(type, TimeoutHTTPAdapter(timeout=TIMEOUT, max_retries=retries))
| mit | Python |
50e8f6e540bb8e94b61a0ee10b966b5b5c00f930 | test objectsio | bnoi/scikit-tracker,bnoi/scikit-tracker,bnoi/scikit-tracker | sktracker/io/tests/test_objectsio.py | sktracker/io/tests/test_objectsio.py | import os
from nose import with_setup
from sktracker import data
from sktracker.io import objectsio, validate_metadata
def test_from_store():
store_path = data.sample_h5()
oio = objectsio.ObjectsIO.from_h5(store_path)
assert validate_metadata(oio.metadata)
| import os
from nose import with_setup
from sktracker import data
from sktracker.io import objectsio
def test_from_store():
store_path = data.sample_h5()
| bsd-3-clause | Python |
33aa75b99dab3810e44da6e69ebe5bec3b1ab20b | Update src/nanoemoji/disjoint_set.py | googlefonts/nanoemoji,googlefonts/nanoemoji | src/nanoemoji/disjoint_set.py | src/nanoemoji/disjoint_set.py | # https://en.wikipedia.org/wiki/Disjoint-set_data_structure
import collections
from typing import FrozenSet, Generic, Generator, Tuple, TypeVar
T = TypeVar("T")
class DisjointSet(Generic[T]):
def __init__(self):
self.parent = {}
self.rank = {}
def make_set(self, e: T):
if e in self.parent:
return
self.parent[e] = e
self.rank[e] = 0
# find with path compression
def find(self, e: T):
self.make_set(e)
prev = e
while self.parent[e] != e:
prev = e
e = self.parent[e]
self.parent[prev] = e
return e
# union by rank
def union(self, x: T, y: T):
x_root = self.find(x)
y_root = self.find(y)
if x_root == y_root:
return # already in the same set
if self.rank[x] < self.rank[y]:
tmp = x_root
x_root = y_root
y_root = tmp
self.parent[y_root] = x_root
if self.rank[x_root] == self.rank[y_root]:
self.rank[x_root] += 1
def sets(self) -> FrozenSet[FrozenSet[T]]:
sets = collections.defaultdict(set)
for e in self.parent:
sets[self.find(e)].add(e)
return frozenset(frozenset(s) for s in sets.values())
def sorted(self) -> Tuple[Tuple[T, ...]]:
"""Sorted tuple of sorted tuples edition of sets()."""
return tuple(sorted(tuple(sorted(s)) for s in self.sets()))
| # https://en.wikipedia.org/wiki/Disjoint-set_data_structure
import collections
from typing import FrozenSet, Generic, Generator, Tuple, TypeVar
T = TypeVar("T")
class DisjointSet(Generic[T]):
def __init__(self):
self.parent = {}
self.rank = {}
def make_set(self, e: T):
if e in self.parent:
return
self.parent[e] = e
self.rank[e] = 0
# find with path compression
def find(self, e: T):
self.make_set(e)
prev = e
while self.parent[e] != e:
prev = e
e = self.parent[e]
self.parent[prev] = e
return e
# union by rank
def union(self, x: T, y: T):
x_root = self.find(x)
y_root = self.find(y)
if x_root == y_root:
return # already in the same set
if self.rank[x] < self.rank[y]:
tmp = x_root
x_root = y_root
y_root = tmp
self.parent[y_root] = x_root
if self.rank[x_root] == self.rank[y_root]:
self.rank[x_root] = self.rank[x_root] + 1
def sets(self) -> FrozenSet[FrozenSet[T]]:
sets = collections.defaultdict(set)
for e in self.parent:
sets[self.find(e)].add(e)
return frozenset(frozenset(s) for s in sets.values())
def sorted(self) -> Tuple[Tuple[T, ...]]:
"""Sorted tuple of sorted tuples edition of sets()."""
return tuple(sorted(tuple(sorted(s)) for s in self.sets()))
| apache-2.0 | Python |
f5685f5da99ccfd0b4bc0a018dfd474e9edca810 | Rename dag BaseModel to be in line with config | ternaris/marv-robotics,ternaris/marv-robotics | code/marv-api/marv_api/dag.py | code/marv-api/marv_api/dag.py | # Copyright 2020 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
# pylint: disable=too-few-public-methods,invalid-name,no-self-argument,no-self-use
from typing import Optional, Union
from pydantic import BaseModel, Extra, create_model, validator
class Model(BaseModel):
class Config:
extra = Extra.forbid
allow_mutation = False
def __hash__(self):
dct = self.__dict__
return hash(tuple(
tuple(v) if isinstance(v, list) else v
for v in (dct[x] for x in self.__fields__)
))
class Inputs(Model):
"""Base class for node input configuration models.
The fields of its subclasses describe the input parameters to be
passed to a node function.
"""
@classmethod
def subclass(cls, __module__, **kw):
return create_model('Inputs', __base__=Inputs, __module__=__module__, **kw)
@validator('*', pre=True)
def streamify(cls, value):
"""Turn Node inputs into streams."""
if hasattr(value, '__marv_node__'):
return Stream(node=value.__marv_node__)
return value
class Node(Model): # pylint: disable=too-few-public-methods
function: str
inputs: Optional[Inputs]
message_schema: Optional[str]
group: Union[bool, str, None]
version: Optional[int]
foreach: Optional[str]
@validator('function')
def function_needs_to_be_dotted_path(cls, value):
if '.' not in value:
raise ValueError(f'Expected dotted path to function, not: {value!r}')
return value
def clone(self, **kw):
# Send inputs through validation
inputs = self.inputs.dict(exclude_unset=True, exclude_defaults=True)
inputs.update(kw)
return self.copy(update={'inputs': type(self.inputs).parse_obj(inputs)})
def __hash__(self):
# Derived from function and therefore ignore: message_schema, group, version, forach
return hash((self.function, self.inputs))
class Stream(Model):
node: Node
name: Optional[str]
| # Copyright 2020 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
# pylint: disable=too-few-public-methods,invalid-name,no-self-argument,no-self-use
from typing import Optional, Union
from pydantic import BaseModel as _BaseModel
from pydantic import Extra, create_model, validator
class BaseModel(_BaseModel):
class Config:
extra = Extra.forbid
allow_mutation = False
def __hash__(self):
dct = self.__dict__
return hash(tuple(
tuple(v) if isinstance(v, list) else v
for v in (dct[x] for x in self.__fields__)
))
class Inputs(BaseModel):
"""Base class for node input configuration models.
The fields of its subclasses describe the input parameters to be
passed to a node function.
"""
@classmethod
def subclass(cls, __module__, **kw):
return create_model('Inputs', __base__=Inputs, __module__=__module__, **kw)
@validator('*', pre=True)
def streamify(cls, value):
"""Turn Node inputs into streams."""
if hasattr(value, '__marv_node__'):
return Stream(node=value.__marv_node__)
return value
class Node(BaseModel): # pylint: disable=too-few-public-methods
function: str
inputs: Optional[Inputs]
message_schema: Optional[str]
group: Union[bool, str, None]
version: Optional[int]
foreach: Optional[str]
@validator('function')
def function_needs_to_be_dotted_path(cls, value):
if '.' not in value:
raise ValueError(f'Expected dotted path to function, not: {value!r}')
return value
def clone(self, **kw):
# Send inputs through validation
inputs = self.inputs.dict(exclude_unset=True, exclude_defaults=True)
inputs.update(kw)
return self.copy(update={'inputs': type(self.inputs).parse_obj(inputs)})
def __hash__(self):
# Derived from function and therefore ignore: message_schema, group, version, forach
return hash((self.function, self.inputs))
class Stream(BaseModel):
node: Node
name: Optional[str]
| agpl-3.0 | Python |
30fca51cbd74b0dba46462d6383dc9055d67eed1 | Fix for #11, how could I miss this one? | ojengwa/django-simple-captcha,google-code-export/django-simple-captcha,rlramirez/django-simple-captcha,andela-bojengwa/django-simple-captcha | captcha/helpers.py | captcha/helpers.py | # -*- coding: utf-8 -*-
import random
from captcha.conf import settings
def math_challenge():
operators = ('+','*','-',)
operands = (random.randint(1,10),random.randint(1,10))
operator = random.choice(operators)
if operands[0] < operands[1] and '-' == operator:
operands = (operands[1],operands[0])
challenge = '%d%s%d' %(operands[0],operator,operands[1])
return u'%s=' %(challenge), unicode(eval(challenge))
def random_char_challenge():
chars,ret = u'abcdefghijklmnopqrstuvwxyz', u''
for i in range(settings.CAPTCHA_LENGTH):
ret += random.choice(chars)
return ret.upper(),ret
def unicode_challenge():
chars,ret = u'äàáëéèïíîöóòüúù', u''
for i in range(settings.CAPTCHA_LENGTH):
ret += random.choice(chars)
return ret.upper(), ret
def word_challenge():
fd = file(settings.CAPTCHA_WORDS_DICTIONARY,'rb')
l = fd.readlines()
fd.close()
word = random.choice(l).strip()
return word.upper(), word.lower()
def noise_arcs(draw,image):
size = image.size
draw.arc([-20,-20, size[0],20], 0, 295, fill=settings.CAPTCHA_FOREGROUND_COLOR)
draw.line([-20,20, size[0]+20,size[1]-20], fill=settings.CAPTCHA_FOREGROUND_COLOR)
draw.line([-20,0, size[0]+20,size[1]], fill=settings.CAPTCHA_FOREGROUND_COLOR)
return draw
def noise_dots(draw,image):
size = image.size
for p in range(int(size[0]*size[1]*0.1)):
draw.point((random.randint(0, size[0]),random.randint(0, size[1])), fill=settings.CAPTCHA_FOREGROUND_COLOR )
return draw
def post_smooth(image):
import ImageFilter
return image.filter(ImageFilter.SMOOTH)
| # -*- coding: utf-8 -*-
import random
from captcha.conf import settings
def math_challenge():
operators = ('+','*','-',)
operands = (random.randint(1,10),random.randint(1,10))
operator = operators[random.randint(0,len(operators)-1)]
if operands[0] < operands[1] and '-' == operator:
operands = (operands[1],operands[0])
challenge = '%d%s%d' %(operands[0],operator,operands[1])
return u'%s=' %(challenge), unicode(eval(challenge))
def random_char_challenge():
chars,ret = u'abcdefghijklmnopqrstuvwxyz', u''
for i in range(settings.CAPTCHA_LENGTH):
ret += chars[random.randint(0,len(chars)-1)]
return ret.upper(),ret
def unicode_challenge():
chars,ret = u'äàáëéèïíîöóòüúù', u''
for i in range(settings.CAPTCHA_LENGTH):
ret += chars[random.randint(0,len(chars)-1)]
return ret.upper(), ret
def word_challenge():
fd = file(settings.CAPTCHA_WORDS_DICTIONARY,'rb')
l = fd.readlines()
pos = random.randint(0,len(l))
fd.close()
word = l[pos].strip()
return word.upper(), word.lower()
def noise_arcs(draw,image):
size = image.size
draw.arc([-20,-20, size[0],20], 0, 295, fill=settings.CAPTCHA_FOREGROUND_COLOR)
draw.line([-20,20, size[0]+20,size[1]-20], fill=settings.CAPTCHA_FOREGROUND_COLOR)
draw.line([-20,0, size[0]+20,size[1]], fill=settings.CAPTCHA_FOREGROUND_COLOR)
return draw
def noise_dots(draw,image):
size = image.size
for p in range(int(size[0]*size[1]*0.1)):
draw.point((random.randint(0, size[0]),random.randint(0, size[1])), fill=settings.CAPTCHA_FOREGROUND_COLOR )
return draw
def post_smooth(image):
import ImageFilter
return image.filter(ImageFilter.SMOOTH)
| mit | Python |
26d8188024845055b96597833f1e8b077aaa84a9 | Make advisor.py capable of running a proper Advisor analysis | opesci/devito,opesci/devito | scripts/advisor.py | scripts/advisor.py | import os
import sys
from pathlib import Path
from subprocess import check_call
from tempfile import gettempdir
import click
@click.command()
# Required arguments
@click.option('--path', '-p', help='Absolute path to the Devito executable.',
required=True)
@click.option('--output', '-o', help='A directory for storing profiling reports. '
'The directory is created if it does not exist.',
required=True)
# Optional arguments
@click.option('--exec-args', type=click.UNPROCESSED,
help='Arguments passed to the executable.')
@click.option('--advisor-home', help='Path to Intel Advisor. Defaults to /opt/intel'
'/advisor, which is the directory in which '
'Intel Compiler suite is installed.')
def run_with_advisor(path, output, exec_args, advisor_home):
path = Path(path)
check(path.is_file(), '%s not found' % path)
check(path.suffix == '.py', '%s not a regular Python file' % path)
output = Path(output)
output.mkdir(parents=True, exist_ok=True)
# Devito must be told where to find Advisor, because it uses its C API
if advisor_home:
os.environ['ADVISOR_HOME'] = advisor_home
else:
os.environ['ADVISOR_HOME'] = '/opt/intel/advisor'
# Tell Devito to instrument the generated code for Advisor
os.environ['DEVITO_PROFILING'] = 'advisor'
# Prevent NumPy from using threads, which otherwise leads to a deadlock when
# used in combination with Advisor. This issue has been described at:
# `software.intel.com/en-us/forums/intel-advisor-xe/topic/780506`
# Note: we should rather sniff the BLAS library used by NumPy, and set the
# appropriate env var only
os.environ['OPENBLAS_NUM_THREADS'] = '1'
os.environ['MKL_NUM_THREADS'] = '1'
# Note: `Numaexpr`, used by NumPy, also employs threading, so we shall disable
# it too via the corresponding env var. See:
# `stackoverflow.com/questions/17053671/python-how-do-you-stop-numpy-from-multithreading` # noqa
os.environ['NUMEXPR_NUM_THREADS'] = '1'
advisor_command = [
'advixe-cl',
'-data-limit=500',
'-collect survey',
'-start-paused', # The generated code will enable/disable Advisor on a loop basis
'-project-dir', str(output),
'-search-dir src:r=%s' % gettempdir(), # Root directory where Devito stores the generated code # noqa
'-run-pass-thru=--no-altstack', # Avoids `https://software.intel.com/en-us/vtune-amplifier-help-error-message-stack-size-is-too-small` # noqa
'-strategy ldconfig:notrace:notrace' # Avoids `https://software.intel.com/en-us/forums/intel-vtune-amplifier-xe/topic/779309` # noqa
]
py_command = ['python', str(path)] + exec_args.split()
command = advisor_command + ['--'] + py_command
check_call(command)
def check(cond, msg):
if not cond:
print(msg, file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
run_with_advisor()
| import os
import sys
from pathlib import Path
from subprocess import check_call
import click
@click.command()
# Required arguments
@click.option('--path', '-p', help='Absolute path to the Devito executable.',
required=True)
@click.option('--output', '-o', help='A directory for storing profiling reports. '
'The directory is created if it does not exist.',
required=True)
# Optional arguments
@click.option('--exec-args', type=click.UNPROCESSED,
help='Arguments passed to the executable.')
@click.option('--advisor-home', help='Path to Intel Advisor. Defaults to /opt/intel'
'/advisor, which is the directory in which '
'Intel Compiler suite is installed.')
def run_with_advisor(path, output, exec_args, advisor_home):
path = Path(path)
check(path.is_file(), '%s not found' % path)
check(path.suffix == '.py', '%s not a regular Python file' % path)
output = Path(output)
output.mkdir(parents=True, exist_ok=True)
if advisor_home:
os.environ['ADVISOR_HOME'] = advisor_home
else:
os.environ['ADVISOR_HOME'] = '/opt/intel/advisor'
os.environ['DEVITO_PROFILING'] = 'advisor'
command = ['python', path.as_posix()]
command.extend(exec_args.split())
check_call(command)
def check(cond, msg):
if not cond:
print(msg, file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
run_with_advisor()
| mit | Python |
e95a3fd3b190baacda0a646ca0cab1d20d9ef5a0 | Downgrade log.error in process_email(): it's mostly spam at this point (#19707) | wagnerand/addons-server,wagnerand/addons-server,diox/olympia,wagnerand/addons-server,diox/olympia,mozilla/addons-server,wagnerand/addons-server,diox/olympia,mozilla/addons-server,diox/olympia,mozilla/addons-server,mozilla/addons-server | src/olympia/activity/tasks.py | src/olympia/activity/tasks.py | import olympia.core.logger
from olympia.activity.models import ActivityLogEmails
from olympia.activity.utils import add_email_to_activity_log_wrapper
from olympia.amo.celery import task
from olympia.amo.decorators import use_primary_db
log = olympia.core.logger.getLogger('z.amo.activity')
@task
@use_primary_db
def process_email(message, spam_rating, **kwargs):
"""Parse emails and save activity log entry."""
# Some emails (gmail, at least) come with Message-ID instead of MessageId.
msg_id = message.get('MessageId')
if not msg_id:
custom_headers = message.get('CustomHeaders', [])
for header in custom_headers:
if header.get('Name', '').lower() == 'message-id':
msg_id = header.get('Value')
if not msg_id:
log.warning(
'No MessageId in message, aborting.', extra={'message_obj': message}
)
return
_, created = ActivityLogEmails.objects.get_or_create(messageid=msg_id)
if not created:
log.warning(
'Already processed email [%s], skipping',
msg_id,
extra={'message_obj': message},
)
return
res = add_email_to_activity_log_wrapper(message, spam_rating)
if not res:
log.warning(
'Failed to process email [%s].', msg_id, extra={'message_obj': message}
)
| import olympia.core.logger
from olympia.activity.models import ActivityLogEmails
from olympia.activity.utils import add_email_to_activity_log_wrapper
from olympia.amo.celery import task
from olympia.amo.decorators import use_primary_db
log = olympia.core.logger.getLogger('z.amo.activity')
@task
@use_primary_db
def process_email(message, spam_rating, **kwargs):
"""Parse emails and save activity log entry."""
# Some emails (gmail, at least) come with Message-ID instead of MessageId.
msg_id = message.get('MessageId')
if not msg_id:
custom_headers = message.get('CustomHeaders', [])
for header in custom_headers:
if header.get('Name', '').lower() == 'message-id':
msg_id = header.get('Value')
if not msg_id:
log.warning(
'No MessageId in message, aborting.', extra={'message_obj': message}
)
return
_, created = ActivityLogEmails.objects.get_or_create(messageid=msg_id)
if not created:
log.warning(
'Already processed email [%s], skipping',
msg_id,
extra={'message_obj': message},
)
return
res = add_email_to_activity_log_wrapper(message, spam_rating)
if not res:
log.error(
'Failed to process email [%s].', msg_id, extra={'message_obj': message}
)
| bsd-3-clause | Python |
f1372842fa1c3eef11f4e9dbe2b35af02c1c5bf5 | Fix the migration so it takes care of bad default for resource links. | uw-it-aca/mdot-rest,uw-it-aca/mdot-rest | mdot_rest/migrations/0003_auto_20150723_1759.py | mdot_rest/migrations/0003_auto_20150723_1759.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mdot_rest', '0002_auto_20150722_2054'),
]
operations = [
migrations.RemoveField(
model_name='resourcelink',
name='resource',
),
migrations.AddField(
model_name='resourcelink',
name='resource',
field=models.ForeignKey(default=1, to='mdot_rest.Resource'),
preserve_default=False,
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mdot_rest', '0002_auto_20150722_2054'),
]
operations = [
migrations.RemoveField(
model_name='resourcelink',
name='resource',
),
migrations.AddField(
model_name='resourcelink',
name='resource',
field=models.ForeignKey(default='', to='mdot_rest.Resource'),
preserve_default=False,
),
]
| apache-2.0 | Python |
58ae1b483801a53c0119e2d7c9e1d2c27652c2c5 | Fix typo in django admin (#16604) | bqbn/addons-server,mozilla/addons-server,mozilla/addons-server,diox/olympia,bqbn/addons-server,diox/olympia,mozilla/addons-server,bqbn/addons-server,mozilla/addons-server,diox/olympia,wagnerand/addons-server,mozilla/olympia,mozilla/olympia,bqbn/addons-server,mozilla/olympia,diox/olympia,mozilla/olympia,wagnerand/addons-server,wagnerand/addons-server,wagnerand/addons-server | src/olympia/shelves/models.py | src/olympia/shelves/models.py | from django.db import models
from olympia.amo.models import ModelBase
ENDPOINTS = ('collections', 'search', 'search-themes')
ENDPOINT_CHOICES = tuple((ty, ty) for ty in ENDPOINTS)
class Shelf(ModelBase):
title = models.CharField(max_length=200)
endpoint = models.CharField(
max_length=200, choices=ENDPOINT_CHOICES, db_column='shelf_type'
)
criteria = models.CharField(
max_length=200,
help_text='e.g., "?promoted=recommended&sort=random&type=extension" '
'or the collection slug',
)
footer_text = models.CharField(
max_length=200, blank=True, help_text='e.g., See more recommended extensions'
)
footer_pathname = models.CharField(
max_length=255,
blank=True,
help_text='e.g., collections/4757633/privacy-matters',
)
addon_count = models.PositiveSmallIntegerField(
default=0,
help_text='0 means the default number (4, or 3 for search-themes) of add-ons '
'will be included in shelf responses. Set to override.',
)
class Meta:
verbose_name_plural = 'shelves'
def __str__(self):
return self.title
def get_count(self):
return self.addon_count or (3 if self.endpoint in ('search-themes',) else 4)
class ShelfManagement(ModelBase):
shelf = models.OneToOneField(Shelf, on_delete=models.CASCADE)
enabled = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __str__(self):
return str(self.shelf)
class Meta:
verbose_name_plural = 'homepage shelves'
constraints = [
models.UniqueConstraint(fields=('enabled', 'position'), name='position_id')
]
| from django.db import models
from olympia.amo.models import ModelBase
ENDPOINTS = ('collections', 'search', 'search-themes')
ENDPOINT_CHOICES = tuple((ty, ty) for ty in ENDPOINTS)
class Shelf(ModelBase):
title = models.CharField(max_length=200)
endpoint = models.CharField(
max_length=200, choices=ENDPOINT_CHOICES, db_column='shelf_type'
)
criteria = models.CharField(
max_length=200,
help_text='e.g., "?promoted=recommended&sort=random&type=extension" '
'or the collection slug',
)
footer_text = models.CharField(
max_length=200, blank=True, help_text='e.g., See more recommended extensions'
)
footer_pathname = models.CharField(
max_length=255,
blank=True,
help_text='e.g., collections/4757633/privacy-matters',
)
addon_count = models.PositiveSmallIntegerField(
default=0,
help_text='0 means the default number (4, or 3 for search-themes) of add-ons '
'will included be in shelf responses. Set to override.',
)
class Meta:
verbose_name_plural = 'shelves'
def __str__(self):
return self.title
def get_count(self):
return self.addon_count or (3 if self.endpoint in ('search-themes',) else 4)
class ShelfManagement(ModelBase):
shelf = models.OneToOneField(Shelf, on_delete=models.CASCADE)
enabled = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __str__(self):
return str(self.shelf)
class Meta:
verbose_name_plural = 'homepage shelves'
constraints = [
models.UniqueConstraint(fields=('enabled', 'position'), name='position_id')
]
| bsd-3-clause | Python |
bacd05f50574a3149d5e765be8821f66bd14191b | Update for rpm based systems | Vicente-Cheng/ceph-deploy,trhoden/ceph-deploy,trhoden/ceph-deploy,osynge/ceph-deploy,dachary/ceph-deploy,branto1/ceph-deploy,zhouyuan/ceph-deploy,dachary/ceph-deploy,osynge/ceph-deploy,branto1/ceph-deploy,ghxandsky/ceph-deploy,ddiss/ceph-deploy,shenhequnying/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,imzhulei/ceph-deploy,ceph/ceph-deploy,alfredodeza/ceph-deploy,rtulke/ceph-deploy,Vicente-Cheng/ceph-deploy,ceph/ceph-deploy,isyippee/ceph-deploy,rtulke/ceph-deploy,jumpstarter-io/ceph-deploy,SUSE/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,codenrhoden/ceph-deploy,codenrhoden/ceph-deploy,ktdreyer/ceph-deploy,SUSE/ceph-deploy,shenhequnying/ceph-deploy,imzhulei/ceph-deploy,ghxandsky/ceph-deploy,zhouyuan/ceph-deploy,ktdreyer/ceph-deploy,jumpstarter-io/ceph-deploy,isyippee/ceph-deploy,ddiss/ceph-deploy,alfredodeza/ceph-deploy | ceph_deploy/lsb.py | ceph_deploy/lsb.py | def lsb_release():
import subprocess
args = [ 'which', 'lsb_release', ]
p = subprocess.Popen(
args=args,
stdout=subprocess.PIPE,
)
distro = p.stdout.read()
ret = p.wait()
if ret != 0:
raise RuntimeError('lsb_release not found on host')
args = [ 'lsb_release', '-s', '-i' ]
p = subprocess.Popen(
args=args,
stdout=subprocess.PIPE,
)
distro = p.stdout.read()
ret = p.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, args, output=out)
if distro == '':
raise RuntimeError('lsb_release gave invalid output for distro')
args = [ 'lsb_release', '-s', '-r', ]
p = subprocess.Popen(
args=args,
stdout=subprocess.PIPE,
)
release = p.stdout.read()
ret = p.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, args, output=out)
if release == '':
raise RuntimeError('lsb_release gave invalid output for release')
args = [ 'lsb_release', '-s', '-c', ]
p = subprocess.Popen(
args=args,
stdout=subprocess.PIPE,
)
codename = p.stdout.read()
ret = p.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, args, output=out)
if codename == '':
raise RuntimeError('lsb_release gave invalid output for codename')
return (distro.rstrip(), release.rstrip(), codename.rstrip())
def choose_init(distro, codename):
if distro == 'Ubuntu':
return 'upstart'
return 'sysvinit'
|
def lsb_release():
import subprocess
args = [
'lsb_release',
'-s',
'-i',
'-r',
'-c',
]
p = subprocess.Popen(
args=args,
stdout=subprocess.PIPE,
)
out = p.stdout.read()
ret = p.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, args, output=out)
try:
(distro, release, codename, empty) = out.split('\n', 3)
except ValueError:
raise RuntimeError('lsb_release gave invalid output')
if empty != '':
raise RuntimeError('lsb_release gave invalid output')
return (distro, release, codename)
def choose_init(distro, codename):
if distro == 'Ubuntu':
return 'upstart'
return 'sysvinit'
| mit | Python |
f86578c88e61b81cd5bdbf5e1cd6841aeabaf0e6 | Use module locator | zaibacu/wutu,zaibacu/wutu,zaibacu/wutu | wutu/tests/modules/test_module/test_module.py | wutu/tests/modules/test_module/test_module.py | from wutu.module import Module, module_locator
from wutu.util import load_js
class TestModule(Module):
def __init__(self):
super(TestModule, self).__init__()
def ping(self):
return "pong"
def get(self, id):
return {"result": id}
def get_identifier(self):
return ["id"]
def get_controller(self):
return load_js("controller.js", module_locator)
| from wutu.module import Module
from wutu.util import load_js
from test_util import test_locator
class TestModule(Module):
def __init__(self):
super(TestModule, self).__init__()
def ping(self):
return "pong"
def get(self, id):
return {"result": id}
def get_identifier(self):
return ["id"]
def get_controller(self):
return load_js("test_module/controller.js", test_locator)
| mit | Python |
d4a8720d9f8ce9e7b73dfd804e45f3cdb9bd6ca1 | Update database.py | sylarcp/anita,sylarcp/anita,mcollins12321/anita,sylarcp/anita,mcollins12321/anita,mcollins12321/anita,mcollins12321/anita,sylarcp/anita,sylarcp/anita,mcollins12321/anita | app/database.py | app/database.py | from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine0 = create_engine('postgresql://gui:AniTa08@67.239.76.218/anita_0710a', convert_unicode=True)
Base = declarative_base()
# engine0 seems can be any database, does not affect the Base.query we use.
Base.metadata.bind=engine0
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import app.models
# Base.metadata.create_all(bind=engine)
#autoload the database
# Base.metadata.reflect(bind=engine)
| from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine0 = create_engine('postgresql://gui:AniTa08@128.175.112.125/anita_0116d', convert_unicode=True)
Base = declarative_base()
# engine0 seems can be any database, does not affect the Base.query we use.
Base.metadata.bind=engine0
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import app.models
# Base.metadata.create_all(bind=engine)
#autoload the database
# Base.metadata.reflect(bind=engine)
| mit | Python |
a4dd311658e555403b0587983be6f0e9765e46e6 | Update pullover.py | Heads-and-Hands/pullover | pullover.py | pullover.py | #!/usr/bin/env python
import sys, getopt
from flask import Flask
from flask import request
import subprocess
from config import repos
def main(argv):
try:
opts, args = getopt.getopt(argv,"hr")
except getopt.GetoptError:
print_help()
sys.exit(2)
for opt, arg in opts:
if opt == '-r':
run_service()
elif opt == '-h':
print_help()
sys.exit(2)
def print_help():
print '-r - Run service'
def run_service():
print 'Runnin service...'
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
repo_id = request.args.get('key')
current_repo = repos.get(repo_id)
remote_name = current_repo.get('remote_name')
remote_branch = current_repo.get('remote_branch')
local_dir = current_repo.get('local_dir')
cmd = ["cd %s && git reset --hard && git pull %s %s" % (local_dir, remote_name, remote_branch),""]
p = subprocess.Popen(cmd, shell=True, close_fds=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out,err = p.communicate()
return out
app.run(host='0.0.0.0')
if __name__ == "__main__":
main(sys.argv[1:])
# app.run(host='0.0.0.0')
| #!/usr/bin/env python
import sys, getopt
from flask import Flask
from flask import request
import subprocess
from config import repos
def main(argv):
try:
opts, args = getopt.getopt(argv,"hr")
except getopt.GetoptError:
print_help()
sys.exit(2)
for opt, arg in opts:
if opt == '-r':
run_service()
elif opt == '-h':
print_help()
sys.exit(2)
def print_help():
print '-r - Run service'
def run_service():
print 'Runnin service...'
app = Flask(__name__)
@app.route("/", methods=['GET'])
def hello():
repo_id = request.args.get('key')
current_repo = repos.get(repo_id)
remote_name = current_repo.get('remote_name')
remote_branch = current_repo.get('remote_branch')
local_dir = current_repo.get('local_dir')
cmd = ["cd %s && git reset --hard && git pull %s %s" % (local_dir, remote_name, remote_branch),""]
p = subprocess.Popen(cmd, shell=True, close_fds=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out,err = p.communicate()
return out
app.run(host='0.0.0.0')
if __name__ == "__main__":
main(sys.argv[1:])
# app.run(host='0.0.0.0')
| mit | Python |
e3464414886ec28fe8c989ea0af0b310ef176a0a | Remove commented code. | andela-akiura/bucketlist | app/database.py | app/database.py | """This module initialises database transactions."""
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from config.config import Config
engine = create_engine(Config.DATABASE_URI, convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
"""Initialize database."""
import app.models
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
| """This module initialises database transactions."""
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from config.config import Config
engine = create_engine(Config.DATABASE_URI, convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
"""initialises database."""
import app.models
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
| mit | Python |
869597b8f874f8a6bea0a47115e839e8681532f1 | Use pillow, not PIL | akgrant43/storagemgr,akgrant43/storagemgr | storagemgr/storage/smhash.py | storagemgr/storage/smhash.py | import hashlib
from PIL import Image
from logger import init_logging
logger = init_logging(__name__)
def smhash(fn):
"""Try and return the hash of just the image data.
If not, the entire file."""
try:
img = Image.open(fn)
img_data = img.tostring()
hasher = hashlib.sha256()
hasher.update(img_data)
digest = hasher.hexdigest()
logger.debug("Successfully used image digest: {0} -> {1}".format(
fn, digest))
except IOError:
digest = None
if digest is None:
hasher = hashlib.sha256()
read_size = hasher.block_size * 1024
with open(fn, 'rb') as fp:
while True:
buf = fp.read(read_size)
if len(buf) == 0:
break
hasher.update(buf)
digest = hasher.hexdigest()
logger.debug("Fallback file digest: {0} -> {1}".format(fn, digest))
return digest
| import hashlib
import Image
from logger import init_logging
logger = init_logging(__name__)
def smhash(fn):
"""Try and return the hash of just the image data.
If not, the entire file."""
try:
img = Image.open(fn)
img_data = img.tostring()
hasher = hashlib.sha256()
hasher.update(img_data)
digest = hasher.hexdigest()
logger.debug("Successfully used image digest: {0} -> {1}".format(
fn, digest))
except IOError:
digest = None
if digest is None:
hasher = hashlib.sha256()
read_size = hasher.block_size * 1024
with open(fn, 'rb') as fp:
while True:
buf = fp.read(read_size)
if len(buf) == 0:
break
hasher.update(buf)
digest = hasher.hexdigest()
logger.debug("Fallback file digest: {0} -> {1}".format(fn, digest))
return digest
| apache-2.0 | Python |
9c69e19225d98afaca6ca129ac1c9a4013e39e35 | fix collide.hit file location. | joetsoi/moonstone,joetsoi/moonstone | assets/files.py | assets/files.py | from pathlib import Path, PureWindowsPath
from pygame.mixer import Sound
from resources.cmp import CmpFile
from resources.font import FontFile
from resources.piv import PivFile
from settings import MOONSTONE_DIR
from resources.terrain import TerrainFile
from .collide import parse_collision_file
def load_file(file_type, filename):
file_path = Path(MOONSTONE_DIR) / PureWindowsPath(filename)
with open(file_path, 'rb') as f:
data = f.read()
return file_type(data)
def load_collision_file(filename):
file_path = Path(MOONSTONE_DIR) / PureWindowsPath(filename)
with open(file_path, 'r') as f:
return parse_collision_file(f)
def load_sound(filename):
file_path = Path(MOONSTONE_DIR) / PureWindowsPath(filename)
return Sound(str(file_path))
backgrounds = {
'ch': load_file(PivFile, 'DISKB\CH.PIV'),
'mindscape': load_file(PivFile, 'DISKA\MINDSCAP'),
'wab1': load_file(PivFile, 'DISKB\WAB1.CMP'),
}
scenery_files = {
'fo1': load_file(CmpFile, 'DISKB\FO1.CMP'),
'fo2': load_file(CmpFile, 'DISKB\FO2.CMP'),
'sw1': load_file(CmpFile, 'DISKB\SW1.CMP'),
'wa1': load_file(CmpFile, 'DISKB\WA1.CMP'),
}
fonts = {
'bold': load_file(FontFile, 'DISKA\BOLD.F'),
'small': load_file(FontFile, 'DISKB\SMALL.FON'),
}
objects = {
'sel': load_file(FontFile, 'DISKB\SEL.CEL'),
'kn1': load_file(FontFile, 'DISKB\KN1.OB'),
'kn2': load_file(FontFile, 'DISKB\KN2.OB'),
'kn3': load_file(FontFile, 'DISKB\KN3.OB'),
'kn4': load_file(FontFile, 'DISKB\KN4.OB'),
}
terrain = {
'wa1': load_file(TerrainFile, 'DISKB\WA1.T'),
'wa2': load_file(TerrainFile, 'DISKB\WA2.T'),
'wa3': load_file(TerrainFile, 'DISKB\WA3.T'),
}
collide_hit = load_collision_file("DISKB\COLLIDE.HIT")
sounds = {
'hit3': 'SAMPLES\\HIT3',
'grnt1': 'SAMPLES\\GRNT1',
'grnt3': 'SAMPLES\\GRNT3',
'grnt3b': 'SAMPLES\\GRNT3B',
'kstep': 'SAMPLES\\KSTEP',
'rjgrunt4': 'SAMPLES\\RJGRUNT4',
'swish': 'SAMPLES\\SWISH',
'swordcl': 'SAMPLES\\SWORDCL',
}
| from pathlib import Path, PureWindowsPath
from pygame.mixer import Sound
from resources.cmp import CmpFile
from resources.font import FontFile
from resources.piv import PivFile
from settings import MOONSTONE_DIR
from resources.terrain import TerrainFile
from .collide import parse_collision_file
def load_file(file_type, filename):
file_path = Path(MOONSTONE_DIR) / PureWindowsPath(filename)
with open(file_path, 'rb') as f:
data = f.read()
return file_type(data)
def load_collision_file(filename):
file_path = Path(MOONSTONE_DIR) / PureWindowsPath(filename)
with open(file_path, 'r') as f:
return parse_collision_file(f)
def load_sound(filename):
file_path = Path(MOONSTONE_DIR) / PureWindowsPath(filename)
return Sound(str(file_path))
backgrounds = {
'ch': load_file(PivFile, 'DISKB\CH.PIV'),
'mindscape': load_file(PivFile, 'DISKA\MINDSCAP'),
'wab1': load_file(PivFile, 'DISKB\WAB1.CMP'),
}
scenery_files = {
'fo1': load_file(CmpFile, 'DISKB\FO1.CMP'),
'fo2': load_file(CmpFile, 'DISKB\FO2.CMP'),
'sw1': load_file(CmpFile, 'DISKB\SW1.CMP'),
'wa1': load_file(CmpFile, 'DISKB\WA1.CMP'),
}
fonts = {
'bold': load_file(FontFile, 'DISKA\BOLD.F'),
'small': load_file(FontFile, 'DISKB\SMALL.FON'),
}
objects = {
'sel': load_file(FontFile, 'DISKB\SEL.CEL'),
'kn1': load_file(FontFile, 'DISKB\KN1.OB'),
'kn2': load_file(FontFile, 'DISKB\KN2.OB'),
'kn3': load_file(FontFile, 'DISKB\KN3.OB'),
'kn4': load_file(FontFile, 'DISKB\KN4.OB'),
}
terrain = {
'wa1': load_file(TerrainFile, 'DISKB\WA1.T'),
'wa2': load_file(TerrainFile, 'DISKB\WA2.T'),
'wa3': load_file(TerrainFile, 'DISKB\WA3.T'),
}
collide_hit = load_collision_file("COLLIDE.HIT")
sounds = {
'hit3': 'SAMPLES\\HIT3',
'grnt1': 'SAMPLES\\GRNT1',
'grnt3': 'SAMPLES\\GRNT3',
'grnt3b': 'SAMPLES\\GRNT3B',
'kstep': 'SAMPLES\\KSTEP',
'rjgrunt4': 'SAMPLES\\RJGRUNT4',
'swish': 'SAMPLES\\SWISH',
'swordcl': 'SAMPLES\\SWORDCL',
}
| agpl-3.0 | Python |
2cfa2ef22dae4d0dc434bd79aa849033b8ed53d3 | update version | jvrana/Pillowtalk | pillowtalk/__init__.py | pillowtalk/__init__.py | __version__ = "1.0.7dev"
from .base import *
from .schemas import *
from .relationship import *
from .session import SessionManager
from .exceptions import *
from marshmallow import pprint
| __version__ = "1.0.6dev"
from .base import *
from .schemas import *
from .relationship import *
from .session import SessionManager
from .exceptions import *
from marshmallow import pprint
| mit | Python |
b0e7168ce5182272691e22122acb7be3800cfa13 | remove unused code | zdw/xos,wathsalav/xos,opencord/xos,open-cloud/xos,opencord/xos,xmaruto/mcord,opencord/xos,cboling/xos,xmaruto/mcord,cboling/xos,cboling/xos,xmaruto/mcord,jermowery/xos,jermowery/xos,xmaruto/mcord,wathsalav/xos,cboling/xos,wathsalav/xos,zdw/xos,jermowery/xos,open-cloud/xos,open-cloud/xos,cboling/xos,wathsalav/xos,zdw/xos,jermowery/xos,zdw/xos | planetstack/observer/steps/sync_site_privileges.py | planetstack/observer/steps/sync_site_privileges.py | import os
import base64
from django.db.models import F, Q
from planetstack.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models import User, UserDeployments, SitePrivilege, SiteDeployments
class SyncSitePrivileges(OpenStackSyncStep):
requested_interval=0
provides=[SitePrivilege]
def fetch_pending(self):
return SitePrivilege.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, site_priv):
# sync site privileges at all site deployments
site_deployments = SiteDeployments.objects.filter(site=site_priv.site)
for site_deployment in site_deployments:
user_deployments = UserDeployments.objects.filter(deployment=site_deployment.deployment)
if user_deployments:
kuser_id = user_deployments[0].kuser_id
driver = self.driver.admin_driver(deployment=site_deployment.deployment.name)
driver.add_user_role(kuser_id,
site_deployment.tenant_id,
site_priv.role.role)
| import os
import base64
from django.db.models import F, Q
from planetstack.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models import User, UserDeployments, SitePrivilege, SiteDeployments
class SyncSitePrivileges(OpenStackSyncStep):
requested_interval=0
provides=[SitePrivilege]
def fetch_pending(self):
return SitePrivilege.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, site_priv):
if site_priv.user.kuser_id and site_priv.site.tenant_id:
self.driver.add_user_role(site_priv.user.kuser_id,
site_priv.site.tenant_id,
site_priv.role.role)
# sync site privileges at all site deployments
site_deployments = SiteDeployments.objects.filter(site=site_priv.site)
for site_deployment in site_deployments:
user_deployments = UserDeployments.objects.filter(deployment=site_deployment.deployment)
if user_deployments:
kuser_id = user_deployments[0].kuser_id
driver = self.driver.admin_driver(deployment=site_deployment.deployment.name)
driver.add_user_role(kuser_id,
site_deployment.tenant_id,
site_priv.role.role)
| apache-2.0 | Python |
4355006067f781d502486210655428d665287c3a | change base_profile test from none to empty string | masschallenge/django-accelerator,masschallenge/django-accelerator | accelerator/tests/test_base_profile.py | accelerator/tests/test_base_profile.py | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
from django.test import TestCase
from accelerator.tests.factories import MemberFactory
class TestBaseProfile(TestCase):
def test_str_is_first_and_last_name(self):
member = MemberFactory()
display = member.baseprofile.__str__()
self.assertNotIn(member.email, display)
self.assertIn(member.first_name, display)
self.assertIn(member.last_name, display)
def test_str_defaults_to_email_if_first_or_last_names_are_missing(
self):
member = MemberFactory()
member.first_name = ''
member.save()
display = member.baseprofile.__str__()
self.assertIn(member.email, display)
self.assertNotIn(member.last_name, display)
| # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
from django.test import TestCase
from accelerator.tests.factories import MemberFactory
class TestBaseProfile(TestCase):
def test_str_is_first_and_last_name(self):
member = MemberFactory()
display = member.baseprofile.__str__()
self.assertNotIn(member.email, display)
self.assertIn(member.first_name, display)
self.assertIn(member.last_name, display)
def test_str_defaults_to_username_if_first_or_last_names_are_missing(
self):
member = MemberFactory()
member.first_name = None
member.save()
display = member.baseprofile.__str__()
self.assertIn(member.email, display)
self.assertNotIn(member.last_name, display)
| mit | Python |
19e5b75ea80e2623d96ab5620224f9cd17a066ea | Order testimonials by date | ACLARKNET/aclarknet-django,ACLARKNET/aclarknet-django | aclarknet/aclarknet/aclarknet/views.py | aclarknet/aclarknet/aclarknet/views.py | from django.shortcuts import render
from aclarknet.aclarknet.models import Client
from aclarknet.aclarknet.models import Service
from aclarknet.aclarknet.models import TeamMember
from aclarknet.aclarknet.models import Testimonial
def clients(request):
clients = Client.objects.all()
context = {'clients': clients}
return render(request, 'clients.html', context)
def home(request):
testimonials = Testimonial.objects.order_by('?')
testimonial = None
if len(testimonials) > 0:
testimonial = testimonials[0]
context = {'testimonial': testimonial}
return render(request, 'home.html', context)
def projects(request):
return render(request, 'projects.html')
def services(request):
services = Service.objects.all()
context = {'services': services}
return render(request, 'services.html', context)
def team(request):
members = TeamMember.objects.all()
context = {'members': members}
return render(request, 'team.html', context)
def testimonials(request):
testimonials = Testimonial.objects.order_by('date')
context = {'testimonials': testimonials}
return render(request, 'testimonials.html', context)
def about(request):
return render(request, 'about.html')
def contact(request):
return render(request, 'contact.html')
| from django.shortcuts import render
from aclarknet.aclarknet.models import Client
from aclarknet.aclarknet.models import Service
from aclarknet.aclarknet.models import TeamMember
from aclarknet.aclarknet.models import Testimonial
def clients(request):
clients = Client.objects.all()
context = {'clients': clients}
return render(request, 'clients.html', context)
def home(request):
testimonials = Testimonial.objects.order_by('?')
testimonial = None
if len(testimonials) > 0:
testimonial = testimonials[0]
context = {'testimonial': testimonial}
return render(request, 'home.html', context)
def projects(request):
return render(request, 'projects.html')
def services(request):
services = Service.objects.all()
context = {'services': services}
return render(request, 'services.html', context)
def team(request):
members = TeamMember.objects.all()
context = {'members': members}
return render(request, 'team.html', context)
def testimonials(request):
testimonials = Testimonial.objects.all()
context = {'testimonials': testimonials}
return render(request, 'testimonials.html', context)
def about(request):
return render(request, 'about.html')
def contact(request):
return render(request, 'contact.html')
| mit | Python |
3898ce548aff92ac23617d39b928448f6a6ef9b6 | fix heading | felipeacsi/python-acoustics,python-acoustics/python-acoustics | acoustics/standards/iso_1996_1_2003.py | acoustics/standards/iso_1996_1_2003.py | """
ISO 1996-1:2003
===============
ISO 1996-1:2003 defines the basic quantities to be used for the description of
noise in community environments and describes basic assessment procedures. It
also specifies methods to assess environmental noise and gives guidance on
predicting the potential annoyance response of a community to long-term exposure
from various types of environmental noises. The sound sources can be separate or
in various combinations. Application of the method to predict annoyance response
is limited to areas where people reside and to related long-term land uses.
"""
import numpy as np
def composite_rating_level(levels, hours, adjustment):
"""Composite rating level.
:params levels: Level per period.
:params hours: Amount of hours per period.
:params adjustment: Adjustment per period.
Composite whole-day rating levels are calculated as
.. math:: L_R = 10 \\log{\\left[ \\sum_i \\frac{d_i}{24} 10^{(L_i+K_i)/10} \\right]}
where :math:`i` is a period. See equation 6 and 7 of the standard.
.. note:: Summation is done over the last axis.
"""
levels = np.asarray(levels)
hours = np.asarray(hours)
adjustment = np.asarray(adjustment)
return 10.0 * np.log10( (hours/24.0 * 10.0**((levels+adjustment)/10.0)).sum(axis=-1))
| """
ISO 1996-1-2003
===============
ISO 1996-1:2003 defines the basic quantities to be used for the description of
noise in community environments and describes basic assessment procedures. It
also specifies methods to assess environmental noise and gives guidance on
predicting the potential annoyance response of a community to long-term exposure
from various types of environmental noises. The sound sources can be separate or
in various combinations. Application of the method to predict annoyance response
is limited to areas where people reside and to related long-term land uses.
"""
import numpy as np
def composite_rating_level(levels, hours, adjustment):
"""Composite rating level.
:params levels: Level per period.
:params hours: Amount of hours per period.
:params adjustment: Adjustment per period.
Composite whole-day rating levels are calculated as
.. math:: L_R = 10 \\log{\\left[ \\sum_i \\frac{d_i}{24} 10^{(L_i+K_i)/10} \\right]}
where :math:`i` is a period. See equation 6 and 7 of the standard.
.. note:: Summation is done over the last axis.
"""
levels = np.asarray(levels)
hours = np.asarray(hours)
adjustment = np.asarray(adjustment)
return 10.0 * np.log10( (hours/24.0 * 10.0**((levels+adjustment)/10.0)).sum(axis=-1))
| bsd-3-clause | Python |
462c384b3dc56865a145d2fb922b86975299edbc | Bump version to 0.0.0 | clchiou/iga,clchiou/iga,clchiou/iga,clchiou/iga | iga/__init__.py | iga/__init__.py | __version__ = '0.0.0'
__author__ = 'Che-Liang Chiou'
__author_email__ = 'clchiou@gmail.com'
__copyright__ = 'Copyright 2015, Che-Liang Chiou'
__license__ = 'MIT'
__all__ = [
'workspace',
]
def workspace(**kwargs):
import iga.context
iga.context.set_global_context(**kwargs)
| __version__ = '0.0.0-dev'
__author__ = 'Che-Liang Chiou'
__author_email__ = 'clchiou@gmail.com'
__copyright__ = 'Copyright 2015, Che-Liang Chiou'
__license__ = 'MIT'
__all__ = [
'workspace',
]
def workspace(**kwargs):
import iga.context
iga.context.set_global_context(**kwargs)
| mit | Python |
c6338c72869a73b9baa0f74d16206e64437eef41 | Update reverse_with_singledispatch.py | vladshults/python_modules,vladshults/python_modules | job_interview_algs/reverse_with_singledispatch.py | job_interview_algs/reverse_with_singledispatch.py | import functools
def swap_it(arr):
i = 0
j = -1
for _ in range(len(arr)//2):
arr[i], arr[j] = arr[j], arr[i]
i += 1
j -= 1
return arr
@functools.singledispatch
def reverse_it(seq):
return swap_it(seq)
@reverse_it.register(list)
def _(arr):
return swap_it(arr)
@reverse_it.register(str)
def _(text):
arr = list(text)
return ''.join(swap_it(arr))
@reverse_it.register(tuple)
def _(t):
arr = list(t)
return tuple(swap_it(arr))
if __name__ == "__main__":
l1 = [1, 2, 3, 4, 5, 6, 7, 8]
l2 = ('раз', 'два', 'три', 'четыре', 'пять', )
l3 = 'abracadabra'
print(reverse_it(l1))
print(reverse_it(l2))
print(reverse_it(l3))
| import functools
def swap_it(arr):
i = 0
j = -1
for _ in range(len(arr)//2):
arr[i], arr[j] = arr[j], arr[i]
i += 1
j -= 1
return arr
@functools.singledispatch
def reverse_it(seq):
return swap_it(seq)
@reverse_it.register(list)
def _(arr):
return swap_it(arr)
@reverse_it.register(str)
def _(text):
s = list(text)
return ''.join(swap_it(s))
@reverse_it.register(tuple)
def _(t):
s = list(t)
return tuple(swap_it(s))
if __name__ == "__main__":
l1 = [1, 2, 3, 4, 5, 6, 7, 8]
l2 = ('раз', 'два', 'три', 'четыре', 'пять', )
l3 = 'abracadabra'
print(reverse_it(l1))
print(reverse_it(l2))
print(reverse_it(l3))
| mit | Python |
3fd2d1cade716f264b2febc3627b1443a1d3e604 | Fix a problem with a migration between master and stable branch | taigaio/taiga-back,dayatz/taiga-back,xdevelsistemas/taiga-back-community,taigaio/taiga-back,taigaio/taiga-back,dayatz/taiga-back,dayatz/taiga-back,xdevelsistemas/taiga-back-community,xdevelsistemas/taiga-back-community | taiga/projects/migrations/0043_auto_20160530_1004.py | taiga/projects/migrations/0043_auto_20160530_1004.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-05-30 10:04
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('projects', '0040_remove_memberships_of_cancelled_users_acounts'),
]
operations = [
migrations.AlterField(
model_name='project',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-05-30 10:04
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('projects', '0042_auto_20160525_0911'),
]
operations = [
migrations.AlterField(
model_name='project',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'),
),
]
| agpl-3.0 | Python |
6916a3fb24a12ce3c0261034c1dcaae57a8cd0ee | Add stdout flushing statements to example. | ipython/ipython,ipython/ipython | docs/examples/kernel/task2.py | docs/examples/kernel/task2.py | #!/usr/bin/env python
# encoding: utf-8
from IPython.kernel import client
import time
import sys
flush = sys.stdout.flush
tc = client.TaskClient()
mec = client.MultiEngineClient()
mec.execute('import time')
for i in range(24):
tc.run(client.StringTask('time.sleep(1)'))
for i in range(6):
time.sleep(1.0)
print "Queue status (vebose=False)"
print tc.queue_status()
flush()
for i in range(24):
tc.run(client.StringTask('time.sleep(1)'))
for i in range(6):
time.sleep(1.0)
print "Queue status (vebose=True)"
print tc.queue_status(True)
flush()
for i in range(12):
tc.run(client.StringTask('time.sleep(2)'))
print "Queue status (vebose=True)"
print tc.queue_status(True)
flush()
qs = tc.queue_status(True)
sched = qs['scheduled']
for tid in sched[-4:]:
tc.abort(tid)
for i in range(6):
time.sleep(1.0)
print "Queue status (vebose=True)"
print tc.queue_status(True)
flush()
| #!/usr/bin/env python
# encoding: utf-8
from IPython.kernel import client
import time
tc = client.TaskClient()
mec = client.MultiEngineClient()
mec.execute('import time')
for i in range(24):
tc.run(client.StringTask('time.sleep(1)'))
for i in range(6):
time.sleep(1.0)
print "Queue status (vebose=False)"
print tc.queue_status()
for i in range(24):
tc.run(client.StringTask('time.sleep(1)'))
for i in range(6):
time.sleep(1.0)
print "Queue status (vebose=True)"
print tc.queue_status(True)
for i in range(12):
tc.run(client.StringTask('time.sleep(2)'))
print "Queue status (vebose=True)"
print tc.queue_status(True)
qs = tc.queue_status(True)
sched = qs['scheduled']
for tid in sched[-4:]:
tc.abort(tid)
for i in range(6):
time.sleep(1.0)
print "Queue status (vebose=True)"
print tc.queue_status(True)
| bsd-3-clause | Python |
491c51005ca42dcdb4410b5e7b3afe0822bd509a | Use integer cast instead of case expression | shadowoneau/skylines,shadowoneau/skylines,skylines-project/skylines,skylines-project/skylines,Turbo87/skylines,RBE-Avionik/skylines,Harry-R/skylines,shadowoneau/skylines,RBE-Avionik/skylines,RBE-Avionik/skylines,RBE-Avionik/skylines,skylines-project/skylines,snip/skylines,Harry-R/skylines,Harry-R/skylines,snip/skylines,Turbo87/skylines,skylines-project/skylines,kerel-fs/skylines,snip/skylines,TobiasLohner/SkyLines,Turbo87/skylines,kerel-fs/skylines,TobiasLohner/SkyLines,Harry-R/skylines,TobiasLohner/SkyLines,shadowoneau/skylines,kerel-fs/skylines,Turbo87/skylines | scripts/search.py | scripts/search.py | #!/usr/bin/python
import sys
from sqlalchemy import desc, literal_column, cast, Integer
from skylines.config import environment
from skylines import model
NULL = literal_column(str(0))
def ilike_as_int(column, value, relevance):
# Make sure relevance is numeric and we can safely
# pass it to the literal_column()
assert isinstance(relevance, (int, float))
# Convert relevance to a literal_column()
relevance = literal_column(str(relevance))
# Return ilike expression
return cast(column.ilike(value), Integer) * relevance
def ilikes_as_int(col_vals):
return sum([ilike_as_int(col, val, rel) for col, val, rel in col_vals], NULL)
environment.load_from_file()
tokens = sys.argv[1:]
session = model.DBSession
def get_query(type, model, query_attr, tokens):
query_attr = getattr(model, query_attr)
col_vals = []
# Matches token exactly
col_vals.extend([(query_attr, '{}'.format(token), len(token) * 5) for token in tokens])
# Begins with token
col_vals.extend([(query_attr, '{}%'.format(token), len(token) * 3) for token in tokens])
# Has token at word start
col_vals.extend([(query_attr, '% {}%'.format(token), len(token) * 2) for token in tokens])
# Has token
col_vals.extend([(query_attr, '%{}%'.format(token), len(token)) for token in tokens])
relevance = ilikes_as_int(col_vals)
# The search result type
type = literal_column('\'{}\''.format(type))
return session.query(type.label('type'),
model.id.label('id'),
query_attr.label('name'),
relevance.label('relevance')).filter(relevance > NULL)
def search_query(tokens):
if len(tokens) > 1:
tokens.append(' '.join(tokens))
q1 = get_query('user', model.User, 'name', tokens)
q2 = get_query('club', model.Club, 'name', tokens)
q3 = get_query('airport', model.Airport, 'name', tokens)
return q1.union(q2, q3).order_by(desc('relevance'))
for u in search_query(tokens).limit(20):
print u
| #!/usr/bin/python
import sys
from sqlalchemy import case, desc, literal_column
from skylines.config import environment
from skylines import model
NULL = literal_column(str(0))
def ilike_as_int(column, value, relevance):
# Make sure relevance is numeric and we can safely
# pass it to the literal_column()
assert isinstance(relevance, (int, float))
# Convert relevance to a literal_column()
relevance = literal_column(str(relevance))
# Return case expression
return case([(column.ilike(value), relevance)], else_=NULL)
def ilikes_as_int(col_vals):
return sum([ilike_as_int(col, val, rel) for col, val, rel in col_vals], NULL)
environment.load_from_file()
tokens = sys.argv[1:]
session = model.DBSession
def get_query(type, model, query_attr, tokens):
query_attr = getattr(model, query_attr)
col_vals = []
# Matches token exactly
col_vals.extend([(query_attr, '{}'.format(token), len(token) * 5) for token in tokens])
# Begins with token
col_vals.extend([(query_attr, '{}%'.format(token), len(token) * 3) for token in tokens])
# Has token at word start
col_vals.extend([(query_attr, '% {}%'.format(token), len(token) * 2) for token in tokens])
# Has token
col_vals.extend([(query_attr, '%{}%'.format(token), len(token)) for token in tokens])
relevance = ilikes_as_int(col_vals)
# The search result type
type = literal_column('\'{}\''.format(type))
return session.query(type.label('type'),
model.id.label('id'),
query_attr.label('name'),
relevance.label('relevance')).filter(relevance > NULL)
def search_query(tokens):
if len(tokens) > 1:
tokens.append(' '.join(tokens))
q1 = get_query('user', model.User, 'name', tokens)
q2 = get_query('club', model.Club, 'name', tokens)
q3 = get_query('airport', model.Airport, 'name', tokens)
return q1.union(q2, q3).order_by(desc('relevance'))
for u in search_query(tokens).limit(20):
print u
| agpl-3.0 | Python |
a96f02ac57de414d94b73378bc40348616f1926d | Update B_ETA.py | Herpinemmanuel/Oceanography | Cas_4/ETA/B_ETA.py | Cas_4/ETA/B_ETA.py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import cartopy.crs as ccrs
from xmitgcm import open_mdsdataset
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
plt.ion()
dir0 = '/homedata/bderembl/runmit/test_southatlgyre3'
ds0 = open_mdsdataset(dir0,prefix=['Eta'])
nt = 0
while (nt < 1000) :
nt = nt+1
print(nt)
plt.figure(1)
ax = plt.subplot(projection=ccrs.PlateCarree());
ds0['Eta'][nt,:,:].plot.pcolormesh('XC', 'YC',ax=ax,vmin=-5,vmax=5,cmap='ocean')
plt.title('Case 4 : Surface Height Anomaly ')
plt.text(5,5,nt,ha='center',wrap=True)
ax.coastlines()
gl = ax.gridlines(draw_labels=True, alpha = 0.5, linestyle='--');
gl.xlabels_top = False
gl.ylabels_right = False
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
if (nt < 10):
plt.savefig('ETA_cas4-'+'00'+str(nt)+'.png')
plt.clf()
elif (nt > 9) and (nt < 100):
plt.savefig('ETA_cas4-'+'0'+str(nt)+'.png')
plt.clf()
else:
plt.savefig('ETA_cas4-'+str(nt)+'.png')
plt.clf()
| import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import cartopy.crs as ccrs
from xmitgcm import open_mdsdataset
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
plt.ion()
dir0 = '/homedata/bderembl/runmit/test_southatlgyre3'
ds0 = open_mdsdataset(dir0,prefix=['Eta'])
nt = 0
nz = 0
while (nt < 1000) :
nt = nt+1
print(nt)
plt.figure(1)
ax = plt.subplot(projection=ccrs.PlateCarree());
ds0['Eta'][nt,nz,:,:].plot.pcolormesh('XC', 'YC',ax=ax,vmin=-5,vmax=5,cmap='ocean')
plt.title('Case 4 : Surface Height Anomaly ')
plt.text(5,5,nt,ha='center',wrap=True)
ax.coastlines()
gl = ax.gridlines(draw_labels=True, alpha = 0.5, linestyle='--');
gl.xlabels_top = False
gl.ylabels_right = False
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
if (nt < 10):
plt.savefig('ETA_cas4-'+'00'+str(nt)+'.png')
plt.clf()
elif (nt > 9) and (nt < 100):
plt.savefig('ETA_cas4-'+'0'+str(nt)+'.png')
plt.clf()
else:
plt.savefig('ETA_cas4-'+str(nt)+'.png')
plt.clf()
| mit | Python |
a0978d09c5460657fef187ac90ad0eb01b3754bc | update useragent | boklm/tbb-testsuite,boklm/tbb-testsuite,boklm/tbb-testsuite,boklm/tbb-testsuite,boklm/tbb-testsuite | marionette/tor_browser_tests/test_fp_useragent.py | marionette/tor_browser_tests/test_fp_useragent.py | from marionette_driver import By
from marionette_driver.errors import MarionetteException
from marionette import MarionetteTestCase
class Test(MarionetteTestCase):
def test_useragent(self):
with self.marionette.using_context('content'):
self.marionette.navigate('about:robots')
js = self.marionette.execute_script
# Check that useragent string is as expected
# We better know the ESR version we're testing
self.assertEqual("Mozilla/5.0 (Windows NT 6.1; rv:45.0) Gecko/20100101 Firefox/45.0",
js("return navigator.userAgent"))
| from marionette_driver import By
from marionette_driver.errors import MarionetteException
from marionette import MarionetteTestCase
class Test(MarionetteTestCase):
def test_useragent(self):
with self.marionette.using_context('content'):
self.marionette.navigate('about:robots')
js = self.marionette.execute_script
# Check that useragent string is as expected
# We better know the ESR version we're testing
self.assertEqual("Mozilla/5.0 (Windows NT 6.1; rv:38.0) Gecko/20100101 Firefox/38.0",
js("return navigator.userAgent"))
| bsd-3-clause | Python |
afdf5d3ccd3556a0f1204ad9a4b92632ed836898 | fix for querying api | tristantao/py-bing-search,tristantao/html-contact | bingsearch.py | bingsearch.py | import urllib2
import requests
URL = 'https://api.datamarket.azure.com/Bing/Search/v1/Composite' \
+ '?Sources=%(source)s&Query=%(query)s&$top=50&$format=json'
API_KEY = 'SECRET_API_KEY'
def request(query, **params):
url = URL % {'source': urllib2.quote("'web'"),
'query': urllib2.quote("'"+query+"'")}
r = requests.get(url, auth=('', API_KEY))
return r.json()['d']['results']
| import requests
URL = 'https://api.datamarket.azure.com/Data.ashx/Bing/SearchWeb/Web?Query=%(query)s&$top=50&$format=json'
API_KEY = 'SECRET_API_KEY'
def request(query, **params):
r = requests.get(URL % {'query': query}, auth=('', API_KEY))
return r.json['d']['results']
| mit | Python |
b06952bd9ed1fa298c06bcb3199bac648752b73a | Update ECP example | gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf | examples/gto/05-input_ecp.py | examples/gto/05-input_ecp.py | #!/usr/bin/env python
from pyscf import gto, scf
'''
Use gto.basis.parse_ecp and gto.basis.load_ecp functions to input
user-specified ecp functions.
'''
mol = gto.M(atom='''
Na 0. 0. 0.
H 0. 0. 1.''',
basis={'Na':'lanl2dz', 'H':'sto3g'},
ecp = {'Na':'lanl2dz'})
mol = gto.M(atom='''
Na 0. 0. 0.
H 0. 0. 1.''',
basis={'Na':'lanl2dz', 'H':'sto3g'},
ecp = {'Na': gto.basis.parse_ecp('''
Na nelec 10
Na ul
0 2.0000000 6.0000000
1 175.5502590 -10.0000000
2 2.3365719 -6.0637782
2 0.7799867 -0.7299393
Na S
0 243.3605846 3.0000000
1 41.5764759 36.2847626
2 13.2649167 72.9304880
2 0.9764209 6.0123861
Na P
0 1257.2650682 5.0000000
1 189.6248810 117.4495683
2 54.5247759 423.3986704
2 0.9461106 7.1241813
''')})
#
# Burkatzki-Filippi-Dolg pseudo potential and basis are prefixed with "bfd"
#
mol = gto.M(atom='Na 0. 0. 0.; H 0 0 2.',
basis={'Na':'bfd-vtz', 'H':'ccpvdz'},
ecp = {'Na':'bfd-pp'},
verbose=0)
| #!/usr/bin/env python
from pyscf import gto, scf
'''
Use gto.basis.parse_ecp and gto.basis.load_ecp functions to input
user-specified ecp functions.
'''
mol = gto.M(atom='''
Na 0. 0. 0.
H 0. 0. 1.''',
basis={'Na':'lanl2dz', 'H':'sto3g'},
ecp = {'Na':'lanl2dz'})
mol = gto.M(atom='''
Na 0. 0. 0.
H 0. 0. 1.''',
basis={'Na':'lanl2dz', 'H':'sto3g'},
ecp = {'Na': gto.basis.parse_ecp('''
Na nelec 10
Na ul
0 2.0000000 6.0000000
1 175.5502590 -10.0000000
2 2.3365719 -6.0637782
2 0.7799867 -0.7299393
Na S
0 243.3605846 3.0000000
1 41.5764759 36.2847626
2 13.2649167 72.9304880
2 0.9764209 6.0123861
Na P
0 1257.2650682 5.0000000
1 189.6248810 117.4495683
2 54.5247759 423.3986704
2 0.9461106 7.1241813
''')})
| apache-2.0 | Python |
3fabae09c849b6fecf1b1d63eb177cea5c0ca7b3 | clean does it already but refresh still need to | claudio-walser/gitcd,claudio-walser/gitcd | gitcd/interface/cli/refresh.py | gitcd/interface/cli/refresh.py | from gitcd.interface.cli.abstract import BaseCommand
from gitcd.git.branch import Branch
class Refresh(BaseCommand):
updateRemote = True
def run(self, branch: Branch):
remote = self.getRemote()
master = Branch(self.config.getMaster())
if branch.getName() == master.getName():
# maybe i should use recursion here
# if anyone passes master again, i wouldnt notice
branch = Branch('%s%s' % (
branch.getName(),
self.interface.askFor(
"You passed your master branch name as feature branch,\
please give a different branch."
)
))
remote.merge(branch, master)
| from gitcd.interface.cli.abstract import BaseCommand
from gitcd.git.branch import Branch
class Refresh(BaseCommand):
def run(self, branch: Branch):
remote = self.getRemote()
master = Branch(self.config.getMaster())
if branch.getName() == master.getName():
# maybe i should use recursion here
# if anyone passes master again, i wouldnt notice
branch = Branch('%s%s' % (
branch.getName(),
self.interface.askFor(
"You passed your master branch name as feature branch,\
please give a different branch."
)
))
remote.merge(branch, master)
| apache-2.0 | Python |
490872b8296ec8c93b9631fd61bef7070b732db8 | Fix some bugs in the Celery code | oshepherd/eforge,oshepherd/eforge,oshepherd/eforge | eforge/queue/celery.py | eforge/queue/celery.py | # -*- coding: utf-8 -*-
# EForge project management system, Copyright © 2010, Element43
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# Celery task queue backend for EForge deployments. This is what we recommend.
# Fast, efficient, powerful.
from celery import task as ctask
class Future(object):
__slots__ = ('fut', 'res')
def __init__(self, fut):
self.fut = fut
self.res = None
@property
def ready(self):
return self.fut.ready()
def value(self):
if self.res is None:
self.res = self.fut.wait()
return self.res
class BoundTask(object):
__slots__ = ('task', 'conn')
def __init__(self, task):
self.task = task
self.conn = task.get_publisher()
def __call__(self, *args, **kwargs):
return Future(self.task.apply_async(args=args, kwargs=kwargs,
publisher=self.conn))
class Task(ctask.Task):
def __init__(self, fn, **kwargs):
super(Task, self).__init__(self, **kwargs)
self.fn = fn
def run(self, *args, **kwargs):
return self.fn(*args, **kwargs)
def bind(self, **kwargs):
return BoundTask(self)
def __call__(self, *args, **kwargs):
return Future(self.apply_async(args=args, kwargs=kwargs))
def task(**kwargs):
def builder(fn):
return Task(fn, **kwargs)
return builder | # -*- coding: utf-8 -*-
# EForge project management system, Copyright © 2010, Element43
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# Celery task queue backend for EForge deployments. This is what we recommend.
# Fast, efficient, powerful.
from celery import task as ctask
class Future(object):
__slots__ = ('fut', 'res')
def __init__(self, fut):
self.fut = fut
self.res = None
@property
def ready(self):
return self.fut.ready()
def value(self):
if self.res is None:
self.res = self.fut.wait()
return self.res
class BoundTask(object):
__slots__ = ('task', 'conn')
def __init__(self, task):
self.task = task
self.conn = task.get_publisher()
def __call__(self, *args, **kwargs):
return Future(self.task.apply_async(args=*args, kwargs=**kwargs,
publisher=self.conn))
class Task(ctask.Task):
def __init__(self, fn, **kwargs):
super(Task, self).__init__(self, **kwargs)
self.fn = fn
def run(self, *args, **kwargs):
return self.fn(*args, **kwargs))
def bind(self, **kwargs)
return BoundTask(self)
def __call__(self, *args, **kwargs):
return Future(self.apply_async(args=args, kwargs=kwargs))
def task(**kwargs):
def builder(fn):
return Task(fn, **kwargs)
return builder | isc | Python |
5e0016f4a3a54eab6cd71f6e121b9e21d448f923 | Bump patch | egtaonline/quiesce | egta/__init__.py | egta/__init__.py | __version__ = '0.0.22'
| __version__ = '0.0.21'
| apache-2.0 | Python |
c0f6668d64b0d8f098b0962b23ab86e80e01f1d9 | Remove options from command line | pythonindia/junction,nava45/junction,ChillarAnand/junction,ChillarAnand/junction,pythonindia/junction,nava45/junction,nava45/junction,ChillarAnand/junction,pythonindia/junction,ChillarAnand/junction,nava45/junction,pythonindia/junction | junction/tickets/management/commands/fill_data.py | junction/tickets/management/commands/fill_data.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
# Third Party Stuff
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
# Junction Stuff
from junction.tickets.models import Ticket
class Command(BaseCommand):
"""
Read a csv file containing ticket numbers and
fill all the details for it.
"""
@transaction.atomic
def handle(self, *args, **options):
if len(args) != 2:
raise CommandError('Usage: python manage.py fill_data <in_file> <out_file>')
in_file, out_file = args
ticket_nums = [line.rstrip('\n') for line in open(in_file).readlines()]
fh = open(out_file, 'w')
header = ','.join(('ticket_num', 'name', 'email', 'address', '\n'))
fh.write(header)
for ticket_num in ticket_nums:
ticket = Ticket.objects.get(ticket_no=ticket_num)
details = ticket.others
for attendee in details['attendee']:
if attendee['ticketNo'] == ticket_num:
attendee = attendee
break
else:
attendee = {}
if not ticket.address:
ticket.address = ''
data = data = ','.join((ticket_num, ticket.name, attendee['email'], ticket.address, '\n'))
fh.write(data)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import os
import optparse
# Third Party Stuff
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
# Junction Stuff
from junction.tickets.models import Ticket
class Command(BaseCommand):
"""
Read a csv file containing ticket numbers and
fill all the details for it.
"""
option_list = BaseCommand.option_list + (
optparse.make_option(
"-i",
"--infile",
dest = "filename",
help = "specify import file",
metavar = "FILE"
),
)
option_list = option_list + (
optparse.make_option(
"-o",
"--outfile",
dest = "category",
help = "specify output file name",
metavar = "FILE"
),
)
@transaction.atomic
def handle(self, *args, **options):
if len(args) != 2:
raise CommandError('Usage: python manage.py fill_data <in_file> <out_file>')
in_file, out_file = args
ticket_nums = [line.rstrip('\n') for line in open(in_file).readlines()]
fh = open(out_file, 'w')
header = ','.join(('ticket_num', 'name', 'email', 'address', '\n'))
fh.write(header)
for ticket_num in ticket_nums:
ticket = Ticket.objects.get(ticket_no=ticket_num)
details = ticket.others
for attendee in details['attendee']:
if attendee['ticketNo'] == ticket_num:
attendee = attendee
break
else:
attendee = {}
if not ticket.address:
ticket.address = ''
data = data = ','.join((ticket_num, ticket.name, attendee['email'], ticket.address, '\n'))
fh.write(data)
| mit | Python |
73b6db43b80b2c86e9bd7b342c8dcd24aee122fa | update yaml_mako to use standard template renderer | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/renderers/yaml_mako.py | salt/renderers/yaml_mako.py | '''
Process yaml with the Mako templating engine
This renderer will take a yaml file within a mako template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import Third Party libs
from mako.template import Template
from salt.utils.yaml import CustomLoader, load
def render(template, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template):
return {}
tmp_data = salt.utils.templates.mako(
template_file,
True,
salt=__salt__,
grains=__grains__,
opts=__opts__,
pillar=__pillar__,
env=env,
sls=sls)
if not tmp_data.get('result', False):
raise SaltRenderError(tmp_data.get('data',
'Unknown render error in yaml_mako renderer'))
yaml_data = tmp_data['data']
with warnings.catch_warnings(record=True) as warn_list:
data = load(yaml_data, Loader=CustomLoader)
if len(warn_list) > 0:
for item in warn_list:
log.warn("{warn} found in {file_}".format(
warn=item.message, file_=template_file))
return data
| '''
Process yaml with the Mako templating engine
This renderer will take a yaml file within a mako template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import Third Party libs
from mako.template import Template
from salt.utils.yaml import CustomLoader, load
def render(template, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['pillar'] = __pillar__
passthrough['env'] = env
passthrough['sls'] = sls
template = Template(open(template, 'r').read())
yaml_data = template.render(**passthrough)
yaml_data = template.render(**passthrough)
with warnings.catch_warnings(record=True) as warn_list:
data = load(yaml_data, Loader=CustomLoader)
if len(warn_list) > 0:
for item in warn_list:
log.warn("{warn} found in {file_}".format(
warn=item.message, file_=template_file))
return data
| apache-2.0 | Python |
1067e8fbcda84d6c656ccee8e996dc32450c4123 | Fix as_seconds on Windows where tz is present | yaybu/touchdown | touchdown/core/datetime.py | touchdown/core/datetime.py | # Copyright 2011-2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import re
import time
from datetime import datetime, timedelta, tzinfo
from touchdown.core import errors
try:
import pytz
except ImportError:
pytz = None
try:
from dateutil import parser
except ImportError:
parser = None
REGEX_DELTA = re.compile(
r'(\d+)\s?(m|minute|minutes|h|hour|hours|d|day|days|w|weeks|weeks)(?: ago)?'
)
UNITS = {
'm': 60,
'h': 60 * 60,
'd': 60 * 60 * 24,
'w': 60 * 60 * 24 * 7,
}
if not pytz:
class UTC(tzinfo):
def __repr__(self):
return '<UTC>'
def utcoffset(self, value):
return timedelta(0)
def tzname(self, value):
return 'UTC'
def dst(self, value):
return timedelta(0)
def localize(self, value):
value.replace(tzinfo=self)
utc = UTC()
else:
utc = pytz.utc
_EPOCH = datetime(1970, 1, 1, tzinfo=utc)
def now():
return datetime.utcnow().replace(tzinfo=utc)
def parse_datetime(value):
match = REGEX_DELTA.match(value)
if match:
amount, unit = match.groups()
return now() - timedelta(
seconds=int(amount) * UNITS[unit[0]],
)
if parser:
try:
return parser.parse(value)
except Exception:
raise errors.Error(
'Unable to parse {} as a date or time'.format(value)
)
raise errors.Error(
'Unable to parse {} as a date or time'.format(value)
)
def as_seconds(value):
if value.tzinfo is None:
return int(time.mktime((
value.year, value.month, value.day,
value.hour, value.minute, value.second,
-1, -1, -1)))
else:
return int((value - _EPOCH).total_seconds())
| # Copyright 2011-2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import re
import time
from datetime import datetime, timedelta, tzinfo
from touchdown.core import errors
try:
import pytz
except ImportError:
pytz = None
try:
from dateutil import parser
except ImportError:
parser = None
REGEX_DELTA = re.compile(
r'(\d+)\s?(m|minute|minutes|h|hour|hours|d|day|days|w|weeks|weeks)(?: ago)?'
)
UNITS = {
'm': 60,
'h': 60 * 60,
'd': 60 * 60 * 24,
'w': 60 * 60 * 24 * 7,
}
if not pytz:
class UTC(tzinfo):
def __repr__(self):
return '<UTC>'
def utcoffset(self, value):
return timedelta(0)
def tzname(self, value):
return 'UTC'
def dst(self, value):
return timedelta(0)
def localize(self, value):
value.replace(tzinfo=self)
utc = UTC()
else:
utc = pytz.utc
_EPOCH = datetime(1970, 1, 1, tzinfo=utc)
def now():
return datetime.utcnow().replace(tzinfo=utc)
def parse_datetime(value):
match = REGEX_DELTA.match(value)
if match:
amount, unit = match.groups()
return now() - timedelta(
seconds=int(amount) * UNITS[unit[0]],
)
if parser:
try:
return parser.parse(value)
except Exception:
raise errors.Error(
'Unable to parse {} as a date or time'.format(value)
)
raise errors.Error(
'Unable to parse {} as a date or time'.format(value)
)
def as_seconds(value):
if value.tzinfo is None:
return int(time.mktime((
value.year, value.month, value.day,
value.hour, value.minute, value.second,
-1, -1, -1)))
else:
return int(value - _EPOCH).total_seconds()
| apache-2.0 | Python |
32d4339cf27306792fc5776193f82ac9bd6ef39d | Add output result box for remove_padding script | vladimirgamalian/pictools | remove_padding.py | remove_padding.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import click
from PIL import Image
from utils import get_file_list
def box_union(a, b):
left = min(a[0], b[0])
top = min(a[1], b[1])
right = max(a[2], b[2])
bottom = max(a[3], b[3])
return left, top, right, bottom
@click.command()
@click.argument('path', type=click.Path(exists=True))
def crop(path):
files = get_file_list(path)
sizes = [Image.open(f).size for f in files]
assert all(s == sizes[0] for s in sizes)
boxes = [Image.open(f).convert("RGBa").getbbox() for f in files]
box = reduce(box_union, boxes)
print box
for f in files:
img = Image.open(f)
img.crop(box).save(f)
if __name__ == '__main__':
crop()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import click
from PIL import Image
from utils import get_file_list
def box_union(a, b):
left = min(a[0], b[0])
top = min(a[1], b[1])
right = max(a[2], b[2])
bottom = max(a[3], b[3])
return left, top, right, bottom
@click.command()
@click.argument('path', type=click.Path(exists=True))
def crop(path):
files = get_file_list(path)
sizes = [Image.open(f).size for f in files]
assert all(s == sizes[0] for s in sizes)
boxes = [Image.open(f).convert("RGBa").getbbox() for f in files]
box = reduce(box_union, boxes)
for f in files:
img = Image.open(f)
img.crop(box).save(f)
if __name__ == '__main__':
crop()
| mit | Python |
f57f8013d7d6f18bb1bbe8c04116062ae33608f7 | Add a message at the end of concatenating | e-koch/VLA_Lband,e-koch/VLA_Lband | 17B-162/HI/imaging/concat_channels.py | 17B-162/HI/imaging/concat_channels.py |
'''
Combine individually-imaged channels into a cube.
Run in CASA.
'''
import sys
from glob import glob
from taskinit import iatool
ia = iatool()
path_to_data = sys.argv[-4]
filename = sys.argv[-3]
# Check for the expected number of images
num_imgs = int(sys.argv[-2])
suffix = sys.argv[-1]
suffixes = ['mask', 'model', 'pb', 'psf', 'residual', 'residual_init', 'image',
'image.pbcor', 'sumwt', 'weight']
if suffix not in suffixes:
raise NameError("suffix {0} is not a valid output file type from tclean.")
casalog.post("Assembling {} cube".format(suffix))
images = []
for num in range(num_imgs):
chan_img = glob("{0}/channel_{1}/{2}_channel*.{3}"
.format(path_to_data, num, filename, suffix))
if len(chan_img) == 1:
images.append(chan_img[0])
if len(images) == 0:
casalog.post("No images found for {}".format(suffix))
sys.exit(1)
if len(images) != num_imgs:
casalog.post("Number of images found ({0}) does not match"
" expected number ({1}) for {2}. Skipping cube creation."
.format(len(images), num_imgs, suffix))
sys.exit(1)
cubename = "{0}/{1}.{2}".format(path_to_data, filename, suffix)
ia.imageconcat(outfile=cubename, infiles=images, reorder=False,
overwrite=True)
ia.close()
casalog.post("Look! I made a {} cube!".format(suffix))
|
'''
Combine individually-imaged channels into a cube.
Run in CASA.
'''
import sys
from glob import glob
from taskinit import iatool
ia = iatool()
path_to_data = sys.argv[-4]
filename = sys.argv[-3]
# Check for the expected number of images
num_imgs = int(sys.argv[-2])
suffix = sys.argv[-1]
suffixes = ['mask', 'model', 'pb', 'psf', 'residual', 'residual_init', 'image',
'image.pbcor', 'sumwt', 'weight']
if suffix not in suffixes:
raise NameError("suffix {0} is not a valid output file type from tclean.")
casalog.post("Assembling {} cube".format(suffix))
images = []
for num in range(num_imgs):
chan_img = glob("{0}/channel_{1}/{2}_channel*.{3}"
.format(path_to_data, num, filename, suffix))
if len(chan_img) == 1:
images.append(chan_img[0])
if len(images) == 0:
casalog.post("No images found for {}".format(suffix))
sys.exit(1)
if len(images) != num_imgs:
casalog.post("Number of images found ({0}) does not match"
" expected number ({1}) for {2}. Skipping cube creation."
.format(len(images), num_imgs, suffix))
sys.exit(1)
cubename = "{0}/{1}.{2}".format(path_to_data, filename, suffix)
ia.imageconcat(outfile=cubename, infiles=images, reorder=False,
overwrite=True)
ia.close()
| mit | Python |
249c3a0c9368faaf63efe02df43838f4e775c411 | Change conanfile.py to install with CMake | Morwenn/cpp-sort,Morwenn/cpp-sort,Morwenn/cpp-sort,Morwenn/cpp-sort | conanfile.py | conanfile.py | from conans import CMake, ConanFile
class CppSortConan(ConanFile):
name = "cpp-sort"
version = "1.4.0"
license = "https://github.com/Morwenn/cpp-sort/blob/master/license.txt"
url = "https://github.com/Morwenn/cpp-sort"
author = "Morwenn <morwenn29@hotmail.fr>"
description = "Additional sorting algorithms & related tools"
exports_sources = ("include/*", "CMakeLists.txt", "cmake/*")
exports = "license.txt"
no_copy_source = True
def package(self):
cmake = CMake(self)
cmake.definitions["BUILD_TESTING"] = "OFF"
cmake.configure()
cmake.install()
self.copy("license*", dst="licenses", ignore_case=True, keep_path=False)
def package_id(self):
self.info.header_only()
| from conans import ConanFile
class CppSortConan(ConanFile):
name = "cpp-sort"
version = "1.4.0"
settings = "compiler"
license = "https://github.com/Morwenn/cpp-sort/blob/master/license.txt"
url = "https://github.com/Morwenn/cpp-sort"
author = "Morwenn <morwenn29@hotmail.fr>"
description = "Additional sorting algorithms & related tools"
exports_sources = "include/*"
exports = "license.txt"
no_copy_source = True
def configure(self):
if self.settings.compiler == "Visual Studio":
raise Exception("Visual Studio is not supported.")
def package(self):
self.copy("license*", dst="licenses", ignore_case=True, keep_path=False)
self.copy(pattern="*", src="include", dst="include")
def package_id(self):
self.info.header_only()
| mit | Python |
026565a0fcee402f35a35d5aeeff6d8aea79ac4f | Make the PASS command a required step on connect when there is a server password | ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd | txircd/modules/cmd_pass.py | txircd/modules/cmd_pass.py | from twisted.words.protocols import irc
from txircd.modbase import Command, Module
class PassCommand(Command, Module):
def onUse(self, user, data):
if self.ircd.server_password and not user.password:
user.registered -= 1
user.password = data["password"]
if user.registered == 0:
user.register()
def processParams(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return {}
if not params:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "PASS", ":Not enough parameters")
return {}
return {
"user": user,
"password": params[0]
}
def onConnect(self, user):
if self.ircd.server_password:
user.registered += 1 # Make password a required step in registration
def onRegister(self, user):
if self.ircd.server_password and self.ircd.server_password != user.password:
user.sendMessage("ERROR", ":Closing link: ({}@{}) [Access denied]".format(user.username, user.hostname), to=None, prefix=None)
return False
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.passcmd = PassCommand()
def spawn():
return {
"actions": {
"connect": [self.passcmd.onConnect],
"register": [self.passcmd.onRegister]
},
"commands": {
"PASS": self.passcmd
}
}
def cleanup():
self.ircd.actions.remove(self.passcmd)
del self.ircd.commands["PASS"]
del self.passcmd | from twisted.words.protocols import irc
from txircd.modbase import Command, Module
class PassCommand(Command, Module):
def onUse(self, user, data):
user.password = data["password"]
def processParams(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return {}
if not params:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "PASS", ":Not enough parameters")
return {}
return {
"user": user,
"password": params[0]
}
def onRegister(self, user):
if self.ircd.server_password and self.ircd.server_password != user.password:
user.sendMessage("ERROR", ":Closing link: ({}@{}) [Access denied]".format(user.username, user.hostname), to=None, prefix=None)
return False
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.passcmd = PassCommand()
def spawn():
return {
"actions": {
"register": [self.passcmd.onRegister]
},
"commands": {
"PASS": self.passcmd
}
}
def cleanup():
self.ircd.actions.remove(self.passcmd)
del self.ircd.commands["PASS"]
del self.passcmd | bsd-3-clause | Python |
82aba46a74721aafa637889af98d62885ada711d | add timeout to http example | ubolonton/twisted-csp | example/http.py | example/http.py | import csp
from twisted.web.client import getPage
def request(url):
return csp.channelify(getPage(url))
def main():
def timeout_channel(seconds):
c = csp.Channel()
def _t():
yield csp.wait(seconds)
yield c.put(None)
csp.go(_t())
return c
c = request("http://www.google.com/search?q=csp")
t = timeout_channel(10)
chan = yield csp.select(c, t)
if chan is c:
result, error = yield c.take()
if error:
print "Uhm, not good"
print error
else:
print "Here"
print result
elif chan is t:
print "Timeout"
| import csp
from twisted.web.client import getPage
def excerpt(text, cutoff=100):
l = len(text)
if l > cutoff:
return text[0:cutoff] + "..."
else:
return text
def request(url):
return csp.channelify(getPage(url))
def main():
c = request("http://google.com")
result, error = yield c.take()
if error:
print "Uhm, not good"
print error
else:
print "Here"
print excerpt(result)
| epl-1.0 | Python |
85903eff6a23592e2ad10f3d226b7712eaebc64d | define scikits.umfpack.__version__ | scikit-umfpack/scikit-umfpack,rc/scikit-umfpack-rc,scikit-umfpack/scikit-umfpack,rc/scikit-umfpack,rc/scikit-umfpack,rc/scikit-umfpack-rc | scikits/umfpack/__init__.py | scikits/umfpack/__init__.py | """
===============
scikits.umfpack
===============
Interface to UMFPACK linear solver.
"""
from __future__ import division, print_function, absolute_import
from .umfpack import *
from .interface import *
from .version import version as __version__
if __doc__ is not None:
from .umfpack import __doc__ as _umfpack_doc
from .interface import __doc__ as _interface_doc
__doc__ += _interface_doc
__doc__ += _umfpack_doc
del _umfpack_doc, _interface_doc
__all__ = [s for s in dir() if not s.startswith('_')]
from numpy.testing import Tester
test = Tester().test
| """
===============
scikits.umfpack
===============
Interface to UMFPACK linear solver.
"""
from __future__ import division, print_function, absolute_import
from .umfpack import *
from .interface import *
if __doc__ is not None:
from .umfpack import __doc__ as _umfpack_doc
from .interface import __doc__ as _interface_doc
__doc__ += _interface_doc
__doc__ += _umfpack_doc
del _umfpack_doc, _interface_doc
__all__ = [s for s in dir() if not s.startswith('_')]
from numpy.testing import Tester
test = Tester().test
| bsd-3-clause | Python |
ab24db61340ccfd1160e6d39066f9541a411a179 | Remove unused imports | cernops/python-neutronclient,cryptickp/python-neutronclient,rackerlabs/rackspace-python-neutronclient,Stavitsky/python-neutronclient,venusource/python-neutronclient,yamahata/python-tackerclient,cryptickp/python-neutronclient,huntxu/python-neutronclient,cernops/python-neutronclient,roaet/python-neutronclient,roaet/python-neutronclient,Stavitsky/python-neutronclient,openstack/python-neutronclient,NeCTAR-RC/python-neutronclient,Juniper/python-neutronclient,varunarya10/python-neutronclient,eayunstack/python-neutronclient,mangelajo/python-neutronclient,JioCloud/python-neutronclient,johnpbatty/python-neutronclient,sajuptpm/python-neutronclient-ipam,NeCTAR-RC/python-neutronclient,JioCloud/python-neutronclient,eayunstack/python-neutronclient,johnpbatty/python-neutronclient,venusource/python-neutronclient,mangelajo/python-neutronclient,noironetworks/python-neutronclient,yamahata/python-tackerclient,sajuptpm/python-neutronclient-ipam,varunarya10/python-neutronclient,rackerlabs/rackspace-python-neutronclient,openstack/python-neutronclient,noironetworks/python-neutronclient,huntxu/python-neutronclient,Juniper/python-neutronclient | doc/source/conf.py | doc/source/conf.py | # -*- coding: utf-8 -*-
#
project = 'python-neutronclient'
# -- General configuration ---------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| # -*- coding: utf-8 -*-
#
import sys
import os
project = 'python-neutronclient'
# -- General configuration ---------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| apache-2.0 | Python |
e94f2cb6dd144cec7b10aecb29669e7ba2ef98b1 | Update release version to 0.6.1 | gunthercox/ChatterBot,vkosuri/ChatterBot | chatterbot/__init__.py | chatterbot/__init__.py | """
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '0.6.1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
| """
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '0.6.0'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
| bsd-3-clause | Python |
666d9c999ebf0cc388d8f045a04756424c2d9b62 | Make it work for Python 2 | cdent/gabbi-demo,cdent/gabbi-demo | gdemo/util.py | gdemo/util.py | """Share utility functions."""
try:
from urllib import parse
except ImportError:
import urllib as parse
def get_route_value(environ, name):
value = environ['wsgiorg.routing_args'][1][name]
value = parse.unquote(value)
return value.replace('%2F', '/')
| """Share utility functions."""
from urllib import parse
def get_route_value(environ, name):
value = environ['wsgiorg.routing_args'][1][name]
value = parse.unquote(value)
return value.replace('%2F', '/')
| apache-2.0 | Python |
5d9bb47f0d0015533cbf547d613600f8b4b7d2d7 | Store the inner exception when creating an OSCException | stackforge/python-openstacksdk,openstack-infra/shade,openstack/python-openstacksdk,stackforge/python-openstacksdk,dtroyer/python-openstacksdk,dtroyer/python-openstacksdk,openstack/python-openstacksdk,openstack-infra/shade | shade/exc.py | shade/exc.py | # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
class OpenStackCloudException(Exception):
def __init__(self, message, extra_data=None):
args = [message]
if extra_data:
args.append(extra_data)
super(OpenStackCloudException, self).__init__(*args)
self.extra_data = extra_data
self.inner_exception = sys.exc_info()
def __str__(self):
if self.extra_data is not None:
return "%s (Extra: %s)" % (
Exception.__str__(self), self.extra_data)
return Exception.__str__(self)
class OpenStackCloudTimeout(OpenStackCloudException):
pass
class OpenStackCloudUnavailableService(OpenStackCloudException):
pass
class OpenStackCloudUnavailableExtension(OpenStackCloudException):
pass
class OpenStackCloudUnavailableFeature(OpenStackCloudException):
pass
class OpenStackCloudResourceNotFound(OpenStackCloudException):
pass
class OpenStackCloudURINotFound(OpenStackCloudException):
pass
| # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class OpenStackCloudException(Exception):
def __init__(self, message, extra_data=None):
args = [message]
if extra_data:
args.append(extra_data)
super(OpenStackCloudException, self).__init__(*args)
self.extra_data = extra_data
def __str__(self):
if self.extra_data is not None:
return "%s (Extra: %s)" % (
Exception.__str__(self), self.extra_data)
return Exception.__str__(self)
class OpenStackCloudTimeout(OpenStackCloudException):
pass
class OpenStackCloudUnavailableService(OpenStackCloudException):
pass
class OpenStackCloudUnavailableExtension(OpenStackCloudException):
pass
class OpenStackCloudUnavailableFeature(OpenStackCloudException):
pass
class OpenStackCloudResourceNotFound(OpenStackCloudException):
pass
class OpenStackCloudURINotFound(OpenStackCloudException):
pass
| apache-2.0 | Python |
4477ef7b7fd9d9e3f7518486cfce4f049009b092 | make reprocess.py a bit smarter, add command line args for sourcedir, builddir and buildarch | kishoredbn/barrelfish,kishoredbn/barrelfish,BarrelfishOS/barrelfish,BarrelfishOS/barrelfish,BarrelfishOS/barrelfish,kishoredbn/barrelfish,BarrelfishOS/barrelfish,BarrelfishOS/barrelfish,BarrelfishOS/barrelfish,kishoredbn/barrelfish,kishoredbn/barrelfish,BarrelfishOS/barrelfish,BarrelfishOS/barrelfish,kishoredbn/barrelfish,BarrelfishOS/barrelfish | tools/harness/reprocess.py | tools/harness/reprocess.py | #!/usr/bin/env python
##########################################################################
# Copyright (c) 2009, ETH Zurich.
# All rights reserved.
#
# This file is distributed under the terms in the attached LICENSE file.
# If you do not find this file, copies can be found by writing to:
# ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
##########################################################################
import os, optparse, re
import harness, debug, tests
def parse_args():
p = optparse.OptionParser(usage='Usage: %prog [options] RESULTDIR...',
description='Reprocess raw results from scalebench/harness runs')
debug.addopts(p, 'debuglevel')
p.add_option('-e', '--existingbuild', dest='existingbuild', metavar='DIR',
help='existing build directory (may not be used with -b)')
p.add_option('-S', '--sourcedir', dest='sourcedir', metavar='DIR',
help='source directory')
p.add_option('-A', '--arch', dest='arch', metavar='ARCH',
help='architecture to use')
options, dirs = p.parse_args()
print "options.existingbuild:",options.existingbuild
options.buildbase = options.existingbuild
print "options.buildbase:",options.buildbase
options.machines = None
if len(dirs) == 0:
p.error('no result directories specified')
# check validity of result dirs
for d in dirs:
if not (os.path.isdir(d) and os.access(d, os.W_OK)
and os.access(os.path.join(d, 'description.txt'), os.R_OK)):
p.error('invalid results directory %s' % d)
debug.current_level = options.debuglevel
return dirs,options
def main(dirs, options=None):
for dirname in dirs:
debug.log('reprocessing %s' % dirname)
debug.verbose('parse %s/description.txt for test' % dirname)
testname = test = None
f = open(os.path.join(dirname, 'description.txt'), 'r')
for line in f:
m = re.match(r'test:\s+(.*)', line)
if m:
testname = m.group(1)
break
f.close()
if not testname:
debug.error('unable to parse description for %s, skipped' % dirname)
continue
debug.verbose('locate test "%s"' % testname)
for t in tests.all_tests:
if t.name.lower() == testname.lower():
test = t(options)
if not test:
debug.error('unknown test "%s" in %s, skipped' % (testname, dirname))
continue
debug.verbose('reprocess results')
h = harness.Harness()
h.process_results(test, dirname)
if __name__ == "__main__":
dirs, options = parse_args()
main(dirs, options=options)
| #!/usr/bin/env python
##########################################################################
# Copyright (c) 2009, ETH Zurich.
# All rights reserved.
#
# This file is distributed under the terms in the attached LICENSE file.
# If you do not find this file, copies can be found by writing to:
# ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
##########################################################################
import os, optparse, re
import harness, debug, tests
def parse_args():
p = optparse.OptionParser(usage='Usage: %prog [options] RESULTDIR...',
description='Reprocess raw results from scalebench/harness runs')
debug.addopts(p, 'debuglevel')
options, dirs = p.parse_args()
if len(dirs) == 0:
p.error('no result directories specified')
# check validity of result dirs
for d in dirs:
if not (os.path.isdir(d) and os.access(d, os.W_OK)
and os.access(os.path.join(d, 'description.txt'), os.R_OK)):
p.error('invalid results directory %s' % d)
debug.current_level = options.debuglevel
return dirs
def main(dirs):
for dirname in dirs:
debug.log('reprocessing %s' % dirname)
debug.verbose('parse %s/description.txt for test' % dirname)
testname = test = None
f = open(os.path.join(dirname, 'description.txt'), 'r')
for line in f:
m = re.match(r'test:\s+(.*)', line)
if m:
testname = m.group(1)
break
f.close()
if not testname:
debug.error('unable to parse description for %s, skipped' % dirname)
continue
debug.verbose('locate test "%s"' % testname)
for t in tests.all_tests:
if t.name.lower() == testname.lower():
test = t(None) # XXX: dummy options
if not test:
debug.error('unknown test "%s" in %s, skipped' % (testname, dirname))
continue
debug.verbose('reprocess results')
h = harness.Harness()
h.process_results(test, dirname)
if __name__ == "__main__":
main(parse_args())
| mit | Python |
be3cfd7033097bbc073e05c232f702bbcbfbd4db | Fix the imports for Python 3 | rabernat/xmitgcm,xgcm/xmitgcm,sambarluc/xmitgcm,xgcm/xgcm,rabernat/xgcm | xgcm/__init__.py | xgcm/__init__.py | from .mdsxray import open_mdsdataset
from .gridops import GCMDataset
from .regridding import regrid_vertical
| from mdsxray import open_mdsdataset
from gridops import GCMDataset
from regridding import regrid_vertical
| mit | Python |
39b0621fc1d7e240ed141e55cb81c7f249d92a7c | fix p4a revamp | johnbolia/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer,kivy/plyer,KeyWeeUsr/plyer,johnbolia/plyer,kivy/plyer,kived/plyer,kivy/plyer,kived/plyer | plyer/platforms/android/__init__.py | plyer/platforms/android/__init__.py | from os import environ
from jnius import autoclass
ANDROID_VERSION = autoclass('android.os.Build$VERSION')
SDK_INT = ANDROID_VERSION.SDK_INT
if 'PYTHON_SERVICE_ARGUMENT' in environ:
try:
PythonService = autoclass('org.kivy.android.PythonService')
except Exception:
PythonService = autoclass('org.renpy.android.PythonService')
activity = PythonService.mService
else:
try:
PythonActivity = autoclass('org.kivy.android.PythonActivity')
except Exception:
PythonActivity = autoclass('org.renpy.android.PythonActivity')
activity = PythonActivity.mActivity
| from os import environ
from jnius import autoclass
ANDROID_VERSION = autoclass('android.os.Build$VERSION')
SDK_INT = ANDROID_VERSION.SDK_INT
if 'PYTHON_SERVICE_ARGUMENT' in environ:
PythonService = autoclass('org.renpy.android.PythonService')
activity = PythonService.mService
else:
PythonActivity = autoclass('org.renpy.android.PythonActivity')
activity = PythonActivity.mActivity
| mit | Python |
021ccdd7abd3133ddde3893dd5719ba6a7303327 | Change variable name | rafalmierzwiak/yearn,rafalmierzwiak/yearn | code/hash_me_poorly.py | code/hash_me_poorly.py | #!/usr/bin/env python3
from collections import namedtuple
PoorItem = namedtuple('Item', 'key value')
class PoorHash:
"""Poor man's hash.
Hash is compartmentalized into slots and in these slots items are kept.
No room left for probing, number of slots fixed at initialisation time,
hence name of the class.
"""
def __delitem__(self, key):
si, ii = self.__slotindex__(key), self.__itemindex__(key)
del self.items[si][ii]
def __getitem__(self, key):
si, ii = self.__slotindex__(key), self.__itemindex__(key)
return self.items[si][ii].value
def __getslot__(self, key):
return self.items[self.__slotindex__(key)]
def __init__(self, slots=16):
self.items = [ [] for _ in range(slots) ]
def __itemindex__(self, key):
try:
index, = (i
for i, item in enumerate(self.__getslot__(key))
if item.key == key)
return index
except ValueError:
raise KeyError(key) from None
def __slotindex__(self, key):
return hash(key) % len(self.items)
def __len__(self):
return sum((len(items) for items in self.items if items))
def __setitem__(self, key, value):
self.items[self.__slotindex__(key)].append(PoorItem(key, value))
h = PoorHash()
for i in range(1024):
h[i] = i
print("No of items: {}".format(len(h)))
for i in range(len(h)):
_ = h[i]
for i in [0, 1, len(h)-2, len(h)-1]:
print("Item {} is {}".format(i, h[i]))
| #!/usr/bin/env python3
from collections import namedtuple
PoorItem = namedtuple('Item', 'key value')
class PoorHash:
"""Poor man's hash.
Hash is compartmentalized into slots and in these slots items are kept.
No room left for probing, number of slots fixed at initialisation time,
hence name of the class.
"""
def __delitem__(self, key):
bi, ii = self.__slotindex__(key), self.__itemindex__(key)
del self.items[bi][ii]
def __getitem__(self, key):
bi, ii = self.__slotindex__(key), self.__itemindex__(key)
return self.items[bi][ii].value
def __getslot__(self, key):
return self.items[self.__slotindex__(key)]
def __init__(self, slots=16):
self.items = [ [] for _ in range(slots) ]
def __itemindex__(self, key):
try:
index, = (i
for i, item in enumerate(self.__getslot__(key))
if item.key == key)
return index
except ValueError:
raise KeyError(key) from None
def __slotindex__(self, key):
return hash(key) % len(self.items)
def __len__(self):
return sum((len(items) for items in self.items if items))
def __setitem__(self, key, value):
self.items[self.__slotindex__(key)].append(PoorItem(key, value))
h = PoorHash()
for i in range(1024):
h[i] = i
print("No of items: {}".format(len(h)))
for i in range(len(h)):
_ = h[i]
for i in [0, 1, len(h)-2, len(h)-1]:
print("Item {} is {}".format(i, h[i]))
| unlicense | Python |
9d6265efb57c350d866c926d18263a64535b238c | fix concept tagger | darenr/MOMA-Art,darenr/art-dataset-nlp-experiments,darenr/MOMA-Art,darenr/MOMA-Art,darenr/MOMA-Art,darenr/art-dataset-nlp-experiments,darenr/art-dataset-nlp-experiments,darenr/art-dataset-nlp-experiments | concept_tag_alchemy.py | concept_tag_alchemy.py | from alchemyapi.alchemyapi import AlchemyAPI
import json
import unicodecsv
alchemyapi = AlchemyAPI()
results = []
with open('MOMA3k.csv', 'rb') as in_csv:
stop = False
for i, m in enumerate(unicodecsv.DictReader(in_csv, encoding='utf-8')):
print i
fieldnames = m.keys()
fieldnames.extend(['AlchemyKeywords', 'AlchemyConcepts'])
if not stop and not 'AlchemyConcepts' in m:
txt = m['ExtraText'].encode('ascii', 'ignore')
keywords = alchemyapi.keywords('text', txt)
concepts = alchemyapi.concepts('text', txt)
if keywords['status'] == 'OK' and concepts['status'] == 'OK':
m['AlchemyKeywords'] = ', '.join(["{0} ({1})".format(k['text'].encode('ascii', 'ignore'), k['relevance'])
for k in keywords['keywords']])
m['AlchemyConcepts'] = ', '.join(["{0} ({1})".format(k['text'].encode('ascii', 'ignore'), k['relevance'])
for k in concepts['concepts']])
else:
print('Error in concept tagging call: ', keywords['status'])
stop = True
results.append(m)
with open('MOMA3k-tagged.csv', 'wb') as out_csv:
output = unicodecsv.DictWriter(out_csv, fieldnames=fieldnames, quoting=unicodecsv.QUOTE_ALL)
output.writerow(dict((fn,fn) for fn in fieldnames))
for i, row in enumerate(results):
output.writerow(row)
if i % 10 == 0:
out_csv.flush()
| from alchemyapi.alchemyapi import AlchemyAPI
import json
import unicodecsv
alchemyapi = AlchemyAPI()
results = []
with open('MOMA3k.csv', 'rb') as in_csv:
for i, m in enumerate(unicodecsv.DictReader(in_csv, encoding='utf-8')):
print i
fieldnames = m.keys()
fieldnames.extend(['AlchemyKeywords', 'AlchemyConcepts'])
if not 'AlchemyConcepts' in m:
txt = m['ExtraText'].encode('ascii', 'ignore')
keywords = alchemyapi.keywords('text', txt)
concepts = alchemyapi.concepts('text', txt)
if keywords['status'] == 'OK' and concepts['status'] == 'OK':
m['AlchemyKeywords'] = ', '.join(["{0} ({1})".format(k['text'].encode('ascii', 'ignore'), k['relevance'])
for k in keywords['keywords']])
m['AlchemyConcepts'] = ', '.join(["{0} ({1})".format(k['text'].encode('ascii', 'ignore'), k['relevance'])
for k in concepts['concepts']])
else:
print('Error in concept tagging call: ', keywords['statusInfo'])
break
results.append(m)
with open('MOMA3k-tagged.csv', 'wb') as out_csv:
output = unicodecsv.DictWriter(out_csv, fieldnames=fieldnames, quoting=unicodecsv.QUOTE_ALL)
output.writerow(dict((fn,fn) for fn in fieldnames))
for i, row in enumerate(results):
output.writerow(row)
if i % 100 == 0:
out_csv.flush()
| mit | Python |
7e754f348327a8fa07adb0850f22b4d2628cecd9 | Use env-python | miyakogi/wdom,miyakogi/wdom,miyakogi/wdom | wdom/tests/test_imports.py | wdom/tests/test_imports.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from os import path
import subprocess
from nose_parameterized import parameterized
from wdom.testing import TestCase
root = path.dirname(path.dirname(path.dirname(path.abspath(__file__))))
cases = [
('wdom', 'css'),
('wdom', 'document'),
('wdom', 'element'),
('wdom', 'event'),
('wdom', 'interface'),
('wdom', 'misc'),
('wdom', 'node'),
('wdom', 'options'),
('wdom', 'parser'),
('wdom', 'tag'),
('wdom', 'testing'),
('wdom', 'web_node'),
('wdom', 'webif'),
('wdom', 'window'),
('wdom', 'server'),
('wdom.server', 'base'),
('wdom.server', 'handler'),
('wdom.server', '_tornado'),
('wdom', 'themes'),
('wdom.themes', 'default'),
('wdom.themes', 'kube'),
('wdom.examples', 'data_binding'),
('wdom.examples', 'rev_text'),
('wdom.examples', 'theming'),
]
try:
import aiohttp
cases.append(('wdom.server', '_aiohttp'))
del aiohttp
except ImportError:
pass
class TestImportModules(TestCase):
@parameterized.expand(cases)
def test_import(self, from_, import_):
cmd = 'from {0} import {1}\nlist(vars({1}).items())'
proc = subprocess.Popen(
[sys.executable, '-c', cmd.format(from_, import_)],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=root,
)
proc.wait()
if proc.returncode != 0:
print(proc.stdout.read())
self.assertEqual(proc.returncode, 0)
def test_wdom_import(self):
cmd = 'import wdom\nlist(vars(wdom).items())'
proc = subprocess.Popen(
[sys.executable, '-c', cmd],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=root,
)
proc.wait()
if proc.returncode != 0:
print(proc.stdout.read())
self.assertEqual(proc.returncode, 0)
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import path
import subprocess
from nose_parameterized import parameterized
from wdom.testing import TestCase
root = path.dirname(path.dirname(path.dirname(path.abspath(__file__))))
cases = [
('wdom', 'css'),
('wdom', 'document'),
('wdom', 'element'),
('wdom', 'event'),
('wdom', 'interface'),
('wdom', 'misc'),
('wdom', 'node'),
('wdom', 'options'),
('wdom', 'parser'),
('wdom', 'tag'),
('wdom', 'testing'),
('wdom', 'web_node'),
('wdom', 'webif'),
('wdom', 'window'),
('wdom', 'server'),
('wdom.server', 'base'),
('wdom.server', 'handler'),
('wdom.server', '_tornado'),
('wdom', 'themes'),
('wdom.themes', 'default'),
('wdom.themes', 'kube'),
('wdom.examples', 'data_binding'),
('wdom.examples', 'rev_text'),
('wdom.examples', 'theming'),
]
try:
import aiohttp
cases.append(('wdom.server', '_aiohttp'))
del aiohttp
except ImportError:
pass
class TestImportModules(TestCase):
@parameterized.expand(cases)
def test_import(self, from_, import_):
cmd = 'from {0} import {1}\nlist(vars({1}).items())'
proc = subprocess.Popen(
['python', '-c', cmd.format(from_, import_)],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=root,
)
proc.wait()
if proc.returncode != 0:
print(proc.stdout.read())
self.assertEqual(proc.returncode, 0)
def test_wdom_import(self):
cmd = 'import wdom\nlist(vars(wdom).items())'
proc = subprocess.Popen(
['python', '-c', cmd],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=root,
)
proc.wait()
if proc.returncode != 0:
print(proc.stdout.read())
self.assertEqual(proc.returncode, 0)
| mit | Python |
5a1f3a37120b9dbcfc3fbd144c4c21772df089e7 | Make explicit some db features | maxirobaina/django-firebird,maxirobaina/django-firebird | firebird/features.py | firebird/features.py | from django.utils.functional import cached_property
from django.db.backends.base.features import BaseDatabaseFeatures
class DatabaseFeatures(BaseDatabaseFeatures):
allows_group_by_pk = False # if the backend can group by just by PK
supports_forward_references = False
has_bulk_insert = False
can_return_id_from_insert = True
has_select_for_update = True
has_select_for_update_nowait = False
supports_forward_references = False
supports_tablespaces = False
supports_long_model_names = False
supports_timezones = False
has_zoneinfo_database = False
uses_savepoints = True
supports_paramstyle_pyformat = False
# connection_persists_old_columns = True
can_rollback_ddl = True
requires_literal_defaults = True
has_case_insensitive_like = False
# Is there a true datatype for uuid?
has_native_uuid_field = False
# Is there a true datatype for timedeltas?
has_native_duration_field = False
# In firebird, check constraint are table based, no column based
supports_column_check_constraints = False
can_introspect_foreign_keys = True
can_introspect_boolean_field = False
can_introspect_small_integer_field = True
# If NULL is implied on columns without needing to be explicitly specified
implied_column_null = True
uppercases_column_names = True
@cached_property
def supports_transactions(self):
return True
def introspected_boolean_field_type(self, field=None, created_separately=False):
"""
What is the type returned when the backend introspects a BooleanField?
The optional arguments may be used to give further details of the field to be
introspected; in particular, they are provided by Django's test suite:
field -- the field definition
created_separately -- True if the field was added via a SchemaEditor's AddField,
False if the field was created with the model
Note that return value from this function is compared by tests against actual
introspection results; it should provide expectations, not run an introspection
itself.
"""
return 'SmallIntegerField'
| from django.utils.functional import cached_property
from django.db.backends.base.features import BaseDatabaseFeatures
class DatabaseFeatures(BaseDatabaseFeatures):
allows_group_by_pk = False # if the backend can group by just by PK
supports_forward_references = False
has_bulk_insert = False
can_return_id_from_insert = True
has_select_for_update = True
has_select_for_update_nowait = False
supports_forward_references = False
supports_tablespaces = False
supports_long_model_names = False
supports_timezones = False
has_zoneinfo_database = False
uses_savepoints = True
supports_paramstyle_pyformat = False
# connection_persists_old_columns = True
can_rollback_ddl = True
requires_literal_defaults = True
has_case_insensitive_like = False
# In firebird, check constraint are table based, no column based
supports_column_check_constraints = False
can_introspect_foreign_keys = True
can_introspect_boolean_field = False
can_introspect_small_integer_field = True
# If NULL is implied on columns without needing to be explicitly specified
implied_column_null = True
uppercases_column_names = True
@cached_property
def supports_transactions(self):
return True
def introspected_boolean_field_type(self, field=None, created_separately=False):
"""
What is the type returned when the backend introspects a BooleanField?
The optional arguments may be used to give further details of the field to be
introspected; in particular, they are provided by Django's test suite:
field -- the field definition
created_separately -- True if the field was added via a SchemaEditor's AddField,
False if the field was created with the model
Note that return value from this function is compared by tests against actual
introspection results; it should provide expectations, not run an introspection
itself.
"""
return 'SmallIntegerField'
| bsd-3-clause | Python |
f60363b3d24d2f4af5ddb894cc1f6494b371b18e | FIX opt_out prevention for mailchimp export | CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland | mass_mailing_switzerland/wizards/mailchimp_export_update_wizard.py | mass_mailing_switzerland/wizards/mailchimp_export_update_wizard.py | ##############################################################################
#
# Copyright (C) 2020 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields, _
from odoo.exceptions import UserError
class ExportMailchimpWizard(models.TransientModel):
_inherit = "partner.export.mailchimp"
@api.multi
def get_mailing_contact_id(self, partner_id, force_create=False):
# Avoid exporting opt_out partner
if force_create and partner_id.opt_out:
return False
# Push the partner_id in mailing_contact creation
return super(
ExportMailchimpWizard, self.with_context(default_partner_id=partner_id)
).get_mailing_contact_id(partner_id, force_create)
| ##############################################################################
#
# Copyright (C) 2020 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields, _
from odoo.exceptions import UserError
class ExportMailchimpWizard(models.TransientModel):
_inherit = "partner.export.mailchimp"
@api.multi
def get_mailing_contact_id(self, partner_id, force_create=False):
# Avoid exporting opt_out partner
if force_create:
partner = self.env["res.partner"].browse(partner_id)
if partner.opt_out:
return False
# Push the partner_id in mailing_contact creation
return super(
ExportMailchimpWizard, self.with_context(default_partner_id=partner_id)
).get_mailing_contact_id(partner_id, force_create)
| agpl-3.0 | Python |
dd6c758f364e84ab9fb5fbc04021c1df460b7622 | Fix get_bump_version output. | matbra/bokeh,almarklein/bokeh,philippjfr/bokeh,percyfal/bokeh,schoolie/bokeh,bokeh/bokeh,azjps/bokeh,Karel-van-de-Plassche/bokeh,ptitjano/bokeh,birdsarah/bokeh,draperjames/bokeh,philippjfr/bokeh,muku42/bokeh,ericmjl/bokeh,caseyclements/bokeh,aavanian/bokeh,phobson/bokeh,deeplook/bokeh,ericmjl/bokeh,azjps/bokeh,msarahan/bokeh,dennisobrien/bokeh,stuart-knock/bokeh,eteq/bokeh,PythonCharmers/bokeh,caseyclements/bokeh,xguse/bokeh,aiguofer/bokeh,matbra/bokeh,KasperPRasmussen/bokeh,timsnyder/bokeh,eteq/bokeh,stuart-knock/bokeh,ericdill/bokeh,tacaswell/bokeh,paultcochrane/bokeh,alan-unravel/bokeh,paultcochrane/bokeh,xguse/bokeh,htygithub/bokeh,carlvlewis/bokeh,daodaoliang/bokeh,jakirkham/bokeh,akloster/bokeh,azjps/bokeh,satishgoda/bokeh,dennisobrien/bokeh,DuCorey/bokeh,srinathv/bokeh,ChinaQuants/bokeh,CrazyGuo/bokeh,maxalbert/bokeh,PythonCharmers/bokeh,draperjames/bokeh,clairetang6/bokeh,CrazyGuo/bokeh,lukebarnard1/bokeh,percyfal/bokeh,birdsarah/bokeh,jakirkham/bokeh,ahmadia/bokeh,abele/bokeh,draperjames/bokeh,jplourenco/bokeh,bsipocz/bokeh,ptitjano/bokeh,timothydmorton/bokeh,timothydmorton/bokeh,schoolie/bokeh,CrazyGuo/bokeh,rothnic/bokeh,saifrahmed/bokeh,jplourenco/bokeh,evidation-health/bokeh,msarahan/bokeh,saifrahmed/bokeh,mutirri/bokeh,htygithub/bokeh,awanke/bokeh,caseyclements/bokeh,gpfreitas/bokeh,timsnyder/bokeh,jakirkham/bokeh,htygithub/bokeh,stonebig/bokeh,awanke/bokeh,canavandl/bokeh,eteq/bokeh,KasperPRasmussen/bokeh,justacec/bokeh,timsnyder/bokeh,mutirri/bokeh,ahmadia/bokeh,evidation-health/bokeh,josherick/bokeh,aiguofer/bokeh,mutirri/bokeh,bsipocz/bokeh,birdsarah/bokeh,abele/bokeh,DuCorey/bokeh,dennisobrien/bokeh,josherick/bokeh,PythonCharmers/bokeh,bokeh/bokeh,paultcochrane/bokeh,xguse/bokeh,bokeh/bokeh,satishgoda/bokeh,maxalbert/bokeh,aavanian/bokeh,ChristosChristofidis/bokeh,ahmadia/bokeh,muku42/bokeh,roxyboy/bokeh,ChinaQuants/bokeh,maxalbert/bokeh,rs2/bokeh,rothnic/bokeh,rs2/bokeh,srinathv/bokeh,ptitjano/bokeh,dennisobrien/bokeh,aiguofer/bokeh,bokeh/bokeh,rhiever/bokeh,justacec/bokeh,josherick/bokeh,matbra/bokeh,satishgoda/bokeh,deeplook/bokeh,ChristosChristofidis/bokeh,lukebarnard1/bokeh,rothnic/bokeh,ericmjl/bokeh,azjps/bokeh,rhiever/bokeh,KasperPRasmussen/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,xguse/bokeh,stonebig/bokeh,jplourenco/bokeh,ericdill/bokeh,laurent-george/bokeh,birdsarah/bokeh,mindriot101/bokeh,ChristosChristofidis/bokeh,evidation-health/bokeh,laurent-george/bokeh,clairetang6/bokeh,philippjfr/bokeh,paultcochrane/bokeh,phobson/bokeh,ericdill/bokeh,matbra/bokeh,draperjames/bokeh,schoolie/bokeh,bsipocz/bokeh,khkaminska/bokeh,alan-unravel/bokeh,caseyclements/bokeh,carlvlewis/bokeh,ericmjl/bokeh,tacaswell/bokeh,lukebarnard1/bokeh,PythonCharmers/bokeh,rs2/bokeh,alan-unravel/bokeh,timsnyder/bokeh,azjps/bokeh,clairetang6/bokeh,muku42/bokeh,stonebig/bokeh,roxyboy/bokeh,phobson/bokeh,aavanian/bokeh,maxalbert/bokeh,akloster/bokeh,daodaoliang/bokeh,canavandl/bokeh,tacaswell/bokeh,aiguofer/bokeh,jplourenco/bokeh,DuCorey/bokeh,Karel-van-de-Plassche/bokeh,KasperPRasmussen/bokeh,tacaswell/bokeh,carlvlewis/bokeh,philippjfr/bokeh,timothydmorton/bokeh,abele/bokeh,draperjames/bokeh,philippjfr/bokeh,saifrahmed/bokeh,CrazyGuo/bokeh,rs2/bokeh,msarahan/bokeh,eteq/bokeh,satishgoda/bokeh,srinathv/bokeh,justacec/bokeh,bokeh/bokeh,alan-unravel/bokeh,awanke/bokeh,laurent-george/bokeh,almarklein/bokeh,timothydmorton/bokeh,clairetang6/bokeh,rothnic/bokeh,evidation-health/bokeh,daodaoliang/bokeh,ahmadia/bokeh,stonebig/bokeh,Karel-van-de-Plassche/bokeh,srinathv/bokeh,rhiever/bokeh,percyfal/bokeh,schoolie/bokeh,canavandl/bokeh,rhiever/bokeh,lukebarnard1/bokeh,aavanian/bokeh,quasiben/bokeh,akloster/bokeh,DuCorey/bokeh,ChinaQuants/bokeh,laurent-george/bokeh,roxyboy/bokeh,saifrahmed/bokeh,Karel-van-de-Plassche/bokeh,mindriot101/bokeh,gpfreitas/bokeh,htygithub/bokeh,jakirkham/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh,ChinaQuants/bokeh,mindriot101/bokeh,canavandl/bokeh,carlvlewis/bokeh,khkaminska/bokeh,rs2/bokeh,deeplook/bokeh,josherick/bokeh,gpfreitas/bokeh,dennisobrien/bokeh,gpfreitas/bokeh,mutirri/bokeh,akloster/bokeh,bsipocz/bokeh,awanke/bokeh,roxyboy/bokeh,schoolie/bokeh,percyfal/bokeh,aiguofer/bokeh,jakirkham/bokeh,khkaminska/bokeh,almarklein/bokeh,ptitjano/bokeh,justacec/bokeh,DuCorey/bokeh,ericdill/bokeh,abele/bokeh,phobson/bokeh,quasiben/bokeh,daodaoliang/bokeh,khkaminska/bokeh,ptitjano/bokeh,stuart-knock/bokeh,mindriot101/bokeh,percyfal/bokeh,muku42/bokeh,ChristosChristofidis/bokeh,phobson/bokeh,quasiben/bokeh,stuart-knock/bokeh,timsnyder/bokeh,deeplook/bokeh,msarahan/bokeh | scripts/get_bump_version.py | scripts/get_bump_version.py | from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "." + status + "."+ gsha[1:])
| from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel tag.")
else:
print(vers + "." + status + "."+ gsha[1:])
| bsd-3-clause | Python |
c16cf1d321115f303615087bc3fcb5ab287b6102 | Support building LS8 netcdf VRTs, and do it faster | omad/damootils,omad/damootils | scripts/netcdf_overviews.py | scripts/netcdf_overviews.py | #!/usr/bin/env python3
"""
Generate gdal VRTs of netcdfs that can be viewed as RGB/etc in QGIS.
"""
import sys
from glob import glob
import subprocess
from tqdm import tqdm
from pathlib import Path
import xarray as xr
import concurrent.futures
COLOURS = 'blue green red nir swir1 swir2'.split()
LS8_COLOURS = ['coastal_aerosol'] + COLOURS
UNWANTED_VAR_NAMES = {'crs', 'dataset'}
MAX_WORKERS = 8
def choose_colours(filename):
with xr.open_dataset(filename) as ds:
var_names = list(ds.data_vars.keys())
return [name for name in var_names if name not in UNWANTED_VAR_NAMES]
def build_netcdf_vrts(pattern):
print("Building viewable VRTs")
filenames = glob(pattern, recursive=True)
with concurrent.futures.ProcessPoolExecutor(max_workers=MAX_WORKERS) as executor:
results = executor.map(build_netcdf_vrt, filenames)
vrts = [vrt_filename for vrt_filename in tqdm(results, total=len(filenames))]
return vrts
def build_netcdf_vrt(filename):
vrt_name = filename.replace('.nc', '.vrt')
vrt_path = Path(vrt_name)
colours = choose_colours(filename)
if not vrt_path.exists() or Path(filename).stat().st_mtime > vrt_path.stat().st_mtime:
input_layers = ['NETCDF:{}:{}'.format(filename, colour) for colour in colours]
subprocess.run(['gdalbuildvrt', '-separate', vrt_name] + input_layers, check=True, stdout=subprocess.DEVNULL)
return vrt_name
def build_overview(filename, levels=None):
if not levels:
levels = ['2', '4', '8', '16']
return subprocess.run(['gdaladdo', '-ro', filename] + levels, check=True, stdout=subprocess.DEVNULL)
def build_overviews(tile_files):
print("Building Tiles overviews")
with concurrent.futures.ProcessPoolExecutor(max_workers=MAX_WORKERS) as executor:
num_files = len(tile_files)
results = executor.map(build_overview, tile_files)
completed = [done for done in tqdm(results, total=num_files)]
def mosaic_vrt(output_name, filenames):
levels = ['32', '64', '128']
print('Building VRT Mosaic')
subprocess.run(['gdalbuildvrt', output_name] + filenames, check=True)
print('Building Mosaic Overviews')
subprocess.run(['gdaladdo', '--config', 'COMPRESS_OVERVIEW', 'DEFLATE', output_name] + levels, check=True)
def main():
pattern = sys.argv[1]
if pattern[-2:] == 'nc':
tile_files = build_netcdf_vrts(pattern)
else:
tile_files = list(glob(pattern, recursive=True))
build_overviews(tile_files)
output_name = sys.argv[2]
mosaic_vrt(output_name, tile_files)
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
"""
Generate gdal VRTs of netcdfs that can be viewed as RGB/etc in QGIS.
"""
import sys
from glob import glob
import subprocess
from tqdm import tqdm
from pathlib import Path
import concurrent.futures
COLOURS = 'blue green red nir swir1 swir2'.split()
MAX_WORKERS = 8
def build_netcdf_vrts(pattern):
print("Building viewable VRTs")
vrts = []
for filename in tqdm(glob(pattern, recursive=True)):
vrt_name = filename.replace('.nc', '.vrt')
vrt_path = Path(vrt_name)
vrts.append(vrt_name)
if not vrt_path.exists() or Path(filename).stat().st_mtime > vrt_path.stat().st_mtime:
input_layers = ['NETCDF:{}:{}'.format(filename, colour) for colour in COLOURS]
subprocess.run(['gdalbuildvrt', '-separate', vrt_name] + input_layers, check=True, stdout=subprocess.DEVNULL)
return vrts
def build_overview(filename, levels=None):
if not levels:
levels = ['2', '4', '8', '16']
return subprocess.run(['gdaladdo', '-ro', filename] + levels, check=True, stdout=subprocess.DEVNULL)
def build_overviews(tile_files):
print("Building Tiles overviews")
with concurrent.futures.ProcessPoolExecutor(max_workers=MAX_WORKERS) as executor:
num_files = len(tile_files)
results = executor.map(build_overview, tile_files)
completed = [done for done in tqdm(results, total=num_files)]
def mosaic_vrt(output_name, filenames):
levels = ['32', '64', '128']
print('Building VRT Mosaic')
subprocess.run(['gdalbuildvrt', output_name] + filenames, check=True)
print('Building Mosaic Overviews')
subprocess.run(['gdaladdo', '--config', 'COMPRESS_OVERVIEW', 'DEFLATE', output_name] + levels, check=True)
def main():
pattern = sys.argv[1]
if pattern[-2:] == 'nc':
tile_files = build_netcdf_vrts(pattern)
else:
tile_files = list(glob(pattern, recursive=True))
build_overviews(tile_files)
output_name = sys.argv[2]
mosaic_vrt(output_name, tile_files)
if __name__ == '__main__':
main()
| apache-2.0 | Python |
a4e72dc07f029798cbe765c75736412a5a52a2ff | Rollback testing change in euler31 | wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy | graal/edu.uci.python.benchmark/src/benchmarks/euler31-timed.py | graal/edu.uci.python.benchmark/src/benchmarks/euler31-timed.py | #runas solve()
#unittest.skip recursive generator
#pythran export solve()
# 01/08/14 modified for benchmarking by Wei Zhang
import sys, time
COINS = [1, 2, 5, 10, 20, 50, 100, 200]
# test
def _sum(iterable):
sum = None
for i in iterable:
sum += i
return sum
def balance(pattern):
return sum(COINS[x]*pattern[x] for x in range(0, len(pattern)))
def gen(pattern, coinnum, num):
coin = COINS[coinnum]
for p in range(0, num//coin + 1):
newpat = pattern[:coinnum] + (p,)
bal = balance(newpat)
if bal > num:
return
elif bal == num:
yield newpat
elif coinnum < len(COINS)-1:
for pat in gen(newpat, coinnum+1, num):
yield pat
def solve(total):
'''
In England the currency is made up of pound, P, and pence, p, and there are eight coins in general circulation:
1p, 2p, 5p, 10p, 20p, 50p, P1 (100p) and P2 (200p).
It is possible to make P2 in the following way:
1 P1 + 1 50p + 2 20p + 1 5p + 1 2p + 3 1p
How many different ways can P2 be made using any number of coins?
'''
return sum(1 for pat in gen((), 0, total))
def measure():
print("Start timing...")
start = time.time()
result = solve(int(sys.argv[1])) # 200
print('total number of different ways: ', result)
duration = "%.3f\n" % (time.time() - start)
print("euler31: " + duration)
# warm up
for i in range(10):
solve(100)
measure()
| #runas solve()
#unittest.skip recursive generator
#pythran export solve()
# 01/08/14 modified for benchmarking by Wei Zhang
import sys, time
COINS = [1, 2, 5, 10, 20, 50, 100, 200]
# test
def _sum(iterable):
sum = None
for i in iterable:
sum += i
return sum
def balance(pattern):
return _sum(COINS[x]*pattern[x] for x in range(0, len(pattern)))
def gen(pattern, coinnum, num):
coin = COINS[coinnum]
for p in range(0, num//coin + 1):
newpat = pattern[:coinnum] + (p,)
bal = balance(newpat)
if bal > num:
return
elif bal == num:
yield newpat
elif coinnum < len(COINS)-1:
for pat in gen(newpat, coinnum+1, num):
yield pat
def solve(total):
'''
In England the currency is made up of pound, P, and pence, p, and there are eight coins in general circulation:
1p, 2p, 5p, 10p, 20p, 50p, P1 (100p) and P2 (200p).
It is possible to make P2 in the following way:
1 P1 + 1 50p + 2 20p + 1 5p + 1 2p + 3 1p
How many different ways can P2 be made using any number of coins?
'''
return _sum(1 for pat in gen((), 0, total))
def measure():
print("Start timing...")
start = time.time()
result = solve(int(sys.argv[1])) # 200
print('total number of different ways: ', result)
duration = "%.3f\n" % (time.time() - start)
print("euler31: " + duration)
# warm up
for i in range(10):
solve(100)
measure()
| bsd-3-clause | Python |
1dc47560dc64325a919b22b0a0a52557a4dc02af | Modify script to use lookup instead of show; #1989 | gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,studio666/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com | scripts/update_user_info.py | scripts/update_user_info.py | #!/usr/bin/env python
"""This is a one-off script to update user_info for #1936.
This could be generalized for #900.
"""
import os
import sys
import time
import requests
from gittip import wireup
from requests_oauthlib import OAuth1
db = wireup.db()
oauth = OAuth1( os.environ['TWITTER_CONSUMER_KEY']
, os.environ['TWITTER_CONSUMER_SECRET']
, os.environ['TWITTER_ACCESS_TOKEN']
, os.environ['TWITTER_ACCESS_TOKEN_SECRET']
)
elsewhere = db.all("SELECT user_id FROM ELSEWHERE WHERE platform='twitter' ORDER BY id LIMIT 120;")
url = "https://api.twitter.com/1.1/users/lookup.json"
while elsewhere:
batch = elsewhere[:100]
elsewhere = elsewhere[100:]
user_ids = ','.join([str(user_id) for user_id in batch])
response = requests.post(url, data={'user_id': user_ids}, auth=oauth)
# Log the rate-limit.
# ===================
nremaining = int(response.headers['X-RATE-LIMIT-REMAINING'])
reset = int(response.headers['X-RATE-LIMIT-RESET'])
print nremaining, reset, time.time()
if response.status_code != 200:
# Who knows what happened?
# ========================
# Supposedly we shouldn't hit 429, at least.
print response.status_code, response.text
else:
# Update!
# =======
users = response.json()
for user_info in users:
# flatten per upsert method in gittip/elsewhere/__init__.py
for k, v in user_info.items():
user_info[k] = unicode(v)
user_id = user_info['id']
db.run("UPDATE elsewhere SET user_info=%s WHERE user_id=%s", (user_info, user_id))
print "updated {} ({})".format(user_info['screen_name'], user_id)
# Stay under our rate limit.
# =========================
# We get 180 per 15 minutes for the users/lookup endpoint, per:
#
# https://dev.twitter.com/docs/rate-limiting/1.1/limits
sleep_for = 5
if nremaining == 0:
sleep_for = reset - time.time()
sleep_for += 10 # Account for potential clock skew between us and Twitter.
time.sleep(sleep_for)
| #!/usr/bin/env python
"""This is a one-off script to update user_info for #1936.
This could be generalized for #900.
"""
import os
import time
import requests
from gittip import wireup
from requests_oauthlib import OAuth1
db = wireup.db()
oauth = OAuth1( os.environ['TWITTER_CONSUMER_KEY']
, os.environ['TWITTER_CONSUMER_SECRET']
, os.environ['TWITTER_ACCESS_TOKEN']
, os.environ['TWITTER_ACCESS_TOKEN_SECRET']
)
elsewhere = db.all("SELECT user_id FROM ELSEWHERE WHERE platform='twitter' ORDER BY id;")
url = "https://api.twitter.com/1.1/users/show.json?user_id=%s"
for user_id in elsewhere:
response = requests.get(url % user_id, auth=oauth)
if response.status_code != 200:
# Who knows what happened?
# ========================
# Supposedly we shouldn't hit 429, at least.
msg = "{} {}".format(response.status_code, response.text)
else:
# Update!
# =======
user_info = response.json()
# flatten per upsert method in gittip/elsewhere/__init__.py
for k, v in user_info.items():
user_info[k] = unicode(v)
db.run("UPDATE elsewhere SET user_info=%s WHERE user_id=%s", (user_info, user_id))
msg = user_info['screen_name']
# Emit a log line.
# ================
nremaining = int(response.headers['X-RATE-LIMIT-REMAINING'])
reset = int(response.headers['X-RATE-LIMIT-RESET'])
print nremaining, reset, time.time(), user_id, msg
# Stay under our rate limit.
# =========================
# We get 180 per 15 minutes for the users/show endpoint, per:
#
# https://dev.twitter.com/docs/rate-limiting/1.1/limits
sleep_for = 5
if nremaining == 0:
sleep_for = reset - time.time()
sleep_for += 10 # Account for potential clock skew between us and Twitter.
time.sleep(sleep_for)
| mit | Python |
b8166742ec60b5a79cde2d9d708cf54bc5736f1e | modify stdout_encoding to return str instead of bytes object for Python3 | goldsmith/Wikipedia,Timidger/Wikia,TobyRoseman/Wikipedia,jeffbuttars/Wikipedia,Timidger/Wikia,alexsalo/Wikipedia,TobyRoseman/Wikipedia,jeffbuttars/Wikipedia,barrust/Wikipedia,AhmedAMohamed/Wikipedia,seansay/Wikipedia,seansay/Wikipedia,suesai/Wikipedia,alexsalo/Wikipedia,kaushik94/Wikipedia,AhmedAMohamed/Wikipedia,kaushik94/Wikipedia,suesai/Wikipedia,goldsmith/Wikipedia | wikipedia/util.py | wikipedia/util.py | import sys
import functools
def debug(fn):
def wrapper(*args, **kwargs):
res = fn(*args, **kwargs)
return res
return wrapper
class cache(object):
def __init__(self, fn):
self.fn = fn
self._cache = {}
functools.update_wrapper(self, fn)
def __call__(self, *args, **kwargs):
key = str(args) + str(kwargs)
if key in self._cache:
ret = self._cache[key]
else:
ret = self._cache[key] = self.fn(*args, **kwargs)
return ret
def clear_cache(self):
self._cache = {}
# from http://stackoverflow.com/questions/3627793/best-output-type-and-encoding-practices-for-repr-functions
def stdout_encode(u, default='UTF8'):
encoding = sys.stdout.encoding or default
if sys.version_info > (3, 0):
return u.encode(encoding).decode(encoding)
return u.encode(encoding)
| import sys
import functools
def debug(fn):
def wrapper(*args, **kwargs):
res = fn(*args, **kwargs)
return res
return wrapper
class cache(object):
def __init__(self, fn):
self.fn = fn
self._cache = {}
functools.update_wrapper(self, fn)
def __call__(self, *args, **kwargs):
key = str(args) + str(kwargs)
if key in self._cache:
ret = self._cache[key]
else:
ret = self._cache[key] = self.fn(*args, **kwargs)
return ret
def clear_cache(self):
self._cache = {}
# from http://stackoverflow.com/questions/3627793/best-output-type-and-encoding-practices-for-repr-functions
def stdout_encode(u, default='UTF8'):
if sys.stdout.encoding:
return u.encode(sys.stdout.encoding)
return u.encode(default) | mit | Python |
bf24a7aacf0e25151307cfb2d8ed6bc8f743f70d | Comment out log modifying code in CGC receive so it doesn't crash concrete tracing | angr/angr,chubbymaggie/angr,chubbymaggie/simuvex,schieb/angr,chubbymaggie/simuvex,angr/angr,schieb/angr,f-prettyland/angr,f-prettyland/angr,tyb0807/angr,axt/angr,iamahuman/angr,chubbymaggie/angr,zhuyue1314/simuvex,schieb/angr,axt/angr,chubbymaggie/angr,tyb0807/angr,angr/angr,chubbymaggie/simuvex,axt/angr,iamahuman/angr,f-prettyland/angr,tyb0807/angr,angr/simuvex,iamahuman/angr | simuvex/procedures/cgc/receive.py | simuvex/procedures/cgc/receive.py | import simuvex
from itertools import count
fastpath_data_counter = count()
class receive(simuvex.SimProcedure):
#pylint:disable=arguments-differ
def run(self, fd, buf, count, rx_bytes):
if self.state.mode == 'fastpath':
# Special case for CFG generation
if not self.state.se.symbolic(count):
actual_size = count
data = self.state.se.Unconstrained(
'receive_data_%d' % fastpath_data_counter.next(),
self.state.se.exactly_int(actual_size) * 8
)
self.state.memory.store(buf, data)
else:
actual_size = self.state.se.Unconstrained('receive_length', self.state.arch.bits)
self.state.memory.store(rx_bytes, actual_size, endness='Iend_LE')
return self.state.se.BVV(0, self.state.arch.bits)
if ABSTRACT_MEMORY in self.state.options:
actual_size = count
else:
actual_size = self.state.se.Unconstrained('receive_length', self.state.arch.bits)
if CGC_NO_SYMBOLIC_RECEIVE_LENGTH in self.state.options:
self.state.add_constraints(actual_size == min(self.state.se.max_int(count), self.state.memory._maximum_symbolic_size), action=True)
else:
self.state.add_constraints(self.state.se.ULE(actual_size, count), action=True)
if self.state.satisfiable(extra_constraints=[count != 0]):
data = self.state.posix.read(fd, count, dst_addr=buf)
# XXX: this breaks wanderer
#list(self.state.log.actions)[-1].size.ast = actual_size
#list(self.state.log.actions)[-2].data.ast = list(self.state.log.actions)[-1].actual_value.ast
self.data = data
else:
self.data = None
self.size = actual_size
self.state.memory.store(rx_bytes, actual_size, condition=rx_bytes != 0, endness='Iend_LE')
# TODO: receive failure
return self.state.se.BVV(0, self.state.arch.bits)
from simuvex.s_options import ABSTRACT_MEMORY, CGC_NO_SYMBOLIC_RECEIVE_LENGTH
| import simuvex
from itertools import count
fastpath_data_counter = count()
class receive(simuvex.SimProcedure):
#pylint:disable=arguments-differ
def run(self, fd, buf, count, rx_bytes):
if self.state.mode == 'fastpath':
# Special case for CFG generation
if not self.state.se.symbolic(count):
actual_size = count
data = self.state.se.Unconstrained(
'receive_data_%d' % fastpath_data_counter.next(),
self.state.se.exactly_int(actual_size) * 8
)
self.state.memory.store(buf, data)
else:
actual_size = self.state.se.Unconstrained('receive_length', self.state.arch.bits)
self.state.memory.store(rx_bytes, actual_size, endness='Iend_LE')
return self.state.se.BVV(0, self.state.arch.bits)
if ABSTRACT_MEMORY in self.state.options:
actual_size = count
else:
actual_size = self.state.se.Unconstrained('receive_length', self.state.arch.bits)
if CGC_NO_SYMBOLIC_RECEIVE_LENGTH in self.state.options:
self.state.add_constraints(actual_size == min(self.state.se.max_int(count), self.state.memory._maximum_symbolic_size), action=True)
else:
self.state.add_constraints(self.state.se.ULE(actual_size, count), action=True)
if self.state.satisfiable(extra_constraints=[count != 0]):
data = self.state.posix.read(fd, count, dst_addr=buf)
list(self.state.log.actions)[-1].size.ast = actual_size
list(self.state.log.actions)[-2].data.ast = list(self.state.log.actions)[-1].actual_value.ast
self.data = data
else:
self.data = None
self.size = actual_size
self.state.memory.store(rx_bytes, actual_size, condition=rx_bytes != 0, endness='Iend_LE')
# TODO: receive failure
return self.state.se.BVV(0, self.state.arch.bits)
from simuvex.s_options import ABSTRACT_MEMORY, CGC_NO_SYMBOLIC_RECEIVE_LENGTH
| bsd-2-clause | Python |
cae1b8461d4ccafe098d01a5d4c9aca51e675e97 | bump version number | mattias-lundell/pytest-xdist | xdist/__init__.py | xdist/__init__.py | #
__version__ = "1.3"
| #
__version__ = "1.2"
| mit | Python |
7e610622a3ed9bffff155234c7b71c9aacf89e31 | add exit() | Nocturnana/jpush-docs,raoxudong/jpush-docs,Aoyunyun/jpush-docs,war22moon/jpush-docs,Aoyunyun/jpush-docs,Nocturnana/jpush-docs,xiepiaa/jpush-docs,Aoyunyun/jpush-docs,xiongtiancheng/jpush-docs,dengyhgit/jpush-docs,jpush/jpush-docs,dengyhgit/jpush-docs,xiongtiancheng/jpush-docs,war22moon/jpush-docs,xiepiaa/jpush-docs,jpush/jpush-docs,xiepiaa/jpush-docs,raoxudong/jpush-docs,dengyhgit/jpush-docs,jpush/jpush-docs,jpush/jpush-docs,Nocturnana/jpush-docs,war22moon/jpush-docs,raoxudong/jpush-docs,Aoyunyun/jpush-docs,xiepiaa/jpush-docs,war22moon/jpush-docs,xiongtiancheng/jpush-docs,xiongtiancheng/jpush-docs,Nocturnana/jpush-docs,raoxudong/jpush-docs | autobuild.py | autobuild.py | #!/usr/bin/env python
import logging
import commands
import os
import time
def git_pull():
print (os.chdir("/opt/push/jpush-docs/jpush-docs/"))
logging.info(commands.getstatusoutput("git pull origin master"))
print ("git pull origin master")
def build():
print (os.chdir("/opt/push/jpush-docs/jpush-docs/zh/JPush/"))
print ("JPush/")
logging.info (commands.getstatusoutput("/opt/push/jpush-docs/venv/bin/mkdocs build"))
time.sleep(1)
print time.asctime(time.localtime(time.time()))
logging.info (os.chdir("/opt/push/jpush-docs/jpush-docs/zh/JMessage/"))
print ("JMessage/")
logging.info (commands.getstatusoutput("/opt/push/jpush-docs/venv/bin/mkdocs build"))
time.sleep(1)
print time.asctime(time.localtime(time.time()))
logging.info (os.chdir("/opt/push/jpush-docs/jpush-docs/zh/JSMS/"))
print ("JSMS/")
logging.info (commands.getstatusoutput("/opt/push/jpush-docs/venv/bin/mkdocs build"))
time.sleep(1)
print time.asctime(time.localtime(time.time()))
logging.info (os.chdir("/opt/push/jpush-docs/jpush-docs/zh/Index/"))
print ("Index/")
logging.info (commands.getstatusoutput("/opt/push/jpush-docs/venv/bin/mkdocs build"))
time.sleep(1)
print time.asctime(time.localtime(time.time()))
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename='/opt/push/jpush-docs/autobuild.log',
filemode='a+')
git_pull()
build()
exit()
| #!/usr/bin/env python
import logging
import commands
import os
import time
def git_pull():
print (os.chdir("/opt/push/jpush-docs/jpush-docs/"))
logging.info(commands.getstatusoutput("git pull origin master"))
print ("git pull origin master")
def build():
print (os.chdir("/opt/push/jpush-docs/jpush-docs/zh/JPush/"))
print ("JPush/")
logging.info (commands.getstatusoutput("/opt/push/jpush-docs/venv/bin/mkdocs build"))
time.sleep(1)
print time.asctime(time.localtime(time.time()))
logging.info (os.chdir("/opt/push/jpush-docs/jpush-docs/zh/JMessage/"))
print ("JMessage/")
logging.info (commands.getstatusoutput("/opt/push/jpush-docs/venv/bin/mkdocs build"))
time.sleep(1)
print time.asctime(time.localtime(time.time()))
logging.info (os.chdir("/opt/push/jpush-docs/jpush-docs/zh/JSMS/"))
print ("JSMS/")
logging.info (commands.getstatusoutput("/opt/push/jpush-docs/venv/bin/mkdocs build"))
time.sleep(1)
print time.asctime(time.localtime(time.time()))
logging.info (os.chdir("/opt/push/jpush-docs/jpush-docs/zh/Index/"))
print ("Index/")
logging.info (commands.getstatusoutput("/opt/push/jpush-docs/venv/bin/mkdocs build"))
time.sleep(1)
print time.asctime(time.localtime(time.time()))
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename='/opt/push/jpush-docs/autobuild.log',
filemode='a+')
git_pull()
build()
print time.asctime(time.localtime(time.time()))
| mit | Python |
3ef1531f6934055a416cdddc694f6ca75694d649 | Make use of expanduser() more sane | snare/voltron,snare/voltron,snare/voltron,snare/voltron | voltron/common.py | voltron/common.py | import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
| import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = '~/.voltron/'
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
| mit | Python |
bf78c387338e2a2bbc556fb0a1c2ed1a693557de | Add port to available arguments. | nvdv/vprof,nvdv/vprof,nvdv/vprof | vprof/__main__.py | vprof/__main__.py | """Main module for visual profiler."""
import argparse
import os
import sys
from vprof import profile_wrappers
from vprof import stats_server
_MODULE_DESC = 'Python visual profiler.'
_HOST = 'localhost'
_PROFILE_MAP = {
'c': profile_wrappers.RuntimeProfile,
'm': profile_wrappers.MemoryProfile,
}
def main():
"""Visual profiler main function."""
parser = argparse.ArgumentParser(description=_MODULE_DESC)
parser.add_argument('profilers', metavar='opts',
help='Profilers configuration')
parser.add_argument('source', metavar='src', nargs=1,
help='Python program to profile.')
parser.add_argument('--port', dest='port', default=8000, type=int,
help='Internal webserver port.')
args = parser.parse_args()
if len(args.profilers) > len(set(args.profilers)):
print('Profiler configuration is ambiguous. Remove duplicates.')
sys.exit(1)
for option in args.profilers:
if option not in _PROFILE_MAP:
print('Unrecognized option: %s' % option)
sys.exit(2)
sys.argv[:] = args.source
program_name, program_stats = args.source[0], {}
for option in args.profilers:
curr_profiler = _PROFILE_MAP[option](program_name)
print('Running %s...' % curr_profiler.__class__.__name__)
program_stats[option] = curr_profiler.run()
sys.stderr = open(os.devnull, "w")
print('Starting HTTP server...')
stats_server.start(_HOST, args.port, program_stats)
if __name__ == "__main__":
main()
| """Main module for visual profiler."""
import argparse
import os
import sys
from vprof import profile_wrappers
from vprof import stats_server
_MODULE_DESC = 'Python visual profiler.'
_HOST = 'localhost'
_PORT = 8000
_PROFILE_MAP = {
'c': profile_wrappers.RuntimeProfile,
'm': profile_wrappers.MemoryProfile,
}
def main():
"""Visual profiler main function."""
parser = argparse.ArgumentParser(description=_MODULE_DESC)
parser.add_argument('profilers', metavar='opts',
help='Profilers configuration')
parser.add_argument('source', metavar='src', nargs=1,
help='Python program to profile.')
args = parser.parse_args()
if len(args.profilers) > len(set(args.profilers)):
print('Profiler configuration is ambiguous. Remove duplicates.')
sys.exit(1)
for option in args.profilers:
if option not in _PROFILE_MAP:
print('Unrecognized option: %s' % option)
sys.exit(2)
sys.argv[:] = args.source
program_name, program_stats = args.source[0], {}
for option in args.profilers:
curr_profiler = _PROFILE_MAP[option](program_name)
print('Running %s...' % curr_profiler.__class__.__name__)
program_stats[option] = curr_profiler.run()
sys.stderr = open(os.devnull, "w")
print('Starting HTTP server...')
stats_server.start(_HOST, _PORT, program_stats)
if __name__ == "__main__":
main()
| bsd-2-clause | Python |
f945c054023220f865032817d04309005e50015e | update __init__.py for SRAMs | cornell-brg/pymtl,cornell-brg/pymtl,cornell-brg/pymtl | pclib/rtl/__init__.py | pclib/rtl/__init__.py |
from regs import Reg, RegEn, RegRst, RegEnRst
from arith import Adder, Subtractor, Incrementer
from arith import ZeroExtender, SignExtender
from arith import ZeroComparator, EqComparator, LtComparator, GtComparator
from arith import SignUnit, UnsignUnit
from arith import LeftLogicalShifter, RightLogicalShifter
from Mux import Mux
from Decoder import Decoder
from RegisterFile import RegisterFile
from Crossbar import Crossbar
from PipeCtrl import PipeCtrl
from arbiters import RoundRobinArbiter, RoundRobinArbiterEn
from SRAMs import SRAMBitsComb_rst_1rw, SRAMBytesComb_rst_1rw
from queues import (
SingleElementNormalQueue,
SingleElementBypassQueue,
NormalQueue,
SingleElementPipelinedQueue,
SingleElementSkidQueue,
TwoElementBypassQueue,
)
|
from regs import Reg, RegEn, RegRst, RegEnRst
from arith import Adder, Subtractor, Incrementer
from arith import ZeroExtender, SignExtender
from arith import ZeroComparator, EqComparator, LtComparator, GtComparator
from arith import SignUnit, UnsignUnit
from arith import LeftLogicalShifter, RightLogicalShifter
from Mux import Mux
from Decoder import Decoder
from RegisterFile import RegisterFile
from Crossbar import Crossbar
from PipeCtrl import PipeCtrl
from arbiters import RoundRobinArbiter, RoundRobinArbiterEn
from queues import (
SingleElementNormalQueue,
SingleElementBypassQueue,
NormalQueue,
SingleElementPipelinedQueue,
SingleElementSkidQueue,
TwoElementBypassQueue,
)
| bsd-3-clause | Python |
22207247c286ad3c656c3f6b550d869cf92f6e92 | Add fs Opener based on the builtin FTPFS opener | althonos/fs.sshfs | fs/sshfs/__init__.py | fs/sshfs/__init__.py | from __future__ import absolute_import
from __future__ import unicode_literals
from .sshfs import SSHFS
from ..opener import Opener, registry
@registry.install
class SSHOpener(Opener):
protocols = ['ssh']
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
#from .sshfs import SSHFS
ssh_host, _, dir_path = parse_result.resource.partition('/')
ssh_host, _, ftp_port = ssh_host.partition(':')
ssh_port = int(ftp_port) if ftp_port.isdigit() else 22
ssh_fs = SSHFS(
ssh_host,
port=ssh_port,
user=parse_result.username,
passwd=parse_result.password,
)
return ssh_fs.opendir(dir_path) if dir_path else ssh_fs
| from __future__ import absolute_import
from __future__ import unicode_literals
from .sshfs import SSHFS
| lgpl-2.1 | Python |
714b517c18c1bdedafd6cd5451df2f64ce9c6ef9 | add GetSliceTicket | dreibh/planetlab-lxc-plcapi,dreibh/planetlab-lxc-plcapi,dreibh/planetlab-lxc-plcapi,dreibh/planetlab-lxc-plcapi | PLC/Methods/__init__.py | PLC/Methods/__init__.py | methods = 'AddAddressType AddAddressTypeToAddress AddBootState AddConfFile AddConfFileToNodeGroup AddConfFileToNode AddKeyType AddMessage AddNetworkMethod AddNetworkType AddNodeGroup AddNodeNetwork AddNode AddNodeToNodeGroup AddNodeToPCU AddPCU AddPeer AddPersonKey AddPerson AddPersonToSite AddPersonToSlice AddRole AddRoleToPerson AddSiteAddress AddSite AddSliceAttribute AddSliceAttributeType AddSliceInstantiation AddSlice AddSliceToNodes AdmAddAddressType AdmAddNodeGroup AdmAddNodeNetwork AdmAddNode AdmAddNodeToNodeGroup AdmAddPersonKey AdmAddPerson AdmAddPersonToSite AdmAddSitePowerControlUnit AdmAddSite AdmAssociateNodeToPowerControlUnitPort AdmAuthCheck AdmDeleteAddressType AdmDeleteAllPersonKeys AdmDeleteNodeGroup AdmDeleteNodeNetwork AdmDeleteNode AdmDeletePersonKeys AdmDeletePerson AdmDeleteSitePowerControlUnit AdmDeleteSite AdmDisassociatePowerControlUnitPort AdmGenerateNodeConfFile AdmGetAllAddressTypes AdmGetAllKeyTypes AdmGetAllNodeNetworks AdmGetAllRoles AdmGetNodeGroupNodes AdmGetNodeGroups AdmGetNodes AdmGetPersonKeys AdmGetPersonRoles AdmGetPersonSites AdmGetPersons AdmGetPowerControlUnitNodes AdmGetPowerControlUnits AdmGetSiteNodes AdmGetSitePersons AdmGetSitePIs AdmGetSitePowerControlUnits AdmGetSites AdmGetSiteTechContacts AdmGrantRoleToPerson AdmIsPersonInRole AdmQueryConfFile AdmQueryNode AdmQueryPerson AdmQueryPowerControlUnit AdmQuerySite AdmRebootNode AdmRemoveNodeFromNodeGroup AdmRemovePersonFromSite AdmRevokeRoleFromPerson AdmSetPersonEnabled AdmSetPersonPrimarySite AdmUpdateNodeGroup AdmUpdateNodeNetwork AdmUpdateNode AdmUpdatePerson AdmUpdateSitePowerControlUnit AdmUpdateSite AuthCheck BlacklistKey BootCheckAuthentication BootGetNodeDetails BootNotifyOwners BootUpdateNode DeleteAddress DeleteAddressTypeFromAddress DeleteAddressType DeleteBootState DeleteConfFileFromNodeGroup DeleteConfFileFromNode DeleteConfFile DeleteKey DeleteKeyType DeleteMessage DeleteNetworkMethod DeleteNetworkType DeleteNodeFromNodeGroup DeleteNodeFromPCU DeleteNodeGroup DeleteNodeNetwork DeleteNode DeletePCU DeletePeer DeletePersonFromSite DeletePersonFromSlice DeletePerson DeleteRoleFromPerson DeleteRole DeleteSession DeleteSite DeleteSliceAttribute DeleteSliceAttributeType DeleteSliceFromNodes DeleteSliceInstantiation DeleteSlice GetAddresses GetAddressTypes GetBootStates GetConfFiles GetEvents GetKeys GetKeyTypes GetMessages GetNetworkMethods GetNetworkTypes GetNodeGroups GetNodeNetworks GetNodes GetPCUs GetPeerData GetPeerName GetPeers GetPersons GetRoles GetSession GetSites GetSliceAttributes GetSliceAttributeTypes GetSliceInstantiations GetSlices GetSliceTicket GetSlivers NotifyPersons RebootNode RefreshPeer ResetPassword SetPersonPrimarySite SliceCreate SliceDelete SliceExtendedInfo SliceInfo SliceListNames SliceListUserSlices SliceNodesAdd SliceNodesDel SliceNodesList SliceRenew SliceUpdate SliceUserAdd SliceUserDel SliceUsersList UpdateAddress UpdateAddressType UpdateConfFile UpdateKey UpdateMessage UpdateNodeGroup UpdateNodeNetwork UpdateNode UpdatePCU UpdatePeer UpdatePerson UpdateSite UpdateSliceAttribute UpdateSliceAttributeType UpdateSlice VerifyPerson system.listMethods system.methodHelp system.methodSignature system.multicall'.split()
| methods = 'AddAddressType AddAddressTypeToAddress AddBootState AddConfFile AddConfFileToNodeGroup AddConfFileToNode AddKeyType AddMessage AddNetworkMethod AddNetworkType AddNodeGroup AddNodeNetwork AddNode AddNodeToNodeGroup AddNodeToPCU AddPCU AddPeer AddPersonKey AddPerson AddPersonToSite AddPersonToSlice AddRole AddRoleToPerson AddSiteAddress AddSite AddSliceAttribute AddSliceAttributeType AddSliceInstantiation AddSlice AddSliceToNodes AdmAddAddressType AdmAddNodeGroup AdmAddNodeNetwork AdmAddNode AdmAddNodeToNodeGroup AdmAddPersonKey AdmAddPerson AdmAddPersonToSite AdmAddSitePowerControlUnit AdmAddSite AdmAssociateNodeToPowerControlUnitPort AdmAuthCheck AdmDeleteAddressType AdmDeleteAllPersonKeys AdmDeleteNodeGroup AdmDeleteNodeNetwork AdmDeleteNode AdmDeletePersonKeys AdmDeletePerson AdmDeleteSitePowerControlUnit AdmDeleteSite AdmDisassociatePowerControlUnitPort AdmGenerateNodeConfFile AdmGetAllAddressTypes AdmGetAllKeyTypes AdmGetAllNodeNetworks AdmGetAllRoles AdmGetNodeGroupNodes AdmGetNodeGroups AdmGetNodes AdmGetPersonKeys AdmGetPersonRoles AdmGetPersonSites AdmGetPersons AdmGetPowerControlUnitNodes AdmGetPowerControlUnits AdmGetSiteNodes AdmGetSitePersons AdmGetSitePIs AdmGetSitePowerControlUnits AdmGetSites AdmGetSiteTechContacts AdmGrantRoleToPerson AdmIsPersonInRole AdmQueryConfFile AdmQueryNode AdmQueryPerson AdmQueryPowerControlUnit AdmQuerySite AdmRebootNode AdmRemoveNodeFromNodeGroup AdmRemovePersonFromSite AdmRevokeRoleFromPerson AdmSetPersonEnabled AdmSetPersonPrimarySite AdmUpdateNodeGroup AdmUpdateNodeNetwork AdmUpdateNode AdmUpdatePerson AdmUpdateSitePowerControlUnit AdmUpdateSite AuthCheck BlacklistKey BootCheckAuthentication BootGetNodeDetails BootNotifyOwners BootUpdateNode DeleteAddress DeleteAddressTypeFromAddress DeleteAddressType DeleteBootState DeleteConfFileFromNodeGroup DeleteConfFileFromNode DeleteConfFile DeleteKey DeleteKeyType DeleteMessage DeleteNetworkMethod DeleteNetworkType DeleteNodeFromNodeGroup DeleteNodeFromPCU DeleteNodeGroup DeleteNodeNetwork DeleteNode DeletePCU DeletePeer DeletePersonFromSite DeletePersonFromSlice DeletePerson DeleteRoleFromPerson DeleteRole DeleteSession DeleteSite DeleteSliceAttribute DeleteSliceAttributeType DeleteSliceFromNodes DeleteSliceInstantiation DeleteSlice GetAddresses GetAddressTypes GetBootStates GetConfFiles GetEvents GetKeys GetKeyTypes GetMessages GetNetworkMethods GetNetworkTypes GetNodeGroups GetNodeNetworks GetNodes GetPCUs GetPeerData GetPeerName GetPeers GetPersons GetRoles GetSession GetSites GetSliceAttributes GetSliceAttributeTypes GetSliceInstantiations GetSlices GetSlivers NotifyPersons RebootNode RefreshPeer ResetPassword SetPersonPrimarySite SliceCreate SliceDelete SliceExtendedInfo SliceInfo SliceListNames SliceListUserSlices SliceNodesAdd SliceNodesDel SliceNodesList SliceRenew SliceUpdate SliceUserAdd SliceUserDel SliceUsersList UpdateAddress UpdateAddressType UpdateConfFile UpdateKey UpdateMessage UpdateNodeGroup UpdateNodeNetwork UpdateNode UpdatePCU UpdatePeer UpdatePerson UpdateSite UpdateSliceAttribute UpdateSliceAttributeType UpdateSlice VerifyPerson system.listMethods system.methodHelp system.methodSignature system.multicall'.split()
| bsd-3-clause | Python |
b54f2549fd6d9a97492a652aae29a4e47f46920d | add todo for trash factory | PTank/trashtalk,PTank/trashtalk | trashtalk/trash_factory.py | trashtalk/trash_factory.py | from __future__ import print_function, absolute_import
from pwd import getpwnam
from os import getlogin
from pathlib import Path
from trashtalk.trash import Trash
import sys
"""
Module who generate trash
:Todo
rename file: generate_trash.py
remove class and make function
change this in core.py and autocomplete_bash.py
add better way of searching media or trash
"""
MEDIA_DIR = ['/media']
TRASHS_PATH = []
class TrashFactory():
"""
"""
def create_trash(self, users=[], medias=[], home=True, all_media=False, error=True):
trashs = []
if not users:
users = [getlogin()]
for user in users:
if home:
path = Path('/home/' + user + "/.local/share/Trash")
if path.exists():
trashs.append(Trash(str(path), user))
elif error:
print("can't find: " + path.name, file=sys.stderr)
if all_media:
medias = Path("/media/" + user).iterdir()
elif medias:
medias = map(lambda x: Path("/media/%s/%s" % (user, x)),
medias)
for m in medias:
if m.exists():
t = m / (".Trash-" + str(getpwnam(user)[2]))
if t.exists():
trashs.append((Trash(str(t), m.name)))
elif error:
print("media " + m.name + " have no trash", file=sys.stderr)
elif error:
print("no media name: " + m.name, file=sys.stderr)
return trashs
def get_all_media(self, user):
for m in Path('/media/' + user).iterdir():
yield m.name
| from __future__ import print_function, absolute_import
from pwd import getpwnam
from os import getlogin
from pathlib import Path
from trashtalk.trash import Trash
import sys
class TrashFactory():
"""
"""
def create_trash(self, users=[], medias=[], home=True, all_media=False, error=True):
trashs = []
if not users:
users = [getlogin()]
for user in users:
if home:
path = Path('/home/' + user + "/.local/share/Trash")
if path.exists():
trashs.append(Trash(str(path), user))
elif error:
print("can't find: " + path.name, file=sys.stderr)
if all_media:
medias = Path("/media/" + user).iterdir()
elif medias:
medias = map(lambda x: Path("/media/%s/%s" % (user, x)),
medias)
for m in medias:
if m.exists():
t = m / (".Trash-" + str(getpwnam(user)[2]))
if t.exists():
trashs.append((Trash(str(t), m.name)))
elif error:
print("media " + m.name + " have no trash", file=sys.stderr)
elif error:
print("no media name: " + m.name, file=sys.stderr)
return trashs
def get_all_media(self, user):
for m in Path('/media/' + user).iterdir():
yield m.name
| mit | Python |
bc3a795902a84ae49fc12753c3f391b98b9a924d | Add the "user is away" line to WHOIS output | Heufneutje/txircd,DesertBus/txircd,ElementalAlchemist/txircd | txircd/modules/cmd_away.py | txircd/modules/cmd_away.py | from twisted.words.protocols import irc
from txircd.modbase import Command
class AwayCommand(Command):
def onUse(self, user, data):
if "reason" in data:
user.metadata["away"] = data["reason"]
user.sendMessage(irc.RPL_NOWAWAY, ":You have been marked as being away")
else:
if "away" in user.metadata:
del user.metadata["away"]
user.sendMessage(irc.RPL_UNAWAY, ":You are no longer marked as being away")
def processParams(self, user, params):
if not params:
return {
"user": user
}
return {
"user": user,
"reason": params[0]
}
def privmsgReply(self, command, data):
if command != "PRIVMSG":
return
if "targetuser" not in data:
return
sourceUser = data["user"]
for user in data["targetuser"]:
udata = self.ircd.users[user]
if "away" in udata.metadata:
sourceUser.sendMessage(irc.RPL_AWAY, udata.nickname, ":{}".format(udata.metadata["away"]))
def whoisLine(self, command, data):
if command != "WHOIS":
return
user = data["user"]
target = data["targetuser"]
if "away" in target.metadata:
user.sendMessage(irc.RPL_AWAY, target.username, ":{}".format(target.metadata["away"]))
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.awayCmd = None
def spawn(self):
self.awayCmd = AwayCommand()
return {
"commands": {
"AWAY": self.awayCmd
},
"actions": {
"commandextra": [self.awayCmd.privmsgReply, self.awayCmd.whoisLine]
}
def cleanup(self):
self.ircd.actions["commandextra"].remove(self.awayCmd.privmsgReply)
del self.ircd.commands["AWAY"] | from twisted.words.protocols import irc
from txircd.modbase import Command
class AwayCommand(Command):
def onUse(self, user, data):
if "reason" in data:
user.metadata["away"] = data["reason"]
user.sendMessage(irc.RPL_NOWAWAY, ":You have been marked as being away")
else:
if "away" in user.metadata:
del user.metadata["away"]
user.sendMessage(irc.RPL_UNAWAY, ":You are no longer marked as being away")
def processParams(self, user, params):
if not params:
return {
"user": user
}
return {
"user": user,
"reason": params[0]
}
def privmsgReply(self, command, data):
if command != "PRIVMSG":
return
if "targetuser" not in data:
return
sourceUser = data["user"]
for user in data["targetuser"]:
udata = self.ircd.users[user]
if "away" in udata.metadata:
sourceUser.sendMessage(irc.RPL_AWAY, udata.nickname, ":{}".format(udata.metadata["away"]))
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.awayCmd = None
def spawn(self):
self.awayCmd = AwayCommand()
return {
"commands": {
"AWAY": self.awayCmd
},
"actions": {
"commandextra": [self.awayCmd.privmsgReply]
}
def cleanup(self):
self.ircd.actions["commandextra"].remove(self.awayCmd.privmsgReply)
del self.ircd.commands["AWAY"] | bsd-3-clause | Python |
c2400d5c5349e70bf375d4fe3cab9a4e9f851d31 | improve test to use Twisted listener, not nc | sammyshj/txtorcon,meejah/txtorcon,sammyshj/txtorcon,ghtdak/txtorcon,isislovecruft/txtorcon,isislovecruft/txtorcon,meejah/txtorcon,david415/txtorcon,ghtdak/txtorcon,david415/txtorcon | txtorcon/test/test_util.py | txtorcon/test/test_util.py | from twisted.trial import unittest
from twisted.test import proto_helpers
from twisted.internet import defer
from twisted.internet.endpoints import TCP4ServerEndpoint
from twisted.internet.interfaces import IProtocolFactory
from zope.interface import implements
from txtorcon.util import process_from_address, delete_file_or_tree
import os
import tempfile
import subprocess
class FakeState:
tor_pid = -1
class FakeProtocolFactory:
implements(IProtocolFactory)
def doStart(self):
"IProtocolFactory API"
def doStop(self):
"IProtocolFactory API"
def buildProtocol(self, addr):
"IProtocolFactory API"
return None
class TestProcessFromUtil(unittest.TestCase):
def setUp(self):
self.fakestate = FakeState()
def test_none(self):
self.assertTrue(process_from_address(None, 80, self.fakestate) == None)
def test_internal(self):
self.assertTrue(process_from_address('(Tor_internal)', 80, self.fakestate) == self.fakestate.tor_pid)
@defer.inlineCallbacks
def test_real_addr(self):
## FIXME should choose a port which definitely isn't used.
## it's apparently frowned upon to use the "real" reactor in
## tests, but I was using "nc" before, and I think this is
## preferable.
from twisted.internet import reactor
listener = yield TCP4ServerEndpoint(reactor, 9887).listen(FakeProtocolFactory())
try:
pid = process_from_address('0.0.0.0', 9887, self.fakestate)
finally:
listener.stopListening()
self.assertTrue(pid == os.getpid())
class TestDelete(unittest.TestCase):
def test_delete_file(self):
(fd, f) = tempfile.mkstemp()
os.write(fd, 'some\ndata\n')
os.close(fd)
self.assertTrue(os.path.exists(f))
delete_file_or_tree(f)
self.assertTrue(not os.path.exists(f))
def test_delete_tree(self):
d = tempfile.mkdtemp()
f = open(os.path.join(d, 'foo'), 'w')
f.write('foo\n')
f.close()
self.assertTrue(os.path.exists(d))
self.assertTrue(os.path.isdir(d))
self.assertTrue(os.path.exists(os.path.join(d,'foo')))
delete_file_or_tree(d)
self.assertTrue(not os.path.exists(d))
self.assertTrue(not os.path.exists(os.path.join(d,'foo')))
| from twisted.trial import unittest
from twisted.test import proto_helpers
from txtorcon.util import process_from_address, delete_file_or_tree
import os
import tempfile
import subprocess
class FakeState:
tor_pid = -1
class TestProcessFromUtil(unittest.TestCase):
def setUp(self):
self.fakestate = FakeState()
def test_none(self):
self.assertTrue(process_from_address(None, 80, self.fakestate) == None)
def test_internal(self):
self.assertTrue(process_from_address('(Tor_internal)', 80, self.fakestate) == self.fakestate.tor_pid)
def test_real_addr(self):
## FIXME should choose a port which definitely isn't used.
try:
proc = subprocess.Popen(['nc', '-l', '0.0.0.0', '9887'], env={})
procpid = proc.pid
pid = process_from_address('0.0.0.0', 9887, self.fakestate)
finally:
proc.terminate()
self.assertTrue(pid == proc.pid)
class TestDelete(unittest.TestCase):
def test_delete_file(self):
(fd, f) = tempfile.mkstemp()
os.write(fd, 'some\ndata\n')
os.close(fd)
self.assertTrue(os.path.exists(f))
delete_file_or_tree(f)
self.assertTrue(not os.path.exists(f))
def test_delete_tree(self):
d = tempfile.mkdtemp()
f = open(os.path.join(d, 'foo'), 'w')
f.write('foo\n')
f.close()
self.assertTrue(os.path.exists(d))
self.assertTrue(os.path.isdir(d))
self.assertTrue(os.path.exists(os.path.join(d,'foo')))
delete_file_or_tree(d)
self.assertTrue(not os.path.exists(d))
self.assertTrue(not os.path.exists(os.path.join(d,'foo')))
| mit | Python |
3c776b0dd950a7444dfc58f2bf8d1f15ab3e6a21 | make a dependency-error'd version of nwis.hdf5 | ocefpaf/ulmo,cameronbracken/ulmo,nathanhilbert/ulmo,nathanhilbert/ulmo,ocefpaf/ulmo,cameronbracken/ulmo | ulmo/usgs/nwis/__init__.py | ulmo/usgs/nwis/__init__.py | """
`USGS National Water Information System`_ web services
.. _USGS National Water Information System: http://waterdata.usgs.gov/nwis
"""
from __future__ import absolute_import
from . import core
from .core import (get_sites, get_site_data)
from ulmo import util
try:
from . import hdf5
except ImportError:
hdf5 = util.module_with_dependency_errors([
'get_site',
'get_sites',
'get_site_data',
'update_site_list',
'update_site_data',
])
pytables = util.module_with_deprecation_warnings([
hdf5.get_site,
hdf5.get_sites,
hdf5.get_site_data,
hdf5.update_site_list,
hdf5.update_site_data,
],
"the nwis.pytables module has moved to nwis.hdf5 - nwis.pytables "
"is deprecated and will be removed in a future ulmo release."
)
| """
`USGS National Water Information System`_ web services
.. _USGS National Water Information System: http://waterdata.usgs.gov/nwis
"""
from __future__ import absolute_import
from . import core
from .core import (get_sites, get_site_data)
from ulmo import util
try:
from . import hdf5
pytables = util.module_with_deprecation_warnings([
hdf5.get_site,
hdf5.get_sites,
hdf5.get_site_data,
hdf5.update_site_list,
hdf5.update_site_data,
],
"the nwis.pytables module has moved to nwis.hdf5 - nwis.pytables "
"is deprecated and will be removed in a future ulmo release."
)
except ImportError:
hdf5 = util.module_with_dependency_errors([
'get_site',
'get_sites',
'get_site_data',
'update_site_list',
'update_site_data',
])
| bsd-3-clause | Python |
416ad8b4ed3f81119ab4a47fea43da4bf246afa2 | reduce BaseDNSService interface. add method docstrings | bkonkle/update-ip | update_ip/services/base.py | update_ip/services/base.py | class BaseDNSService(object):
name = 'Service Name' # Replace this with the name of the DNS service
def update(self, domain, ip):
'''updates the domain with the new ip'''
raise NotImplementedError
def find_domains(self, ip):
'''get all domains with the given ip'''
raise NotImplementedError
| class BaseDNSService(object):
name = 'Service Name' # Replace this with the name of the DNS service
def create(self, domain, ip):
raise NotImplementedError
def read(self, domain):
raise NotImplementedError
def update(self, domain, ip):
raise NotImplementedError
def delete(self, domain):
raise NotImplementedError
def find_domains(self, ip):
raise NotImplementedError
| bsd-3-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.