commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
a83de7bb6bb0836b754e0670c9bbe3fa4f76f4c0 | fix docstring in log method | shopkeep/shpkpr,shopkeep/shpkpr | shpkpr/cli.py | shpkpr/cli.py | # stdlib imports
import os
import sys
# third-party imports
import click
# local imports
from shpkpr.marathon import MarathonClient
from shpkpr.mesos import MesosClient
CONTEXT_SETTINGS = dict(auto_envvar_prefix='SHPKPR')
class Context(object):
def __init__(self):
self.marathon_client = None
self.mesos_client = None
def log(self, msg, *args):
"""Logs a message to stdout."""
if args:
msg %= args
click.echo(msg, file=sys.stdout)
pass_context = click.make_pass_decorator(Context, ensure=True)
cmd_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), 'commands'))
class ShpkprCLI(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(cmd_folder):
if filename.endswith('.py') and \
filename.startswith('cmd_'):
rv.append(filename[4:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
try:
if sys.version_info[0] == 2:
name = name.encode('ascii', 'replace')
mod = __import__('shpkpr.commands.cmd_' + name, None, None, ['cli'])
except ImportError:
return
return mod.cli
@click.command(cls=ShpkprCLI, context_settings=CONTEXT_SETTINGS)
@click.option('--marathon_url', required=True, help="URL of the Marathon API to use.")
@click.option('--mesos_master_url', required=True, help="URL of the Mesos master to use.")
@pass_context
def cli(ctx, mesos_master_url, marathon_url):
"""A tool to manage applications running on Marathon."""
ctx.mesos_client = MesosClient(mesos_master_url)
ctx.marathon_client = MarathonClient(marathon_url)
| # stdlib imports
import os
import sys
# third-party imports
import click
# local imports
from shpkpr.marathon import MarathonClient
from shpkpr.mesos import MesosClient
CONTEXT_SETTINGS = dict(auto_envvar_prefix='SHPKPR')
class Context(object):
def __init__(self):
self.marathon_client = None
def log(self, msg, *args):
"""Logs a message to stderr."""
if args:
msg %= args
click.echo(msg, file=sys.stdout)
pass_context = click.make_pass_decorator(Context, ensure=True)
cmd_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), 'commands'))
class ShpkprCLI(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(cmd_folder):
if filename.endswith('.py') and \
filename.startswith('cmd_'):
rv.append(filename[4:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
try:
if sys.version_info[0] == 2:
name = name.encode('ascii', 'replace')
mod = __import__('shpkpr.commands.cmd_' + name, None, None, ['cli'])
except ImportError:
return
return mod.cli
@click.command(cls=ShpkprCLI, context_settings=CONTEXT_SETTINGS)
@click.option('--marathon_url', required=True, help="URL of the Marathon API to use.")
@click.option('--mesos_master_url', required=True, help="URL of the Mesos master to use.")
@pass_context
def cli(ctx, mesos_master_url, marathon_url):
"""A tool to manage applications running on Marathon."""
ctx.mesos_client = MesosClient(mesos_master_url)
ctx.marathon_client = MarathonClient(marathon_url)
| mit | Python |
a4263c3d4156d8b419103e23cdd176b50b737e02 | Change "--no-patch" to "-s", which has the same behavior | google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot | slave/skia_slave_scripts/run_bench.py | slave/skia_slave_scripts/run_bench.py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia benchmarking executable. """
from build_step import BuildStep
from utils import shell_utils
import os
import re
import sys
GIT = 'git.bat' if os.name == 'nt' else 'git'
GIT_SVN_ID_MATCH_STR = 'git-svn-id: http://skia.googlecode.com/svn/trunk@(\d+)'
def BenchArgs(data_file):
""" Builds a list containing arguments to pass to bench.
data_file: filepath to store the log output
"""
return ['--timers', 'wcg', '--logFile', data_file]
def GetSvnRevision(commit_hash):
output = shell_utils.Bash([GIT, 'show', '-s', commit_hash],
echo=False, log_in_real_time=False)
results = re.findall(GIT_SVN_ID_MATCH_STR, output)
if results:
return results[0]
else:
raise Exception('No git-svn-id found for %s\nOutput:\n%s' % (commit_hash,
output))
class RunBench(BuildStep):
def __init__(self, timeout=9600, no_output_timeout=9600, **kwargs):
super(RunBench, self).__init__(timeout=timeout,
no_output_timeout=no_output_timeout,
**kwargs)
def _BuildDataFile(self):
return os.path.join(self._device_dirs.PerfDir(),
'bench_r%s_data' % GetSvnRevision(self._got_revision))
def _Run(self):
args = []
if self._perf_data_dir:
args.extend(BenchArgs(self._BuildDataFile()))
if 'Nexus4' in self._builder_name:
args.extend(['--config', 'defaults', 'MSAA4'])
self._flavor_utils.RunFlavoredCmd('bench', args + self._bench_args)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunBench))
| #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia benchmarking executable. """
from build_step import BuildStep
from utils import shell_utils
import os
import re
import sys
GIT = 'git.bat' if os.name == 'nt' else 'git'
GIT_SVN_ID_MATCH_STR = 'git-svn-id: http://skia.googlecode.com/svn/trunk@(\d+)'
def BenchArgs(data_file):
""" Builds a list containing arguments to pass to bench.
data_file: filepath to store the log output
"""
return ['--timers', 'wcg', '--logFile', data_file]
def GetSvnRevision(commit_hash):
output = shell_utils.Bash([GIT, 'show', '--no-patch', commit_hash],
echo=False, log_in_real_time=False)
results = re.findall(GIT_SVN_ID_MATCH_STR, output)
if results:
return results[0]
else:
raise Exception('No git-svn-id found for %s\nOutput:\n%s' % (commit_hash,
output))
class RunBench(BuildStep):
def __init__(self, timeout=9600, no_output_timeout=9600, **kwargs):
super(RunBench, self).__init__(timeout=timeout,
no_output_timeout=no_output_timeout,
**kwargs)
def _BuildDataFile(self):
return os.path.join(self._device_dirs.PerfDir(),
'bench_r%s_data' % GetSvnRevision(self._got_revision))
def _Run(self):
args = []
if self._perf_data_dir:
args.extend(BenchArgs(self._BuildDataFile()))
if 'Nexus4' in self._builder_name:
args.extend(['--config', 'defaults', 'MSAA4'])
self._flavor_utils.RunFlavoredCmd('bench', args + self._bench_args)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunBench))
| bsd-3-clause | Python |
23964bb3bb9b4d850bc60708eab82d1636cde9c8 | make pretty | RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software | soccer/gameplay/positions/position.py | soccer/gameplay/positions/position.py | import robocup
import single_robot_composite_behavior
import enum
import typing
## Parent class of any position
# Deals with relative positions as well
# as the generic pass options
class Position(single_robot_composite_behavior.SingleRobotCompositeBehavior):
class Type(enum.Enum):
Striker = 0
Midfielder = 1
Defender = 2
Goalie = 3
def __init__(self, position_class: enum.Enum, name: str) -> None:
super().__init__(continuous=True)
self._position_class = position_class
self._str_name = name
self._relative_pos = None
self._pass_options = [] # type: ignore
# Actual location the controller wants in field XY terms
# None follows the same rules as the `relative_pos`
self.target_pos = None
## What type of position this is
# (Striker/Midfielder/Defender/Goalie etc)
@property
def position_class(self) -> enum.Enum:
return self._position_class
## String name of the position (Left/Right/Center etc)
@property
def str_name(self) -> str:
return self._str_name
## Where the controller wants this position to be
# Set by controller in init and should not be touched
# None when the position is goalie and a relative position
# doesn't make sense
@property
def relative_pos(self) -> robocup.Point:
return self._relative_pos
@relative_pos.setter
def relative_pos(self, pos):
self._relative_pos = pos
## List of other positions that we can pass to
# These are the "triangles" that are naturally formed on the field
# In general, those in this list are the only ones who need to get
# open when I have the ball
# This is sorted from most "forward" option to furthest "back" option
# from left to right in formation
@property
def pass_options(self): # -> typing.List[Position]:
return self._pass_options
@pass_options.setter
def pass_options(self, options): #: typing.List[Position]
self._pass_options = options
def __str__(self):
desc = super().__str__()
desc += "\n " + self._str_name
return desc
| import robocup
import single_robot_composite_behavior
import enum
import typing
## Parent class of any position
# Deals with relative positions as well
# as the generic pass options
class Position(single_robot_composite_behavior.SingleRobotCompositeBehavior):
class Type(enum.Enum):
Striker = 0
Midfielder = 1
Defender = 2
Goalie = 3
def __init__(self, position_class: enum.Enum, name: str) -> None:
super().__init__(continuous=True)
self._position_class = position_class
self._str_name = name
self._relative_pos = None
self._pass_options = [] # type: ignore
# Actual location the controller wants in field XY terms
# None follows the same rules as the `relative_pos`
self.target_pos = None
## What type of position this is
# (Striker/Midfielder/Defender/Goalie etc)
@property
def position_class(self) -> enum.Enum:
return self._position_class
## String name of the position (Left/Right/Center etc)
@property
def str_name(self) -> str:
return self._str_name
## Where the controller wants this position to be
# Set by controller in init and should not be touched
# None when the position is goalie and a relative position
# doesn't make sense
@property
def relative_pos(self) -> robocup.Point:
return self._relative_pos
@relative_pos.setter
def relative_pos(self, pos):
self._relative_pos = pos
## List of other positions that we can pass to
# These are the "triangles" that are naturally formed on the field
# In general, those in this list are the only ones who need to get
# open when I have the ball
# This is sorted from most "forward" option to furthest "back" option
# from left to right in formation
@property
def pass_options(self):# -> typing.List[Position]:
return self._pass_options
@pass_options.setter
def pass_options(self, options): #: typing.List[Position]
self._pass_options = options
def __str__(self):
desc = super().__str__()
desc += "\n " + self._str_name
return desc
| apache-2.0 | Python |
d5d359c5ec0f1735e97355839f1a12c6ea45c460 | Add add_remote to pygit2 implementation | solarnz/polygamy,solarnz/polygamy | polygamy/pygit2_git.py | polygamy/pygit2_git.py | from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
| from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
| bsd-3-clause | Python |
c8c29109b4f44bf1b251c67b208c3cf7a6269692 | Improve error handling/detection. | zooniverse/docker-status | docker_status.py | docker_status.py | from flask import Flask
from multiprocessing import Process, Value
from time import sleep
import datetime
import os
import re
import urllib2
import sys
import traceback
OK_STATUSES = [ 200, 301, 302 ]
HTTP_TIMEOUT = os.environ.get('HTTP_TIMEOUT', 30)
TEST_INTERVAL = os.environ.get('TEST_INTERVAL', 30)
DEBUG = os.environ.get('DEBUG', False) == "true"
LISTEN_HOST = os.environ.get('LISTEN_HOST', '0.0.0.0')
LISTEN_PORT = os.environ.get('LISTEN_PORT', '80')
app = Flask(__name__)
checks = {}
class NoRedirectHTTPErrorProcessor(urllib2.HTTPErrorProcessor):
def http_response(self, request, response):
return response
https_response = http_response
url_opener = urllib2.build_opener(NoRedirectHTTPErrorProcessor)
@app.route("/")
def status():
if len([ host for (host, check) in checks.items()
if check[0].value not in OK_STATUSES ]) == 0:
status = 200
message = "OK"
else:
status = 500
message = "Fail"
return message, status
def checker(host, status):
get_path = os.environ.get("%s_GET_PATH" % host, "/")
while True:
try:
result = url_opener.open("http://%s%s" % (host, get_path),
timeout=HTTP_TIMEOUT)
status.value = result.getcode()
except Exception as e:
status.value = getattr(e, 'code', -1)
traceback.print_exc()
print datetime.datetime.now(), host, status.value
sys.stdout.flush()
sleep(TEST_INTERVAL)
if __name__ == "__main__":
hosts = [ var.split('_')[0] for var in os.environ
if re.match('[A-Z0-9]+_PORT_80_TCP$', var) ]
for host in hosts:
status = Value('i', -1)
process = Process(target=checker, args=(host, status))
process.start()
checks[host] = (status, process)
app.run(host=LISTEN_HOST, port=LISTEN_PORT, debug=DEBUG)
| from flask import Flask
from multiprocessing import Process, Value
from time import sleep
import datetime
import os
import re
import urllib2
import sys
OK_STATUSES = [ 200, 301, 302 ]
HTTP_TIMEOUT = os.environ.get('HTTP_TIMEOUT', 30)
TEST_INTERVAL = os.environ.get('TEST_INTERVAL', 30)
DEBUG = os.environ.get('DEBUG', False) == "true"
LISTEN_HOST = os.environ.get('LISTEN_HOST', '0.0.0.0')
LISTEN_PORT = os.environ.get('LISTEN_PORT', '80')
app = Flask(__name__)
checks = {}
@app.route("/")
def status():
if len([ host for (host, check) in checks.items()
if check[0].value not in OK_STATUSES ]) == 0:
status = 200
message = "OK"
else:
status = 500
message = "Fail"
return message, status
def checker(host, status):
while True:
try:
result = urllib2.urlopen("http://%s/" % host, timeout=HTTP_TIMEOUT)
status.value = result.getcode()
except (urllib2.URLError) as e:
status.value = getattr(e, 'code', -1)
except:
status.value = -1
print datetime.datetime.now(), host, status.value
sys.stdout.flush()
sleep(TEST_INTERVAL)
if __name__ == "__main__":
hosts = [ var.split('_')[0] for var in os.environ
if re.match('[A-Z0-9]+_PORT_80_TCP$', var) ]
for host in hosts:
status = Value('i', -1)
process = Process(target=checker, args=(host, status))
process.start()
checks[host] = (status, process)
app.run(host=LISTEN_HOST, port=LISTEN_PORT, debug=DEBUG)
| apache-2.0 | Python |
0826b8fa1157eccef406b108928f7be516d054e2 | add some more generic service tests. | spaam/svtplay-dl,olof/svtplay-dl,olof/svtplay-dl,spaam/svtplay-dl | lib/svtplay_dl/tests/test_service.py | lib/svtplay_dl/tests/test_service.py | from __future__ import absolute_import
import unittest
from svtplay_dl.service import Generic
from svtplay_dl.service import opengraph_get
from svtplay_dl.service import Service
from svtplay_dl.service import service_handler
from svtplay_dl.service.services import sites
from svtplay_dl.utils.parser import setup_defaults
class MockService(Service):
supported_domains = ["example.com", "example.net"]
class ServiceTest(unittest.TestCase):
def test_supports(self):
assert MockService.handles("http://example.com/video.swf?id=1")
assert MockService.handles("http://example.net/video.swf?id=1")
assert MockService.handles("http://www.example.com/video.swf?id=1")
assert MockService.handles("http://www.example.net/video.swf?id=1")
class service_handlerTest(unittest.TestCase):
def test_service_handler(self):
config = setup_defaults()
assert not service_handler(sites, config, "localhost")
class service_handlerTest2(unittest.TestCase):
def test_service_handler(self):
config = setup_defaults()
assert isinstance(service_handler(sites, config, "https://www.svtplay.se"), Service)
class service_opengraphGet(unittest.TestCase):
text = '<html><head><meta name="og:image" property="og:image" content="http://example.com/img3.jpg"><meta'
def test_og_get(self):
assert opengraph_get(self.text, "image") == "http://example.com/img3.jpg"
class service_opengraphGet_none(unittest.TestCase):
text = '<html><head><meta name="og:image" property="og:image" content="http://example.com/img3.jpg"><meta'
def test_og_get(self):
assert not opengraph_get(self.text, "kalle")
class service_opengraphGet2(unittest.TestCase):
text = '<html><head><meta name="og:image" property="og:image" content="http://example.com/img3.jpg">'
def test_og_get(self):
assert opengraph_get(self.text, "image") == "http://example.com/img3.jpg"
class test_generic(unittest.TestCase):
def test_nothing(self):
config = setup_defaults()
generic = Generic(config, "http://example.com")
data = "hejsan"
assert generic._match(data, sites) == ("http://example.com", None)
def test_hls(self):
config = setup_defaults()
generic = Generic(config, "http://example.com")
data = 'source src="http://example.com/hls.m3u8" type="application/x-mpegURL"'
assert isinstance(generic._match(data, sites)[1], Service)
def test_tv4(self):
config = setup_defaults()
generic = Generic(config, "http://example.com")
data = "rc=https://www.tv4play.se/iframe/video/12499319 "
assert isinstance(generic._match(data, sites)[1], Service)
def test_vimeo(self):
config = setup_defaults()
generic = Generic(config, "http://example.com")
data = 'src="https://player.vimeo.com/video/359281775" '
assert isinstance(generic._match(data, sites)[1], Service)
| from __future__ import absolute_import
import unittest
from svtplay_dl.service import opengraph_get
from svtplay_dl.service import Service
from svtplay_dl.service import service_handler
from svtplay_dl.service.services import sites
from svtplay_dl.utils.parser import setup_defaults
class MockService(Service):
supported_domains = ["example.com", "example.net"]
class ServiceTest(unittest.TestCase):
def test_supports(self):
assert MockService.handles("http://example.com/video.swf?id=1")
assert MockService.handles("http://example.net/video.swf?id=1")
assert MockService.handles("http://www.example.com/video.swf?id=1")
assert MockService.handles("http://www.example.net/video.swf?id=1")
class service_handlerTest(unittest.TestCase):
def test_service_handler(self):
config = setup_defaults()
assert not service_handler(sites, config, "localhost")
class service_handlerTest2(unittest.TestCase):
def test_service_handler(self):
config = setup_defaults()
assert isinstance(service_handler(sites, config, "https://www.svtplay.se"), Service)
class service_opengraphGet(unittest.TestCase):
text = '<html><head><meta name="og:image" property="og:image" content="http://example.com/img3.jpg"><meta'
def test_og_get(self):
assert opengraph_get(self.text, "image") == "http://example.com/img3.jpg"
class service_opengraphGet_none(unittest.TestCase):
text = '<html><head><meta name="og:image" property="og:image" content="http://example.com/img3.jpg"><meta'
def test_og_get(self):
assert not opengraph_get(self.text, "kalle")
class service_opengraphGet2(unittest.TestCase):
text = '<html><head><meta name="og:image" property="og:image" content="http://example.com/img3.jpg">'
def test_og_get(self):
assert opengraph_get(self.text, "image") == "http://example.com/img3.jpg"
| mit | Python |
301924877984658716d69425f18bb5bd6e75cae8 | update server | mzmttks/miteteyo | server/app.py | server/app.py | from flask import Flask, request
from pymongo import MongoClient
import os
import json
import pprint
app = Flask(__name__)
client = MongoClient(os.environ["MONGOLAB_URI"])
db = client["heroku_gw4w78g9"]
col = db["locations"]
print col
@app.route('/location', methods=["POST"])
def addLocation():
try:
col.insert_one(request.json)
except Exception as e:
import traceback
print traceback.format_exc()
return "ok"
@app.route('/')
def hello_world():
locs = [d for d in col.find({})]
print pprint.pformat(locs)
return "<pre>" + pprint.pformat(locs) + "</pre>"
if __name__ == '__main__':
app.run(debug=True)
| from flask import Flask, request
from pymongo import MongoClient
import os
import json
import pprint
app = Flask(__name__)
client = MongoClient(os.environ["MONGOLAB_URI"])
db = client["heroku_gw4w78g9"]
col = db["locations"]
print col
@app.route('/location', methods=["POST"])
def addLocation():
try:
print "REQUEST.JSON", "---", request.json, "---"
col.insert_one(request.json)
except Exception as e:
import traceback
print traceback.format_exc()
return "ok"
@app.route('/')
def hello_world():
locs = [d for d in col.find({})]
print pprint.pformat(locs)
return pprint.pformat(locs)
if __name__ == '__main__':
app.run(debug=True)
| mit | Python |
0aafba5834f4448d798452d00dfc53efcaa39898 | update shuffling | dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy | disaggregator/utils.py | disaggregator/utils.py | import appliance
import pandas as pd
import numpy as np
import os
import pickle
def aggregate_instances(instances, metadata, how="strict"):
'''
Given a list of temporally aligned instances, aggregate them into a single
signal.
'''
if how == "strict":
traces = [instance.traces for instance in instances]
traces = [list(t) for t in zip(*traces)] # transpose
traces = [ aggregate_traces(t,{}) for t in traces]
return appliance.ApplianceInstance(traces, metadata)
else:
return NotImplementedError
def aggregate_traces(traces, metadata, how="strict"):
'''
Given a list of temporally aligned traces, aggregate them into a single
signal.
'''
if how == "strict":
# require that traces are exactly aligned
summed_series = traces[0].series
for trace in traces[1:]:
summed_series += trace.series
return appliance.ApplianceTrace(summed_series, metadata)
else:
return NotImplementedError
def concatenate_traces(traces, metadata=None, how="strict"):
'''
Given a list of appliance traces, returns a single concatenated
trace. With how="strict" option, must be sampled at the same rate and
consecutive, without overlapping datapoints.
'''
if not metadata:
metadata = traces[0].metadata
if how == "strict":
# require ordered list of consecutive, similarly sampled traces with no
# missing data.
return appliance.ApplianceTrace(pd.concat([t.series for t in traces]),metadata)
else:
raise NotImplementedError
def concatenate_traces_lists(traces, metadata=None, how="strict"):
'''
Takes a list of lists of n traces and concatenates them into a single
list of n traces.
'''
if not metadata:
metadata = [trace.metadata for trace in traces[0]]
if how == "strict":
traces = [list(t) for t in zip(*traces)]
traces = [concatenate_traces(t,m) for t,m in zip(traces,metadata)]
return traces
else:
raise NotImplementedError
def order_traces(traces):
'''
Given a set of traces, orders them chronologically and catches
overlapping traces.
'''
order = np.argsort([t.series[0] for t in traces])
new_traces = [traces[i] for i in order]
return new_traces
def pickle_object(obj,title):
'''
Given an object and a filename saves the object in pickled format to the data directory.
'''
with open(os.path.join(os.pardir,'data/{}.p'.format(title)),'wb') as f:
pickle.dump(obj, f)
def shuffle_appliance_sets(sets,other_params):
pass
| import appliance
import pandas as pd
import numpy as np
import os
import pickle
def concatenate_traces(traces, metadata=None, how="strict"):
'''
Given a list of appliance traces, returns a single concatenated
trace. With how="strict" option, must be sampled at the same rate and
consecutive, without overlapping datapoints.
'''
if not metadata:
metadata = traces[0].metadata
if how == "strict":
# require ordered list of consecutive, similarly sampled traces with no
# missing data.
return appliance.ApplianceTrace(pd.concat([t.series for t in traces]),metadata)
else:
raise NotImplementedError
def concatenate_traces_lists(traces, metadata=None, how="strict"):
'''
Takes a list of lists of n traces and concatenates them into a single
list of n traces.
'''
if not metadata:
metadata = [trace.metadata for trace in traces[0]]
if how == "strict":
traces = [list(t) for t in zip(*traces)]
traces = [concatenate_traces(t,m) for t,m in zip(traces,metadata)]
return traces
else:
raise NotImplementedError
def aggregate_traces(traces, metadata, how="strict"):
'''
Given a list of temporally aligned traces, aggregate them into a single
signal.
'''
if how == "strict":
# require that traces are exactly aligned
summed_series = traces[0].series
for trace in traces[1:]:
summed_series += trace.series
return appliance.ApplianceTrace(summed_series, metadata)
else:
return NotImplementedError
def aggregate_instances(instances, metadata, how="strict"):
'''
Given a list of temporally aligned instances, aggregate them into a single
signal.
'''
if how == "strict":
traces = [instance.traces for instance in instances]
traces = [list(t) for t in zip(*traces)] # transpose
traces = [ aggregate_traces(t,{}) for t in traces]
return appliance.ApplianceInstance(traces, metadata)
else:
return NotImplementedError
def order_traces(traces):
'''
Given a set of traces, orders them chronologically and catches
overlapping traces.
'''
order = np.argsort([t.series[0] for t in traces])
new_traces = [traces[i] for i in order]
return new_traces
def pickle_object(obj,title):
'''
Given an object and a filename saves the object in pickled format to the data directory.
'''
with open(os.path.join(os.pardir,'data/{}.p'.format(title)),'wb') as f:
pickle.dump(obj, f)
| mit | Python |
88bfcad3e764bab14a057f092ba9562301db89eb | fix to `_standardize` | wkentaro/chainer,chainer/chainer,okuta/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,okuta/chainer,niboshi/chainer,chainer/chainer,okuta/chainer,wkentaro/chainer,hvy/chainer,niboshi/chainer,hvy/chainer,niboshi/chainer,pfnet/chainer,hvy/chainer,chainer/chainer,wkentaro/chainer,wkentaro/chainer,okuta/chainer | tests/chainer_tests/functions_tests/normalization_tests/test_standardize.py | tests/chainer_tests/functions_tests/normalization_tests/test_standardize.py | import numpy
from chainer.functions.normalization._standardize import _standardize
from chainer import testing
@testing.parameterize(*(testing.product({
'ch_out': [1, 5],
'size': [10, 20],
'dtype': [numpy.float32, numpy.float16],
'eps': [1e-5, 1e-1],
})))
@testing.backend.inject_backend_tests(
None,
# CPU tests
testing.product({
'use_cuda': [False],
'use_ideep': ['never', 'always'],
})
# GPU tests
+ testing.product({
'use_cuda': [True],
'use_cudnn': ['never', 'always'],
})
# ChainerX tests
+ testing.product({
'use_chainerx': [True],
'chainerx_device': ['native:0', 'cuda:0'],
})
)
class TestStandardize(testing.FunctionTestCase):
def setUp(self):
self.check_forward_options.update({'eps': self.eps})
self.check_backward_options.update({'eps': self.eps})
self.check_double_backward_options.update({'eps': self.eps})
if self.dtype == numpy.float16:
self.check_forward_options.update({'atol': 5e-3, 'rtol': 1e-2})
self.check_backward_options.update({'atol': 5e-3, 'rtol': 1e-2})
self.check_double_backward_options.update(
{'atol': 5e-3, 'rtol': 1e-2})
def generate_inputs(self):
shape = self.ch_out, self.size
x = numpy.random.uniform(-1, 1, shape).astype(self.dtype)
return x,
def forward(self, inputs, device):
x, = inputs
return _standardize(x, self.eps),
def forward_expected(self, inputs):
x, = inputs
mu = numpy.mean(x, axis=1, keepdims=True)
x_mu = x - mu
var = numpy.mean(numpy.square(x_mu), axis=1, keepdims=True)
std = numpy.sqrt(var, dtype=x.dtype) + self.eps
inv_std = 1. / std
return x_mu * inv_std,
testing.run_module(__name__, __file__)
| import numpy
from chainer import functions
from chainer import testing
@testing.parameterize(*(testing.product({
'ch_out': [1, 5],
'size': [10, 20],
'dtype': [numpy.float32, numpy.float16],
'eps': [1e-5, 1e-1],
})))
@testing.backend.inject_backend_tests(
None,
# CPU tests
testing.product({
'use_cuda': [False],
'use_ideep': ['never', 'always'],
})
# GPU tests
+ testing.product({
'use_cuda': [True],
'use_cudnn': ['never', 'always'],
})
# ChainerX tests
+ testing.product({
'use_chainerx': [True],
'chainerx_device': ['native:0', 'cuda:0'],
})
)
class TestStandardize(testing.FunctionTestCase):
def setUp(self):
self.check_forward_options.update({'eps': self.eps})
self.check_backward_options.update({'eps': self.eps})
self.check_double_backward_options.update({'eps': self.eps})
if self.dtype == numpy.float16:
self.check_forward_options.update({'atol': 5e-3, 'rtol': 1e-2})
self.check_backward_options.update({'atol': 5e-3, 'rtol': 1e-2})
self.check_double_backward_options.update(
{'atol': 5e-3, 'rtol': 1e-2})
def generate_inputs(self):
shape = self.ch_out, self.size
x = numpy.random.uniform(-1, 1, shape).astype(self.dtype)
return x,
def forward(self, inputs, device):
x, = inputs
return functions.standardize(x, self.eps),
def forward_expected(self, inputs):
x, = inputs
mu = numpy.mean(x, axis=1, keepdims=True)
x_mu = x - mu
var = numpy.mean(numpy.square(x_mu), axis=1, keepdims=True)
std = numpy.sqrt(var, dtype=x.dtype) + self.eps
inv_std = 1. / std
return x_mu * inv_std,
testing.run_module(__name__, __file__)
| mit | Python |
a11a5deb0f87e9abe939f864c7f929552ea07d98 | Fix likelihood (#83) | johnveitch/cpnest | examples/gaussianmixture.py | examples/gaussianmixture.py | import unittest
import numpy as np
import cpnest.model
class GaussianMixtureModel(cpnest.model.Model):
"""
A simple gaussian model with parameters mean and sigma
Shows example of using your own data
"""
def __init__(self):
self.names=['mean1','sigma1','mean2','sigma2','weight']
self.bounds=[[-3,3],[0.01,1],[-3,3],[0.01,1],[0.0,1.0]]
data = []
for _ in range(10000):
if np.random.uniform(0.0,1.0) < 0.1:
data.append(np.random.normal(0.5,0.5))
else:
data.append(np.random.normal(-1.5,0.03))
self.data = np.array(data)
def log_likelihood(self,x):
w = x['weight']
logL1 = np.log(w)-np.log(x['sigma1'])-0.5*((self.data-x['mean1'])/x['sigma1'])**2
logL2 = np.log(1.0-w)-np.log(x['sigma2'])-0.5*((self.data-x['mean2'])/x['sigma2'])**2
logL = np.logaddexp(logL1, logL2).sum()
return logL
def log_prior(self,p):
if not self.in_bounds(p): return -np.inf
return -np.log(p['sigma1'])-np.log(p['sigma2'])
def force(self,x):
f = np.zeros(1, dtype = {'names':x.names, 'formats':['f8' for _ in x.names]})
f['sigma1'] = 1.0
f['sigma2'] = 1.0
return f
class GaussianMixtureTestCase(unittest.TestCase):
"""
Test the gaussian model
"""
def setUp(self):
self.work=cpnest.CPNest(GaussianMixtureModel(),verbose=2,nthreads=6,nlive=1024,maxmcmc=1000,poolsize=1000,nslice=6)
def test_run(self):
self.work.run()
def test_all():
unittest.main(verbosity=2)
if __name__=='__main__':
unittest.main(verbosity=2)
| import unittest
import numpy as np
import cpnest.model
class GaussianMixtureModel(cpnest.model.Model):
"""
A simple gaussian model with parameters mean and sigma
Shows example of using your own data
"""
def __init__(self):
self.names=['mean1','sigma1','mean2','sigma2','weight']
self.bounds=[[-3,3],[0.01,1],[-3,3],[0.01,1],[0.0,1.0]]
data = []
for _ in range(10000):
if np.random.uniform(0.0,1.0) < 0.1:
data.append(np.random.normal(0.5,0.5))
else:
data.append(np.random.normal(-1.5,0.03))
self.data = np.array(data)
def log_likelihood(self,x):
w = x['weight']
logL1 = np.sum(np.log(w)-np.log(x['sigma1'])-0.5*((self.data-x['mean1'])/x['sigma1'])**2)
logL2 = np.sum(np.log(1.0-w)-np.log(x['sigma2'])-0.5*((self.data-x['mean2'])/x['sigma2'])**2)
logL = np.logaddexp(logL1,logL2)
return logL
def log_prior(self,p):
if not self.in_bounds(p): return -np.inf
return -np.log(p['sigma1'])-np.log(p['sigma2'])
def force(self,x):
f = np.zeros(1, dtype = {'names':x.names, 'formats':['f8' for _ in x.names]})
f['sigma1'] = 1.0
f['sigma2'] = 1.0
return f
class GaussianMixtureTestCase(unittest.TestCase):
"""
Test the gaussian model
"""
def setUp(self):
self.work=cpnest.CPNest(GaussianMixtureModel(),verbose=2,nthreads=6,nlive=1024,maxmcmc=1000,poolsize=1000,nslice=6)
def test_run(self):
self.work.run()
def test_all():
unittest.main(verbosity=2)
if __name__=='__main__':
unittest.main(verbosity=2)
| mit | Python |
60f24436f10465c81facd0705c184aa906bd05e6 | Use the updated psxml module. | chingc/DJRivals,chingc/DJRivals | psxml.py | psxml.py | class PrettySimpleXML():
"""A simple little pretty print XML generator."""
def __init__(self, width=4):
self._output = []
self._open_tags = []
self._width = " " * width
self._depth = 0
def _untrim(self, string, newline):
"""Inserts indentation and newline to a string."""
if not isinstance(string, str):
raise ValueError("first argument must be of type: string")
if not isinstance(newline, bool):
raise ValueError("second argument must be of type: boolean")
lead = "" if not self._output or self._output[-1][-1] != "\n" else self._depth * self._width
trail = "" if not newline else "\n"
return lead + string + trail
def _raw(self, string, newline):
"""Add a string exactly as given."""
self._output.append(self._untrim(string, newline))
return self
def _empty(self, tag, attr, newline):
"""Add an empty element."""
self._output.append(self._untrim("<{} />".format(" ".join([tag] + attr)), newline))
return self
def _begin(self, tag, attr, value, newline):
"""Begin and add a new element."""
self._open_tags.append(tag)
self._output.append(self._untrim("<{}>{}".format(" ".join([tag] + attr), value), newline))
self._depth += 1
return self
def _end(self, newline):
"""End an element."""
self._depth -= 1
self._output.append(self._untrim("</{}>".format(self._open_tags.pop()), newline))
return self
def output(self):
"""Returns the output in pretty print."""
return "".join(self._output)
# convenience functions
def raw(self, string):
"""Add a string exactly as given."""
return self._raw(string, False)
def rawln(self, string):
"""Add a string exactly as given and start a newline."""
return self._raw(string, True)
def empty(self, tag, attr=[]):
"""Add an empty element."""
return self._empty(tag, attr, False)
def emptyln(self, tag, attr=[]):
"""Add an empty element and start a newline."""
return self._empty(tag, attr, True)
def begin(self, tag, attr=[], value=""):
"""Begin and add a new element."""
return self._begin(tag, attr, value, False)
def beginln(self, tag, attr=[], value=""):
"""Begin, add a new element, and start a newline."""
return self._begin(tag, attr, value, True)
def end(self):
"""End an element."""
return self._end(False)
def endln(self):
"""End an element and start a newline."""
return self._end(True)
| class PrettySimpleXML():
"""A simple little pretty print XML generator."""
def __init__(self, width=4):
self._output = []
self._open_tags = []
self._width = " " * width
self._depth = 0
def _untrim(self, value, newline):
lead = "" if not self._output or self._output[-1][-1] != "\n" else self._depth * self._width
trail = "" if not newline else "\n"
return "{}{}{}".format(lead, value, trail)
def raw(self, value, newline=True):
self._output.append(self._untrim(value, newline))
return self
def empty(self, tag, attr=[], newline=True):
self._output.append(self._untrim("<{} />".format(" ".join([tag] + attr)), newline))
return self
def start(self, tag, attr=[], value="", newline=True):
self._open_tags.append(tag)
self._output.append(self._untrim("<{}>{}".format(" ".join([tag] + attr), value), newline))
self._depth += 1
return self
def end(self, newline=True):
self._depth -= 1
self._output.append(self._untrim("</{}>".format(self._open_tags.pop()), newline))
return self
def end_all(self):
while len(self._open_tags):
self.end()
return self
def get(self):
return "".join(self._output)
def clear(self):
self._output = []
self._open_tags = []
self._depth = 0
| bsd-2-clause | Python |
4f2a77093ba4305ec673d73281033f91eb70e511 | prepare for release 1.2.129-dev | kvick/aminator,coryb/aminator,bmoyles/aminator,Netflix/aminator | aminator/__init__.py | aminator/__init__.py | # -*- coding: utf-8 -*-
#
#
# Copyright 2013 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
"""
aminator
========
Create images from packages for deployment in various cloud formations
"""
import logging
try:
from logging import NullHandler
except ImportError:
# py26
try:
from logutils import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__version__ = '1.2.129-dev'
__versioninfo__ = __version__.split('.')
__all__ = ()
logging.getLogger(__name__).addHandler(NullHandler())
| # -*- coding: utf-8 -*-
#
#
# Copyright 2013 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
"""
aminator
========
Create images from packages for deployment in various cloud formations
"""
import logging
try:
from logging import NullHandler
except ImportError:
# py26
try:
from logutils import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__version__ = '1.2.125-dev'
__versioninfo__ = __version__.split('.')
__all__ = ()
logging.getLogger(__name__).addHandler(NullHandler())
| apache-2.0 | Python |
579b11b16d6e6b7f53c6ea8debd1163f3eb50bb9 | Update example for symmetry detection | sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf | examples/gto/13-symmetry.py | examples/gto/13-symmetry.py | #!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
from pyscf import gto
'''
Specify symmetry.
Mole.symmetry can be True/False to turn on/off the symmetry (default is off),
or a string to specify the symmetry of molecule. symmetry_subgroup keyword
can be set to generate a subgroup of the detected symmetry.
symmetry_subgroup has no effect when an explicit label is assigned to
Mole.symmetry.
Symmetry adapted basis are stored in Mole attribute symm_orb.
'''
mol = gto.M(
atom = 'C 0 .2 0; O 0 0 1.1',
symmetry = True,
)
print('Symmetry %-4s, subgroup %s.' % (mol.topgroup, mol.groupname))
print('--\n')
mol = gto.M(
atom = 'C 0 .2 0; O 0 0 1.1',
symmetry = True,
symmetry_subgroup = 'C2v',
)
print('Symmetry %-4s, subgroup %s.' % (mol.topgroup, mol.groupname))
print('--\n')
mol = gto.M(
atom = 'C 0 0 0; O 0 0 1.5',
symmetry = 'C2v',
)
print('Symmetry %-4s, subgroup %s.' % (mol.topgroup, mol.groupname))
print('If "symmetry=xxx" is specified, the symmetry for the molecule will be set to xxx')
print('--\n')
print('Symmetry adapted orbitals')
for k, ir in enumerate(mol.irrep_name):
print('Irrep name %s (ID %d), symm-adapted-basis shape %s' %
(ir, mol.irrep_id[k], mol.symm_orb[k].shape))
| #!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
from pyscf import gto
'''
Specify symmetry.
Mole.symmetry can be True/False to turn on/off the symmetry (default is off),
or a string to specify the symmetry of molecule. symmetry_subgroup keyword
can be set to generate a subgroup of the dectected symmetry.
symmetry_subgroup has no effect when an explicit label is assigned to
Mole.symmetry.
Symmetry adapted basis are stored in Mole attribute symm_orb.
'''
mol = gto.M(
atom = 'C 0 .2 0; O 0 0 1.1',
symmetry = True,
)
print('Symmetry %-4s, using subgroup %s. The molecule geometry is changed' %
(mol.topgroup, mol.groupname))
for x in mol._atom:
print(x)
print('--\n')
mol = gto.M(
atom = 'C 0 .2 0; O 0 0 1.1',
symmetry = True,
symmetry_subgroup = 'C2v',
)
print('Symmetry %-4s, using subgroup %s. The molecule geometry is changed' %
(mol.topgroup, mol.groupname))
for x in mol._atom:
print(x)
print('--\n')
try:
mol = gto.M(
atom = 'C 0 .2 0; O 0 0 1.1',
symmetry = 'C2v',
)
except RuntimeWarning as e:
print('Unable to identify the symmetry with the input geometry. Error msg:')
print(e)
print('--\n')
mol = gto.M(
atom = 'C 0 0 0; O 0 0 1.5',
symmetry = 'C2v',
)
print('Symmetry %-4s, using subgroup %s.' % (mol.topgroup, mol.groupname))
print('If "symmetry=string" was specified, the string is taken as the '
'group name and the geometry is kept')
for x in mol._atom:
print(x)
for k, ir in enumerate(mol.irrep_name):
print('Irrep name %s (ID %d), symm-adapted-basis shape %s' %
(ir, mol.irrep_id[k], mol.symm_orb[k].shape))
| apache-2.0 | Python |
b0cd7f7d3296028bcc28dff9e222a2a5f4537afa | Fix division by zero | timvandermeij/sentiment-analysis,timvandermeij/sentiment-analysis | analyze.py | analyze.py | import sys
import re
def main(argv):
# Message to perform sentiment analysis on
message = argv[0] if len(argv) > 0 else ""
if message == "":
print("Usage: python analyze.py [message]")
sys.exit(1)
# Load the positive and negative words
words = {}
with open("words/positive.txt") as file:
for line in file:
line = line.replace("\r\n", "").lower()
words[line] = 1
with open("words/negative.txt") as file:
for line in file:
line = line.replace("\r\n", "").lower()
words[line] = -1
# Perform the sentiment analysis
score = 0
found = 0
for w in message.split():
w = re.sub(r'\W+', '', w).lower() # Only keep alphanumeric characters
if w in words:
score += words[w]
found += 1
if found == 0:
print(0)
else:
print(round(score / float(found), 2))
if __name__ == "__main__":
main(sys.argv[1:])
| import sys
import re
def main(argv):
# Message to perform sentiment analysis on
message = argv[0] if len(argv) > 0 else ""
if message == "":
print("Usage: python analyze.py [message]")
sys.exit(1)
# Load the positive and negative words
words = {}
with open("words/positive.txt") as file:
for line in file:
line = line.replace("\r\n", "").lower()
words[line] = 1
with open("words/negative.txt") as file:
for line in file:
line = line.replace("\r\n", "").lower()
words[line] = -1
# Perform the sentiment analysis
score = 0
found = 0
for w in message.split():
w = re.sub(r'\W+', '', w).lower() # Only keep alphanumeric characters
if w in words:
score += words[w]
found += 1
print(round(score / float(found), 2))
if __name__ == "__main__":
main(sys.argv[1:])
| mit | Python |
e5a6957fc7bcdf7568065df796d3048ffe20ca2e | remove typo | lmzintgraf/MultiMAuS | authenticators/simple_authenticators.py | authenticators/simple_authenticators.py | from authenticators.abstract_authenticator import AbstractAuthenticator
class OracleAuthenticator(AbstractAuthenticator):
def authorise_transaction(self, customer):
if customer.fraudster:
return False
else:
return True
class NeverSecondAuthenticator(AbstractAuthenticator):
def authorise_transaction(self, customer):
return True
class AlwaysSecondAuthenticator(AbstractAuthenticator):
def authorise_transaction(self, customer):
if customer.get_authentication() is not None:
return True
else:
return False
class HeuristicAuthenticator(AbstractAuthenticator):
def __init__(self, thresh):
self.thresh = thresh
def authorise_transaction(self, customer):
authorise = True
if customer.curr_amount > self.thresh:
auth_quality = customer.get_authentication()
if auth_quality is None:
authorise = False
return authorise
class RandomAuthenticator(AbstractAuthenticator):
def authorise_transaction(self, customer):
# ask for second authentication in 50% of the cases
if customer.model.random_state.uniform(0, 1, 1)[0] < 0.5:
auth_quality = customer.give_authentication()
if auth_quality is None:
authorise = False
else:
authorise = True
else:
authorise = True
return authorise
| from authenticators.abstract_authenticator import AbstractAuthenticator
class OracleAuthenticator(AbstractAuthenticator):
def authorise_transaction(self, customer):
if customer.fraudster:
return False
else:
return True
class NeverSecondAuthenticator(AbstractAuthenticator):
def authorise_transaction(self, customer):
return True
class AlwaysSecondAuthenticator(AbstractAuthenticator):
def authorise_transaction(self, customer):
if customer.get_authentication() is not None:
return True
else:
return False
class HeuristicAuthenticator(AbstractAuthenticator):
def __init__(self, thresh):g
self.thresh = thresh
def authorise_transaction(self, customer):
authorise = True
if customer.curr_amount > self.thresh:
auth_quality = customer.get_authentication()
if auth_quality is None:
authorise = False
return authorise
class RandomAuthenticator(AbstractAuthenticator):
def authorise_transaction(self, customer):
# ask for second authentication in 50% of the cases
if customer.model.random_state.uniform(0, 1, 1)[0] < 0.5:
auth_quality = customer.give_authentication()
if auth_quality is None:
authorise = False
else:
authorise = True
else:
authorise = True
return authorise
| mit | Python |
a0e024b273bcdad825b84e2527fc93d24ed9d098 | Make the functional testing annotated steps script call the eggplant starting script | dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk | tools/bots/functional_testing.py | tools/bots/functional_testing.py | #!/usr/bin/python
# Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""
Buildbot steps for functional testing master and slaves
"""
import os
import re
import shutil
import sys
import bot
import bot_utils
utils = bot_utils.GetUtils()
FT_BUILDER = r'ft-slave-(linux|mac)'
FT_MASTER = r'ft-master'
HOST_OS = utils.GuessOS()
EDITOR_LOCATION='/home/chrome-bot/Desktop'
def SrcConfig(name, is_buildbot):
"""Returns info for the current buildbot based on the name of the builder.
- mode: always "release"
- system: always "linux" or "mac"
"""
pattern = re.match(FT_BUILDER, name)
master_pattern = re.match(FT_MASTER, name)
if not pattern and not master_pattern:
return None
if master_pattern:
tag = 'master'
system = 'linux'
else:
tag = 'slave'
system = pattern.group(1)
return bot.BuildInfo('none', 'none', 'release', system,
builder_tag=tag)
def Run(args):
print "Running: %s" % ' '.join(args)
sys.stdout.flush()
bot.RunProcess(args)
def FTSlave(config):
with bot.BuildStep('Fetching editor'):
revision = int(os.environ['BUILDBOT_GOT_REVISION'])
bot_name, _ = bot.GetBotName()
print bot_name
channel = bot_utils.GetChannelFromName(bot_name)
namer = bot_utils.GCSNamer(channel=channel)
system = config.system
if system == 'mac':
system = 'macos'
editor_path = namer.editor_zipfilepath(revision, system, 'x64')
gsutils = bot_utils.GSUtil()
local_path = os.path.join(EDITOR_LOCATION, 'editor.zip')
if os.path.exists(local_path):
os.remove(local_path)
local_extracted = os.path.join(EDITOR_LOCATION, 'dart')
shutil.rmtree(local_extracted, ignore_errors=True)
gsutils.execute(['cp', editor_path, local_path])
Run(['unzip', local_path, '-d', EDITOR_LOCATION])
def FTMaster(config):
run = int(os.environ['BUILDBOT_ANNOTATED_STEPS_RUN'])
with bot.BuildStep('Master run %s' % run):
if run == 1:
print 'Not doing anything on master before the triggers'
return
else:
builddir = os.path.join(bot_utils.DART_DIR,
utils.GetBuildDir(HOST_OS, HOST_OS),
'functional_testing')
shutil.rmtree(builddir, ignore_errors=True)
os.makedirs(builddir)
script_locations = os.path.join(bot_utils.DART_DIR, 'editor', 'ft')
Run(['/home/chrome-bot/func-test/bot-run', builddir, script_locations])
def FTSteps(config):
if config.builder_tag == 'master':
FTMaster(config)
else:
FTSlave(config)
if __name__ == '__main__':
bot.RunBot(SrcConfig, FTSteps, build_step=None)
| #!/usr/bin/python
# Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""
Buildbot steps for functional testing master and slaves
"""
import os
import re
import shutil
import sys
import bot
import bot_utils
FT_BUILDER = r'ft-slave-(linux|mac)'
FT_MASTER = r'ft-master'
EDITOR_LOCATION='/home/chrome-bot/Desktop'
def SrcConfig(name, is_buildbot):
"""Returns info for the current buildbot based on the name of the builder.
- mode: always "release"
- system: always "linux" or "mac"
"""
pattern = re.match(FT_BUILDER, name)
master_pattern = re.match(FT_MASTER, name)
if not pattern and not master_pattern:
return None
if master_pattern:
tag = 'master'
system = 'linux'
else:
tag = 'slave'
system = pattern.group(1)
return bot.BuildInfo('none', 'none', 'release', system,
builder_tag=tag)
def Run(args):
print "Running: %s" % ' '.join(args)
sys.stdout.flush()
bot.RunProcess(args)
def FTSteps(config):
if config.builder_tag == 'master':
print 'Not doing anything on master for now'
return
revision = int(os.environ['BUILDBOT_GOT_REVISION'])
bot_name, _ = bot.GetBotName()
print bot_name
channel = bot_utils.GetChannelFromName(bot_name)
namer = bot_utils.GCSNamer(channel=channel)
system = config.system
if system == 'mac':
system = 'macos'
editor_path = namer.editor_zipfilepath(revision, system, 'x64')
gsutils = bot_utils.GSUtil()
local_path = os.path.join(EDITOR_LOCATION, 'editor.zip')
if os.path.exists(local_path):
os.remove(local_path)
local_extracted = os.path.join(EDITOR_LOCATION, 'dart')
shutil.rmtree(local_extracted, ignore_errors=True)
gsutils.execute(['cp', editor_path, local_path])
Run(['unzip', local_path, '-d', EDITOR_LOCATION])
if __name__ == '__main__':
bot.RunBot(SrcConfig, FTSteps)
| bsd-3-clause | Python |
6f77910ac036209a852bc3b08989de7b84b5ee29 | Print error if job is missing config | Foxboron/Frank,Foxboron/Frank,martinp/jarvis2,martinp/jarvis2,mpolden/jarvis2,Foxboron/Frank,mpolden/jarvis2,mpolden/jarvis2,martinp/jarvis2 | app/run.py | app/run.py | #!/usr/bin/env python
"""JARVIS 2 helper script
Usage:
run.py -j [-s] [NAME]
run.py [-d]
Options:
-h --help Show usage
-d --debug Run app in debug mode
-j --job Run a job, will prompt if NAME is not given
-s --json Print job output as JSON
"""
from __future__ import print_function
import os
import signal
from docopt import docopt
from main import app, queues, sched
def _teardown(signal, frame):
sched.shutdown(wait=False)
for queue in queues.values():
queue.put(None)
queues.clear()
# Let the interrupt bubble up so that Flask/Werkzeug see it
raise KeyboardInterrupt
def _run_job(name=None, print_json=False):
import json
import sys
from flask import Flask
from jobs import load_jobs
from pprint import pprint
_app = Flask(__name__)
_app.config.from_envvar('JARVIS_SETTINGS')
conf = _app.config['JOBS']
jobs = load_jobs()
if name is None or len(name) == 0:
names = ' '.join(jobs.keys())
name = raw_input('Name of the job to run [%s]: ' % (names,)).lower()
cls = jobs.get(name)
if cls is None:
print('No such job: %s' % (name,))
sys.exit(1)
job_conf = conf.get(name)
if job_conf is None:
print('No config found for job: %s' % (name,))
sys.exit(1)
job = cls(job_conf)
data = job.get()
if print_json:
print(json.dumps(data, indent=2))
else:
pprint(data)
def _run_app(debug=False):
app.debug = debug
signal.signal(signal.SIGINT, _teardown)
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port, use_reloader=False, threaded=True)
def main():
args = docopt(__doc__)
if args['--job']:
_run_job(args['NAME'], args['--json'])
else:
_run_app(args['--debug'])
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""JARVIS 2 helper script
Usage:
run.py -j [-s] [NAME]
run.py [-d]
Options:
-h --help Show usage
-d --debug Run app in debug mode
-j --job Run a job, will prompt if NAME is not given
-s --json Print job output as JSON
"""
from __future__ import print_function
import os
import signal
from docopt import docopt
from main import app, queues, sched
def _teardown(signal, frame):
sched.shutdown(wait=False)
for queue in queues.values():
queue.put(None)
queues.clear()
# Let the interrupt bubble up so that Flask/Werkzeug see it
raise KeyboardInterrupt
def _run_job(name=None, print_json=False):
import json
import sys
from flask import Flask
from jobs import load_jobs
from pprint import pprint
_app = Flask(__name__)
_app.config.from_envvar('JARVIS_SETTINGS')
conf = _app.config['JOBS']
jobs = load_jobs()
if name is None or len(name) == 0:
names = ' '.join(jobs.keys())
name = raw_input('Name of the job to run [%s]: ' % (names,)).lower()
cls = jobs.get(name)
if cls is None:
print('No such job: %s' % (name,))
sys.exit(1)
job = cls(conf[name])
data = job.get()
if print_json:
print(json.dumps(data, indent=2))
else:
pprint(data)
def _run_app(debug=False):
app.debug = debug
signal.signal(signal.SIGINT, _teardown)
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port, use_reloader=False, threaded=True)
def main():
args = docopt(__doc__)
if args['--job']:
_run_job(args['NAME'], args['--json'])
else:
_run_app(args['--debug'])
if __name__ == '__main__':
main()
| mit | Python |
5fc65183e40dd1d06bd6ae3e4e7ba0f0a0e2bdd6 | Add DAG & non-DAG adjacency dicts | bowen0701/algorithms_data_structures | alg_check_dag.py | alg_check_dag.py | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _previsit():
pass
def _postvisit():
pass
def _dfs_explore():
pass
def check_dag():
"""Check Directed Acyclic Graph (DAG)."""
pass
def main():
# Graph adjacency dictionary for DAG.
dag_adj_d = {
'A': ['D'],
'B': ['D'],
'C': ['D'],
'D': ['E', 'G'],
'E': ['J'],
'F': ['G'],
'G': ['I'],
'I': ['J'],
'J': []
}
# Graph adjacency dictionary for non-DAG.
nondag_adj_d = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _previsit():
pass
def _postvisit():
pass
def _dfs_explore():
pass
def check_dag():
"""Check Directed Acyclic Graph (DAG)."""
pass
def main():
# DAG.
dag_adj_d = {
'A': ['D'],
'B': ['D'],
'C': ['D'],
'D': ['E', 'G'],
'E': ['J'],
'F': ['G'],
'G': ['I'],
'I': ['J'],
'J': []
}
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
17688f5ea2261345cbc0df6b4ffe99a076deaec2 | Tidy up and optimize Client creation | hobarrera/django-afip,hobarrera/django-afip | django_afip/clients.py | django_afip/clients.py | import pytz
from zeep import Client
from zeep.cache import SqliteCache
from zeep.transports import Transport
TZ_AR = pytz.timezone(pytz.country_timezones['ar'][0])
transport = Transport(cache=SqliteCache(timeout=86400))
wsdls = {
('wsaa', False): 'https://wsaa.afip.gov.ar/ws/services/LoginCms?wsdl',
('wsfe', False): 'https://servicios1.afip.gov.ar/wsfev1/service.asmx?WSDL',
('wsaa', True): 'https://wsaahomo.afip.gov.ar/ws/services/LoginCms?wsdl',
('wsfe', True): 'https://wswhomo.afip.gov.ar/wsfev1/service.asmx?WSDL',
}
cached_clients = {}
def get_client(service_name, sandbox=False):
"""
Returns a client for a given service.
The `sandbox` argument should only be necessary if a the client will be
used to make a request. If it will only be used to serialize objects, it is
irrelevant. Avoid the overhead of determining the sandbox mode in the
calling context if only serialization operations will take place.
:param string service_name: The name of the web services.
:param bool sandbox: Whether the sandbox (or production) environment should
be used by the returned client.
:returns: A zeep client to communicate with an AFIP webservice.
:rtype: zeep.Client
"""
key = (service_name.lower(), sandbox,)
try:
if key not in cached_clients:
cached_clients[key] = Client(wsdls[key], transport=transport)
return cached_clients[key]
except KeyError:
ValueError('Unknown service name, {}'.format(service_name))
| import pytz
from django.utils.functional import LazyObject
from zeep import Client
TZ_AR = pytz.timezone(pytz.country_timezones['ar'][0])
# XXX: Below are a set of clients for each WS. Each one is
# lazy-initialized ONCE, and only once.
#
# The code layout is somewhat ugly, so, if you have better code-pattern,
# patches are welcome.
class WsaaProductionClient(LazyObject):
def _setup(self):
self._wrapped = Client(
'https://wsaa.afip.gov.ar/ws/services/LoginCms?wsdl'
)
class WsaaSandboxClient(LazyObject):
def _setup(self):
self._wrapped = Client(
'https://wsaahomo.afip.gov.ar/ws/services/LoginCms?wsdl'
)
class WsfeProductionClient(LazyObject):
def _setup(self):
self._wrapped = Client(
'https://servicios1.afip.gov.ar/wsfev1/service.asmx?WSDL'
)
class WsfeSandboxClient(LazyObject):
def _setup(self):
self._wrapped = Client(
'https://wswhomo.afip.gov.ar/wsfev1/service.asmx?WSDL'
)
production_clients = dict(
wsaa=WsaaProductionClient(),
wsfe=WsfeProductionClient(),
)
sandbox_clients = dict(
wsaa=WsaaSandboxClient(),
wsfe=WsfeSandboxClient(),
)
def get_client(service_name, sandbox=False):
"""
Returns a client for a given service.
The `sandbox` argument should only be necessary if a the client will be
used to make a request. If it will only be used to serialize objects, it is
irrelevant. Avoid the overhead of determining the sandbox mode in the
calling context if only serialization operations will take place.
"""
if sandbox:
return sandbox_clients[service_name]
else:
return production_clients[service_name]
| isc | Python |
e77480eaef553567073af2a7e0cdf7d42628a0c4 | fix #5, use run('sudo ..') instead of sudo('..') in uwsgi restart | mocco/django-fabric | django_fabric/uwsgi.py | django_fabric/uwsgi.py | # -*- coding: utf-8 -*-
from fabric.operations import run
from django_fabric.base import App
class UwsgiApp(App):
ini_files = {}
def __init__(self, ini_files, *args, **kwargs):
super(UwsgiApp, self).__init__(*args, **kwargs)
self.ini_files = ini_files
def restart_app(self, instance):
run("sudo touch %s" % self.ini_files[instance])
| # -*- coding: utf-8 -*-
from fabric.operations import sudo
from django_fabric.base import App
class UwsgiApp(App):
ini_files = {}
def __init__(self, ini_files, *args, **kwargs):
super(UwsgiApp, self).__init__(*args, **kwargs)
self.ini_files = ini_files
def restart_app(self, instance):
sudo("touch %s" % self.ini_files[instance])
| mit | Python |
06ef8037e8439d70dd229ff465887e8bfd0373a3 | Allow archive generation even when some JSON files are missing | speth/planetary-annihilation-db,speth/planetary-annihilation-db,KillerKiwiJuice/planetary-annihilation-db,KillerKiwiJuice/planetary-annihilation-db | archive.py | archive.py | import units
import tarfile
import os
def save_db_info(pa_root=None, version=None):
if pa_root:
units.CONFIG['pa_root'] = pa_root
else:
pa_root = units.CONFIG['pa_root']
db = units.VersionDb()
db.load_units()
if version is None:
version = open(pa_root +'/../version.txt').read().strip()
archive_root = 'units-' + version
archive_name = 'units-{}.tar.bz2'.format(version)
print('Creating archive: "{}"'.format(archive_name))
with tarfile.open(archive_name, 'w:bz2') as archive:
archive.add(pa_root + '/pa/units/unit_list.json',
arcname=archive_root + '/pa/units/unit_list.json')
for filename in db._things:
if os.path.exists(pa_root + filename):
archive.add(pa_root + filename,
arcname=archive_root + filename)
else:
print('WARNING: missing file {!r}'.format(filename))
path_tmpl = '/ui/{}/live_game/img/build_bar/units/{}.png'
for unit in db.units.values():
for directory in ('main/game', 'alpha'):
iconpath = path_tmpl.format(directory, unit.safename)
if os.path.exists(pa_root + iconpath):
archive.add(pa_root + iconpath,
arcname=archive_root + iconpath)
if __name__ == '__main__':
import sys
if len(sys.argv) == 1:
save_db_info()
elif len(sys.argv) <= 3:
save_db_info(*sys.argv[1:])
else:
print('Unrecognized command line arguments:', repr(sys.argv[1:]))
sys.exit(1)
| import units
import tarfile
import os
def save_db_info(pa_root=None, version=None):
if pa_root:
units.CONFIG['pa_root'] = pa_root
else:
pa_root = units.CONFIG['pa_root']
db = units.VersionDb()
db.load_units()
if version is None:
version = open(pa_root +'/../version.txt').read().strip()
archive_root = 'units-' + version
archive_name = 'units-{}.tar.bz2'.format(version)
print('Creating archive: "{}"'.format(archive_name))
with tarfile.open(archive_name, 'w:bz2') as archive:
archive.add(pa_root + '/pa/units/unit_list.json',
arcname=archive_root + '/pa/units/unit_list.json')
for filename in db._things:
archive.add(pa_root + filename,
arcname=archive_root + filename)
path_tmpl = '/ui/{}/live_game/img/build_bar/units/{}.png'
for unit in db.units.values():
for directory in ('main/game', 'alpha'):
iconpath = path_tmpl.format(directory, unit.safename)
if os.path.exists(pa_root + iconpath):
archive.add(pa_root + iconpath,
arcname=archive_root + iconpath)
if __name__ == '__main__':
import sys
if len(sys.argv) == 1:
save_db_info()
elif len(sys.argv) <= 3:
save_db_info(*sys.argv[1:])
else:
print('Unrecognized command line arguments:', repr(sys.argv[1:]))
sys.exit(1)
| mit | Python |
61b55fa9bf2aa8a8f15803033ee34347d009c1e7 | Use CompositionProgress refactoring in console script | ivanovwaltz/wavelet_sound_microscope | analyse_sound.py | analyse_sound.py | #!/usr/bin/env python3
import logging
import os
from contextlib import contextmanager
from functools import partial
import click
from composition import CompositionProgress
logging.basicConfig()
log = logging.getLogger(__name__)
@contextmanager
def statusbar(val):
log.debug('Status before %s', val)
yield
log.debug('Status after %s', val)
@click.command()
@click.argument('source_sound_file', type=click.Path(exists=True))
@click.argument('destination_image_file', type=click.Path(), required=False)
@click.option('--norma_window_len', type=int, default=301)
@click.option('--verbose/--silent', default=False)
def main(source_sound_file, destination_image_file, norma_window_len, verbose):
if verbose:
logging.getLogger('').setLevel(logging.DEBUG)
progress = partial(
click.progressbar,
label='Calculating wavelet transformation',
fill_char=click.style('#', fg='magenta'),
)
composition = CompositionProgress(source_sound_file, progress)
with statusbar('Prepare Wavelet Box'):
composition.prepare_wbox()
img = composition.get_image(norma_window_len=norma_window_len)
file_dir, file_name = os.path.split(source_sound_file)
sound_name, ext = os.path.splitext(file_name)
if not destination_image_file:
name = '{}.jpg'.format(sound_name)
destination_image_file = os.path.join('.', name)
img.save(destination_image_file)
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
import logging
import os
from contextlib import contextmanager
import click
from composition import Composition
logging.basicConfig()
log = logging.getLogger(__name__)
class CompositionWithProgressbar(Composition):
def get_whole_image(self, chunks, decimate):
with click.progressbar(chunks,
label='Calculating wavelet transformation',
fill_char=click.style('#', fg='magenta'),
) as chunks_:
return super().get_whole_image(chunks_, decimate)
@contextmanager
def statusbar(val):
log.debug('Status before %s', val)
yield
log.debug('Status after %s', val)
@click.command()
@click.argument('source_sound_file', type=click.Path(exists=True))
@click.argument('destination_image_file', type=click.Path(), required=False)
@click.option('--norma_window_len', type=int, default=301)
@click.option('--verbose/--silent', default=False)
def main(source_sound_file, destination_image_file, norma_window_len, verbose):
if verbose:
logging.getLogger('').setLevel(logging.DEBUG)
composition = CompositionWithProgressbar(source_sound_file)
with statusbar('Prepare Wavelet Box'):
composition.prepare_wbox()
img = composition.get_image(norma_window_len=norma_window_len)
file_dir, file_name = os.path.split(source_sound_file)
sound_name, ext = os.path.splitext(file_name)
if not destination_image_file:
name = '{}.jpg'.format(sound_name)
destination_image_file = os.path.join('.', name)
img.save(destination_image_file)
if __name__ == '__main__':
main()
| mit | Python |
343ad64efe98f950284ca4ccb8de363d3b535c08 | print tests | forrestv/myhdl,forrestv/myhdl,forrestv/myhdl,forrestv/myhdl | myhdl/test/toVHDL/test_print.py | myhdl/test/toVHDL/test_print.py | from myhdl import *
def PrintBench():
si1 = Signal(intbv(0)[8:])
si2 = Signal(intbv(0, min=-10, max=12))
sb = Signal(bool(0))
@instance
def logic():
i1 = intbv(0)[8:]
i2 = intbv(0, min=-10, max=12)
b = bool(1)
i1[:] = 10
si1.next = 11
i2[:] = -7
si2.next = -5
yield delay(10)
print i1
print i2
print si1
print si2
yield delay(10)
print "This is a test"
yield delay(10)
print int(b)
print int(sb)
yield delay(10)
return logic
def testPrint():
assert conversion.verify(PrintBench) == 0
| from myhdl import *
def PrintBench():
@instance
def logic():
i1 = intbv(0)[8:]
i2 = intbv(0, min=-10, max=12)
b = bool(1)
i1[:] = 10
print int(i1)
yield delay(10)
print "Test"
yield delay(10)
print i1
yield delay(10)
i2[:] = -7
print i2
yield delay(10)
print int(b)
yield delay(10)
return logic
def testPrint():
assert conversion.verify(PrintBench) == 0
| lgpl-2.1 | Python |
503dfd707275789af1d143db16b177ca48a86468 | Fix typo in docs | jpbottaro/anna | anna/model/bridge.py | anna/model/bridge.py | """Bridges transform the state from the encoders so they fit the decoder
Inspired by OpenNMT & google/seq2seq
## Available bridges
@@NoBridge
@@ZeroBridge
@@DenseBridge
"""
import tensorflow as tf
class Bridge:
"""Transforms the state from the encoders so they fit the decoder"""
def __call__(self, zero_state, init_state):
"""
Creates a state for a cell that accepts `zero_state` type of states. Uses
`init` as the input.
Args:
zero_state (tf.Tensor): the result of cell#zero_state().
init_state (tf.Tensor): initialization for the state.
[batch_size, size]
Returns:
init_state (tf.Tensor): same size as `zero_state`, initialized with
`init_state`.
"""
raise NotImplementedError
class NoBridge(Bridge):
def __call__(self, zero_state, init_state):
return init_state
class ZeroBridge(Bridge):
def __call__(self, zero_state, init_state):
return zero_state
class DenseBridge(Bridge):
def __call__(self, zero_state, init_state):
# See states as a flat list of tensors
zero_state_flat = tf.contrib.framework.nest.flatten(zero_state)
# Find sizes of all states
dims = [t.get_shape()[-1].value for t in zero_state_flat]
# Project `init` to cover all needed states
states = tf.layers.dense(init_state, sum(dims))
# Match dimensions of expected states
states = tf.split(states, dims, axis=1)
# Pack the result to conform with the requested states
return tf.contrib.framework.nest.pack_sequence_as(zero_state, states)
| """Bridges transform the state from the encoders so they fit the decoder
Inspired by OpenNMT & google/seq2seq
## Available bridges
@@Bridge
@@NoBridge
@@ZeroBridge
@@DenseBridge
"""
import tensorflow as tf
class Bridge:
"""Transforms the state from the encoders so they fit the decoder"""
def __call__(self, zero_state, init_state):
"""
Creates a state for a cell that accepts `zero_state` type of states. Uses
`init` as the input.
Args:
zero_state (tf.Tensor): the result of cell#zero_state().
init_state (tf.Tensor): initialization for the state.
[batch_size, size]
Returns:
init_state (tf.Tensor): same size as `zero_state`, initialized with
`init_state`.
"""
raise NotImplementedError
class NoBridge(Bridge):
def __call__(self, zero_state, init_state):
return init_state
class ZeroBridge(Bridge):
def __call__(self, zero_state, init_state):
return zero_state
class DenseBridge(Bridge):
def __call__(self, zero_state, init_state):
# See states as a flat list of tensors
zero_state_flat = tf.contrib.framework.nest.flatten(zero_state)
# Find sizes of all states
dims = [t.get_shape()[-1].value for t in zero_state_flat]
# Project `init` to cover all needed states
states = tf.layers.dense(init_state, sum(dims))
# Match dimensions of expected states
states = tf.split(states, dims, axis=1)
# Pack the result to conform with the requested states
return tf.contrib.framework.nest.pack_sequence_as(zero_state, states)
| mit | Python |
cc77c6affaa34333e3eebc6638a53f56f1323348 | Remove extraneous import. | closeio/nylas,ErinCall/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,closeio/nylas,jobscore/sync-engine,closeio/nylas,nylas/sync-engine,gale320/sync-engine,nylas/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,gale320/sync-engine,nylas/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,gale320/sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,jobscore/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,gale320/sync-engine,gale320/sync-engine,closeio/nylas,jobscore/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine | inbox/models/util.py | inbox/models/util.py | from inbox.models import (Calendar, Contact, Message, Event, Block, Tag,
Thread)
def reconcile_message(new_message, session):
"""
Check to see if the (synced) Message instance new_message was originally
created/sent via the Inbox API (based on the X-Inbox-Uid header. If so,
update the existing message with new attributes from the synced message
and return it.
"""
if new_message.inbox_uid is None:
return None
if '-' not in new_message.inbox_uid:
# Old X-Inbox-Id format; use the old reconciliation strategy.
existing_message = session.query(Message).filter(
Message.namespace_id == new_message.namespace_id,
Message.inbox_uid == new_message.inbox_uid,
Message.is_created == True).first()
version = None
else:
# new_message has the new X-Inbox-Id format <public_id>-<version>
# If this is an old version of a current draft, we want to:
# * not commit a new, separate Message object for it
# * not update the current draft with the old header values in the code
# below.
expected_public_id, version = new_message.inbox_uid.split('-')
existing_message = session.query(Message).filter(
Message.namespace_id == new_message.namespace_id,
Message.public_id == expected_public_id,
Message.is_created == True).first()
if existing_message is None:
return None
if version is None or int(version) == existing_message.version:
existing_message.message_id_header = new_message.message_id_header
existing_message.full_body = new_message.full_body
existing_message.references = new_message.references
return existing_message
def transaction_objects():
"""
Return the mapping from API object name - which becomes the
Transaction.object_type - for models that generate Transactions (i.e.
models that implement the HasRevisions mixin).
"""
return {
'calendar': Calendar,
'contact': Contact,
'draft': Message,
'event': Event,
'file': Block,
'message': Message,
'tag': Tag,
'thread': Thread
}
| from inbox.models import (Calendar, Contact, Message, Event, Block, Message,
Tag, Thread)
def reconcile_message(new_message, session):
"""
Check to see if the (synced) Message instance new_message was originally
created/sent via the Inbox API (based on the X-Inbox-Uid header. If so,
update the existing message with new attributes from the synced message
and return it.
"""
if new_message.inbox_uid is None:
return None
if '-' not in new_message.inbox_uid:
# Old X-Inbox-Id format; use the old reconciliation strategy.
existing_message = session.query(Message).filter(
Message.namespace_id == new_message.namespace_id,
Message.inbox_uid == new_message.inbox_uid,
Message.is_created == True).first()
version = None
else:
# new_message has the new X-Inbox-Id format <public_id>-<version>
# If this is an old version of a current draft, we want to:
# * not commit a new, separate Message object for it
# * not update the current draft with the old header values in the code
# below.
expected_public_id, version = new_message.inbox_uid.split('-')
existing_message = session.query(Message).filter(
Message.namespace_id == new_message.namespace_id,
Message.public_id == expected_public_id,
Message.is_created == True).first()
if existing_message is None:
return None
if version is None or int(version) == existing_message.version:
existing_message.message_id_header = new_message.message_id_header
existing_message.full_body = new_message.full_body
existing_message.references = new_message.references
return existing_message
def transaction_objects():
"""
Return the mapping from API object name - which becomes the
Transaction.object_type - for models that generate Transactions (i.e.
models that implement the HasRevisions mixin).
"""
return {
'calendar': Calendar,
'contact': Contact,
'draft': Message,
'event': Event,
'file': Block,
'message': Message,
'tag': Tag,
'thread': Thread
}
| agpl-3.0 | Python |
9cce9ac6f183e035cb5fbead1b83830058cacd8d | Set default tips app | MeirKriheli/debian.org.il,MeirKriheli/debian.org.il | apps/tips/__init__.py | apps/tips/__init__.py | default_app_config = 'tips.apps.TipsConfig'
| mit | Python | |
46738818354d42fa93a6358aba7763de91d14bb0 | Comment example Selenium script | srguiwiz/nrvr-commander | dev/examples/qa/selenium/selenium-tests.py | dev/examples/qa/selenium/selenium-tests.py | #!/usr/bin/python
from selenium import webdriver
browser = webdriver.Firefox()
browser.implicitly_wait(180)
browser.get("http://www.bbc.co.uk/")
# this is just a website we had picked as an example,
# it may have different links by the time you are trying it out
#
# why this doesn't go all of below links all the time apparently is a Selenium issue,
# possibly specific to versions of Selenium,
# maybe to a browser specific driver
#
# you should figure that out for your own site you are testing, and for your own script
#
# a universally correctly running example will gladly be included here as a replacement
browser.find_element_by_link_text("News").click()
browser.find_element_by_link_text("US & Canada").click()
browser.find_element_by_link_text("Europe").click()
browser.find_element_by_link_text("Weather").click()
browser.find_element_by_link_text("Capital").click()
print "DONE running %s" % (__file__)
| #!/usr/bin/python
from selenium import webdriver
browser = webdriver.Firefox()
browser.implicitly_wait(60)
browser.get("http://www.bbc.co.uk/")
browser.find_element_by_link_text("News").click()
browser.find_element_by_link_text("US & Canada").click()
browser.find_element_by_link_text("Europe").click()
browser.find_element_by_link_text("Weather").click()
browser.find_element_by_link_text("Capital").click()
| bsd-2-clause | Python |
e9c9076090540f093a368be8b8e6076774ba93fd | fix response | msmexplorer/msmexplorer,msmexplorer/msmexplorer | devtools/travis-ci/update_versions_json.py | devtools/travis-ci/update_versions_json.py | import json
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
from msmexplorer import version
if not version.release:
print("This is not a release.")
exit(0)
URL = 'http://www.msmbuilder.org/msmexplorer'
res = urlopen(URL + '/versions.json').read().decode('utf-8')
versions = json.loads(res)
# new release so all the others are now old
for i in range(len(versions)):
versions[i]['latest'] = False
versions.append({
'version': version.short_version,
'url': "{base}/{version}".format(base=URL, version=version.short_version),
'latest': True})
with open("docs/_deploy/versions.json", 'w') as versionf:
json.dump(versions, versionf)
| import json
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
from msmexplorer import version
if not version.release:
print("This is not a release.")
exit(0)
URL = 'http://www.msmbuilder.org/msmexplorer'
versions = json.load(urlopen(URL + '/versions.json'))
# new release so all the others are now old
for i in range(len(versions)):
versions[i]['latest'] = False
versions.append({
'version': version.short_version,
'url': "{base}/{version}".format(base=URL, version=version.short_version),
'latest': True})
with open("docs/_deploy/versions.json", 'w') as versionf:
json.dump(versions, versionf)
| mit | Python |
4b665bb2e85994e3df0324afacb2453b8f4998a1 | Handle dask TimeoutError exception in tests | dwhswenson/contact_map,dwhswenson/contact_map | contact_map/tests/test_dask_runner.py | contact_map/tests/test_dask_runner.py |
# pylint: disable=wildcard-import, missing-docstring, protected-access
# pylint: disable=attribute-defined-outside-init, invalid-name, no-self-use
# pylint: disable=wrong-import-order, unused-wildcard-import
from .utils import *
from contact_map.dask_runner import *
def dask_setup_test_cluster(distributed, n_workers=4, n_attempts=3):
"""Set up a test cluster using dask.distributed. Try up to n_attempts
times, and skip the test if all attempts fail.
"""
cluster = None
for _ in range(n_attempts):
try:
cluster = distributed.LocalCluster(n_workers=n_workers)
except distributed.TimeoutError:
continue
else:
return cluster
# only get here if all retries fail
pytest.skip("Failed to set up distributed LocalCluster")
class TestDaskContactFrequency(object):
def test_dask_integration(self):
# this is an integration test to check that dask works
dask = pytest.importorskip('dask') # pylint: disable=W0612
distributed = pytest.importorskip('dask.distributed')
# Explicitly set only 4 workers on Travis instead of 31
# Fix copied from https://github.com/spencerahill/aospy/pull/220/files
cluster = dask_setup_test_cluster(distributed)
client = distributed.Client(cluster)
filename = find_testfile("trajectory.pdb")
dask_freq = DaskContactFrequency(client, filename, cutoff=0.075,
n_neighbors_ignored=0)
client.close()
assert dask_freq.n_frames == 5
|
# pylint: disable=wildcard-import, missing-docstring, protected-access
# pylint: disable=attribute-defined-outside-init, invalid-name, no-self-use
# pylint: disable=wrong-import-order, unused-wildcard-import
from .utils import *
from contact_map.dask_runner import *
class TestDaskContactFrequency(object):
def test_dask_integration(self):
# this is an integration test to check that dask works
dask = pytest.importorskip('dask') # pylint: disable=W0612
distributed = pytest.importorskip('dask.distributed')
# Explicitly set only 4 workers on Travis instead of 31
# Fix copied from https://github.com/spencerahill/aospy/pull/220/files
cluster = distributed.LocalCluster(n_workers=4)
client = distributed.Client(cluster)
filename = find_testfile("trajectory.pdb")
dask_freq = DaskContactFrequency(client, filename, cutoff=0.075,
n_neighbors_ignored=0)
client.close()
assert dask_freq.n_frames == 5
| lgpl-2.1 | Python |
bbda0a77d564631379616b416e830c2f9432ffb5 | Fix log. | Labbiness/Pancake,Labbiness/Pancake | Pancake/pancake.py | Pancake/pancake.py | #
# Copyright (c) 2017 Shota Shimazu
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import Console
import Installer.make as cake
if __name__ == "__main__":
Console.log("Now test")
| #
# Copyright (c) 2017 Shota Shimazu
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import Core.debug as debug
import Installer.make as cake
if __name__ == "__main__":
debug.Log("Now test")
| apache-2.0 | Python |
3a3b1ac34119c6b6bcb9add10589ca13b1ed5f27 | Add fallback to use local rhyme db if not installed | fathat/pyrhyme | rhyme.py | rhyme.py | #!/usr/bin/env python
import sqlite3 as sql
import sys, os, os.path
_rhymedb_path = os.path.join(sys.prefix, 'data/rhyme.db')
try:
_conn = sql.connect(_rhymedb_path)
except sql.OperationalError:
_rhymedb_path = os.path.join(os.path.dirname(sys.argv[0]), 'data/rhyme.db')
_conn = sql.connect(_rhymedb_path) # if it fails again, let it crash
def rhymes_with(word):
"""Returns a list of words that rhyme, or [] if no words rhyme."""
global _conn
cursor = _conn.execute("select * from words where word=?", (word.lower(),))
row = cursor.fetchone()
if not row: return []
word, sound, key = row
cursor = _conn.execute("select * from rhymes where sound=?", (sound,))
sound, words = cursor.fetchone()
#return all the matching words. If a word has a (n) on it, clip it off,
#and also don't return the original word
return [x.split('(')[0] for x in words.split() if x.lower() != word.lower()]
def main():
for word in sys.argv[1:]:
print '%s: %s' % (word, ', '.join(rhymes_with(word)))
main() if __name__=='__main__' else None
| #!/usr/bin/env python
import sqlite3 as sql
import sys, os, os.path
_conn = sql.connect(os.path.join(sys.prefix, 'data/rhyme.db'))
def rhymes_with(word):
"""Returns a list of words that rhyme, or [] if no words rhyme."""
global _conn
cursor = _conn.execute("select * from words where word=?", (word.lower(),))
row = cursor.fetchone()
if not row: return []
word, sound, key = row
cursor = _conn.execute("select * from rhymes where sound=?", (sound,))
sound, words = cursor.fetchone()
#return all the matching words. If a word has a (n) on it, clip it off,
#and also don't return the original word
return [x.split('(')[0] for x in words.split() if x.lower() != word.lower()]
def main():
for word in sys.argv[1:]:
print '%s: %s' % (word, ', '.join(rhymes_with(word)))
main() if __name__=='__main__' else None
| mit | Python |
fdc139414ef0c0415448088d55f3c34aa56ef349 | Update RebuildMetadata.py | Bookworm-project/BookwormDB,Bookworm-project/BookwormDB | RebuildMetadata.py | RebuildMetadata.py | import MySQLdb
import re
import sys
import json
import os
from subprocess import call
from ParseDate import *
from CreateDatabase import *
from ImportNewLibrary import *
from WordsTableCreate import WordsTableCreate
# Pull a dbname from command line input.
dbname = sys.argv[1]
dbuser = sys.argv[2]
dbpassword = sys.argv[3]
Bookworm = BookwormSQLDatabase(dbname,dbuser,dbpassword)
print "Parsing field_descriptions.json"
ParseFieldDescs()
print "Parsing jsoncatalog.json"
ParseJSONCatalog()
"Writing metadata to new catalog file..."
write_metadata(Bookworm.variables)
Bookworm.load_book_list()
Bookworm.create_memory_table_script()
Bookworm.jsonify_data()
Bookworm.create_API_settings()
| import MySQLdb
import re
import sys
import json
import os
from subprocess import call
import ParseDate
#These three libraries define the Bookworm-specific methods.
from CreateDatabase import *
from ImportNewLibrary import *
from WordsTableCreate import WordsTableCreate
# Pull a dbname from command line input.
dbname = sys.argv[1]
dbuser = sys.argv[2]
dbpassword = sys.argv[3]
Bookworm = BookwormSQLDatabase(dbname,dbuser,dbpassword)
print "Parsing the dates to a native format"
ParseDate.DateParser()
"Writing metadata to new catalog file..."
write_metadata(Bookworm.variables)
Bookworm.load_book_list()
Bookworm.create_memory_table_script()
Bookworm.jsonify_data()
Bookworm.create_API_settings()
| mit | Python |
61dc22ffd1fc2eed8625e7bc3d1431f8b5d92fbd | remove gen_image | kozistr/Awesome-GANs | image_utils.py | image_utils.py | import tensorflow as tf
import numpy as np
import imageio
import cv2
def down_sampling(img):
shape = img.get_shape() # [batch, height, width, channels]
h2 = int(shape[1] // 2)
w2 = int(shape[2] // 2)
return tf.image.resize_images(img, [h2, w2], tf.image.ResizeMethod.BILINEAR)
def up_sampling(img):
shape = img.get_shape() # [batch, height, width, channels]
h2 = int(shape[1] * 2)
w2 = int(shape[2] * 2)
return tf.image.resize_images(img, [h2, w2], tf.image.ResizeMethod.BILINEAR)
def inverse_transform(images):
images *= 255.
images[images > 255.] = 255.
images[images < 0.] = 0.
return images
def merge(images, size):
h, w = images.shape[1], images.shape[2]
img = np.zeros((h * size[0], w * size[1], 3))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j * h:j * h + h, i * w:i * w + w, :] = image
return img
def img_save(images, size, path):
image = np.squeeze(merge(images, size))
return imageio.imwrite(path, image)
def save_images(images, size, image_path):
return img_save(inverse_transform(images), size, image_path)
def save_image(img, path):
img = inverse_transform(img).astype(np.uint8)
return cv2.imwrite(path, img)
| import tensorflow as tf
import numpy as np
import imageio
import cv2
def down_sampling(img):
shape = img.get_shape() # [batch, height, width, channels]
h2 = int(shape[1] // 2)
w2 = int(shape[2] // 2)
return tf.image.resize_images(img, [h2, w2], tf.image.ResizeMethod.BILINEAR)
def up_sampling(img):
shape = img.get_shape() # [batch, height, width, channels]
h2 = int(shape[1] * 2)
w2 = int(shape[2] * 2)
return tf.image.resize_images(img, [h2, w2], tf.image.ResizeMethod.BILINEAR)
def inverse_transform(images):
images *= 255.
images[images > 255.] = 255.
images[images < 0.] = 0.
return images
def merge(images, size):
h, w = images.shape[1], images.shape[2]
img = np.zeros((h * size[0], w * size[1], 3))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j * h:j * h + h, i * w:i * w + w, :] = image
return img
def img_save(images, size, path):
image = np.squeeze(merge(images, size))
return imageio.imwrite(path, image)
def save_images(images, size, image_path):
return img_save(inverse_transform(images), size, image_path)
def save_image(img, path):
img = inverse_transform(img).astype(np.uint8)
return cv2.imwrite(path, img)
def get_image(path, w, h):
img = cv2.imread(path)
img = cv2.resize(img, (w, h))
# bgr to rgb
# b, g, r = cv2.split(img)
# img = cv2.merge([r, g, b])
return img
| mit | Python |
e6a1c5fbeb3f0bb370dd459092ae52e69e45bbc6 | Tidy up imports | incuna/incuna-mail,incuna/incuna-mail | incuna_mail.py | incuna_mail.py | from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.mail import EmailMessage, EmailMultiAlternatives
from django.template.loader import render_to_string
def get_manager_emails():
"""
Get a list of the managers email addresses.
"""
addresses = User.objects.filter(is_staff=True).exclude(email='').distinct().values_list('email')
manager_emails = [m[0] for m in addresses]
if not manager_emails:
manager_emails = [m[1] for m in settings.MANAGERS]
return manager_emails
def send(sender=None, to=(), cc=(), bcc=(), subject='mail',
attachments=(), template_name=(), text_template_name=(),
extra_context=None, **kwargs):
"""
Render and send a (mail) template.
if text_template_name is not None then a multipart email will be sent using
template for the html part and text_template_name for the plain part.
The context will include the current site (and any extra_context specified).
If no sender is specified then the DEFAULT_FROM_EMAIL or SERVER_EMAIL setting will be used.
Any extra items passed in with kwargs will be added to the email headers.
"""
current_site = Site.objects.get_current()
if isinstance(to, basestring):
to = [to]
if isinstance(cc, basestring):
cc = [cc]
if isinstance(bcc, basestring):
bcc = [bcc]
if sender is None:
sender = hasattr(settings, 'DEFAULT_FROM_EMAIL') and settings.DEFAULT_FROM_EMAIL or settings.SERVER_EMAIL
subject = unicode(subject)
context = {'site': current_site}
if extra_context is not None:
context.update(extra_context)
attachment_list = [[a.name, a.read(), a.content_type] for a in attachments if attachments]
email_kwargs = {
'from_email': sender,
'to': to,
'cc': cc,
'bcc': bcc,
'subject': subject,
'attachments': attachment_list,
'headers': kwargs,
}
if not text_template_name:
email_kwargs['body'] = render_to_string(template_name, context)
msg = EmailMessage(**email_kwargs)
else:
email_kwargs['body'] = render_to_string(text_template_name, context)
msg = EmailMultiAlternatives(**email_kwargs)
html_content = render_to_string(template_name, context)
msg.attach_alternative(html_content, 'text/html')
msg.send()
| from django.conf import settings
from django.contrib.sites.models import Site
from django.template.loader import render_to_string
from django.core.mail import EmailMultiAlternatives, EmailMessage
from django.contrib.auth.models import User
def get_manager_emails():
"""
Get a list of the managers email addresses.
"""
addresses = User.objects.filter(is_staff=True).exclude(email='').distinct().values_list('email')
manager_emails = [m[0] for m in addresses]
if not manager_emails:
manager_emails = [m[1] for m in settings.MANAGERS]
return manager_emails
def send(sender=None, to=(), cc=(), bcc=(), subject='mail',
attachments=(), template_name=(), text_template_name=(),
extra_context=None, **kwargs):
"""
Render and send a (mail) template.
if text_template_name is not None then a multipart email will be sent using
template for the html part and text_template_name for the plain part.
The context will include the current site (and any extra_context specified).
If no sender is specified then the DEFAULT_FROM_EMAIL or SERVER_EMAIL setting will be used.
Any extra items passed in with kwargs will be added to the email headers.
"""
current_site = Site.objects.get_current()
if isinstance(to, basestring):
to = [to]
if isinstance(cc, basestring):
cc = [cc]
if isinstance(bcc, basestring):
bcc = [bcc]
if sender is None:
sender = hasattr(settings, 'DEFAULT_FROM_EMAIL') and settings.DEFAULT_FROM_EMAIL or settings.SERVER_EMAIL
subject = unicode(subject)
context = {'site': current_site}
if extra_context is not None:
context.update(extra_context)
attachment_list = [[a.name, a.read(), a.content_type] for a in attachments if attachments]
email_kwargs = {
'from_email': sender,
'to': to,
'cc': cc,
'bcc': bcc,
'subject': subject,
'attachments': attachment_list,
'headers': kwargs,
}
if not text_template_name:
email_kwargs['body'] = render_to_string(template_name, context)
msg = EmailMessage(**email_kwargs)
else:
email_kwargs['body'] = render_to_string(text_template_name, context)
msg = EmailMultiAlternatives(**email_kwargs)
html_content = render_to_string(template_name, context)
msg.attach_alternative(html_content, 'text/html')
msg.send()
| bsd-2-clause | Python |
58e3a0a09600f2b99df4eee74b39c462e56a2e0a | Use py.test for running tests in invoke tasks file | FreeMusicNinja/freemusic.ninja,FreeMusicNinja/freemusic.ninja | django/tasks.py | django/tasks.py | import os
from invoke import run, task
@task
def test(speed='fast'):
if speed == 'fast':
os.environ['DATABASE_URL'] = "sqlite://"
run("py.test")
| import os
from invoke import run, task
@task
def test(speed='fast'):
if speed == 'fast':
os.environ['DATABASE_URL'] = "sqlite://"
run("coverage run manage.py test")
run("coverage html")
| bsd-3-clause | Python |
791ee4a19313242fd16e4460b0d4b06d96fdc226 | fix documentation version | PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild | doc-src/conf.py | doc-src/conf.py | #!/usr/bin/env python3
import recommonmark
from pkg_resources import DistributionNotFound, get_distribution
from recommonmark.parser import CommonMarkParser
from recommonmark.transform import AutoStructify
try:
__version__ = get_distribution("benchbuild").version
except DistributionNotFound:
pass
project = 'benchbuild'
copyright = '2018, Andreas Simbürger'
author = 'Andreas Simbürger'
version = '.'.join(__version__.split('.')[:2])
release = __version__
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
language = 'en'
pygments_style = 'sphinx'
todo_include_todos = True
html_theme = 'nature'
# html_theme_options = {}
# html_static_path = ['_static']
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'benchbuild', 'benchbuild Documentation',
[author], 1)
]
def setup(app):
app.add_config_value('recommonmark_config', {
'auto_toc_tree_section': 'Contents',
'enable_eval_rst': True,
'enable_auto_doc_ref': True,
}, True)
app.add_transform(AutoStructify)
| #!/usr/bin/env python3
import recommonmark
from recommonmark.parser import CommonMarkParser
from recommonmark.transform import AutoStructify
from pkg_resources import DistributionNotFound, get_distribution
try:
__version__ = get_distribution("benchbuild").version
except DistributionNotFound:
LOG.error("could not find version information.")
project = 'benchbuild'
copyright = '2018, Andreas Simbürger'
author = 'Andreas Simbürger'
version = '2.0'
release = VERSION
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
language = 'en'
pygments_style = 'sphinx'
todo_include_todos = True
html_theme = 'nature'
# html_theme_options = {}
# html_static_path = ['_static']
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'benchbuild', 'benchbuild Documentation',
[author], 1)
]
def setup(app):
app.add_config_value('recommonmark_config', {
'auto_toc_tree_section': 'Contents',
'enable_eval_rst': True,
'enable_auto_doc_ref': True,
}, True)
app.add_transform(AutoStructify)
| mit | Python |
1bb4d511723682568c0683450cdedf6b4878643e | Add detailed Maintenance admin | mfcovington/django-system-maintenance,mfcovington/django-system-maintenance,mfcovington/django-system-maintenance | system_maintenance/admin.py | system_maintenance/admin.py | from django.contrib import admin
from .models import Maintenance, MaintenanceType, Software, SysAdmin, System
@admin.register(Maintenance)
class MaintenanceAdmin(admin.ModelAdmin):
fieldset_basic = ('Basic', {
'fields': [
'system',
'sys_admin',
'maintenance_type',
'software',
'datetime',
'success',
],
})
fieldset_description = ('Description', {
'fields': [
'description',
'description_markup_type',
],
})
fieldset_procedure = ('Procedure', {
'fields': [
'procedure',
'procedure_markup_type',
],
})
fieldset_problems = ('Problems', {
'fields': [
'problems',
'problems_markup_type',
],
})
fieldsets = [
fieldset_basic,
fieldset_description,
fieldset_procedure,
fieldset_problems,
]
filter_horizontal = ['software']
list_display = [
'id',
'system',
'datetime',
'maintenance_type',
'sys_admin',
'success',
]
list_filter = [
'system',
'maintenance_type',
'software',
'success',
'sys_admin',
]
search_fields = [
'description',
'procedure',
'problems',
]
admin.site.register(MaintenanceType)
admin.site.register(Software)
admin.site.register(SysAdmin)
admin.site.register(System)
| from django.contrib import admin
from .models import Maintenance, MaintenanceType, Software, SysAdmin, System
admin.site.register(Maintenance)
admin.site.register(MaintenanceType)
admin.site.register(Software)
admin.site.register(SysAdmin)
admin.site.register(System)
| bsd-3-clause | Python |
75c5a6c5562938da67c8c0930fbaa6faaf972d32 | update version | icoxfog417/mlimages | setup.py | setup.py | from distutils.core import setup
setup(
name="mlimages",
packages=[
"mlimages",
"mlimages.gather",
"mlimages.scripts",
"mlimages.util",
],
install_requires=[
"requests",
"aiohttp"
],
version="0.2",
description="gather image data and create training data for machine learning",
author="icoxfog417",
author_email="icoxfog417@yahoo.co.jp",
url="https://github.com/icoxfog417/mlimages",
download_url="https://github.com/icoxfog417/mlimages/tarball/0.2",
keywords = ["imagenet", "machine learning"],
classifiers=[],
)
| from distutils.core import setup
setup(
name="mlimages",
packages=[
"mlimages",
"mlimages.imagenet"
],
install_requires=[
"requests",
"aiohttp"
],
version="0.1",
description="gather and create image dataset for machine learning",
author="icoxfog417",
author_email="icoxfog417@yahoo.co.jp",
url="https://github.com/icoxfog417/mlimages",
download_url="https://github.com/icoxfog417/mlimages/tarball/0.1",
keywords = ["imagenet", "machine learning"],
classifiers=[],
)
| mit | Python |
d3545dabf741528e0b95fae995861299dbba5e52 | Change version to 0.1.7 | keitaoouchi/seleniumwrapper | setup.py | setup.py | from setuptools import setup
from sys import version
if version < '2.6.0':
raise Exception("This module doesn't support any version less than 2.6")
import sys
sys.path.append("./test")
with open('README.rst', 'r') as f:
long_description = f.read()
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
"Programming Language :: Python",
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Topic :: Software Development :: Libraries :: Python Modules'
]
setup(
author='Keita Oouchi',
author_email='keita.oouchi@gmail.com',
url = 'https://github.com/keitaoouchi/seleniumwrapper',
name = 'seleniumwrapper',
version = '0.1.7',
package_dir={"":"src"},
packages = ['seleniumwrapper'],
test_suite = "test_seleniumwrapper.suite",
license='BSD License',
classifiers=classifiers,
description = 'selenium webdriver wrapper to make manipulation easier.',
long_description=long_description,
)
| from setuptools import setup
from sys import version
if version < '2.6.0':
raise Exception("This module doesn't support any version less than 2.6")
import sys
sys.path.append("./test")
with open('README.rst', 'r') as f:
long_description = f.read()
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
"Programming Language :: Python",
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Topic :: Software Development :: Libraries :: Python Modules'
]
setup(
author='Keita Oouchi',
author_email='keita.oouchi@gmail.com',
url = 'https://github.com/keitaoouchi/seleniumwrapper',
name = 'seleniumwrapper',
version = '0.1.6',
package_dir={"":"src"},
packages = ['seleniumwrapper'],
test_suite = "test_seleniumwrapper.suite",
license='BSD License',
classifiers=classifiers,
description = 'selenium webdriver wrapper to make manipulation easier.',
long_description=long_description,
)
| bsd-3-clause | Python |
42b5ce9a1f2c320597e80b49b58b8f0fc9e80661 | bump 0.1.2 | arubertoson/mayatest | setup.py | setup.py | #! /usr/bin/env python
import os
import sys
from setuptools import setup
if sys.version_info[:2] < (2, 7):
sys.exit('mayalauncher requires Python 2.7 or higher.')
here = os.path.abspath(os.path.dirname(__file__))
# Get long description
try:
import pypandoc
pypandoc.convert_file('README.md', 'rst', outputfile='README.rst')
with open('README.rst') as rst:
description = rst.read()
os.remove('README.rst')
except (IOError, ImportError):
with open(os.path.join(here, 'README.md'), 'r') as f:
description = f.read()
setup(
name='mayatest',
version='0.1.2',
description='Test Autodesk Maya scripts and modules with pytest',
long_description=description,
author='Marcus Albertsson',
author_email='marcus.arubertoson@gmail.com',
url='https://github.com/arubertoson/mayatest',
license='MIT',
packages=['mayatest'],
install_requires=['pytest', 'mock'],
include_package_data=True,
zip_safe=False,
entry_points={'console_scripts': ['mayatest = mayatest.cli:main']},
classifiers=[
'Development Status :: 3 - Alpha',
'Programming Language :: Python :: 2.7',
'Intended Audience :: End Users/Desktop',
'Environment :: Console',
])
| #! /usr/bin/env python
import os
import sys
from setuptools import setup
if sys.version_info[:2] < (2, 7):
sys.exit('mayalauncher requires Python 2.7 or higher.')
here = os.path.abspath(os.path.dirname(__file__))
# Get long description
try:
import pypandoc
pypandoc.convert_file('README.md', 'rst', outputfile='README.rst')
with open('README.rst') as rst:
description = rst.read()
os.remove('README.rst')
except (IOError, ImportError):
with open(os.path.join(here, 'README.md'), 'r') as f:
description = f.read()
setup(
name='mayatest',
version='0.1.1',
description='Test Autodesk Maya scripts and modules with pytest',
long_description=description,
author='Marcus Albertsson',
author_email='marcus.arubertoson@gmail.com',
url='https://github.com/arubertoson/mayatest',
license='MIT',
packages=['mayatest'],
install_requires=['pytest', 'mock'],
include_package_data=True,
zip_safe=False,
entry_points={'console_scripts': ['mayatest = mayatest.cli:main']},
classifiers=[
'Development Status :: 3 - Alpha',
'Programming Language :: Python :: 2.7',
'Intended Audience :: End Users/Desktop',
'Environment :: Console',
])
| mit | Python |
9937071cfb85ab39c1c22a862f826c226b0e71ba | change long description to markdown | mhajiloo/persiantools | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from io import open
from setuptools import setup
version = '1.3.0'
def readme():
with open('README.md', encoding='utf-8') as f:
return f.read()
setup(name='persiantools',
version=version,
description='Jalali date and datetime with other tools',
long_description=readme(),
long_description_content_type="text/markdown",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Localization',
'Topic :: Utilities',
],
keywords='jalali shamsi persian digits characters converter jalalidate '
'jalalidatetime date datetime jdate jdatetime',
url='https://github.com/mhajiloo/persiantools',
author='Majid Hajiloo',
author_email='majid.hajiloo@gmail.com',
license='MIT',
packages=['persiantools'],
tests_require=['pytest', 'pytest-cov'],
install_requires=[],
include_package_data=True,
zip_safe=False)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from io import open
from setuptools import setup
version = '1.3.0'
def readme():
with open('README.md', encoding='utf-8') as f:
return f.read()
setup(name='persiantools',
version=version,
description='Jalali date and datetime with other tools',
long_description=readme(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Localization',
'Topic :: Utilities',
],
keywords='jalali shamsi persian digits characters converter jalalidate '
'jalalidatetime date datetime jdate jdatetime',
url='https://github.com/mhajiloo/persiantools',
author='Majid Hajiloo',
author_email='majid.hajiloo@gmail.com',
license='MIT',
packages=['persiantools'],
tests_require=['pytest', 'pytest-cov'],
install_requires=[],
include_package_data=True,
zip_safe=False)
| mit | Python |
c9abb51e496e19137dd596a55dbd98f613659c07 | Change author in setup.py | pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics | setup.py | setup.py | import os
import sys
from setuptools import setup
# Utility function to read the README file.
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# The packages we depend on
dependencies = [
"Flask==0.9",
"MySQL-python==1.2.4c1",
"repoze.lru==0.5",
"WTForms==1.0.3",
"requests==1.1.0"
]
# If old Python, then we need simplejson
if sys.version_info < (2,6):
dependencies += ["simplejson>=2.6.2"]
setup(
name = "pegasus-metrics",
version = "0.1",
author = "Pegasus Team",
author_email = "pegasus@isi.edu",
description = "Anonymous usage metrics collection and reporting for Pegasus",
long_description = read("README.md"),
license = "Apache2",
url = "https://github.com/pegasus-isi/pegasus-metrics",
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
packages = ["pegasus","pegasus.metrics"],
package_data = {"pegasus.metrics" : ["templates/*", "static/*"] },
include_package_data = True,
zip_safe = False,
scripts = ["bin/pegasus-metrics-server", "bin/pegasus-metrics-loader"],
install_requires = dependencies
)
| import os
import sys
from setuptools import setup
# Utility function to read the README file.
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# The packages we depend on
dependencies = [
"Flask==0.9",
"MySQL-python==1.2.4c1",
"repoze.lru==0.5",
"WTForms==1.0.3",
"requests==1.1.0"
]
# If old Python, then we need simplejson
if sys.version_info < (2,6):
dependencies += ["simplejson>=2.6.2"]
setup(
name = "pegasus-metrics",
version = "0.1",
author = "Gideon Juve",
author_email = "gideon@isi.edu",
description = "Anonymous usage metrics collection and reporting for Pegasus",
long_description = read("README.md"),
license = "Apache2",
url = "https://github.com/pegasus-isi/pegasus-metrics",
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
packages = ["pegasus","pegasus.metrics"],
package_data = {"pegasus.metrics" : ["templates/*", "static/*"] },
include_package_data = True,
zip_safe = False,
scripts = ["bin/pegasus-metrics-server", "bin/pegasus-metrics-loader"],
install_requires = dependencies
)
| apache-2.0 | Python |
cfe09669a329df0c92795e0864ab089e7a8c8224 | Set zip_safe=True | sfischer13/python-arpa,sfischer13/python-arpa | setup.py | setup.py | #!/usr/bin/env python3
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if not ((3, 4) <= sys.version_info < (4, 0)):
print('ERROR: Python 3.4+ is required!')
sys.exit(1)
with open('README.md') as readme_file:
readme = readme_file.read()
with open('HISTORY.md') as history_file:
history = history_file.read()
setup(
author='Stefan Fischer',
author_email='sfischer13@ymail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing',
'Topic :: Text Processing :: Linguistic',
],
description='Library for reading ARPA n-gram models.',
include_package_data=True,
install_requires=[],
keywords='ARPA,n-gram,ngram,language model,LM,language technology,LT,'
'computational linguistics,CL,natural language processing,NLP,unigram,bigram,trigram',
license='MIT',
long_description=readme + '\n\n' + history,
long_description_content_type='text/markdown',
name='arpa',
package_dir={'arpa': 'arpa'},
packages=['arpa'],
project_urls={
'bug tracker': 'https://github.com/sfischer13/python-arpa/issues/',
'documentation': 'https://arpa.readthedocs.io/',
'source code': 'https://github.com/sfischer13/python-arpa/',
},
python_requires='~=3.4',
setup_requires=['pytest-runner'],
tests_require=['pytest'],
url='https://github.com/sfischer13/python-arpa',
version='0.1.0b3',
zip_safe=True,
)
| #!/usr/bin/env python3
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if not ((3, 4) <= sys.version_info < (4, 0)):
print('ERROR: Python 3.4+ is required!')
sys.exit(1)
with open('README.md') as readme_file:
readme = readme_file.read()
with open('HISTORY.md') as history_file:
history = history_file.read()
setup(
author='Stefan Fischer',
author_email='sfischer13@ymail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing',
'Topic :: Text Processing :: Linguistic',
],
description='Library for reading ARPA n-gram models.',
include_package_data=True,
install_requires=[],
keywords='ARPA,n-gram,ngram,language model,LM,language technology,LT,'
'computational linguistics,CL,natural language processing,NLP,unigram,bigram,trigram',
license='MIT',
long_description=readme + '\n\n' + history,
long_description_content_type='text/markdown',
name='arpa',
package_dir={'arpa': 'arpa'},
packages=['arpa'],
project_urls={
'bug tracker': 'https://github.com/sfischer13/python-arpa/issues/',
'documentation': 'https://arpa.readthedocs.io/',
'source code': 'https://github.com/sfischer13/python-arpa/',
},
python_requires='~=3.4',
setup_requires=['pytest-runner'],
tests_require=['pytest'],
url='https://github.com/sfischer13/python-arpa',
version='0.1.0b3',
zip_safe=False,
)
| mit | Python |
dffcb768f233b1a3875342d467f1323be3e2364c | add required geo packages to setup.py | openego/eDisGo,openego/eDisGo | setup.py | setup.py | from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'ding0 >=0.1.2, <=0.1.3',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.20.3, <=0.20.3',
'pypsa >=0.10.0, <=0.10.0',
'pyproj >= 1.9.5.1, <= 1.9.5.1',
'geopy >= 1.11.0, <= 1.11.0'
],
package_data={
'config': [
os.path.join('config',
'config_system'),
os.path.join('config',
'*.cfg')
]
},
cmdclass={
'install': InstallSetup}
)
| from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'ding0 >=0.1.2, <=0.1.3',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.20.3, <=0.20.3',
'pypsa >=0.10.0, <=0.10.0'
],
package_data={
'config': [
os.path.join('config',
'config_system'),
os.path.join('config',
'*.cfg')
]
},
cmdclass={
'install': InstallSetup}
)
| agpl-3.0 | Python |
b981a87e1945dc5ff6850f02a8081c0d7534c709 | Bump version | sdelements/django-multi-import | setup.py | setup.py | import os
from setuptools import setup, find_packages
def read_file(filename):
"""Read a file into a string"""
path = os.path.abspath(os.path.dirname(__file__))
filepath = os.path.join(path, filename)
try:
return open(filepath).read()
except IOError:
return ""
install_requires = ["chardet", "tablib", "six"]
setup(
name="django-multi_import",
version="1.3.3",
author="Simon Bartlett",
author_email="simon@securitycompass.com",
packages=find_packages(),
include_package_data=True,
url="https://github.com/sdelements/django-multi-importer",
license="MIT",
description="Import/export multi Django resources together atomically",
classifiers=[
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Framework :: Django",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
],
long_description=read_file("README.rst"),
test_suite="runtests.runtests",
zip_safe=False,
requires=["django (>=1.11)", "djangorestframework (>=3.0)"],
install_requires=install_requires,
)
| import os
from setuptools import setup, find_packages
def read_file(filename):
"""Read a file into a string"""
path = os.path.abspath(os.path.dirname(__file__))
filepath = os.path.join(path, filename)
try:
return open(filepath).read()
except IOError:
return ""
install_requires = ["chardet", "tablib", "six"]
setup(
name="django-multi_import",
version="1.3.1",
author="Simon Bartlett",
author_email="simon@securitycompass.com",
packages=find_packages(),
include_package_data=True,
url="https://github.com/sdelements/django-multi-importer",
license="MIT",
description="Import/export multi Django resources together atomically",
classifiers=[
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Framework :: Django",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
],
long_description=read_file("README.rst"),
test_suite="runtests.runtests",
zip_safe=False,
requires=["django (>=1.11)", "djangorestframework (>=3.0)"],
install_requires=install_requires,
)
| mit | Python |
9e6988d2eed1478f0944da1da177e88b2dfe557d | fix bad req spec | galaxy-iuc/parsec | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import glob
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
requirements = [
'Click>=6.7',
'git+https://github.com/erasche/bioblend.git@2e92cbcc22d2e1d9dec6554f3ad21b12112925d5',
'wrapt',
'pyyaml',
]
test_requirements = [
# TODO: put package test requirements here
]
version = '1.0.0-rc5'
subpackages = [x.replace('/', '.') for x in glob.glob('parsec/commands/*') if not x.endswith('.py')]
setup(
name='galaxy-parsec',
version=version,
description='Command-line utilities to assist in interacting with Galaxy servers (http://galaxyproject.org/).',
long_description=readme + '\n\n' + history,
author='Galaxy Project and Community',
author_email='rasche.eric@gmail.com',
url='https://github.com/galaxy-iuc/parsec',
packages=[
'parsec',
'parsec.commands',
] + subpackages,
entry_points='''
[console_scripts]
parsec=parsec.cli:parsec
''',
package_dir={'parsec': 'parsec'},
install_requires=requirements,
license="AFL",
keywords='parsec',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import glob
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
requirements = [
'Click>=6.7',
'git+https://github.com/erasche/bioblend.git@2e92cbcc22d2e1d9dec6554f3ad21b12112925d5',
'wrapt',
'pyyaml',
]
test_requirements = [
# TODO: put package test requirements here
]
version = '1.0.0-rc4'
subpackages = [x.replace('/', '.') for x in glob.glob('parsec/commands/*') if not x.endswith('.py')]
setup(
name='galaxy-parsec',
version=version,
description='Command-line utilities to assist in interacting with Galaxy servers (http://galaxyproject.org/).',
long_description=readme + '\n\n' + history,
author='Galaxy Project and Community',
author_email='rasche.eric@gmail.com',
url='https://github.com/galaxy-iuc/parsec',
packages=[
'parsec',
'parsec.commands',
] + subpackages,
entry_points='''
[console_scripts]
parsec=parsec.cli:parsec
''',
package_dir={'parsec': 'parsec'},
install_requires=requirements,
license="AFL",
keywords='parsec',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
| apache-2.0 | Python |
c997c6518aa675828ced24a49345bcc6f251cf36 | update dev status | ariebovenberg/omgorm,ariebovenberg/snug | setup.py | setup.py | import os.path
from setuptools import setup, find_packages
def read_local_file(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, 'r') as rfile:
return rfile.read()
metadata = {}
exec(read_local_file('snug/__about__.py'), metadata)
readme = read_local_file('README.rst')
history = read_local_file('HISTORY.rst')
setup(
name='snug',
version=metadata['__version__'],
description=metadata['__description__'],
license='MIT',
long_description=readme + '\n\n' + history,
url='https://github.com/ariebovenberg/snug',
author=metadata['__author__'],
author_email='a.c.bovenberg@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
install_requires=[
'typing>=3.6.2;python_version<"3.5"'
],
keywords=['api', 'wrapper', 'rest', 'http'],
python_requires='>=3.4',
packages=find_packages(exclude=('tests', 'docs', 'examples'))
)
| import os.path
from setuptools import setup, find_packages
def read_local_file(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, 'r') as rfile:
return rfile.read()
metadata = {}
exec(read_local_file('snug/__about__.py'), metadata)
readme = read_local_file('README.rst')
history = read_local_file('HISTORY.rst')
setup(
name='snug',
version=metadata['__version__'],
description=metadata['__description__'],
license='MIT',
long_description=readme + '\n\n' + history,
url='https://github.com/ariebovenberg/snug',
author=metadata['__author__'],
author_email='a.c.bovenberg@gmail.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
install_requires=[
'typing>=3.6.2;python_version<"3.5"'
],
keywords=['api', 'wrapper', 'rest', 'http'],
python_requires='>=3.4',
packages=find_packages(exclude=('tests', 'docs', 'examples'))
)
| mit | Python |
8f2fc6bbcbc7e6e9fb78021e6e0636783b98f263 | add extras to setup.py | openannotation/annotator-store,nobita-isc/annotator-store,nobita-isc/annotator-store,happybelly/annotator-store,ningyifan/annotator-store,nobita-isc/annotator-store,nobita-isc/annotator-store | setup.py | setup.py | from setuptools import setup, find_packages
import sys
requires = [
'Flask==0.9',
'pyes==0.19.1',
'PyJWT==0.1.4',
'iso8601==0.1.4',
]
if sys.version_info < (2, 7):
requires.append('ordereddict==1.1')
setup(
name = 'annotator',
version = '0.10.0',
packages = find_packages(),
install_requires = requires,
extras_require = {
'docs': ['Sphinx'],
'testing': ['nose', 'coverage'],
},
# metadata for upload to PyPI
author = 'Rufus Pollock and Nick Stenning (Open Knowledge Foundation)',
author_email = 'annotator@okfn.org',
description = 'Database backend for the Annotator (http://annotateit.org)',
license = 'MIT',
keywords = 'annotation web javascript',
url = 'http://okfnlabs.org/annotator/',
download_url = 'https://github.com/okfn/annotator-store',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python'
],
)
| from setuptools import setup, find_packages
import sys
requires = [
'Flask==0.9',
'pyes==0.19.1',
'PyJWT==0.1.4',
'iso8601==0.1.4',
]
if sys.version_info < (2, 7):
requires.append('ordereddict==1.1')
setup(
name = 'annotator',
version = '0.10.0',
packages = find_packages(),
install_requires = requires,
# metadata for upload to PyPI
author = 'Rufus Pollock and Nick Stenning (Open Knowledge Foundation)',
author_email = 'annotator@okfn.org',
description = 'Database backend for the Annotator (http://annotateit.org)',
license = 'MIT',
keywords = 'annotation web javascript',
url = 'http://okfnlabs.org/annotator/',
download_url = 'https://github.com/okfn/annotator-store',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python'
],
)
| mit | Python |
dd64e237e0f8e1dc4c00a371e1849ab6accb60a5 | add cython dependency to try to built pylbfgs | datamade/rlr | setup.py | setup.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
try:
from setuptools import setup, Extension
except ImportError :
raise ImportError("setuptools module required, please go to https://pypi.python.org/pypi/setuptools and follow the instructions for installing setuptools")
setup(
name='rlr',
url='https://github.com/datamade/rlr',
version='1.1',
description='Case weighted L2 regularized logistic regression',
packages=['rlr'],
dependency_links = ['https://github.com/larsmans/pylbfgs/tarball/master#egg=pylbfgs'],
install_requires=['numpy', 'pylbfgs', 'cython'],
license='Apache 2.0 License: https://www.apache.org/licenses/LICENSE-2.0',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache 2.0 License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Information Analysis']
)
| #!/usr/bin/python
# -*- coding: utf-8 -*-
try:
from setuptools import setup, Extension
except ImportError :
raise ImportError("setuptools module required, please go to https://pypi.python.org/pypi/setuptools and follow the instructions for installing setuptools")
setup(
name='rlr',
url='https://github.com/datamade/rlr',
version='1.1',
description='Case weighted L2 regularized logistic regression',
packages=['rlr'],
dependency_links = ['https://github.com/larsmans/pylbfgs/tarball/master#egg=pylbfgs'],
install_requires=['numpy', 'pylbfgs'],
license='Apache 2.0 License: https://www.apache.org/licenses/LICENSE-2.0',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache 2.0 License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Information Analysis']
)
| bsd-3-clause | Python |
7fee938e21701c09fe129fe01843e33352c5678c | Update setup script | MahjongRepository/mahjong | setup.py | setup.py | import io
from distutils.core import setup
def get_long_description():
"""Generate a long description from the README file."""
descr = []
for fname in ('README.rst',):
with io.open(fname, encoding='utf-8') as f:
descr.append(f.read())
return '\n\n'.join(descr)
setup(
name='mahjong',
packages=[
'mahjong',
'mahjong.hand_calculating',
'mahjong.hand_calculating.yaku_list',
'mahjong.hand_calculating.yaku_list.yakuman',
],
version='1.0.5',
description='Mahjong hands calculation',
long_description=get_long_description(),
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
data_files=[('', ['README.rst'])],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| import io
from distutils.core import setup
def get_long_description():
"""Generate a long description from the README file."""
descr = []
for fname in ('README.rst',):
with io.open(fname, encoding='utf-8') as f:
descr.append(f.read())
return '\n\n'.join(descr)
setup(
name='mahjong',
packages=[
'mahjong',
'mahjong.hand_calculating',
'mahjong.hand_calculating.yaku_list',
'mahjong.hand_calculating.yaku_list.yakuman',
],
version='1.0.4',
description='Mahjong hands calculation',
long_description=get_long_description(),
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| mit | Python |
a28fbfd15ec86c6b9178e5c63c14dea4d12226b8 | Update setup.py | materialsproject/custodian,specter119/custodian,xhqu1981/custodian,specter119/custodian,materialsproject/custodian,specter119/custodian,davidwaroquiers/custodian,materialsproject/custodian | setup.py | setup.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import os
from io import open
from setuptools import setup, find_packages
with open("README.rst") as f:
long_desc = f.read()
ind = long_desc.find("\n")
long_desc = long_desc[ind + 1:]
setup(
name="custodian",
packages=find_packages(),
version="1.0.1",
install_requires=["monty>=0.9.0", "six"],
extras_require={"vasp, nwchem, qchem": ["pymatgen>=3.3.1"]},
package_data={},
author="Shyue Ping Ong, William Davidson Richards, Stephen Dacek, "
"Xiaohui Qu",
author_email="ongsp@ucsd.edu",
maintainer="Shyue Ping Ong",
url="https://github.com/materialsproject/custodian",
license="MIT",
description="A simple JIT job management framework in Python.",
long_description=long_desc,
keywords=["jit", "just-in-time", "job", "management", "vasp"],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Software Development :: Libraries :: Python Modules"
],
scripts=[os.path.join("scripts", f) for f in os.listdir("scripts")]
)
| # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import os
from io import open
from setuptools import setup, find_packages
with open("README.rst") as f:
long_desc = f.read()
ind = long_desc.find("\n")
long_desc = long_desc[ind + 1:]
setup(
name="custodian",
packages=find_packages(),
version="1.0.1",
install_requires=["monty>=0.7.0", "six"],
extras_require={"vasp, nwchem, qchem": ["pymatgen>=3.3.1"]},
package_data={},
author="Shyue Ping Ong, William Davidson Richards, Stephen Dacek, "
"Xiaohui Qu",
author_email="ongsp@ucsd.edu",
maintainer="Shyue Ping Ong",
url="https://github.com/materialsproject/custodian",
license="MIT",
description="A simple JIT job management framework in Python.",
long_description=long_desc,
keywords=["jit", "just-in-time", "job", "management", "vasp"],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Software Development :: Libraries :: Python Modules"
],
scripts=[os.path.join("scripts", f) for f in os.listdir("scripts")]
)
| mit | Python |
c9e6e9c11a56bfc04649d393bd56339e0ecce5ff | make "python setup.py test" install and run tox | davidcaste/fabtools,sociateru/fabtools,ahnjungho/fabtools,AMOSoft/fabtools,bitmonk/fabtools,fabtools/fabtools,ronnix/fabtools,badele/fabtools,hagai26/fabtools,pombredanne/fabtools,n0n0x/fabtools-python,wagigi/fabtools-python,prologic/fabtools | setup.py | setup.py | import os
import re
import sys
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
def read(filename):
path = os.path.join(os.path.dirname(__file__), filename)
contents = open(path).read()
return re.sub(r'.*travis-ci\.org/.*', '', contents)
class Tox(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import tox
errno = tox.cmdline(self.test_args)
sys.exit(errno)
setup(
name='fabtools',
version='0.19.0',
description='Tools for writing awesome Fabric files',
long_description=read('README.rst') + '\n' + read('docs/CHANGELOG.rst'),
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://fabtools.readthedocs.org/',
license='BSD',
install_requires=[
"fabric>=1.7.0",
],
setup_requires=[],
tests_require=[
'tox',
],
cmdclass = {
'test': Tox,
},
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
| import os
import re
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
def read(filename):
path = os.path.join(os.path.dirname(__file__), filename)
contents = open(path).read()
return re.sub(r'.*travis-ci\.org/.*', '', contents)
setup(
name='fabtools',
version='0.19.0',
description='Tools for writing awesome Fabric files',
long_description=read('README.rst') + '\n' + read('docs/CHANGELOG.rst'),
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://fabtools.readthedocs.org/',
license='BSD',
install_requires=[
"fabric>=1.7.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
| bsd-2-clause | Python |
89c4863322bcfcb31242ee5123f4ed42bd7ce7f4 | Add test command to setup.py again | mpkato/interleaving | setup.py | setup.py | # -*- coding:utf-8 -*-
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
pytest.main(self.test_args)
setup(
name = "interleaving",
packages = ["interleaving", "interleaving.simulation"],
version = "0.0.1",
description = "Interleaving library for ranking evaluation",
author = "Makoto P. Kato, Tomohiro Manabe",
author_email = "kato@dl.kuis.kyoto-u.ac.jp",
license = "MIT License",
url = "https://github.com/mpkato/interleaving",
setup_requires = [
'numpy'
],
install_requires = [
'numpy',
'scipy'
],
tests_require=['pytest'],
cmdclass = {'test': PyTest}
)
| # -*- coding:utf-8 -*-
from setuptools import setup
setup(
name = "interleaving",
packages = ["interleaving", "interleaving.simulation"],
version = "0.0.1",
description = "Interleaving library for ranking evaluation",
author = "Makoto P. Kato, Tomohiro Manabe",
author_email = "kato@dl.kuis.kyoto-u.ac.jp",
license = "MIT License",
url = "https://github.com/mpkato/interleaving",
setup_requires = [
'numpy'
],
install_requires = [
'numpy',
'scipy'
],
tests_require=['pytest'],
)
| mit | Python |
4472aa5b2e852d4fbb3b1287384f39f459368bad | add temporary version in setup.py (#84) | deepcharles/ruptures,deepcharles/ruptures | setup.py | setup.py | from setuptools import setup, find_packages, Extension
import numpy as np
from Cython.Build import cythonize
ext_modules = [
Extension(
"ruptures.detection._detection.ekcpd",
sources=[
"ruptures/detection/_detection/ekcpd.pyx",
"ruptures/detection/_detection/ekcpd_computation.c",
"ruptures/detection/_detection/ekcpd_pelt_computation.c",
"ruptures/detection/_detection/kernels.c",
],
),
Extension(
"ruptures.utils._utils.convert_path_matrix",
sources=[
"ruptures/utils/_utils/convert_path_matrix.pyx",
"ruptures/utils/_utils/convert_path_matrix_c.c",
],
),
]
setup(
name="ruptures",
version="1.1.0-rc.1",
packages=find_packages(exclude=["docs", "tests*", "images"]),
install_requires=["numpy", "scipy"],
extras_require={"display": ["matplotlib"]},
python_requires=">=3",
url="https://centre-borelli.github.io/ruptures-docs/",
license="BSD License",
author="Charles Truong, Laurent Oudre, Nicolas Vayatis",
author_email="charles@doffy.net",
maintainer="Charles Truong, Olivier Boulant",
description="Change point detection for signals, in Python",
download_url="https://github.com/deepcharles/ruptures/archive/master.zip",
keywords=[
"change point detection",
"signal segmentation",
"computer science",
"machine learning",
"kernel methods",
"time series",
],
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Mathematics",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
long_description="""\
Offline change point detection for Python.
------------------------------------------
**ruptures** is a Python library for offline change point detection.
This package provides methods for the analysis and segmentation of
non-stationary signals.
Implemented algorithms include exact and approximate detection for various
parametric and non-parametric models.
**ruptures** focuses on ease of use by providing a well-documented and
consistent interface. In addition, thanks to its modular structure, different
algorithms and models can be connected and extended within this package.
An extensive documentation is available
`github.com/deepcharles/ruptures <https://github.com/deepcharles/ruptures>`_.
This version requires Python 3 or later.
""",
ext_modules=cythonize(
ext_modules,
language_level="3",
),
)
| from setuptools import setup, find_packages, Extension
import numpy as np
from Cython.Build import cythonize
ext_modules = [
Extension(
"ruptures.detection._detection.ekcpd",
sources=[
"ruptures/detection/_detection/ekcpd.pyx",
"ruptures/detection/_detection/ekcpd_computation.c",
"ruptures/detection/_detection/ekcpd_pelt_computation.c",
"ruptures/detection/_detection/kernels.c",
],
),
Extension(
"ruptures.utils._utils.convert_path_matrix",
sources=[
"ruptures/utils/_utils/convert_path_matrix.pyx",
"ruptures/utils/_utils/convert_path_matrix_c.c",
],
),
]
setup(
name="ruptures",
version="1.1.0",
packages=find_packages(exclude=["docs", "tests*", "images"]),
install_requires=["numpy", "scipy"],
extras_require={"display": ["matplotlib"]},
python_requires=">=3",
url="https://centre-borelli.github.io/ruptures-docs/",
license="BSD License",
author="Charles Truong, Laurent Oudre, Nicolas Vayatis",
author_email="charles@doffy.net",
maintainer="Charles Truong, Olivier Boulant",
description="Change point detection for signals, in Python",
download_url="https://github.com/deepcharles/ruptures/archive/master.zip",
keywords=[
"change point detection",
"signal segmentation",
"computer science",
"machine learning",
"kernel methods",
"time series",
],
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Mathematics",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
long_description="""\
Offline change point detection for Python.
------------------------------------------
**ruptures** is a Python library for offline change point detection.
This package provides methods for the analysis and segmentation of
non-stationary signals.
Implemented algorithms include exact and approximate detection for various
parametric and non-parametric models.
**ruptures** focuses on ease of use by providing a well-documented and
consistent interface. In addition, thanks to its modular structure, different
algorithms and models can be connected and extended within this package.
An extensive documentation is available
`github.com/deepcharles/ruptures <https://github.com/deepcharles/ruptures>`_.
This version requires Python 3 or later.
""",
ext_modules=cythonize(
ext_modules,
language_level="3",
),
)
| bsd-2-clause | Python |
ef23394f9c0e06fb6705bc37fab28a3077fe15c8 | Bump version | Sberned/djaio | setup.py | setup.py | #!/usr/bin/env python
import os
from setuptools import setup, find_packages
base = os.path.dirname(os.path.abspath(__file__))
install_requires = [
'aiohttp==1.0.5',
'aiohttp_jinja2',
'asyncio',
'aiohttp_debugtoolbar',
'aiohttp_autoreload',
'schematics>=2.0.0.dev2',
'aiopg==0.11.0',
]
tests_require = [
'pytest',
'pytest-aiohttp'
]
setup(name='djaio',
version='0.0.12',
description='Djaio - Django-inspired AsyncIO web framework',
author='Vadim Tregubov',
author_email='vatregubov@sberned.ru',
url='https://github.com/Sberned/djaio',
packages=find_packages(),
install_requires=install_requires,
classifiers=[
'Development Status :: 3 - Alpha',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Topic :: System :: Software Distribution',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
tests_require=tests_require
)
| #!/usr/bin/env python
import os
from setuptools import setup, find_packages
base = os.path.dirname(os.path.abspath(__file__))
install_requires = [
'aiohttp==1.0.5',
'aiohttp_jinja2',
'asyncio',
'aiohttp_debugtoolbar',
'aiohttp_autoreload',
'schematics>=2.0.0.dev2',
'aiopg==0.11.0',
]
tests_require = [
'pytest',
'pytest-aiohttp'
]
setup(name='djaio',
version='0.0.11',
description='Djaio - Django-inspired AsyncIO web framework',
author='Vadim Tregubov',
author_email='vatregubov@sberned.ru',
url='https://github.com/Sberned/djaio',
packages=find_packages(),
install_requires=install_requires,
classifiers=[
'Development Status :: 3 - Alpha',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Topic :: System :: Software Distribution',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
tests_require=tests_require
)
| apache-2.0 | Python |
b951bb360f14da66ebb30d31c863ec811955f3cb | Set upper limits on some dependency versions. (#568) | transientskp/tkp,transientskp/tkp | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
install_requires = """
astropy<3.0.0
colorlog
numpy>=1.3.0,<1.17.0
psycopg2
python-casacore
python-dateutil>=1.4.1
pytz
scipy>=0.7.0,<1.3.0
sqlalchemy>=1.0.0
alembic
monotonic
""".split()
extras_require = {
'monetdb': ['sqlalchemy_monetdb>=0.9.1'],
}
tkp_scripts = [
"tkp/bin/pyse.py",
"tkp/bin/trap-manage.py",
"tkp/bin/tkp-inject.py",
]
package_data = {'tkp': [
'config/*/*',
'db/sql/statements/batch',
'db/sql/statements/*/*.sql'
]}
package_list = find_packages(where='.', exclude=['tests'])
setup(
name="tkp",
version="4.0",
packages=package_list,
scripts=tkp_scripts,
package_data=package_data,
description="LOFAR Transients Key Project (TKP)",
author="TKP Discovery WG",
author_email="discovery@transientskp.org",
url="http://docs.transientskp.org/",
install_requires=install_requires,
extras_require=extras_require
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
install_requires = """
astropy
colorlog
numpy>=1.3.0
psycopg2
python-casacore
python-dateutil>=1.4.1
pytz
scipy>=0.7.0
sqlalchemy>=1.0.0
alembic
monotonic
""".split()
extras_require = {
'monetdb': ['sqlalchemy_monetdb>=0.9.1'],
}
tkp_scripts = [
"tkp/bin/pyse.py",
"tkp/bin/trap-manage.py",
"tkp/bin/tkp-inject.py",
]
package_data = {'tkp': [
'config/*/*',
'db/sql/statements/batch',
'db/sql/statements/*/*.sql'
]}
package_list = find_packages(where='.', exclude=['tests'])
setup(
name="tkp",
version="4.0",
packages=package_list,
scripts=tkp_scripts,
package_data=package_data,
description="LOFAR Transients Key Project (TKP)",
author="TKP Discovery WG",
author_email="discovery@transientskp.org",
url="http://docs.transientskp.org/",
install_requires=install_requires,
extras_require=extras_require
)
| bsd-2-clause | Python |
8a645a839d40f1c95b5007d8128a636f738649a4 | Bump version | Organice/django-organice-theme-rssk,Organice/django-organice-theme-rssk,Organice/django-organice-theme-rssk,Organice/django-organice-theme-rssk | setup.py | setup.py | #!/usr/bin/env python
#
# Copyright 2014 Peter Bittner <django@bittner.it>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Office/Business :: Groupware',
]
DEPENDENCIES = [
'django-organice-theme',
]
ROOT_PATH = os.path.dirname(__file__)
setup(
name='django-organice-theme-rssk',
version='0.2',
author='Peter Bitter',
author_email='django@bittner.it',
url='http://organice.io/themes/',
license='Apache License, Version 2.0',
description='A responsive theme with a classic look for django-organice.',
long_description=open(os.path.join(ROOT_PATH, 'README.rst')).read(),
keywords='organice, theme, django, python',
classifiers=CLASSIFIERS,
install_requires=DEPENDENCIES,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
| #!/usr/bin/env python
#
# Copyright 2014 Peter Bittner <django@bittner.it>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Office/Business :: Groupware',
]
DEPENDENCIES = [
'django-organice-theme',
]
ROOT_PATH = os.path.dirname(__file__)
setup(
name='django-organice-theme-rssk',
version='0.1',
author='Peter Bitter',
author_email='django@bittner.it',
url='http://organice.io/themes/',
license='Apache License, Version 2.0',
description='A responsive theme with a classic look for django-organice.',
long_description=open(os.path.join(ROOT_PATH, 'README.rst')).read(),
keywords='organice, theme, django, python',
classifiers=CLASSIFIERS,
install_requires=DEPENDENCIES,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
| apache-2.0 | Python |
026caa995dee9115956d1a25c49b2c26647d876f | Update setup.py | duverse/django-cdnjs | setup.py | setup.py | from setuptools import setup, find_packages
import os
import codecs
def read(*parts):
filename = os.path.join(os.path.dirname(__file__), *parts)
with codecs.open(filename, encoding='utf-8') as fp:
return fp.read()
setup(
name='django-cdnjs',
packages=find_packages(),
version='2017.10.19-0',
license='MIT',
description=(
'Django template plugin to be used to simplify static CDN resources '
'connecting.'
),
long_description=read('README.md'),
author='Maxim Papezhuk',
author_email='maxp.job@gmail.com',
url='https://github.com/duverse/django-cdnjs',
download_url='https://github.com/duverse/django-cdnjs/tarball/v2017.10.19-0',
keywords=[
'django',
'cdn',
'cdnjs',
'templatetag'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
)
| from setuptools import setup, find_packages
import os
import codecs
def read(*parts):
filename = os.path.join(os.path.dirname(__file__), *parts)
with codecs.open(filename, encoding='utf-8') as fp:
return fp.read()
setup(
name='django-cdnjs',
packages=find_packages(),
version='2017.10.19-0',
license='MIT',
description=(
'Django template plugin to be used to simplify static CDN resources '
'connecting.'
),
long_description=read('README.rst'),
author='Maxim Papezhuk',
author_email='maxp.job@gmail.com',
url='https://github.com/duverse/django-cdnjs',
download_url='https://github.com/duverse/django-cdnjs/tarball/v2017.10.19-0',
keywords=[
'django',
'cdn',
'cdnjs',
'templatetag'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
)
| mit | Python |
530aeadbc686f436e907ca8d111b5a72788906ee | Bump version to 1.0.4.dev | deepmind/graph_nets | setup.py | setup.py | # Copyright 2018 The GraphNets Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Setuptools installation script."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from setuptools import find_packages
from setuptools import setup
description = """Graph Nets is DeepMind's library for building graph networks in
Tensorflow and Sonnet.
"""
setup(
name="graph_nets",
version="1.0.4.dev",
description="Library for building graph networks in Tensorflow and Sonnet.",
long_description=description,
author="DeepMind",
license="Apache License, Version 2.0",
keywords=["graph networks", "tensorflow", "sonnet", "machine learning"],
url="https://github.com/deepmind/graph-nets",
packages=find_packages(),
install_requires=[
"absl-py",
"dm-sonnet==1.23",
"future",
"networkx",
"numpy",
"setuptools",
"six",
],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
)
| # Copyright 2018 The GraphNets Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Setuptools installation script."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from setuptools import find_packages
from setuptools import setup
description = """Graph Nets is DeepMind's library for building graph networks in
Tensorflow and Sonnet.
"""
setup(
name="graph_nets",
version="1.0.3",
description="Library for building graph networks in Tensorflow and Sonnet.",
long_description=description,
author="DeepMind",
license="Apache License, Version 2.0",
keywords=["graph networks", "tensorflow", "sonnet", "machine learning"],
url="https://github.com/deepmind/graph-nets",
packages=find_packages(),
install_requires=[
"absl-py",
"dm-sonnet==1.23",
"future",
"networkx",
"numpy",
"setuptools",
"six",
],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
)
| apache-2.0 | Python |
3621089d5ed6374e89138be01f5796ddf1727f69 | Update version to 4.0.8 | pmaigutyak/mp-shop,pmaigutyak/mp-shop,pmaigutyak/mp-shop | setup.py | setup.py |
from setuptools import setup, find_packages
__version__ = '4.0.8'
with open('requirements.txt') as f:
requires = f.read().splitlines()
url = 'https://github.com/pmaigutyak/mp-shop'
setup(
name='django-mp-shop',
version=__version__,
description='Django shop apps',
long_description=open('README.md').read(),
author='Paul Maigutyak',
author_email='pmaigutyak@gmail.com',
url=url,
download_url='%s/archive/%s.tar.gz' % (url, __version__),
packages=find_packages(),
include_package_data=True,
license='MIT',
install_requires=requires
)
|
from setuptools import setup, find_packages
__version__ = '4.0.7'
with open('requirements.txt') as f:
requires = f.read().splitlines()
url = 'https://github.com/pmaigutyak/mp-shop'
setup(
name='django-mp-shop',
version=__version__,
description='Django shop apps',
long_description=open('README.md').read(),
author='Paul Maigutyak',
author_email='pmaigutyak@gmail.com',
url=url,
download_url='%s/archive/%s.tar.gz' % (url, __version__),
packages=find_packages(),
include_package_data=True,
license='MIT',
install_requires=requires
)
| isc | Python |
4affa5053a3268d0becc9d2af849ecefa1da7c75 | Fix for setup.py TypeError | areski/django-nvd3,chiu/django-nvd3,lgp171188/django-nvd3,chiu/django-nvd3,marcogiusti/django-nvd3,lgp171188/django-nvd3,areski/django-nvd3,chiu/django-nvd3,areski/django-nvd3,lgp171188/django-nvd3 | setup.py | setup.py | from setuptools import setup, find_packages
import django_nvd3
import os
import re
def read(*parts):
return open(os.path.join(os.path.dirname(__file__), *parts)).read()
def parse_requirements(file_name):
requirements = []
for line in open(file_name, 'r').read().split('\n'):
if re.match(r'(\s*#)|(\s*$)', line):
continue
if re.match(r'\s*-e\s+', line):
requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1', line))
elif re.match(r'(\s*git)|(\s*hg)', line):
pass
else:
requirements.append(line)
return requirements
def parse_dependency_links(file_name):
dependency_links = []
for line in open(file_name, 'r').read().split('\n'):
if re.match(r'\s*-[ef]\s+', line):
dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
return dependency_links
setup(
name='django-nvd3',
version=django_nvd3.__version__,
description="Django NVD3",
long_description=read('README.rst'),
keywords='django, nvd3, chart, graph, d3',
url='http://github.com/areski/django-nvd3',
author='Belaid Arezqui',
author_email='areski@gmail.com',
license='MIT License',
zip_safe=False,
packages=find_packages(exclude=["tests", "demoproject", "docs"]),
include_package_data=True,
package_data={},
install_requires=parse_requirements('requirements.txt'),
dependency_links=parse_dependency_links('requirements.txt'),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| from setuptools import setup, find_packages
import django_nvd3
import os
import codecs
import re
def read(*parts):
return codecs.open(os.path.join(os.path.dirname(__file__), *parts)).read()
def parse_requirements(file_name):
requirements = []
for line in open(file_name, 'r').read().split('\n'):
if re.match(r'(\s*#)|(\s*$)', line):
continue
if re.match(r'\s*-e\s+', line):
requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1', line))
elif re.match(r'(\s*git)|(\s*hg)', line):
pass
else:
requirements.append(line)
return requirements
def parse_dependency_links(file_name):
dependency_links = []
for line in open(file_name, 'r').read().split('\n'):
if re.match(r'\s*-[ef]\s+', line):
dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
return dependency_links
setup(
name='django-nvd3',
version=django_nvd3.__version__,
description="Django NVD3",
long_description=read('README.rst'),
keywords='django, nvd3, chart, graph, d3',
url='http://github.com/areski/django-nvd3',
author='Belaid Arezqui',
author_email='areski@gmail.com',
license='MIT License',
zip_safe=False,
packages=find_packages(exclude=["tests", "demoproject", "docs"]),
include_package_data=True,
package_data={},
install_requires=parse_requirements('requirements.txt'),
dependency_links=parse_dependency_links('requirements.txt'),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| mit | Python |
baf1ef102ad367b1997c17dc2ca815ba94e13b38 | Bump to next dev version | tswicegood/django-timelinejs2 | setup.py | setup.py | from distutils.core import setup
import os
# Stolen from django-registration
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir:
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('timelinejs'):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'):
del dirnames[i]
if '__init__.py' in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
elif filenames:
prefix = dirpath[len('timelinejs/'):]
for f in filenames:
data_files.append(os.path.join(prefix, f))
setup(
name='django-timelinejs2',
version='2.18.2alpha',
description='Connecting Timeline.js v2.18 to Django',
author='Travis Swicegood',
author_email='development@domain51.com',
url='http://github.com/tswicegood/django-timelinejs/',
install_requires=[
'django-staticfiles-timelinejs_static==2.18',
'python-dateutil>=1.5',
],
packages=packages,
package_data={'timelinejs': data_files},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
| from distutils.core import setup
import os
# Stolen from django-registration
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir:
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('timelinejs'):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'):
del dirnames[i]
if '__init__.py' in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
elif filenames:
prefix = dirpath[len('timelinejs/'):]
for f in filenames:
data_files.append(os.path.join(prefix, f))
setup(
name='django-timelinejs2',
version='2.18.1',
description='Connecting Timeline.js v2.18 to Django',
author='Travis Swicegood',
author_email='development@domain51.com',
url='http://github.com/tswicegood/django-timelinejs/',
install_requires=[
'django-staticfiles-timelinejs_static==2.18',
'python-dateutil>=1.5',
],
packages=packages,
package_data={'timelinejs': data_files},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
| apache-2.0 | Python |
b64d17589b321451237314fd8f5615590d66a93d | Make classifiers a list | dirn/When.py | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.4.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
py_modules=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
tests_require=['coverage', 'mock', 'nose'],
license=open('LICENSE').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
setup(**settings)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.4.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
py_modules=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
tests_require=['coverage', 'mock', 'nose'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
| bsd-3-clause | Python |
0595acfc90fd432007603ec6c318380d3ef174a0 | Add 2 applicable Trove classifiers | moreati/python-niceware | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
]
test_requirements = [
'flake8',
'pytest',
]
setup(
name='niceware',
version='0.1.1',
description='Convert cryptographic keys to human-readable phrases, or generate random-yet-memorable passphrases',
long_description=readme + '\n\n' + history,
author='Alex Willmer',
author_email='alex@moreati.org.uk',
url='https://github.com/moreati/python-niceware',
packages=[
'niceware',
],
entry_points={
'console_scripts': [
'niceware = niceware.__main__:main',
]
},
package_dir={'niceware': 'niceware'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=True,
keywords='niceware diceware passphrase password encryption',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Security',
],
test_suite='tests',
tests_require=test_requirements,
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
]
test_requirements = [
'flake8',
'pytest',
]
setup(
name='niceware',
version='0.1.1',
description='Convert cryptographic keys to human-readable phrases, or generate random-yet-memorable passphrases',
long_description=readme + '\n\n' + history,
author='Alex Willmer',
author_email='alex@moreati.org.uk',
url='https://github.com/moreati/python-niceware',
packages=[
'niceware',
],
entry_points={
'console_scripts': [
'niceware = niceware.__main__:main',
]
},
package_dir={'niceware': 'niceware'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=True,
keywords='niceware diceware passphrase password encryption',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
test_suite='tests',
tests_require=test_requirements,
)
| mit | Python |
e8069c8ff771b766757a1a96bc741a4073fcf9c1 | add wheel support | cyberdelia/atomic | setup.py | setup.py | # -*- coding: utf-8 -*-
import io
import os
from setuptools.dist import Distribution
from setuptools import setup, find_packages
try:
from atomic import ffi
except ImportError:
ext_modules=[]
else:
ext_modules=[ffi.verifier.get_extension()]
class BinaryDistribution(Distribution):
def is_pure(self):
return False
with io.open('README.rst', encoding='utf-8') as f:
readme = f.read()
setup(
name='atomic',
version='0.7.0',
description='An atomic class that guarantees atomic updates to its contained value.',
long_description=readme,
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/atomic',
license='MIT',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
setup_requires=['cffi'],
install_requires=['cffi'],
test_suite="tests",
ext_modules=ext_modules,
distclass=BinaryDistribution,
)
| # -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
try:
from atomic import ffi
except ImportError:
ext_modules=[]
else:
ext_modules=[ffi.verifier.get_extension()]
with io.open('README.rst', encoding='utf-8') as f:
readme = f.read()
setup(
name='atomic',
version='0.7.0',
description='An atomic class that guarantees atomic updates to its contained value.',
long_description=readme,
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/atomic',
license='MIT',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
setup_requires=['cffi'],
install_requires=['cffi'],
test_suite="tests",
ext_modules=ext_modules,
)
| mit | Python |
16a9a9ad4554c2f727be5a833c388e13a77a8cfe | add lark file to data files | amplify-education/python-hcl2 | setup.py | setup.py | """setup.py controls the build, testing, and distribution of the egg"""
import re
import os.path
from setuptools import setup, find_packages
VERSION_REGEX = re.compile(r"""
^__version__\s=\s
['"](?P<version>.*?)['"]
""", re.MULTILINE | re.VERBOSE)
VERSION_FILE = os.path.join("hcl2", "version.py")
def get_long_description():
"""Reads the long description from the README"""
this_directory = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_directory, 'README.md'), encoding='utf-8') as file:
return file.read()
def get_version():
"""Reads the version from the package"""
with open(VERSION_FILE) as handle:
lines = handle.read()
result = VERSION_REGEX.search(lines)
if result:
return result.groupdict()["version"]
raise ValueError("Unable to determine __version__")
def get_requirements():
"""Reads the installation requirements from requirements.pip"""
with open("requirements.pip") as reqfile:
return [line for line in reqfile.read().split("\n") if not line.startswith(('#', '-'))]
setup(
name='python_hcl2',
python_requires='>=3.6.0',
version=get_version(),
description="A project for being awesome.",
long_description=get_long_description(),
long_description_content_type='text/markdown',
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Software Development :: Libraries :: Python Modules',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
keywords='',
author='Amplify Education',
author_email='github@amplify.com',
url='https://github.com/amplify-education/python_hcl2',
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=get_requirements(),
test_suite='nose.collector',
scripts=[
'bin/hcl2tojson',
],
data_files=[('', 'hcl2/hcl2.lark')]
)
| """setup.py controls the build, testing, and distribution of the egg"""
import re
import os.path
from setuptools import setup, find_packages
VERSION_REGEX = re.compile(r"""
^__version__\s=\s
['"](?P<version>.*?)['"]
""", re.MULTILINE | re.VERBOSE)
VERSION_FILE = os.path.join("hcl2", "version.py")
def get_long_description():
"""Reads the long description from the README"""
this_directory = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_directory, 'README.md'), encoding='utf-8') as file:
return file.read()
def get_version():
"""Reads the version from the package"""
with open(VERSION_FILE) as handle:
lines = handle.read()
result = VERSION_REGEX.search(lines)
if result:
return result.groupdict()["version"]
raise ValueError("Unable to determine __version__")
def get_requirements():
"""Reads the installation requirements from requirements.pip"""
with open("requirements.pip") as reqfile:
return [line for line in reqfile.read().split("\n") if not line.startswith(('#', '-'))]
setup(
name='python_hcl2',
python_requires='>=3.6.0',
version=get_version(),
description="A project for being awesome.",
long_description=get_long_description(),
long_description_content_type='text/markdown',
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Software Development :: Libraries :: Python Modules',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
keywords='',
author='Amplify Education',
author_email='github@amplify.com',
url='https://github.com/amplify-education/python_hcl2',
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=get_requirements(),
test_suite='nose.collector',
scripts=[
'bin/hcl2tojson',
],
)
| mit | Python |
2e6187224724780d5a026b6bd228c0fda155e338 | Add test dependency towards extension-template | openfisca/openfisca-core,openfisca/openfisca-core | setup.py | setup.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name = 'OpenFisca-Core',
version = '12.1.4',
author = 'OpenFisca Team',
author_email = 'contact@openfisca.fr',
classifiers = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Information Analysis",
],
description = u'A versatile microsimulation free software',
keywords = 'benefit microsimulation social tax',
license = 'https://www.fsf.org/licensing/licenses/agpl-3.0.html',
url = 'https://github.com/openfisca/openfisca-core',
data_files = [
('share/openfisca/openfisca-core', ['CHANGELOG.md', 'LICENSE.AGPL.txt', 'README.md']),
],
entry_points = {
'console_scripts': ['openfisca-run-test=openfisca_core.scripts.run_test:main'],
},
extras_require = {
'parsers': [
'OpenFisca-Parsers >= 1.0.2, < 2.0',
],
'test': [
'nose',
'flake8',
'openfisca-country-template == 1.0.0',
'openfisca-extension-template == 1.0.0',
],
},
include_package_data = True, # Will read MANIFEST.in
install_requires = [
'Biryani[datetimeconv] >= 0.10.4',
'numpy >= 1.11',
'PyYAML >= 3.10',
'flask == 0.12',
'flask-cors == 3.0.2',
'gunicorn >= 19.7.1',
'lxml >= 3.7',
],
message_extractors = {
'openfisca_core': [
('**.py', 'python', None),
],
},
packages = find_packages(exclude=['tests*']),
test_suite = 'nose.collector',
)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name = 'OpenFisca-Core',
version = '12.1.4',
author = 'OpenFisca Team',
author_email = 'contact@openfisca.fr',
classifiers = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Information Analysis",
],
description = u'A versatile microsimulation free software',
keywords = 'benefit microsimulation social tax',
license = 'https://www.fsf.org/licensing/licenses/agpl-3.0.html',
url = 'https://github.com/openfisca/openfisca-core',
data_files = [
('share/openfisca/openfisca-core', ['CHANGELOG.md', 'LICENSE.AGPL.txt', 'README.md']),
],
entry_points = {
'console_scripts': ['openfisca-run-test=openfisca_core.scripts.run_test:main'],
},
extras_require = {
'parsers': [
'OpenFisca-Parsers >= 1.0.2, < 2.0',
],
'test': [
'nose',
'flake8',
'openfisca-country-template == 1.0.0',
],
},
include_package_data = True, # Will read MANIFEST.in
install_requires = [
'Biryani[datetimeconv] >= 0.10.4',
'numpy >= 1.11',
'PyYAML >= 3.10',
'flask == 0.12',
'flask-cors == 3.0.2',
'gunicorn >= 19.7.1',
'lxml >= 3.7',
],
message_extractors = {
'openfisca_core': [
('**.py', 'python', None),
],
},
packages = find_packages(exclude=['tests*']),
test_suite = 'nose.collector',
)
| agpl-3.0 | Python |
e4dd1da8f2fdfa2f4071ab1796b31147f12d00a0 | Add Python 3.2 trove classifier | kisielk/covenant,kisielk/covenant | setup.py | setup.py | from setuptools import setup
setup(name='covenant',
version='0.1.0',
description='Code contracts for Python 3',
author='Kamil Kisiel',
author_email='kamil@kamilkisiel.net',
url='http://pypi.python.org/pypi/covenant',
license="BSD License",
packages=["covenant"],
keywords="contract",
platforms=["All"],
install_requires=["decorator"],
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities'],
)
| from setuptools import setup
setup(name='covenant',
version='0.1.0',
description='Code contracts for Python 3',
author='Kamil Kisiel',
author_email='kamil@kamilkisiel.net',
url='http://pypi.python.org/pypi/covenant',
license="BSD License",
packages=["covenant"],
keywords="contract",
platforms=["All"],
install_requires=["decorator"],
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities'],
)
| mit | Python |
8fd223aac8bb3255efeeac47502484fe3801c0ca | Fix travis | openfisca/openfisca-tunisia,openfisca/openfisca-tunisia | setup.py | setup.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Tunisia specific model for OpenFisca -- a versatile microsimulation free software"""
from setuptools import setup, find_packages
classifiers = """\
Development Status :: 2 - Pre-Alpha
License :: OSI Approved :: GNU Affero General Public License v3
Operating System :: POSIX
Programming Language :: Python
Topic :: Scientific/Engineering :: Information Analysis
"""
doc_lines = __doc__.split('\n')
setup(
name = 'OpenFisca-Tunisia',
version = '0.16.0',
author = 'OpenFisca Team',
author_email = 'contact@openfisca.fr',
classifiers = [classifier for classifier in classifiers.split('\n') if classifier],
description = doc_lines[0],
keywords = 'benefit microsimulation social tax tunisia',
license = 'http://www.fsf.org/licensing/licenses/agpl-3.0.html',
long_description = '\n'.join(doc_lines[2:]),
url = 'https://github.com/openfisca/openfisca-tunisia',
data_files = [
('share/openfisca/openfisca-tunisia', ['CHANGELOG.md', 'LICENSE.AGPL.txt', 'README.md']),
],
extras_require = dict(
tests = [
'ipykernel',
'jupyter-client',
'nbconvert >= 5.3.1',
'nbformat',
'nose',
'matplotlib',
],
notebook = [
'matplotlib',
],
survey = [
'OpenFisca-Survey-Manager >= 0.9.5',
]
),
include_package_data = True, # Will read MANIFEST.in
install_requires = [
'OpenFisca-Core >= 21.1.2, < 22.0',
'PyYAML >= 3.10',
'scipy >= 0.12',
],
message_extractors = {'openfisca_tunisia': [
('**.py', 'python', None),
]},
packages = find_packages(exclude=['openfisca_tunisia.tests*', 'openfisca_tunisia.old_tests*']),
)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Tunisia specific model for OpenFisca -- a versatile microsimulation free software"""
from setuptools import setup, find_packages
classifiers = """\
Development Status :: 2 - Pre-Alpha
License :: OSI Approved :: GNU Affero General Public License v3
Operating System :: POSIX
Programming Language :: Python
Topic :: Scientific/Engineering :: Information Analysis
"""
doc_lines = __doc__.split('\n')
setup(
name = 'OpenFisca-Tunisia',
version = '0.16.0',
author = 'OpenFisca Team',
author_email = 'contact@openfisca.fr',
classifiers = [classifier for classifier in classifiers.split('\n') if classifier],
description = doc_lines[0],
keywords = 'benefit microsimulation social tax tunisia',
license = 'http://www.fsf.org/licensing/licenses/agpl-3.0.html',
long_description = '\n'.join(doc_lines[2:]),
url = 'https://github.com/openfisca/openfisca-tunisia',
data_files = [
('share/openfisca/openfisca-tunisia', ['CHANGELOG.md', 'LICENSE.AGPL.txt', 'README.md']),
],
extras_require = dict(
tests = [
'ipykernel',
'jupyter-client',
'nbconvert >= 5.3.1',
'nbformat',
'nose',
],
notebook = [
'matplotlib',
],
survey = [
'OpenFisca-Survey-Manager >= 0.9.5',
]
),
include_package_data = True, # Will read MANIFEST.in
install_requires = [
'OpenFisca-Core >= 21.1.2, < 22.0',
'PyYAML >= 3.10',
'scipy >= 0.12',
],
message_extractors = {'openfisca_tunisia': [
('**.py', 'python', None),
]},
packages = find_packages(exclude=['openfisca_tunisia.tests*', 'openfisca_tunisia.old_tests*']),
)
| agpl-3.0 | Python |
9f7a6e0a55291aa0d89348b401569179163b6eb6 | Undo a 2to3 change | ligonier/django-athumb | setup.py | setup.py | from distutils.core import setup
import athumb
long_description = open('README.rst').read()
setup(
name='django-athumb',
version=athumb.VERSION,
packages=['athumb', 'athumb.backends', 'athumb.management',
'athumb.management.commands', 'athumb.templatetags',
'athumb.upload_handlers'],
description='A simple, S3-backed thumbnailer field.',
long_description=long_description,
author='Gregory Taylor',
author_email='gtaylor@duointeractive.com',
license='BSD License',
url='http://github.com/duointeractive/django-athumb',
platforms=["any"],
requires=['django', 'boto', 'pil'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Environment :: Web Environment',
],
)
| from distutils.core import setup
from . import athumb
long_description = open('README.rst').read()
setup(
name='django-athumb',
version=athumb.VERSION,
packages=['athumb', 'athumb.backends', 'athumb.management',
'athumb.management.commands', 'athumb.templatetags',
'athumb.upload_handlers'],
description='A simple, S3-backed thumbnailer field.',
long_description=long_description,
author='Gregory Taylor',
author_email='gtaylor@duointeractive.com',
license='BSD License',
url='http://github.com/duointeractive/django-athumb',
platforms=["any"],
requires=['django', 'boto', 'pil'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Environment :: Web Environment',
],
)
| bsd-3-clause | Python |
44b9649df3418713b8ef5ae8e1f2990a92a48907 | Patch 0.2.1 to remove print | gilesbrown/python-icapservice,gilesbrown/python-icapservice | setup.py | setup.py | from setuptools import setup
setup(
name='icapservice',
version='0.2.1',
description='ICAP service library for Python',
author='Giles Brown',
author_email='giles_brown@hotmail.com',
url='https://github.com/gilesbrown/icapservice',
license='MIT',
packages=['icapservice'],
zip_safe=False,
install_requires=['six', 'brotlipy'],
include_package_data=True,
package_data={'': ['LICENSE']},
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
#'Programming Language :: Python :: 3',
#'Programming Language :: Python :: 3.4',
#'Programming Language :: Python :: 3.5',
),
)
| from setuptools import setup
setup(
name='icapservice',
version='0.2.0',
description='ICAP service library for Python',
author='Giles Brown',
author_email='giles_brown@hotmail.com',
url='https://github.com/gilesbrown/icapservice',
license='MIT',
packages=['icapservice'],
zip_safe=False,
install_requires=['six', 'brotlipy'],
include_package_data=True,
package_data={'': ['LICENSE']},
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
#'Programming Language :: Python :: 3',
#'Programming Language :: Python :: 3.4',
#'Programming Language :: Python :: 3.5',
),
)
| mit | Python |
d79145f60abbecda2b681525d40d972e93d18109 | Bump to 0.3.0 | managedbyq/mbq.metrics | setup.py | setup.py | import codecs
import setuptools
__version__ = '0.3.0'
with codecs.open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setuptools.setup(
name='mbq.metrics',
long_description=readme,
version=__version__,
license='Apache 2.0',
url='https://github.com/managedbyq/mbq.metrics',
author='Managed by Q, Inc.',
author_email='open-source@managedbyq.com',
maintainer='Managed by Q, Inc.',
maintainer_email='open-source@managedbyq.com',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries',
],
keywords='metrics monitoring statsd',
packages=setuptools.find_packages(),
install_requires=[
'datadog==0.16.0', # pinned until 1.0.0
],
zip_safe=True,
)
| import codecs
import setuptools
__version__ = '0.2.1'
with codecs.open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setuptools.setup(
name='mbq.metrics',
long_description=readme,
version=__version__,
license='Apache 2.0',
url='https://github.com/managedbyq/mbq.metrics',
author='Managed by Q, Inc.',
author_email='open-source@managedbyq.com',
maintainer='Managed by Q, Inc.',
maintainer_email='open-source@managedbyq.com',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries',
],
keywords='metrics monitoring statsd',
packages=setuptools.find_packages(),
install_requires=[
'datadog==0.16.0', # pinned until 1.0.0
],
zip_safe=True,
)
| apache-2.0 | Python |
e1c76a8b4b052d15d68cd282eee86364a348667c | bump version to 0.1.1. we're really coming along. | maw/python-kestrel | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="kestrel",
version="0.1.1",
description="Oversimple library to talk to Kestrel",
author="Michael Wolf",
author_email="maw+python-kestrel@pobox.com",
keywords="queues kestrel memcache",
py_modules=["__init__", "kestrel"])
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="kestrel",
version="0.1",
description="Oversimple library to talk to Kestrel",
author="Michael Wolf",
author_email="maw+python-kestrel@pobox.com",
keywords="queues kestrel memcache",
py_modules=["__init__", "kestrel"])
| mit | Python |
5b1ae941f00420d04b18d61b0e5f8523086e07c0 | Bump version | netbek/chrys,netbek/chrys,netbek/chrys | setup.py | setup.py | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="chrys",
version="3.0.1",
author="Hein Bekker",
author_email="hein@netbek.co.za",
description="A collection of color palettes for mapping and visualisation",
long_description=long_description,
long_description_content_type="text/markdown",
license='BSD-3-Clause',
url="https://github.com/netbek/chrys",
install_requires=[
'matplotlib >= 2.2.4',
'numpy >= 1.7.1',
],
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
],
)
| import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="chrys",
version="3.0.0",
author="Hein Bekker",
author_email="hein@netbek.co.za",
description="A collection of color palettes for mapping and visualisation",
long_description=long_description,
long_description_content_type="text/markdown",
license='BSD-3-Clause',
url="https://github.com/netbek/chrys",
install_requires=[
'matplotlib >= 2.2.4',
'numpy >= 1.7.1',
],
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
],
)
| bsd-3-clause | Python |
55bc397d6c2884fa68e3c6caf9d79ea27af79a6e | add requirements to setup.py | cwacek/cobertura-clover-transform | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1.1',
packages=find_packages(),
include_package_data=True,
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
install_requires=['lxml'],
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
| from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1.1',
packages=find_packages(),
include_package_data=True,
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
| mit | Python |
2c655c86d16b427d596cda64ef95d5ea48dc9470 | Update version for next release 0.2 | elliotpeele/prism_rest_client | setup.py | setup.py | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.txt')).read()
requires = [
'requests',
]
setup(name='prism_rest_client',
version='0.2',
description='prism_rest_client',
long_description=README,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
],
author='Elliot Peele',
author_email='elliot@bentlogic.net',
url='https://github.com/elliotpeele/prism_rest_client',
keywords='web rest client',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='prism_rest_client',
install_requires=requires,
)
| import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.txt')).read()
requires = [
'requests',
]
setup(name='prism_rest_client',
version='0.1',
description='prism_rest_client',
long_description=README,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
],
author='Elliot Peele',
author_email='elliot@bentlogic.net',
url='https://github.com/elliotpeele/prism_rest_client',
keywords='web rest client',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='prism_rest_client',
install_requires=requires,
)
| mit | Python |
375888254dbb27cdf460e1d31141c4edfb3d314f | 添加pyshere到setup.py | lloy/api-agent | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
__author__ = 'Hardy.zheng'
__version = '0.1'
setup(
name='cdsagent',
version=__version,
description='cdsagent ',
author='hardy.Zheng',
author_email='wei.zheng@yun-idc.com',
install_requires=[
'eventlet>=0.13.0',
'PasteDeploy>=1.5.0',
'six>=1.7.0',
'pysphere>=0.5',
'stevedore>=0.14',
'jsonschema>=2.0.0,<3.0.0',
'jsonpath-rw>=1.2.0,<2.0',
'anyjson>=0.3.3'],
packages=find_packages(),
entry_points={
'instance_create': [
'create = cdsagent.instance:InstanceCreate'],
'instance_delete': [
'delete = cdsagent.instance:InstanceDelete'],
'watch_dog': [
'watch = cdsagent.instance:InstanceWatchDog'],
},
scripts=['cds-agent'],
namespace_packages=['cdsagent'],
include_package_data=True,
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
__author__ = 'Hardy.zheng'
__version = '0.1'
setup(
name='cdsagent',
version=__version,
description='cdsagent ',
author='hardy.Zheng',
author_email='wei.zheng@yun-idc.com',
install_requires=[
'eventlet>=0.13.0',
'PasteDeploy>=1.5.0',
'six>=1.7.0',
'stevedore>=0.14',
'jsonschema>=2.0.0,<3.0.0',
'jsonpath-rw>=1.2.0,<2.0',
'anyjson>=0.3.3'],
packages=find_packages(),
entry_points={
'instance_create': [
'create = cdsagent.instance:InstanceCreate'],
'instance_delete': [
'delete = cdsagent.instance:InstanceDelete'],
'watch_dog': [
'watch = cdsagent.instance:InstanceWatchDog'],
},
scripts=['cds-agent'],
namespace_packages=['cdsagent'],
include_package_data=True,
)
| apache-2.0 | Python |
6edfa553e32dfefb0494c453b60798d754558fee | Remove matplotlib requirement | SamStudio8/gretel | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
requirements = [
"numpy",
"hanselx",
"pysam",
"PyVCF",
]
test_requirements = [
]
setuptools.setup(
name="gretel",
version="0.0.1a",
url="https://github.com/samstudio8/gretel",
description="An algorithm for recovering potential haplotypes from metagenomes",
long_description="",
author="Sam Nicholls",
author_email="sam@samnicholls.net",
maintainer="Sam Nicholls",
maintainer_email="sam@samnicholls.net",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=requirements,
entry_points = {
"console_scripts": [
"gretel=gretel.cmd:main",
]
},
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'License :: OSI Approved :: MIT License',
],
test_suite="tests",
tests_require=test_requirements
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
requirements = [
"numpy",
"hanselx",
"pysam",
"PyVCF",
"matplotlib",
]
test_requirements = [
]
setuptools.setup(
name="gretel",
version="0.0.1a",
url="https://github.com/samstudio8/gretel",
description="An algorithm for recovering potential haplotypes from metagenomes",
long_description="",
author="Sam Nicholls",
author_email="sam@samnicholls.net",
maintainer="Sam Nicholls",
maintainer_email="sam@samnicholls.net",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=requirements,
entry_points = {
"console_scripts": [
"gretel=gretel.cmd:main",
]
},
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'License :: OSI Approved :: MIT License',
],
test_suite="tests",
tests_require=test_requirements
)
| mit | Python |
356f4caabf4b6c455c9045102594b0f72cea7016 | Bump version. | mwchase/class-namespaces,mwchase/class-namespaces | setup.py | setup.py | """A setuptools based setup module."""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open as c_open
from os import path
HERE = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with c_open(path.join(HERE, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='class_namespaces',
version='0.5.3',
description='Class Namespaces',
long_description=LONG_DESCRIPTION,
url='https://github.com/mwchase/class-namespaces',
author='Max Woerner Chase',
author_email='max.chase@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='class namespaces',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'test': ['coverage', 'pytest'],
},
)
| """A setuptools based setup module."""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open as c_open
from os import path
HERE = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with c_open(path.join(HERE, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='class_namespaces',
version='0.5.2',
description='Class Namespaces',
long_description=LONG_DESCRIPTION,
url='https://github.com/mwchase/class-namespaces',
author='Max Woerner Chase',
author_email='max.chase@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='class namespaces',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'test': ['coverage', 'pytest'],
},
)
| mit | Python |
42b45b7f5629c235cd6d3931221d241727842e06 | Update version to 1.0.7 | Xuanwo/qingcloud-sdk-python,yunify/qingcloud-sdk-python,markduan/qingcloud-sdk-python | setup.py | setup.py | # coding:utf-8
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.version_info < (2, 6):
error = 'ERROR: qingcloud-sdk requires Python Version 2.6 or above.'
print >> sys.stderr, error
sys.exit(1)
setup(
name = 'qingcloud-sdk',
version = '1.0.7',
description = 'Software Development Kit for QingCloud.',
long_description = open('README.rst', 'rb').read().decode('utf-8'),
keywords = 'qingcloud iaas qingstor sdk',
author = 'Yunify Team',
author_email = 'simon@yunify.com',
url = 'https://docs.qingcloud.com/sdk/',
packages = ['qingcloud', 'qingcloud.conn', 'qingcloud.iaas', 'qingcloud.misc', 'qingcloud.qingstor'],
package_dir = {'qingcloud-sdk': 'qingcloud'},
namespace_packages = ['qingcloud'],
include_package_data = True,
install_requires = ['future']
)
| # coding:utf-8
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.version_info < (2, 6):
error = 'ERROR: qingcloud-sdk requires Python Version 2.6 or above.'
print >> sys.stderr, error
sys.exit(1)
setup(
name = 'qingcloud-sdk',
version = '1.0.6',
description = 'Software Development Kit for QingCloud.',
long_description = open('README.rst', 'rb').read().decode('utf-8'),
keywords = 'qingcloud iaas qingstor sdk',
author = 'Yunify Team',
author_email = 'simon@yunify.com',
url = 'https://docs.qingcloud.com/sdk/',
packages = ['qingcloud', 'qingcloud.conn', 'qingcloud.iaas', 'qingcloud.misc', 'qingcloud.qingstor'],
package_dir = {'qingcloud-sdk': 'qingcloud'},
namespace_packages = ['qingcloud'],
include_package_data = True,
install_requires = ['future']
)
| apache-2.0 | Python |
308d456a39f07b150d75c4a957aaf6f0765ac83f | fix error in setup.py | chrisb2/pi_ina219 | setup.py | setup.py | try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
import sys
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
setup(name = 'pi-ina219',
version = '1.0.0',
author = 'Chris Borrill',
author_email = 'chris.borrill@gmail.com',
description = 'Library that supports the INA219 current and power monitor from Texas Instruments.',
license = 'MIT',
url = 'https://github.com/chrisb/pi_ina219/',
install_requires = requires,
test_suite = 'tests',
py_modules = ['ina219'])
| try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
import sys
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
setup(name = 'pi-ina219',
version = '1.0.0',
author = 'Chris Borrill',
author_email = 'chris.borrill@gmail.com',
description = 'Library that supports the INA219 current and power monitor from Texas Instruments.',
license = 'MIT',
url = 'https://github.com/chrisb/pi_ina219/',
install_requires = requires,
test_suite = 'tests',
pymodules = ['ina219'])
| mit | Python |
c8e8dd74bc8801b6c97932a81180960319714046 | Increment version to 0.0.2 | marekjm/diaspy | setup.py | setup.py | from setuptools import setup, find_packages
setup(name='diaspy',
version='0.0.2',
author='Moritz Kiefer',
author_email='moritz.kiefer@gmail.com',
url='https://github.com/Javafant/diaspora-api',
description='A python api to the social network diaspora',
packages=find_packages(),
install_requires=['requests']
)
| from setuptools import setup, find_packages
setup(name='diaspy',
version='0.0.1',
author='Moritz Kiefer',
author_email='moritz.kiefer@gmail.com',
packages=find_packages(),
install_requires=['requests']
)
| mit | Python |
75318d6c8cc843112edb875359afb1492858765c | Fix version conflict | arcticfoxnv/slackminion,arcticfoxnv/slackminion | setup.py | setup.py | from setuptools import setup, find_packages
from slackminion.plugins.core import version
setup(
name='slackminion',
version=version,
packages=find_packages(exclude=['test_plugins']),
url='https://github.com/arcticfoxnv/slackminion',
license='MIT',
author='Nick King',
author_email='',
description='A python bot framework for slack',
package_data={'slackminion': ['templates/*']},
install_requires=[
'Flask',
'PyYAML',
'requests',
'six',
'slackclient',
'websocket-client',
],
setup_requires=[
'pytest-runner'
],
tests_require=[
'pytest==2.7.2',
'pytest-cov==2.2.1',
'codeclimate-test-reporter==0.1.2',
'coverage==4.1'
],
entry_points={
'console_scripts': [
'slackminion = slackminion.__main__:main',
]
},
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 2 - Pre-Alpha",
"Topic :: Communications :: Chat",
]
)
| from setuptools import setup, find_packages
from slackminion.plugins.core import version
setup(
name='slackminion',
version=version,
packages=find_packages(exclude=['test_plugins']),
url='https://github.com/arcticfoxnv/slackminion',
license='MIT',
author='Nick King',
author_email='',
description='A python bot framework for slack',
package_data={'slackminion': ['templates/*']},
install_requires=[
'Flask',
'PyYAML',
'requests',
'six',
'slackclient',
'websocket-client',
],
setup_requires=[
'pytest-runner'
],
tests_require=[
'pytest==2.7.2',
'pytest-cov==2.2.1',
'codeclimate-test-reporter==0.1.1',
'coverage==4.1'
],
entry_points={
'console_scripts': [
'slackminion = slackminion.__main__:main',
]
},
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 2 - Pre-Alpha",
"Topic :: Communications :: Chat",
]
)
| mit | Python |
a85494c6e9db0458ca50a59232fca508ba9619f5 | Increment version. | consbio/parserutils | setup.py | setup.py | import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.8.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
| import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.8.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
| bsd-3-clause | Python |
d657f8416dddedcd300fa451e6f510b0a639f61b | Bump minimum version for google-api-core to 1.14.0. (#8709) | googleapis/python-dataproc,googleapis/python-dataproc | setup.py | setup.py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
import setuptools
# Package metadata.
name = "google-cloud-dataproc"
description = "Google Cloud Dataproc API client library"
version = "0.4.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 3 - Alpha"
dependencies = ["google-api-core[grpc] >= 1.14.0, < 2.0.0dev"]
extras = {}
# Setup boilerplate below this line.
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, "README.rst")
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
# Only include packages under the 'google' namespace. Do not include tests,
# benchmarks, etc.
packages = [
package for package in setuptools.find_packages() if package.startswith("google")
]
# Determine which namespaces are needed.
namespaces = ["google"]
if "google.cloud" in packages:
namespaces.append("google.cloud")
setuptools.setup(
name=name,
version=version,
description=description,
long_description=readme,
author="Google LLC",
author_email="googleapis-packages@google.com",
license="Apache 2.0",
url="https://github.com/GoogleCloudPlatform/google-cloud-python",
classifiers=[
release_status,
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Operating System :: OS Independent",
"Topic :: Internet",
],
platforms="Posix; MacOS X; Windows",
packages=packages,
namespace_packages=namespaces,
install_requires=dependencies,
extras_require=extras,
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
include_package_data=True,
zip_safe=False,
)
| # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
import setuptools
# Package metadata.
name = "google-cloud-dataproc"
description = "Google Cloud Dataproc API client library"
version = "0.4.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 3 - Alpha"
dependencies = ["google-api-core[grpc] >= 1.6.0, < 2.0.0dev"]
extras = {}
# Setup boilerplate below this line.
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, "README.rst")
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
# Only include packages under the 'google' namespace. Do not include tests,
# benchmarks, etc.
packages = [
package for package in setuptools.find_packages() if package.startswith("google")
]
# Determine which namespaces are needed.
namespaces = ["google"]
if "google.cloud" in packages:
namespaces.append("google.cloud")
setuptools.setup(
name=name,
version=version,
description=description,
long_description=readme,
author="Google LLC",
author_email="googleapis-packages@google.com",
license="Apache 2.0",
url="https://github.com/GoogleCloudPlatform/google-cloud-python",
classifiers=[
release_status,
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Operating System :: OS Independent",
"Topic :: Internet",
],
platforms="Posix; MacOS X; Windows",
packages=packages,
namespace_packages=namespaces,
install_requires=dependencies,
extras_require=extras,
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
include_package_data=True,
zip_safe=False,
)
| apache-2.0 | Python |
c2a863d2b6da40a9cd23d31390b7510cf6786cd6 | Refactor setup.py | eiginn/passpie,marcwebbie/passpie,scorphus/passpie,scorphus/passpie,eiginn/passpie,marcwebbie/passpie | setup.py | setup.py | #!/usr/bin/env python
import os
import sys
try:
from setuptools import setup, Command, find_packages
except ImportError:
from distutils.core import setup, Command, find_packages
__version__ = "1.0"
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
sys.exit()
if sys.argv[-1] == 'tag':
os.system("git tag -a v%s -m 'version v%s'" % (__version__, __version__))
os.system("git push --tags")
sys.exit()
requirements = [
'click==5.1',
'PyYAML==3.11',
'tabulate==0.7.5',
'tinydb==2.4',
]
long_description = open('README.md').read() + "\n\n"
class PyTest(Command):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
self.pytest_args = []
def finalize_options(self):
pass
def run(self):
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
class PyTestCoverage(PyTest):
def initialize_options(self):
self.pytest_args = ['--cov', 'passpie']
setup(
name='passpie',
version=__version__,
license='License :: OSI Approved :: MIT License',
description="Manage your login credentials from the terminal painlessly.",
long_description=long_description,
author='Marcwebbie',
author_email='marcwebbie@gmail.com',
url='https://marcwebbie.github.io/passpie',
download_url='https://github.com/marcwebbie/passpie',
packages=find_packages(),
entry_points={
'console_scripts': [
'passpie=passpie.cli:cli',
]
},
install_requires=requirements,
cmdclass={'test': PyTest, 'coverage': PyTestCoverage},
test_suite='tests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python',
'Topic :: Security :: Cryptography',
],
)
| #!/usr/bin/env python
import os
import sys
try:
from setuptools import setup, Command, find_packages
except ImportError:
from distutils.core import setup, Command, find_packages
__version__ = "1.0"
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
sys.exit()
if sys.argv[-1] == 'tag':
os.system("git tag -a v%s -m 'version v%s'" % (__version__, __version__))
os.system("git push --tags")
sys.exit()
requirements = [
'click==5.1',
'PyYAML==3.11',
'tabulate==0.7.5',
'tinydb==2.4',
]
long_description = open('README.md').read() + "\n\n"
class PyTest(Command):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
self.pytest_args = []
def finalize_options(self):
pass
def run(self):
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
class PyTestCoverage(PyTest):
def initialize_options(self):
self.pytest_args = ['--cov', 'passpie']
setup(
name='passpie',
version=__version__,
license='License :: OSI Approved :: MIT License',
description="Manage your login credentials from the terminal painlessly.",
long_description=long_description,
author='Marcwebbie',
author_email='marcwebbie@gmail.com',
url='https://marcwebbie.github.io/passpie',
download_url='https://github.com/marcwebbie/passpie',
packages=find_packages(),
entry_points={
'console_scripts': [
'passpie=passpie.cli:cli',
]
},
install_requires=requirements,
cmdclass={'test': PyTest, 'coverage': PyTestCoverage},
test_suite='tests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python',
'Topic :: Security :: Cryptography',
],
)
| mit | Python |
cf42122aa4e82c22bb26140921bb7468df159636 | Update classifiers | yprez/django-logentry-admin,yprez/django-logentry-admin | setup.py | setup.py | from setuptools import setup
with open('README.rst') as f:
readme = f.read()
with open('CHANGELOG.rst') as f:
changelog = f.read()
setup(
name='django-logentry-admin',
author='Yuri Prezument',
author_email='y@yprez.com',
version='1.0.6',
packages=['logentry_admin'],
package_data={
'logentry_admin': [
'templates/admin/admin/logentry/change_form.html',
'*.po',
],
},
include_package_data=True,
license='ISC',
url='https://github.com/yprez/django-logentry-admin',
description='Show all LogEntry objects in the Django admin site.',
long_description='\n\n'.join([readme, changelog]),
install_requires=[
'Django>=1.7',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.0',
'Framework :: Django :: 2.1',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.0',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
)
| from setuptools import setup
with open('README.rst') as f:
readme = f.read()
with open('CHANGELOG.rst') as f:
changelog = f.read()
setup(
name='django-logentry-admin',
author='Yuri Prezument',
author_email='y@yprez.com',
version='1.0.6',
packages=['logentry_admin'],
package_data={
'logentry_admin': [
'templates/admin/admin/logentry/change_form.html',
'*.po',
],
},
include_package_data=True,
license='ISC',
url='https://github.com/yprez/django-logentry-admin',
description='Show all LogEntry objects in the Django admin site.',
long_description='\n\n'.join([readme, changelog]),
install_requires=[
'Django>=1.7',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.0',
'Framework :: Django :: 2.1',
'Framework :: Django :: 2.2',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
| isc | Python |
ed8dd36a07876a711a62eae98d6af79322801b03 | Make sure the shared library doesn't appear in source builds, using various hacks | sklam/llvmlite,markdewing/llvmlite,numba/llvmlite,pitrou/llvmlite,ssarangi/llvmlite,numba/llvmlite,m-labs/llvmlite,m-labs/llvmlite,ssarangi/llvmlite,ssarangi/llvmlite,m-labs/llvmlite,sklam/llvmlite,markdewing/llvmlite,sklam/llvmlite,numba/llvmlite,markdewing/llvmlite,squisher/llvmlite,numba/llvmlite,m-labs/llvmlite,markdewing/llvmlite,pitrou/llvmlite,pitrou/llvmlite,ssarangi/llvmlite,squisher/llvmlite,sklam/llvmlite,squisher/llvmlite,pitrou/llvmlite,squisher/llvmlite | setup.py | setup.py | try:
from setuptools import setup, Extension
except ImportError:
from distutils.core import setup, Extension
from distutils.spawn import spawn
from distutils.command.build import build
from distutils.command.build_ext import build_ext
import os
import sys
from llvmlite.utils import get_library_name
here_dir = os.path.dirname(__file__)
class LlvmliteBuild(build):
def finalize_options(self):
build.finalize_options(self)
# The build isn't platform-independent
if self.build_lib == self.build_purelib:
self.build_lib = self.build_platlib
def get_sub_commands(self):
# Force "build_ext" invocation.
commands = build.get_sub_commands(self)
for c in commands:
if c == 'build_ext':
return commands
return ['build_ext'] + commands
class LlvmliteBuildExt(build_ext):
def run(self):
build_ext.run(self)
cmd = [sys.executable, os.path.join(here_dir, 'ffi', 'build.py')]
spawn(cmd, dry_run=self.dry_run)
# HACK: this makes sure the library file (which is large) is only
# included in binary builds, not source builds.
library_name = get_library_name()
self.distribution.package_data = {
"llvmlite.binding": [get_library_name()],
}
packages = ['llvmlite',
'llvmlite.binding',
'llvmlite.llvmpy',
'llvmlite.tests',
]
setup(name='llvmlite',
description="lightweight wrapper around basic LLVM functionality",
version="0.1",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Software Development :: Code Generators",
"Topic :: Software Development :: Compilers",
],
# Include the separately-compiled shared library
author="Continuum Analytics, Inc.",
author_email="numba-users@continuum.io",
url="https://github.com/numba/llvmlite",
packages=packages,
license="BSD",
cmdclass={'build': LlvmliteBuild,
'build_ext': LlvmliteBuildExt,
},
)
| try:
from setuptools import setup, Extension
except ImportError:
from distutils.core import setup, Extension
from distutils.spawn import spawn
from distutils.command.build import build
import os
import sys
from llvmlite.utils import get_library_name
here_dir = os.path.dirname(__file__)
class LlvmliteBuild(build):
def run(self):
cmd = [sys.executable, os.path.join(here_dir, 'ffi', 'build.py')]
spawn(cmd, dry_run=self.dry_run)
packages = ['llvmlite',
'llvmlite.binding',
'llvmlite.llvmpy',
'llvmlite.tests',
]
setup(name='llvmlite',
description="lightweight wrapper around basic LLVM functionality",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Software Development :: Code Generators",
"Topic :: Software Development :: Compilers",
],
# Include the separately-compiled shared library
package_data={
"llvmlite.binding": [get_library_name()],
},
author="Continuum Analytics, Inc.",
author_email="numba-users@continuum.io",
url="https://github.com/numba/llvmlite",
packages=packages,
license="BSD",
cmdclass={'build': LlvmliteBuild},
)
| bsd-2-clause | Python |
5a3df5efb35f55d136d2364402f1c9009b85e6a4 | Bump version | hammerlab/mhctools,hammerlab/mhctools | setup.py | setup.py | # Copyright (c) 2014. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from setuptools import setup
readme_dir = os.path.dirname(__file__)
readme_filename = os.path.join(readme_dir, 'README.md')
try:
with open(readme_filename, 'r') as f:
readme = f.read()
except:
logging.warn("Failed to load %s" % readme_filename)
readme = ""
try:
import pypandoc
readme = pypandoc.convert(readme, to='rst', format='md')
except:
logging.warn("Conversion of long_description from MD to RST failed")
pass
if __name__ == '__main__':
setup(
name='mhctools',
version="0.1.7",
description="Python interface to running command-line and web-based MHC binding predictors",
author="Alex Rubinsteyn",
author_email="alex {dot} rubinsteyn {at} mssm {dot} edu",
url="https://github.com/hammerlab/mhctools",
license="http://www.apache.org/licenses/LICENSE-2.0.html",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
install_requires=[
'numpy>=1.7',
'pandas>=0.13.1',
'varcode>=0.3.17',
'six>=1.9.0'
],
long_description=readme,
packages=['mhctools'],
)
| # Copyright (c) 2014. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from setuptools import setup
readme_dir = os.path.dirname(__file__)
readme_filename = os.path.join(readme_dir, 'README.md')
try:
with open(readme_filename, 'r') as f:
readme = f.read()
except:
logging.warn("Failed to load %s" % readme_filename)
readme = ""
try:
import pypandoc
readme = pypandoc.convert(readme, to='rst', format='md')
except:
logging.warn("Conversion of long_description from MD to RST failed")
pass
if __name__ == '__main__':
setup(
name='mhctools',
version="0.1.6",
description="Python interface to running command-line and web-based MHC binding predictors",
author="Alex Rubinsteyn",
author_email="alex {dot} rubinsteyn {at} mssm {dot} edu",
url="https://github.com/hammerlab/mhctools",
license="http://www.apache.org/licenses/LICENSE-2.0.html",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
install_requires=[
'numpy>=1.7',
'pandas>=0.13.1',
'varcode>=0.3.17',
'six>=1.9.0'
],
long_description=readme,
packages=['mhctools'],
)
| apache-2.0 | Python |
a4b2c2f46744bc10b138612adc18f8f32d682b94 | Fix "ImportError: cannot import name wraps" on trusty | Rayman/ros-get,Rayman/ros-get | setup.py | setup.py | #!/usr/bin/env python
import os
import re
from setuptools import setup, find_packages
def get_ini_variable(name):
with open(os.path.join(os.path.dirname(__file__), 'src', 'ros_get', '__init__.py')) as f:
return re.compile(r".*%s = '(.*?)'" % name, re.S).match(f.read()).group(1)
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as r_file:
readme = r_file.read()
setup(
name='ros_get',
license='MIT',
version=get_ini_variable('__version__'),
url=get_ini_variable('__url__'),
author=get_ini_variable('__author__'),
author_email=get_ini_variable('__email__'),
description='Simple tools for working with ROS source packages',
long_description=readme,
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'six>=1.7', # https://github.com/testing-cabal/mock/issues/257
'rosdep',
'rosdistro >= 0.7.3',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
| #!/usr/bin/env python
import os
import re
from setuptools import setup, find_packages
def get_ini_variable(name):
with open(os.path.join(os.path.dirname(__file__), 'src', 'ros_get', '__init__.py')) as f:
return re.compile(r".*%s = '(.*?)'" % name, re.S).match(f.read()).group(1)
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as r_file:
readme = r_file.read()
setup(
name='ros_get',
license='MIT',
version=get_ini_variable('__version__'),
url=get_ini_variable('__url__'),
author=get_ini_variable('__author__'),
author_email=get_ini_variable('__email__'),
description='Simple tools for working with ROS source packages',
long_description=readme,
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro >= 0.7.3',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
| mit | Python |
15f0275b7b8b75bcb100c26dc48392efc90a392e | Bump version. | sunscrapers/djet | setup.py | setup.py | import codecs
from setuptools import setup
requirements = [i.strip() for i in open('requirements.txt').readlines()]
setup(
name='djet',
version='0.1.0',
description='Set of helpers for easy testing of Django apps.',
long_description=codecs.open('README.rst', encoding='utf-8').read(),
license='MIT',
author='SUNSCRAPERS',
author_email='info@sunscrapers.com',
packages=['djet'],
url='https://github.com/sunscrapers/djet',
install_requires=requirements,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing'
]
)
| import codecs
from setuptools import setup
requirements = [i.strip() for i in open('requirements.txt').readlines()]
setup(
name='djet',
version='0.0.10',
description='Set of helpers for easy testing of Django apps.',
long_description=codecs.open('README.rst', encoding='utf-8').read(),
license='MIT',
author='SUNSCRAPERS',
author_email='info@sunscrapers.com',
packages=['djet'],
url='https://github.com/sunscrapers/djet',
install_requires=requirements,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing'
]
)
| mit | Python |
c62bcab18e38712ce2346cc5fe11f7c01759a431 | bump version number | facciocose/django-italian-utils | setup.py | setup.py | import os
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='django-italian-utils',
version='0.3.3',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Libreria di utility per semplificare la creazione di applicazioni italiane.',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/facciocose/django-italian-utils',
author='Luca Marra',
author_email='luca@facciocose.it',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| import os
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='django-italian-utils',
version='0.3.2',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Libreria di utility per semplificare la creazione di applicazioni italiane.',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/facciocose/django-italian-utils',
author='Luca Marra',
author_email='luca@facciocose.it',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| mit | Python |
a98511d4028b678333a56edd293c5905d976ac88 | Fix typo in setup.py | robolivable/prettystring | setup.py | setup.py | from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='prettystring',
version='0.1.0',
description='Build ANSI color encoded strings with ease.',
long_description=readme(),
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals',
'Topic :: Text Processing :: General',
'Topic :: Utilities'
],
keywords='color colorful pretty string strings',
url='https://github.com/robolivable/prettystring',
author='Robert Oliveira',
author_email='oliveira.rlde@gmail.com',
license='MIT',
packages=['prettystring'],
install_requires=['enum34==1.1.6'])
| from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='prettystring',
version='0.1.0',
description='Build ANSI color encoded strings with ease.',
long_description=readme(),
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals',
'Topic :: Text Processing :: General',
'Topic :: Utilities'
],
keywords='color colorful pretty string strings',
url='https://github.com/robolivable/prettystring',
author='Robert Oliveira',
author_email='oliveira.rlde@gmail.com',
license='MIT',
packages=['prettystring'],
install_requires['enum34==1.1.6'])
| mit | Python |
602ec25dcb8934da3d8f3eea3f21d4fdaf93ac37 | add 'mock' | hpk42/p4p | setup.py | setup.py | #! /usr/bin/env python
import os, sys
from setuptools import setup, find_packages
if __name__ == "__main__":
here = os.path.abspath(".")
README = open(os.path.join(here, 'README.rst')).read()
install_requires = ["webob", "simplejson", "pycrypto", "requests", "twisted", "mock"]
if sys.version_info < (2,7):
install_requires.append("argparse>=1.2.1")
setup(
name="friendsecure",
description="lookup service for p2p messenger",
long_description=README,
version='0.1',
maintainer="Holger Krekel",
maintainer_email="holger@merlinux.eu",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.3".split()],
install_requires=install_requires,
entry_points = {
'console_scripts': [
"friend_lookup_server = friendsecure.lookup_server:main",
"friend_client = friendsecure.peer:main",
]
})
| #! /usr/bin/env python
import os, sys
from setuptools import setup, find_packages
if __name__ == "__main__":
here = os.path.abspath(".")
README = open(os.path.join(here, 'README.rst')).read()
install_requires = ["webob", "simplejson", "pycrypto", "requests", "twisted"]
if sys.version_info < (2,7):
install_requires.append("argparse>=1.2.1")
setup(
name="friendsecure",
description="lookup service for p2p messenger",
long_description=README,
version='0.1',
maintainer="Holger Krekel",
maintainer_email="holger@merlinux.eu",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.3".split()],
install_requires=install_requires,
entry_points = {
'console_scripts': [
"friend_lookup_server = friendsecure.lookup_server:main",
"friend_client = friendsecure.peer:main",
]
})
| mit | Python |
4130eb4652f76b90bcfd929fd2fcb15c463d3634 | Improve setup.py, support for extras_require, replace ' with " | farzadghanei/statsd-metrics | setup.py | setup.py | #!/usr/bin/env python
"""
statsdmetrics
--------------
Data metrics for Statsd.
"""
from __future__ import print_function
import os
from os.path import dirname
try:
import setuptools
from setuptools import setup
except ImportError:
setuptools = None
from distutils.core import setup
try:
import distutilazy.test
import distutilazy.clean
except ImportError:
distutilazy = None
from statsdmetrics import __version__
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Networking :: Monitoring",
"Topic :: Internet :: Log Analysis"
]
long_description = __doc__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as fh:
long_description = fh.read()
setup_params = dict(
name = "statsdmetrics",
packages = ["statsdmetrics", "statsdmetrics.app"],
version = __version__,
description = "Metric classes for Statsd",
long_description = long_description,
author = "Farzad Ghanei",
author_email = "farzad.ghanei@gmail.com",
license = "MIT",
classifiers = classifiers,
)
if setuptools:
dev_dependencies_filename = os.path.join(
dirname(__file__),
"requirements-dev.txt"
)
if os.path.exists(dev_dependencies_filename):
with open(dev_dependencies_filename) as fh:
setup_params["extras_require"] = {
"dev": [item.strip() for item in fh.readlines()]
}
setup_params["keywords"] = "statsd metrics"
setup_params["test_suite"] = "tests"
setup_params["zip_safe"] = True
setup_params["entry_points"] = {
"console_scripts": [
"statsd-client = statsdmetrics.app.statsdclient:main",
]
}
if distutilazy:
setup_params["cmdclass"] = dict(
test=distutilazy.test.run_tests,
clean_pyc=distutilazy.clean.clean_pyc,
clean=distutilazy.clean.clean_all
)
if __name__ == "__main__":
setup(**setup_params)
__all__ = (setup_params, classifiers, long_description)
| #!/usr/bin/env python
"""
statsdmetrics
--------------
Data metrics for Statsd.
"""
from __future__ import print_function
import os
try:
import setuptools
from setuptools import setup
except ImportError:
setuptools = None
from distutils.core import setup
try:
import distutilazy.test
import distutilazy.clean
except ImportError:
distutilazy = None
from statsdmetrics import __version__
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Networking :: Monitoring",
"Topic :: Internet :: Log Analysis"
]
long_description = __doc__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as fh:
long_description = fh.read()
setup_params = dict(
name = 'statsdmetrics',
packages = ['statsdmetrics', 'statsdmetrics.app'],
version = __version__,
description = 'Metric classes for Statsd',
long_description = long_description,
author = 'Farzad Ghanei',
author_email = 'farzad.ghanei@gmail.com',
license = 'MIT',
classifiers = classifiers,
)
if setuptools:
setup_params['test_suite'] = 'tests'
setup_params['zip_safe'] = True
setup_params['entry_points'] = {
'console_scripts': [
'statsd-client = statsdmetrics.app.statsdclient:main',
]
}
if distutilazy:
setup_params['cmdclass'] = dict(
test=distutilazy.test.run_tests,
clean_pyc=distutilazy.clean.clean_pyc,
clean=distutilazy.clean.clean_all
)
if __name__ == '__main__':
setup(**setup_params)
__all__ = (setup_params, classifiers, long_description) | mit | Python |
d614dbe8e1d96c9029fc64f72da5acb85a53a5ac | Update setup.py | yunojuno/django-onfido | setup.py | setup.py | # -*- coding: utf-8 -*-
from os import path, pardir, chdir
from setuptools import setup, find_packages
README = open(path.join(path.dirname(__file__), 'README.rst')).read()
# requirements.txt must be included in MANIFEST.in and include_package_data must be True
# in order for this to work; ensures that tox can use the setup to enforce requirements
REQUIREMENTS = '\n'.join(open(path.join(path.dirname(__file__), 'requirements.txt')).readlines())
# allow setup.py to be run from any path
chdir(path.normpath(path.join(path.abspath(__file__), pardir)))
setup(
name="django-onfido",
version="0.9.5-dev",
packages=find_packages(),
install_requires=REQUIREMENTS,
include_package_data=True,
description='Django app for integration with Onfido.',
license='MIT',
long_description=README,
url='https://github.com/yunojuno/django-onfido',
author='Hugo Rodger-Brown',
author_email='code@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| # -*- coding: utf-8 -*-
from os import path, pardir, chdir
from setuptools import setup, find_packages
README = open(path.join(path.dirname(__file__), 'README.rst')).read()
# requirements.txt must be included in MANIFEST.in and include_package_data must be True
# in order for this to work; ensures that tox can use the setup to enforce requirements
REQUIREMENTS = '\n'.join(open(path.join(path.dirname(__file__), 'requirements.txt')).readlines())
# allow setup.py to be run from any path
chdir(path.normpath(path.join(path.abspath(__file__), pardir)))
setup(
name="django-onfido",
version="0.9.4",
packages=find_packages(),
install_requires=REQUIREMENTS,
include_package_data=True,
description='Django app for integration with Onfido.',
long_description=README,
url='https://github.com/yunojuno/django-onfido',
author='Hugo Rodger-Brown',
author_email='code@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| mit | Python |
19f4fcf2acbedd78b4510b8260e95f7d389f812b | Fix the version of pycodestyle to 2.5.0 | hhatto/autopep8,hhatto/autopep8 | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Setup for autopep8."""
import ast
import io
from setuptools import setup
INSTALL_REQUIRES = (
['pycodestyle == 2.5.0']
)
def version():
"""Return version string."""
with io.open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
return ast.parse(line).body[0].value.s
with io.open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='hhatto.jp@gmail.com',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Quality Assurance',
],
keywords='automation, pep8, format, pycodestyle',
install_requires=INSTALL_REQUIRES,
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Setup for autopep8."""
import ast
import io
from setuptools import setup
INSTALL_REQUIRES = (
['pycodestyle >= 2.5.0']
)
def version():
"""Return version string."""
with io.open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
return ast.parse(line).body[0].value.s
with io.open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='hhatto.jp@gmail.com',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Quality Assurance',
],
keywords='automation, pep8, format, pycodestyle',
install_requires=INSTALL_REQUIRES,
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
| mit | Python |
e1f874902ee338ef779cbbf10d5d68d39630ac97 | add a newline for pep8 pass | BrianHicks/emit,BrianHicks/emit,BrianHicks/emit | setup.py | setup.py | import os
from setuptools import setup, find_packages
import emit
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError: # for tox
return ''
setup(
# System information
name='emit',
version=emit.__version__,
packages=find_packages(exclude=('test',)),
scripts=['emit/bin/emit_digraph'],
zip_safe=True,
extras_require = {
'celery-routing': ['celery>=3.0.13'],
'rq-routing': ['rq>=0.3.4', 'redis>=2.7.2'],
},
# Human information
author='Brian Hicks',
author_email='brian@brianthicks.com',
url='https://github.com/brianhicks/emit',
description='Build a graph to process streams',
keywords='stream processing',
long_description=read('README.rst'),
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| import os
from setuptools import setup, find_packages
import emit
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError: # for tox
return ''
setup(
# System information
name='emit',
version=emit.__version__,
packages=find_packages(exclude=('test',)),
scripts=['emit/bin/emit_digraph'],
zip_safe=True,
extras_require = {
'celery-routing': ['celery>=3.0.13'],
'rq-routing': ['rq>=0.3.4', 'redis>=2.7.2'],
},
# Human information
author='Brian Hicks',
author_email='brian@brianthicks.com',
url='https://github.com/brianhicks/emit',
description='Build a graph to process streams',
keywords='stream processing',
long_description=read('README.rst'),
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| mit | Python |
7f688edf040a8bbb4bb03f408bd1c4cfab6b19ef | Update Version | pirsquare/BigQuery-GCS | setup.py | setup.py | from setuptools import find_packages
from setuptools import setup
VERSION = '0.0.2'
setup_args = dict(
name='BigQuery-GCS',
description='Export Large Results from BigQuery to Google Cloud Storage',
url='https://github.com/pirsquare/BigQuery-GCS',
version=VERSION,
license='MIT',
packages=find_packages(),
include_package_data=True,
install_requires=['bigquery-python', 'boto'],
author='Ryan Liao',
author_email='pirsquare.ryan@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
if __name__ == '__main__':
setup(**setup_args)
| from setuptools import find_packages
from setuptools import setup
VERSION = '0.0.1'
setup_args = dict(
name='BigQuery-GCS',
description='Export Large Results from BigQuery to Google Cloud Storage',
url='https://github.com/pirsquare/BigQuery-GCS',
version=VERSION,
license='MIT',
packages=find_packages(),
include_package_data=True,
install_requires=['bigquery-python', 'boto'],
author='Ryan Liao',
author_email='pirsquare.ryan@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
if __name__ == '__main__':
setup(**setup_args)
| mit | Python |
dd949fdcd9834400f0534c054b56cb4d825a2be2 | Add package_data to setup.py | rizumu/django-paste-organizer | setup.py | setup.py | from setuptools import setup, find_packages
version = '0.1.0'
setup(
name='django-writeboards',
version=version,
description="Manage 123.writeboard.com hosted writeboards in one place. Allows easier team collaboration on multiple writeboardsself.",
long_description=read('README'),
author='Thomas Schreiber',
author_email='tom@insatsu.us',
url='http://github.com/rizumu/django-writeboards/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
keywords='writeboards,django'
packages=find_packages(),
include_package_data=True,
package_data = {
'friends': [
'templates/writeboards/*/*.html',
'templates/writeboards/*/*.txt',
]
},
zip_safe=False,
) | from setuptools import setup, find_packages
version = '0.1.0'
setup(
name='django-writeboards',
version=version,
description="Manage 123.writeboard.com hosted writeboards in one place. Allows easier team collaboration on multiple writeboardsself.",
long_description=read('README'),
classifiers=[
'Framework :: Django',
'Programming Language :: Python',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
],
keywords='writeboards,django'
author='Thomas Schreiber',
author_email='tom@insatsu.us',
url='http://github.com/rizumu/django-writeboards/',
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
) | mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.