commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
68860fcc9d5fc201017ca910577620e9d833c2c3 | modify SO | ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo | addons/stock_dropshipping/models/sale.py | addons/stock_dropshipping/models/sale.py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models, fields
class SaleOrderLine(models.Model):
_inherit = "sale.order.line"
purchase_line_ids = fields.One2many('purchase.order.line', 'sale_line_id')
@api.multi
def _get_qty_procurement(self):
# People without purchase rights should be able to do this operation
purchase_lines_sudo = self.sudo().purchase_line_ids
if purchase_lines_sudo.filtered(lambda r: r.state != 'cancel'):
qty = 0.0
for po_line in purchase_lines_sudo.filtered(lambda r: r.state != 'cancel'):
qty += po_line.product_uom._compute_quantity(po_line.product_qty, self.product_uom, rounding_method='HALF-UP')
return qty
else:
return super(SaleOrderLine, self)._get_qty_procurement()
| # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models, fields
class SaleOrderLine(models.Model):
_inherit = "sale.order.line"
purchase_line_ids = fields.One2many('purchase.order.line', 'sale_line_id')
@api.multi
def _get_qty_procurement(self):
# People without purchase rights should be able to do this operation
purchase_lines_sudo = self.sudo().purchase_line_ids
if not self.move_ids.filtered(lambda r: r.state != 'cancel') and purchase_lines_sudo.filtered(lambda r: r.state != 'cancel'):
qty = 0.0
for po_line in purchase_lines_sudo.filtered(lambda r: r.state != 'cancel'):
qty += po_line.product_uom._compute_quantity(po_line.product_qty, self.product_uom, rounding_method='HALF-UP')
return qty
else:
return super(SaleOrderLine, self)._get_qty_procurement()
| agpl-3.0 | Python |
3c77617141542fe6bb6aac897aff554cd3c1ec08 | fix missing pbx frameworks | kivy/kivy-ios,kivy/kivy-ios,kivy/kivy-ios | recipes/sdl2/__init__.py | recipes/sdl2/__init__.py | from toolchain import Recipe, shprint
import sh
class LibSDL2Recipe(Recipe):
version = "2.0.8"
url = "https://www.libsdl.org/release/SDL2-{version}.tar.gz"
#version = "iOS-improvements"
#url = "https://bitbucket.org/slime73/sdl-experiments/get/{version}.tar.gz"
library = "Xcode-iOS/SDL/build/Release-{arch.sdk}/libSDL2.a"
include_dir = "include"
pbx_frameworks = [
"OpenGLES", "AudioToolbox", "QuartzCore", "CoreGraphics",
"CoreMotion", "GameController", "AVFoundation", "Metal",
"UIKit"]
def prebuild_arch(self, arch):
if self.has_marker("patched"):
return
self.apply_patch("uikit-transparent.patch")
self.set_marker("patched")
def build_arch(self, arch):
env = arch.get_env()
shprint(sh.xcodebuild, self.ctx.concurrent_xcodebuild,
"ONLY_ACTIVE_ARCH=NO",
"ARCHS={}".format(arch.arch),
"CC={}".format(env['CC']),
"-sdk", arch.sdk,
"-project", "Xcode-iOS/SDL/SDL.xcodeproj",
"-target", "libSDL-iOS",
"-configuration", "Release")
recipe = LibSDL2Recipe()
| from toolchain import Recipe, shprint
import sh
class LibSDL2Recipe(Recipe):
version = "2.0.8"
url = "https://www.libsdl.org/release/SDL2-{version}.tar.gz"
#version = "iOS-improvements"
#url = "https://bitbucket.org/slime73/sdl-experiments/get/{version}.tar.gz"
library = "Xcode-iOS/SDL/build/Release-{arch.sdk}/libSDL2.a"
include_dir = "include"
pbx_frameworks = ["OpenGLES", "AudioToolbox", "QuartzCore", "CoreGraphics",
"CoreMotion", "GameController", "AVFoundation", "Metal"]
def prebuild_arch(self, arch):
if self.has_marker("patched"):
return
self.apply_patch("uikit-transparent.patch")
self.set_marker("patched")
def build_arch(self, arch):
env = arch.get_env()
shprint(sh.xcodebuild, self.ctx.concurrent_xcodebuild,
"ONLY_ACTIVE_ARCH=NO",
"ARCHS={}".format(arch.arch),
"CC={}".format(env['CC']),
"-sdk", arch.sdk,
"-project", "Xcode-iOS/SDL/SDL.xcodeproj",
"-target", "libSDL-iOS",
"-configuration", "Release")
recipe = LibSDL2Recipe()
| mit | Python |
595218c33892facf0cf26e5e6b3e16b2c02e737e | Add a stub for fts_updates | pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner | spring/settings.py | spring/settings.py | from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.fts_updates = 0
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
| from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
| apache-2.0 | Python |
10b5182bd5ee2a2dc8fa1d13c8db1237077b0209 | Fix linting error | PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure | bin/prxtransfer-dns-station-migration.py | bin/prxtransfer-dns-station-migration.py | import boto3
client = boto3.client("route53")
# List the FTP subdomains that should have explicit DNS records added
subdomains = [
"wxyz",
]
changes = []
name = "infra-FtpSe-1W1OF5U4X8M3Z-284373e0ff42a3aa.elb.us-east-1.amazonaws.com"
for subdomain in subdomains:
changes.append(
{
"Action": "UPSERT",
"ResourceRecordSet": {
"Name": f"{subdomain}.prxtransfer.org",
"Type": "A",
"AliasTarget": {
"HostedZoneId": "Z26RNL4JYFTOTI",
"DNSName": name,
"EvaluateTargetHealth": False,
},
},
}
)
client.change_resource_record_sets(
HostedZoneId="Z2DOBCW7CSO5EP",
ChangeBatch={"Changes": changes},
)
| import boto3
client = boto3.client("route53")
# List the FTP subdomains that should have explicit DNS records added
subdomains = [
"wxyz",
]
changes = []
for subdomain in subdomains:
changes.append(
{
"Action": "UPSERT",
"ResourceRecordSet": {
"Name": f"{subdomain}.prxtransfer.org",
"Type": "A",
"AliasTarget": {
"HostedZoneId": "Z26RNL4JYFTOTI",
"DNSName": "infra-FtpSe-1W1OF5U4X8M3Z-284373e0ff42a3aa.elb.us-east-1.amazonaws.com",
"EvaluateTargetHealth": False,
},
},
}
)
client.change_resource_record_sets(
HostedZoneId="Z2DOBCW7CSO5EP",
ChangeBatch={"Changes": changes},
)
| mit | Python |
4f9dd639566515e956812d38f2932237a487a4e6 | Fix typo | bit-bots/bitbots_misc,bit-bots/bitbots_misc,bit-bots/bitbots_misc | bitbots_common/scripts/launch_warning.py | bitbots_common/scripts/launch_warning.py | #!/usr/bin/env python3
import rospy
rospy.logerr("###\n###\n###\n###\n###\nYou didn't specifiy which robot you want to start!\nPlease add minibot:=true or wolfgang:=true or davros:=true behind you roslaunch.\n###\n###\n###\n###\n###")
| #!/usr/bin/env python3
import rospy
rospy.logerr("###\n###\n###\n###\n###\nYou didn't specifiy which robot you want to start!\nPlease add minibot:=true or woflgang:=true or davros:=true behind you roslaunch.\n###\n###\n###\n###\n###") | mit | Python |
ad19f15ed4609c67c1325a02005062b15d503705 | customize plot style | abonaca/gary,abonaca/gary,adrn/gala,adrn/gala,abonaca/gary,adrn/gary,adrn/gary,adrn/gary,adrn/gala | streamteam/integrate/tests/helpers.py | streamteam/integrate/tests/helpers.py | # coding: utf-8
""" Test helpers """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os, sys
# Third-party
import matplotlib.pyplot as plt
import numpy as np
def plot(ts, q, p, marker='.', alpha=0.75, linestyle='-'):
""" Make some helpful plots for testing the integrators. """
qp = np.squeeze(np.vstack((q.T,p.T)))
ndim = qp.shape[0]
fig,axes = plt.subplots(ndim, ndim, figsize=(4*ndim,4*ndim))
kwargs = dict(marker=marker, linestyle=linestyle, alpha=alpha)
for ii in range(ndim):
for jj in range(ndim):
if ii == jj:
axes[jj,ii].plot(qp[ii], **kwargs)
else:
axes[jj,ii].plot(qp[ii], qp[jj], **kwargs)
fig.tight_layout()
for ii in range(ndim):
for jj in range(ndim):
if ii > jj:
axes[jj,ii].set_visible(False)
continue
return fig | # coding: utf-8
""" Test helpers """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os, sys
# Third-party
import matplotlib.pyplot as plt
import numpy as np
def plot(ts, q, p):
""" Make some helpful plots for testing the integrators. """
qp = np.squeeze(np.vstack((q.T,p.T)))
ndim = qp.shape[0]
fig,axes = plt.subplots(ndim, ndim, figsize=(4*ndim,4*ndim))
kwargs = dict(marker='.', linestyle='-')
for ii in range(ndim):
for jj in range(ndim):
if ii == jj:
axes[jj,ii].plot(qp[ii], linestyle='-',
marker='.', alpha=0.75)
else:
axes[jj,ii].plot(qp[ii], qp[jj], linestyle='-',
marker='.', alpha=0.75)
fig.tight_layout()
for ii in range(ndim):
for jj in range(ndim):
if ii > jj:
axes[jj,ii].set_visible(False)
continue
return fig | mit | Python |
cfad3e375dc57e6b4b59c3892d31773221255686 | use `eden prefetch` on windows | ReactiveSocket/reactivesocket-cpp,ReactiveSocket/reactivesocket-cpp,ReactiveSocket/reactivesocket-cpp,rsocket/rsocket-cpp,rsocket/rsocket-cpp,rsocket/rsocket-cpp,rsocket/rsocket-cpp | build/fbcode_builder/getdeps/copytree.py | build/fbcode_builder/getdeps/copytree.py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shutil
import subprocess
from .platform import is_windows
PREFETCHED_DIRS = set()
def containing_repo_type(path):
while True:
if os.path.exists(os.path.join(path, ".git")):
return ("git", path)
if os.path.exists(os.path.join(path, ".hg")):
return ("hg", path)
parent = os.path.dirname(path)
if parent == path:
return None, None
path = parent
def find_eden_root(dirpath):
"""If the specified directory is inside an EdenFS checkout, returns
the canonical absolute path to the root of that checkout.
Returns None if the specified directory is not in an EdenFS checkout.
"""
if is_windows():
repo_type, repo_root = containing_repo_type(dirpath)
if repo_root is not None:
if os.path.exists(os.path.join(repo_root, ".eden", "config")):
return os.path.realpath(repo_root)
return None
try:
return os.readlink(os.path.join(dirpath, ".eden", "root"))
except OSError:
return None
def prefetch_dir_if_eden(dirpath):
""" After an amend/rebase, Eden may need to fetch a large number
of trees from the servers. The simplistic single threaded walk
performed by copytree makes this more expensive than is desirable
so we help accelerate things by performing a prefetch on the
source directory """
global PREFETCHED_DIRS
if dirpath in PREFETCHED_DIRS:
return
root = find_eden_root(dirpath)
if root is None:
return
rel = os.path.relpath(dirpath, root)
print("Prefetching %s..." % rel)
subprocess.call(
["edenfsctl", "prefetch", "--repo", root, "--silent", "%s/**" % rel]
)
PREFETCHED_DIRS.add(dirpath)
def copytree(src_dir, dest_dir, ignore=None):
""" Recursively copy the src_dir to the dest_dir, filtering
out entries using the ignore lambda. The behavior of the
ignore lambda must match that described by `shutil.copytree`.
This `copytree` function knows how to prefetch data when
running in an eden repo.
TODO: I'd like to either extend this or add a variant that
uses watchman to mirror src_dir into dest_dir.
"""
prefetch_dir_if_eden(src_dir)
return shutil.copytree(src_dir, dest_dir, ignore=ignore)
| # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shutil
import subprocess
from .platform import is_windows
PREFETCHED_DIRS = set()
def containing_repo_type(path):
while True:
if os.path.exists(os.path.join(path, ".git")):
return ("git", path)
if os.path.exists(os.path.join(path, ".hg")):
return ("hg", path)
parent = os.path.dirname(path)
if parent == path:
return None, None
path = parent
def find_eden_root(dirpath):
"""If the specified directory is inside an EdenFS checkout, returns
the canonical absolute path to the root of that checkout.
Returns None if the specified directory is not in an EdenFS checkout.
"""
if is_windows():
repo_type, repo_root = containing_repo_type(dirpath)
if repo_root is not None:
if os.path.exists(os.path.join(repo_root, ".eden", "config")):
return os.path.realpath(repo_root)
return None
try:
return os.readlink(os.path.join(dirpath, ".eden", "root"))
except OSError:
return None
def prefetch_dir_if_eden(dirpath):
""" After an amend/rebase, Eden may need to fetch a large number
of trees from the servers. The simplistic single threaded walk
performed by copytree makes this more expensive than is desirable
so we help accelerate things by performing a prefetch on the
source directory """
global PREFETCHED_DIRS
if is_windows():
# prefetch takes longer than not prefetching our opensource
# projects until we cut over to the new globfiles implementation
return
if dirpath in PREFETCHED_DIRS:
return
root = find_eden_root(dirpath)
if root is None:
return
rel = os.path.relpath(dirpath, root)
print("Prefetching %s..." % rel)
subprocess.call(
["edenfsctl", "prefetch", "--repo", root, "--silent", "%s/**" % rel]
)
PREFETCHED_DIRS.add(dirpath)
def copytree(src_dir, dest_dir, ignore=None):
""" Recursively copy the src_dir to the dest_dir, filtering
out entries using the ignore lambda. The behavior of the
ignore lambda must match that described by `shutil.copytree`.
This `copytree` function knows how to prefetch data when
running in an eden repo.
TODO: I'd like to either extend this or add a variant that
uses watchman to mirror src_dir into dest_dir.
"""
prefetch_dir_if_eden(src_dir)
return shutil.copytree(src_dir, dest_dir, ignore=ignore)
| unknown | Python |
bf69e6d248f58245258c350a6f427d9dc2f05343 | Correct transit test failure | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource | test/469-transit-features.py | test/469-transit-features.py | # way 91806504
assert_has_feature(
16, 10470, 25316, 'transit',
{ 'kind': 'bus_stop' })
# node 1241518350
assert_has_feature(
16, 10480, 25332, 'pois',
{ 'kind': 'bus_stop' })
# way 196670577
assert_has_feature(
16, 10486, 25326, 'transit',
{ 'kind': 'platform' })
| # way 91806504
assert_has_feature(
16, 10470, 25316, 'transit',
{ 'kind': 'bus_stop' })
# node 1241518350
assert_has_feature(
16, 10480, 25332, 'transit',
{ 'kind': 'bus_stop' })
# way 196670577
assert_has_feature(
16, 10486, 25326, 'transit',
{ 'kind': 'platform' })
| mit | Python |
3cca42d17c7bdcd250df3723240c760434a08a60 | update create cloud account command example | cloudsidekick/catoclient,cloudsidekick/catoclient | catoclient/commands/createcloudaccount.py | catoclient/commands/createcloudaccount.py | #########################################################################
# Copyright 2011 Cloud Sidekick
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#########################################################################
import catoclient.catocommand
from catoclient.param import Param
class CreateCloudAccount(catoclient.catocommand.CatoCommand):
Description = 'Creates new Cloud Account credentials in Cato used to access a cloud endpoint.'
API = 'create_account'
Examples = '''
cato-create-cloud-account -name "vcloudtest" -v "vCloud" -l "tom.thumb@example.com" -p "passw0rd" -d "vcloud-test"
'''
Options = [Param(name='provider', short_name='v', long_name='provider',
optional=False, ptype='string',
doc='The name of a Cato supported Cloud Provider. One of: Eucalyptus, vCloud, VMware, AWS, OpenStackAws'),
Param(name='name', short_name='n', long_name='name',
optional=False, ptype='string',
doc='A name for the new Cloud Account.'),
Param(name='login', short_name='l', long_name='login',
optional=False, ptype='string',
doc='Login name (Access Key) for the new Cloud Account.'),
Param(name='password', short_name='p', long_name='password',
optional=False, ptype='string',
doc='Password for the new Cloud Account.'),
Param(name='default_cloud', short_name='d', long_name='default_cloud',
optional=False, ptype='string',
doc='A default Cloud to be associated with this Account.'),
Param(name='account_number', short_name='a', long_name='account_number',
optional=True, ptype='string',
doc='An account number for the New Cloud Account.')
]
def main(self):
results = self.call_api(self.API, ['provider', 'name', 'login', 'password', 'default_cloud', 'account_number'])
print(results)
| #########################################################################
# Copyright 2011 Cloud Sidekick
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#########################################################################
import catoclient.catocommand
from catoclient.param import Param
class CreateCloudAccount(catoclient.catocommand.CatoCommand):
Description = 'Creates a new Cloud Account.'
API = 'create_account'
Examples = ''''''
Options = [Param(name='provider', short_name='v', long_name='provider',
optional=False, ptype='string',
doc='The name of a Cato supported Cloud Provider.'),
Param(name='name', short_name='n', long_name='name',
optional=False, ptype='string',
doc='A name for the new Cloud Account.'),
Param(name='login', short_name='l', long_name='login',
optional=False, ptype='string',
doc='Login name (Access Key) for the new Cloud Account.'),
Param(name='password', short_name='p', long_name='password',
optional=False, ptype='string',
doc='Password for the new Cloud Account.'),
Param(name='default_cloud', short_name='d', long_name='default_cloud',
optional=False, ptype='string',
doc='A default Cloud to be associated with this Account.'),
Param(name='account_number', short_name='a', long_name='account_number',
optional=True, ptype='string',
doc='An account number for the New Cloud Account.')
]
def main(self):
results = self.call_api(self.API, ['provider', 'name', 'login', 'password', 'default_cloud', 'account_number'])
print(results)
| apache-2.0 | Python |
a2adce3bb374acf7e2eef1fd6dfc6648027ab06d | Modify the views according to the modification of models | ZhibinCH/my3MW,ZhibinCH/my3MW | sites/views.py | sites/views.py | from django.http import HttpResponse
from django.template import loader
from django.shortcuts import render,render_to_response
from django.http import Http404
from sites.models import *
def sites_overview(request):
# if not Sites.objects.all():
#
# site = Sites(site_name='Demo Site', site_id='1',date='2015-02-01',a_value = '12',b_value='16')
# site.save()
# site = Sites(site_name='Demo Site', site_id='1',date='2015-02-03',a_value = '20',b_value='100')
# site.save()
# site = Sites(site_name='Demo Site', site_id='1',date='2015-02-10',a_value = '20',b_value='80')
# site.save()
# site = Sites(site_name='ABC Site', site_id='2',date='2015-02-03',a_value = '5',b_value='15')
# site.save()
# site = Sites(site_name='XYZ Site', site_id='3',date='2015-02-15',a_value = '5',b_value='15')
# site.save()
# site = Sites(site_name='XYZ Site', site_id='3',date='2015-02-28',a_value = '5',b_value='15')
# site.save()
template = loader.get_template('sites/sites_list.html')
context = {}
context['sites'] = Sites.objects.all()
return HttpResponse(template.render(context, request))
# return render_to_response('sites/sites_list.html')
def site_detail(request,site_id):
site = Site.objects.filter(site_id = site_id)
return render_to_response('sites/site_detail.html',{'site':site})
| from django.http import HttpResponse
from django.template import loader
from django.shortcuts import render,render_to_response
from django.http import Http404
from sites.models import *
def sites_overview(request):
# if not Sites.objects.all():
#
# site = Sites(site_name='Demo Site', site_id='1',date='2015-02-01',a_value = '12',b_value='16')
# site.save()
# site = Sites(site_name='Demo Site', site_id='1',date='2015-02-03',a_value = '20',b_value='100')
# site.save()
# site = Sites(site_name='Demo Site', site_id='1',date='2015-02-10',a_value = '20',b_value='80')
# site.save()
# site = Sites(site_name='ABC Site', site_id='2',date='2015-02-03',a_value = '5',b_value='15')
# site.save()
# site = Sites(site_name='XYZ Site', site_id='3',date='2015-02-15',a_value = '5',b_value='15')
# site.save()
# site = Sites(site_name='XYZ Site', site_id='3',date='2015-02-28',a_value = '5',b_value='15')
# site.save()
template = loader.get_template('sites/sites_list.html')
context = {}
context['sites'] = Sites.objects.all()
return HttpResponse(template.render(context, request))
# return render_to_response('sites/sites_list.html')
def site_detail(request,site_id):
site = Sites.objects.filter(site_id = site_id)
return render_to_response('sites/site_detail.html',{'site':site})
| bsd-2-clause | Python |
4fb65741f91224c22ad504313f513a76f12f08a5 | update errors.py doc | waliens/sldc | sldc/errors.py | sldc/errors.py | # -*- coding: utf-8 -*-
__author__ = "Romain Mormont <romainmormont@hotmail.com>"
__version__ = "0.1"
class TileExtractionException(Exception):
"""Thrown when a tile is requested but cannot be fetched"""
pass
class ImageExtractionException(Exception):
"""Thrown when an image is requested cannot be extracted"""
pass
| # -*- coding: utf-8 -*-
class TileExtractionException(Exception):
"""
Thrown when a tile is requested but cannot be fetched
"""
pass
class ImageExtractionException(Exception):
"""
Thrown when an image is requested cannot be extracted
"""
pass
| mit | Python |
6dcbaa509546678666cfe74c7b58993bc0bfacb9 | update tests to use the cluster arg | SUSE/ceph-deploy-to-be-deleted,osynge/ceph-deploy,SUSE/ceph-deploy,codenrhoden/ceph-deploy,imzhulei/ceph-deploy,ddiss/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,isyippee/ceph-deploy,branto1/ceph-deploy,shenhequnying/ceph-deploy,ceph/ceph-deploy,shenhequnying/ceph-deploy,trhoden/ceph-deploy,alfredodeza/ceph-deploy,SUSE/ceph-deploy,ghxandsky/ceph-deploy,isyippee/ceph-deploy,branto1/ceph-deploy,Vicente-Cheng/ceph-deploy,ktdreyer/ceph-deploy,ceph/ceph-deploy,ghxandsky/ceph-deploy,rtulke/ceph-deploy,rtulke/ceph-deploy,jumpstarter-io/ceph-deploy,jumpstarter-io/ceph-deploy,ktdreyer/ceph-deploy,codenrhoden/ceph-deploy,zhouyuan/ceph-deploy,alfredodeza/ceph-deploy,ddiss/ceph-deploy,trhoden/ceph-deploy,osynge/ceph-deploy,imzhulei/ceph-deploy,zhouyuan/ceph-deploy,Vicente-Cheng/ceph-deploy | ceph_deploy/tests/unit/util/test_paths.py | ceph_deploy/tests/unit/util/test_paths.py | from ceph_deploy.util import paths
class TestMonPaths(object):
def test_base_path(self):
assert paths.mon._base.endswith('/ceph-')
def test_path(self):
result = paths.mon.path('ceph', 'myhostname')
assert result.startswith('/')
assert result.endswith('/ceph-myhostname')
def test_done(self):
result = paths.mon.done('ceph', 'myhostname')
assert result.startswith('/')
assert result.endswith('ceph-myhostname/done')
def test_init(self):
result = paths.mon.init('ceph', 'myhostname', 'init')
assert result.startswith('/')
assert result.endswith('ceph-myhostname/init')
def test_keyring(self):
result = paths.mon.keyring('mycluster', 'myhostname')
assert result.startswith('/')
assert result.endswith('tmp/mycluster-myhostname.mon.keyring')
| from ceph_deploy.util import paths
class TestMonPaths(object):
def test_base_path(self):
assert paths.mon._base.endswith('/ceph-')
def test_path(self):
result = paths.mon.path('myhostname')
assert result.startswith('/')
assert result.endswith('/ceph-myhostname')
def test_done(self):
result = paths.mon.done('myhostname')
assert result.startswith('/')
assert result.endswith('ceph-myhostname/done')
def test_init(self):
result = paths.mon.init('myhostname', 'init')
assert result.startswith('/')
assert result.endswith('ceph-myhostname/init')
def test_keyring(self):
result = paths.mon.keyring('mycluster', 'myhostname')
assert result.startswith('/')
assert result.endswith('tmp/mycluster-myhostname.mon.keyring')
| mit | Python |
0647264a7c9ac410a0d342250d4933b1ba5c7fb1 | print in main | SebastianCallh/kartoffel-tsea29,SebastianCallh/kartoffel-tsea29 | pi/main.py | pi/main.py | import signal
import sys
import traceback
import datetime
from datetime import timedelta
from bus import Bus
from navigator import Navigator
from driver import Driver
from messages import read_messages, subscribe_to_cmd
from outbound import request_sensor_data, CMD_RETURN_SENSOR_DATA, \
set_motor_speed, set_right_motor_speed, set_left_motor_speed
bus = Bus()
driver = Driver(bus)
navigator = Navigator(driver)
# Update frequency
last_request = datetime.datetime.now()
request_period = timedelta(milliseconds=1)
busy = False
def sensor_data_received(ir_left_mm, ir_right_mm):
global busy, navigator
busy = False
print('ir right': ir_right_mm)
navigator.sensor_data_received(ir_left_mm, ir_right_mm)
def handle_abort(signum, frame):
# Stop motors to avoid robot running amok
set_motor_speed(bus, 0)
sys.exit(0)
def handle_bus(bus):
global busy, last_request, request_period
if not busy and datetime.datetime.now() - last_request > request_period:
busy = True
last_request = datetime.datetime.now()
request_sensor_data(bus)
# Setup
signal.signal(signal.SIGINT, handle_abort)
subscribe_to_cmd(CMD_RETURN_SENSOR_DATA, sensor_data_received)
try:
while True:
read_messages(bus)
handle_bus(bus)
navigator.navigate()
except:
traceback.print_exc()
set_motor_speed(bus, 0) | import signal
import sys
import traceback
import datetime
from datetime import timedelta
from bus import Bus
from navigator import Navigator
from driver import Driver
from messages import read_messages, subscribe_to_cmd
from outbound import request_sensor_data, CMD_RETURN_SENSOR_DATA, \
set_motor_speed, set_right_motor_speed, set_left_motor_speed
bus = Bus()
driver = Driver(bus)
navigator = Navigator(driver)
# Update frequency
last_request = datetime.datetime.now()
request_period = timedelta(milliseconds=1)
busy = False
def sensor_data_received(ir_left_mm, ir_right_mm):
global busy, navigator
busy = False
navigator.sensor_data_received(ir_left_mm, ir_right_mm)
def handle_abort(signum, frame):
# Stop motors to avoid robot running amok
set_motor_speed(bus, 0)
sys.exit(0)
def handle_bus(bus):
global busy, last_request, request_period
if not busy and datetime.datetime.now() - last_request > request_period:
busy = True
last_request = datetime.datetime.now()
request_sensor_data(bus)
# Setup
signal.signal(signal.SIGINT, handle_abort)
subscribe_to_cmd(CMD_RETURN_SENSOR_DATA, sensor_data_received)
try:
while True:
read_messages(bus)
handle_bus(bus)
navigator.navigate()
except:
traceback.print_exc()
set_motor_speed(bus, 0) | mit | Python |
a578e2e738a77f8ca0073b2b337a8fa79794500b | Change default SERVER_NAME to localhost:5000 | peterhil/skeleton,peterhil/ninhursag,peterhil/skeleton,peterhil/ninhursag,peterhil/ninhursag,peterhil/ninhursag,peterhil/skeleton | skeleton/settings.py | skeleton/settings.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'skeleton.dev'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'skeleton.dev'
API_TOKEN = 'some-api-token'
| mit | Python |
9c53de5034d1f7184eba5f1d5414884386901cd9 | add empty return type test | dwillmer/jedi,tjwei/jedi,jonashaag/jedi,WoLpH/jedi,dwillmer/jedi,WoLpH/jedi,flurischt/jedi,mfussenegger/jedi,tjwei/jedi,jonashaag/jedi,flurischt/jedi,mfussenegger/jedi | test/completion/docstring.py | test/completion/docstring.py | """ Test docstrings in functions and classes, which are used to infer types """
# -----------------
# sphinx style
# -----------------
def f(a, b, c, d):
""" asdfasdf
:param a: blablabla
:type a: str
:type b: (str, int)
:type c: threading.Thread
:type d: :class:`threading.Thread`
:rtype: dict
"""
#? str()
a
#? str()
b[0]
#? int()
b[1]
#? ['join']
c.join
#? ['join']
d.join
#? dict()
f()
# wrong declarations
def f(a, b):
"""
:param a: Forgot type declaration
:type a:
:param b: Just something
:type b: ``
:rtype:
"""
#?
a
#?
b
#?
f()
# -----------------
# epydoc style
# -----------------
def e(a, b):
""" asdfasdf
@type a: str
@param a: blablabla
@type b: (str, int)
@param b: blablah
@rtype: list
"""
#? str()
a
#? str()
b[0]
#? int()
b[1]
#? list()
e()
# Returns with param type only
def rparam(a,b):
"""
@type a: str
"""
return a
#? str()
rparam()
# Composite types
def composite():
"""
@rtype: (str, int, dict)
"""
x, y, z = composite()
#? str()
x
#? int()
y
#? dict()
z
# Both docstring and calculated return type
def both():
"""
@rtype: str
"""
return 23
#? str(), int()
both()
class Test(object):
def __init__(self):
self.teststr = ""
"""
# jedi issue #210
"""
def test(self):
#? ['teststr']
self.teststr
# -----------------
# statement docstrings
# -----------------
d = ''
""" bsdf """
#? str()
d.upper()
| """ Test docstrings in functions and classes, which are used to infer types """
# -----------------
# sphinx style
# -----------------
def f(a, b, c, d):
""" asdfasdf
:param a: blablabla
:type a: str
:type b: (str, int)
:type c: threading.Thread
:type d: :class:`threading.Thread`
:rtype: dict
"""
#? str()
a
#? str()
b[0]
#? int()
b[1]
#? ['join']
c.join
#? ['join']
d.join
#? dict()
f()
# wrong declarations
def f(a, b):
"""
:param a: Forgot type declaration
:type a:
:param b: Just something
:type b: ``
"""
#?
a
#?
b
# -----------------
# epydoc style
# -----------------
def e(a, b):
""" asdfasdf
@type a: str
@param a: blablabla
@type b: (str, int)
@param b: blablah
@rtype: list
"""
#? str()
a
#? str()
b[0]
#? int()
b[1]
#? list()
e()
# Returns with param type only
def rparam(a,b):
"""
@type a: str
"""
return a
#? str()
rparam()
# Composite types
def composite():
"""
@rtype: (str, int, dict)
"""
x, y, z = composite()
#? str()
x
#? int()
y
#? dict()
z
# Both docstring and calculated return type
def both():
"""
@rtype: str
"""
return 23
#? str(), int()
both()
class Test(object):
def __init__(self):
self.teststr = ""
"""
# jedi issue #210
"""
def test(self):
#? ['teststr']
self.teststr
# -----------------
# statement docstrings
# -----------------
d = ''
""" bsdf """
#? str()
d.upper()
| mit | Python |
d574cd0f50a5f1771ebb7f0cbc0295746108c937 | Add nitpicky | ongr-io/docs.ongr.io | source/conf.py | source/conf.py | # -*- coding: utf-8 -*-
execfile("conf-travis.py")
# Check that all links are linking to existing documents
nitpicky = True
# Pulling all repository components.
execfile("pull.py")
| # -*- coding: utf-8 -*-
execfile("conf-travis.py")
# Pulling all repository components.
execfile("pull.py")
| mit | Python |
52338a07bba66da549a55391747d74a4a7e6aba3 | Set version to v3.0.0.dev13 | spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy | spacy/about.py | spacy/about.py | # fmt: off
__title__ = "spacy"
__version__ = "3.0.0.dev13"
__release__ = True
__download_url__ = "https://github.com/explosion/spacy-models/releases/download"
__compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json"
__shortcuts__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json"
__projects__ = "https://github.com/explosion/spacy-boilerplates"
| # fmt: off
__title__ = "spacy"
__version__ = "3.0.0.dev12"
__release__ = True
__download_url__ = "https://github.com/explosion/spacy-models/releases/download"
__compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json"
__shortcuts__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json"
__projects__ = "https://github.com/explosion/spacy-boilerplates"
| mit | Python |
8af4b9e4dfd6e8e273be5613c7dde017ae2a3354 | Fix compatibility.json link | explosion/spaCy,oroszgy/spaCy.hu,recognai/spaCy,oroszgy/spaCy.hu,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,raphael0202/spaCy,recognai/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,raphael0202/spaCy,aikramer2/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,recognai/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,raphael0202/spaCy,aikramer2/spaCy,recognai/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,honnibal/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,explosion/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy | spacy/about.py | spacy/about.py | # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '1.6.0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__docs__ = 'https://spacy.io/docs/usage'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = {'en': 'en_core_web_sm', 'de': 'de_core_web_md', 'vectors': 'en_vectors_glove_md'}
| # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '1.6.0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__docs__ = 'https://spacy.io/docs/usage'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json?token=ANAt54fi5zcUtnwGhMLw2klWwcAyHkZGks5Y0nw1wA%3D%3D'
__shortcuts__ = {'en': 'en_core_web_sm', 'de': 'de_core_web_md', 'vectors': 'en_vectors_glove_md'}
| mit | Python |
dad97a33179d6b7cb2c7024ea46983618162dc24 | Switch to verified https requests | DavidHHShao/slack | slack/http_client.py | slack/http_client.py | # Copyright (c) 2014 Katsuya Noguchi
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import requests
import slack
from slack.exception import SlackError, \
InvalidAuthError, \
NotAuthedError, \
AccountInactiveError, \
ChannelNotFoundError, \
ChannelArchivedError, \
NotInChannelError, \
RateLimitedError
def get(method, params):
url = _build_url(method)
response = requests.get(url, params=params, verify=True).json()
_raise_error_if_not_ok(response)
return response
def post(method, data):
url = _build_url(method)
response = requests.post(url, data=data, verify=True).json()
_raise_error_if_not_ok(response)
return response
def _build_url(method):
return '%s/%s' % (slack.api_base_url, method)
def _raise_error_if_not_ok(response):
if response['ok']:
return
if response['error'] == 'invalid_auth':
raise InvalidAuthError()
if response['error'] == 'not_authed':
raise NotAuthedError()
if response['error'] == 'account_inactive':
raise AccountInactiveError()
if response['error'] == 'channel_not_found':
raise ChannelNotFoundError()
if response['error'] == 'is_archived':
raise ChannelArchivedError()
if response['error'] == 'not_in_channel':
raise NotInChannelError()
if response['error'] == 'rate_limited':
raise RateLimitedError()
raise SlackError(response['error'])
| # Copyright (c) 2014 Katsuya Noguchi
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import requests
import slack
from slack.exception import SlackError, \
InvalidAuthError, \
NotAuthedError, \
AccountInactiveError, \
ChannelNotFoundError, \
ChannelArchivedError, \
NotInChannelError, \
RateLimitedError
def get(method, params):
url = _build_url(method)
response = requests.get(url, params=params, verify=False).json()
_raise_error_if_not_ok(response)
return response
def post(method, data):
url = _build_url(method)
response = requests.post(url, data=data, verify=False).json()
_raise_error_if_not_ok(response)
return response
def _build_url(method):
return '%s/%s' % (slack.api_base_url, method)
def _raise_error_if_not_ok(response):
if response['ok']:
return
if response['error'] == 'invalid_auth':
raise InvalidAuthError()
if response['error'] == 'not_authed':
raise NotAuthedError()
if response['error'] == 'account_inactive':
raise AccountInactiveError()
if response['error'] == 'channel_not_found':
raise ChannelNotFoundError()
if response['error'] == 'is_archived':
raise ChannelArchivedError()
if response['error'] == 'not_in_channel':
raise NotInChannelError()
if response['error'] == 'rate_limited':
raise RateLimitedError()
raise SlackError(response['error'])
| mit | Python |
cf4436b69922b55bd612ad83cce0ff7ebf34a1d0 | Bump version | smarkets/smk_python_sdk | smarkets/__init__.py | smarkets/__init__.py | # Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '1.0.9'
def private(something):
something.__private__ = True
return something
__all__ = ()
| # Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '1.0.8'
def private(something):
something.__private__ = True
return something
__all__ = ()
| mit | Python |
e71bb55c2db45a8743c7e3b47f823d454319b317 | add poster to video | arkarkark/feedapp,arkarkark/feedapp,arkarkark/feedapp,arkarkark/feedapp | src/server/instagram.py | src/server/instagram.py | # Copyright 2017 Alex K (wtwf.com)
__author__ = 'wtwf.com (Alex K)'
import cgi
import datetime
import json
import logging
import re
import PyRSS2Gen as rss
from google.appengine.api import urlfetch
from google.appengine.ext import webapp
class RssFeed(webapp.RequestHandler):
"""Make RSS Feed for a (public) instagram user."""
def get(self, user):
url = "https://www.instagram.com/%s/media/" % user
logging.info("fetching: %r", url)
result = urlfetch.fetch(url)
if result.status_code != 200:
return self.error(result.status_code)
media = json.loads(result.content)
if "items" not in media or len(media["items"]) == 0:
return self.error(404)
f = None
for item in media["items"]:
if f is None:
user = item["user"]
title = "%s (@%s)" % (user["full_name"], user["username"])
f = rss.RSS2(
title=title,
link="https://instagram.com/%s" % user["username"],
description="",
lastBuildDate=datetime.datetime.now(),
)
img_src = re.sub(
r'c[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/',
'',
item["images"]["standard_resolution"]["url"]
)
if item["type"] == "video":
media = """<video width="320" height="320" controls="controls">
<source src="%s" type="video/mp4" poster="%s" />
</video>""" % (item["alt_media_url"], img_src)
else:
media = """<a href="%s"><img src="%s"></a>""" % (item["link"], img_src)
body = """%s<br>%s""" % (media, cgi.escape(item["caption"]["text"]))
rss_item = {
"title": title,
"link": item["link"],
"description": body,
"guid": rss.Guid(item["id"], False),
"pubDate": datetime.datetime.fromtimestamp(int(item["created_time"])),
}
if item["type"] == "video":
rss_item["enclosure"] = rss.Enclosure(item["alt_media_url"], 10, "video/mp4")
f.items.append(rss.RSSItem(**rss_item))
self.response.headers['Content-Type'] = 'text/xml'
f.write_xml(self.response.out)
| # Copyright 2017 Alex K (wtwf.com)
__author__ = 'wtwf.com (Alex K)'
import cgi
import datetime
import json
import logging
import re
import PyRSS2Gen as rss
from google.appengine.api import urlfetch
from google.appengine.ext import webapp
class RssFeed(webapp.RequestHandler):
"""Make RSS Feed for a (public) instagram user."""
def get(self, user):
url = "https://www.instagram.com/%s/media/" % user
logging.info("fetching: %r", url)
result = urlfetch.fetch(url)
if result.status_code != 200:
return self.error(result.status_code)
media = json.loads(result.content)
if "items" not in media or len(media["items"]) == 0:
return self.error(404)
f = None
for item in media["items"]:
if f is None:
user = item["user"]
title = "%s (@%s)" % (user["full_name"], user["username"])
f = rss.RSS2(
title=title,
link="https://instagram.com/%s" % user["username"],
description="",
lastBuildDate=datetime.datetime.now(),
)
img_src = re.sub(
r'c[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/',
'',
item["images"]["standard_resolution"]["url"]
)
if item["type"] == "video":
media = """<video width="320" height="320" controls="controls">
<source src="%s" type="video/mp4" />
</video>""" % item["alt_media_url"]
else:
media = """<a href="%s"><img src="%s"></a>""" % (item["link"], img_src)
body = """%s<br>%s""" % (media, cgi.escape(item["caption"]["text"]))
rss_item = {
"title": title,
"link": item["link"],
"description": body,
"guid": rss.Guid(item["id"], False),
"pubDate": datetime.datetime.fromtimestamp(int(item["created_time"])),
}
if item["type"] == "video":
rss_item["enclosure"] = rss.Enclosure(item["alt_media_url"], 10, "video/mp4")
f.items.append(rss.RSSItem(**rss_item))
self.response.headers['Content-Type'] = 'text/xml'
f.write_xml(self.response.out)
| mit | Python |
025a2d28fffcb178d35127b5ed76306a9f0f9818 | Include Django migrations in Python package (fixes #9) | muccg/django-iprestrict,whyflyru/django-iprestrict,whyflyru/django-iprestrict,whyflyru/django-iprestrict,muccg/django-iprestrict,whyflyru/django-iprestrict,muccg/django-iprestrict | setup.py | setup.py | import os
import re
from setuptools import setup
def get_package_version(package):
version = re.compile(r"(?:__)?version(?:__)?\s*=\s\"(.*)\"", re.I)
initfile = os.path.join(os.path.dirname(__file__), package, "__init__.py")
for line in open(initfile):
m = version.match(line)
if m:
return m.group(1)
return "UNKNOWN"
setup(
name='django-iprestrict',
version=get_package_version("iprestrict"),
description='Django app + middleware to restrict access to all or sections of a Django project by client IP ranges',
long_description='Django app + middleware to restrict access to all or sections of a Django project by client IP ranges',
author='Tamas Szabo, CCG, Murdoch University',
author_email='devops@ccg.murdoch.edu.au',
url='https://github.com/muccg/django-iprestrict',
download_url='https://github.com/muccg/django-iprestrict/releases',
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development"
],
packages=[
'iprestrict',
'iprestrict.management',
'iprestrict.management.commands',
'iprestrict.migrations',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'Django>=1.7',
'django-templatetag-handlebars',
],
test_suite='tests.runtests.main',
)
| import os
import re
from setuptools import setup
def get_package_version(package):
version = re.compile(r"(?:__)?version(?:__)?\s*=\s\"(.*)\"", re.I)
initfile = os.path.join(os.path.dirname(__file__), package, "__init__.py")
for line in open(initfile):
m = version.match(line)
if m:
return m.group(1)
return "UNKNOWN"
setup(
name='django-iprestrict',
version=get_package_version("iprestrict"),
description='Django app + middleware to restrict access to all or sections of a Django project by client IP ranges',
long_description='Django app + middleware to restrict access to all or sections of a Django project by client IP ranges',
author='Tamas Szabo, CCG, Murdoch University',
author_email='devops@ccg.murdoch.edu.au',
url='https://github.com/muccg/django-iprestrict',
download_url='https://github.com/muccg/django-iprestrict/releases',
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development"
],
packages=[
'iprestrict',
'iprestrict.management',
'iprestrict.management.commands',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'Django>=1.7',
'django-templatetag-handlebars',
],
test_suite='tests.runtests.main',
)
| bsd-3-clause | Python |
1e710cde77cab75946ac290578230ece09a1e0d8 | fix name and package the same | Lab-317/NewsParser | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: bustta
# @Date: 2014-11-04 00:47:53
# @Last Modified by: kenny.tsai
# @Last Modified time: 2014-11-05 10:10:37
from distutils.core import setup
setup(
name = 'newsParser',
packages = ['newsParser'],
version = '0.0.4',
description = 'Taiwan News Parser',
author = 'lab317',
author_email = 'balicanta@gmail.com',
url = 'https://github.com/Lab-317/NewsParser',
download_url = 'https://github.com/Lab-317/NewsParser/archive/v0.0.4.tar.gz',
keywords = ['News', 'Parser', 'Taiwan'],
classifiers = [
"Development Status :: 2 - Pre-Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
"Natural Language :: Chinese (Traditional)"
],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: bustta
# @Date: 2014-11-04 00:47:53
# @Last Modified by: bustta
# @Last Modified time: 2014-11-05 01:13:27
from distutils.core import setup
setup(
name = 'NewsParser',
packages = ['newsParser'],
scripts = [''],
version = '0.0.3',
description = 'Taiwan News Parser',
author = 'lab317',
author_email = 'balicanta@gmail.com',
url = 'https://github.com/Lab-317/NewsParser',
download_url = 'https://github.com/Lab-317/NewsParser/archive/v0.0.3.tar.gz',
keywords = ['News', 'Parser', 'Taiwan'],
classifiers = [],
)
| mit | Python |
23b15ebead9c5818dc94471d5b56feaa1fcf24a7 | Bump version to 0.3.3 | fuller-inc/bqx | setup.py | setup.py | import os
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
from bqx._func_generator import generate_funcpy
here = os.path.abspath(os.path.dirname(__file__))
if 'build' in sys.argv:
import pypandoc
with open(os.path.join(here, 'README.md')) as f:
readme = pypandoc.convert(f.read(), 'rst', format='md')
with open(os.path.join(here, 'README.rst'), 'w') as f:
f.write(readme)
else:
readme = ''
if 'install' in sys.argv or 'test' in sys.argv:
funcpy_in = os.path.join(here, 'bqx/_func.py')
funcpy = os.path.join(here, 'bqx/func.py')
generate_funcpy(funcpy_in, funcpy)
__version__ = '0.3.3'
__author__ = 'Takumi Sueda'
__author_email__ = 'takumi.sueda@fuller.co.jp'
__license__ = 'BSD License'
__classifiers__ = (
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: SQL',
'Topic :: Database',
'Topic :: Software Development :: Code Generators',
)
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
pytest.main(self.test_args)
setup(
name='bqx',
version=__version__,
author=__author__,
author_email=__author_email__,
url='https://github.com/fuller-inc/bqx',
description='Query generator for Google BigQuery and other SQL environments',
long_description=readme,
classifiers=__classifiers__,
packages=find_packages(exclude=['test*']),
license=__license__,
include_package_data=True,
test_suite='tests',
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
| import os
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
from bqx._func_generator import generate_funcpy
here = os.path.abspath(os.path.dirname(__file__))
if 'build' in sys.argv:
import pypandoc
with open(os.path.join(here, 'README.md')) as f:
readme = pypandoc.convert(f.read(), 'rst', format='md')
with open(os.path.join(here, 'README.rst'), 'w') as f:
f.write(readme)
else:
readme = ''
if 'install' in sys.argv or 'test' in sys.argv:
funcpy_in = os.path.join(here, 'bqx/_func.py')
funcpy = os.path.join(here, 'bqx/func.py')
generate_funcpy(funcpy_in, funcpy)
__version__ = '0.3.2'
__author__ = 'Takumi Sueda'
__author_email__ = 'takumi.sueda@fuller.co.jp'
__license__ = 'BSD License'
__classifiers__ = (
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: SQL',
'Topic :: Database',
'Topic :: Software Development :: Code Generators',
)
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
pytest.main(self.test_args)
setup(
name='bqx',
version=__version__,
author=__author__,
author_email=__author_email__,
url='https://github.com/fuller-inc/bqx',
description='Query generator for Google BigQuery and other SQL environments',
long_description=readme,
classifiers=__classifiers__,
packages=find_packages(exclude=['test*']),
license=__license__,
include_package_data=True,
test_suite='tests',
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
| bsd-3-clause | Python |
fb8ded55a0e09c2f953770e6bc1c7968c57f0c47 | Bump to version 1.12. | dimagi/django-digest,trey0/django-digest | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
setup(
name='django-digest',
version='1.12',
description=('An implementation of HTTP Digest Authentication for Django.'),
long_description=(
"""
django-digest supplies a middleware (HttpDigestMiddleware) that may installed to protect access
to all URLs, a decorator (@httpdigest) that may be applied to selected view functions, and a
simple class (HttpDigestAuthenticator) that can be used to implement custom authentication
scenarios.
django-digest also supplies a subclass of django.test.Client that is able to perform Digest and
Basic authentication.
"""
),
author='Akoha Inc.',
author_email='adminmail@akoha.com',
url='http://bitbucket.org/akoha/django-digest/',
packages=['django_digest',
'django_digest.backend',
'django_digest.migrations',
'django_digest.test',
'django_digest.test.methods'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
zip_safe=False,
)
| #!/usr/bin/env python
from setuptools import setup
setup(
name='django-digest',
version='1.11',
description=('An implementation of HTTP Digest Authentication for Django.'),
long_description=(
"""
django-digest supplies a middleware (HttpDigestMiddleware) that may installed to protect access
to all URLs, a decorator (@httpdigest) that may be applied to selected view functions, and a
simple class (HttpDigestAuthenticator) that can be used to implement custom authentication
scenarios.
django-digest also supplies a subclass of django.test.Client that is able to perform Digest and
Basic authentication.
"""
),
author='Akoha Inc.',
author_email='adminmail@akoha.com',
url='http://bitbucket.org/akoha/django-digest/',
packages=['django_digest',
'django_digest.backend',
'django_digest.migrations',
'django_digest.test',
'django_digest.test.methods'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
zip_safe=False,
)
| bsd-3-clause | Python |
1b8a9cabd5f329f59f19572c34e1df4f5bfa0302 | Make Pygame a dependency. | zeth/ledgrid | setup.py | setup.py | from __future__ import with_statement
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ledgrid_classifiers = [
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
]
with open("README.rst", "r") as fp:
ledgrid_long_description = fp.read()
setup(name="ledgrid",
description = 'An 8x8 grid of virtual LEDs implemented in Pygame.',
version=0.4,
author="Zeth",
author_email="theology@gmail.com",
py_modules=["ledgrid"],
install_requires=[
'pygame'
],
long_description=ledgrid_long_description,
license="BSD",
classifiers=ledgrid_classifiers,
url = 'https://github.com/zeth/ledgrid', # use the URL to the github repo
#download_url = 'https://github.com/zeth/ledgrid/tarball/0.1',
)
| from __future__ import with_statement
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ledgrid_classifiers = [
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
]
with open("README.rst", "r") as fp:
ledgrid_long_description = fp.read()
setup(name="ledgrid",
description = 'An 8x8 grid of virtual LEDs implemented in Pygame.',
version=0.3,
author="Zeth",
author_email="theology@gmail.com",
py_modules=["ledgrid"],
long_description=ledgrid_long_description,
license="BSD",
classifiers=ledgrid_classifiers,
url = 'https://github.com/zeth/ledgrid', # use the URL to the github repo
#download_url = 'https://github.com/zeth/ledgrid/tarball/0.1',
)
| bsd-3-clause | Python |
d4c064f150ac506c7b8f0dc9ea2103b1cb8cab0e | Fix license description | liuyug/django-alipay,liuyug/django-alipay | setup.py | setup.py | #!/usr/bin/env python
import os
from distutils.core import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-alipay',
version='0.1',
platforms=['noarch'],
packages=[
'alipay',
'alipay.create_direct_pay_by_user',
'alipay.create_direct_pay_by_user.dpn',
'alipay.create_partner_trade_by_buyer',
'alipay.create_partner_trade_by_buyer.ptn',
'alipay.send_goods_confirm_by_platform',
],
include_package_data=True,
license='GPLv3 License',
description='alipay api for django',
long_description=README,
url='https://github.com/liuyug/django-alipay',
author='Yugang LIU',
author_email='liuyug@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| #!/usr/bin/env python
import os
from distutils.core import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-alipay',
version='0.1',
platforms=['noarch'],
packages=[
'alipay',
'alipay.create_direct_pay_by_user',
'alipay.create_direct_pay_by_user.dpn',
'alipay.create_partner_trade_by_buyer',
'alipay.create_partner_trade_by_buyer.ptn',
'alipay.send_goods_confirm_by_platform',
],
include_package_data=True,
license='GPLv3 License',
description='alipay api for django',
long_description=README,
url='https://github.com/liuyug/django-alipay',
author='Yugang LIU',
author_email='liuyug@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GPLv3 License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| mit | Python |
ba14198b7ed847a26cb548710ef46ff83563abe8 | Bump version to 2.1.1 | bcb/jsonrpcclient | setup.py | setup.py | """setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.1.1',
description='Send JSON-RPC requests',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.io/',
license='MIT',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['future', 'jsonschema'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
| """setup.py"""
from codecs import open as codecs_open
from setuptools import setup
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.1.0',
description='Send JSON-RPC requests',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.io/',
license='MIT',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['future', 'jsonschema'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
| mit | Python |
2e6d862b26848ce182ab3ed82cf56b5e56a1b90f | Add the back buffer blit in the update function | fjacob21/pycon2015 | elpiwear/watchout_screen.py | elpiwear/watchout_screen.py | import Image
import ImageDraw
import ImageFont
import screen
class watchout_screen(screen.screen):
def __init__(self):
screen.screen.__init__(self)
self.image = Image.open('maximus.jpg')
self.image = self.image.resize((320,240))
self.draw_image(self.image , (0,0))
self.bfont = ImageFont.truetype('Montserrat-Regular.ttf', 66)
self.font = ImageFont.truetype('Montserrat-Regular.ttf', 65)
self.count = 0
self.draw_text('Watchout!!', (0, 100), self.bfont, fill=(255,0,0))
self.draw_text('Watchout!!', (0, 100), self.font, fill=(255,255,255))
def update(self):
self.draw_image(self.image , (0,0))
if self.count == 0 :
self.draw_text('Watchout!!', (0, 100), self.bfont, fill=(255,255,255))
self.draw_text('Watchout!!', (0, 100), self.font, fill=(255,0,0))
else:
self.draw_text('Watchout!!', (0, 100), self.bfont, fill=(255,0,0))
self.draw_text('Watchout!!', (0, 100), self.font, fill=(255,255,255))
self.count = 1 - self.count
screen.screen.update(self)
| import Image
import ImageDraw
import ImageFont
import screen
class watchout_screen(screen.screen):
def __init__(self):
screen.screen.__init__(self)
self.image = Image.open('maximus.jpg')
self.image = self.image.resize((320,240))
self.draw_image(self.image , (0,0))
self.bfont = ImageFont.truetype('Montserrat-Regular.ttf', 66)
self.font = ImageFont.truetype('Montserrat-Regular.ttf', 65)
self.count = 0
self.draw_text('Watchout!!', (0, 100), self.bfont, fill=(255,0,0))
self.draw_text('Watchout!!', (0, 100), self.font, fill=(255,255,255))
def update(self):
self.draw_image(self.image , (0,0))
if self.count == 0 :
self.draw_text('Watchout!!', (0, 100), self.bfont, fill=(255,255,255))
self.draw_text('Watchout!!', (0, 100), self.font, fill=(255,0,0))
else:
self.draw_text('Watchout!!', (0, 100), self.bfont, fill=(255,0,0))
self.draw_text('Watchout!!', (0, 100), self.font, fill=(255,255,255))
self.count = 1 - self.count
| mit | Python |
2026f1fd5b46ac98b9a26206beb036147a787e64 | update author email | CI-WATER/tethys,CI-WATER/tethys,CI-WATER/tethys | setup.py | setup.py | """
********************************************************************************
* Name: setup.py
* Author: Nathan Swain
* Created On: 2014
* Copyright: (c) Brigham Young University 2014
* License: BSD 2-Clause
********************************************************************************
"""
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
requires = []
version = '1.3.0'
setup(
name='tethys_platform',
version=version,
packages=find_packages(),
include_package_data=True,
license='BSD 2-Clause License',
description='Primary Tethys Platform Django Site Project',
long_description=README,
url='http://tethysplatform.org/',
author='Nathan Swain',
author_email='nswain@aquaveo.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
entry_points = {
'console_scripts': ['tethys=tethys_apps.cli:tethys_command',],
'paste.paster_create_template': ['tethys_app_scaffold=tethys_apps.pastetemplates:TethysAppTemplate',],
},
install_requires=requires,
)
| """
********************************************************************************
* Name: setup.py
* Author: Nathan Swain
* Created On: 2014
* Copyright: (c) Brigham Young University 2014
* License: BSD 2-Clause
********************************************************************************
"""
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
requires = []
version = '1.3.0'
setup(
name='tethys_platform',
version=version,
packages=find_packages(),
include_package_data=True,
license='BSD 2-Clause License',
description='Primary Tethys Platform Django Site Project',
long_description=README,
url='http://tethysplatform.org/',
author='Nathan Swain',
author_email='nathan.swain@byu.net',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
entry_points = {
'console_scripts': ['tethys=tethys_apps.cli:tethys_command',],
'paste.paster_create_template': ['tethys_app_scaffold=tethys_apps.pastetemplates:TethysAppTemplate',],
},
install_requires=requires,
)
| bsd-2-clause | Python |
3a6e9b2a0c5806f0e39bbadd768e9e9cbe503479 | Add RST `long_description` to setup.py for nice layout on Pypi. | dockermeetupsinbordeaux/docker-zabbix-sender,dockermeetupsinbordeaux/docker-zabbix-sender | setup.py | setup.py | #!/usr/bin/env python
import os
import sys
from setuptools import setup
from textwrap import dedent
NAME = "docker-zabbix-sender"
GITHUB_ORG_URL = "https://github.com/dockermeetupsinbordeaux"
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
exec(open('docker_zabbix_sender/version.py').read())
setup(
name=NAME,
version=version,
author="Tristan Carel",
author_email="tristan.carel@gmail.com",
url= GITHUB_ORG_URL + '/' + NAME,
download_url="{0}/{1}/tarball/v{2}".format(GITHUB_ORG_URL, NAME, version),
description="Push Docker containers statistics to Zabbix efficiently",
long_description=dedent("""
Rationale
---------
Docker Zabbix Sender delivers a daemon script that push to Zabbix statistics about Docker containers.
It leverages 2 interesting components:
- Zabbix maintains a tool titled ``zabbix-sender``.
It is meant to push `Zabbix trapper items`_ efficiently.
- Docker 1.5.0 comes with Docker Remote API version 17, providing a new `stats endpoint`_.
It allows the client to subscribe to a live feed delivering a container statistics.
The daemon script stands in the middle of those 2 components.
It collects Docker containers statistics and transforms them in Zabbix trapper events.
Published metrics
-----------------
The daemon script already emits a collection of metrics: CPU, memory, and network usage.
The module's API provide mechanism to easily extend the list of published events.
Documentation
-------------
The stable documentation is available on ReadTheDocs_
.. _Zabbix trapper items: https://www.zabbix.com/documentation/2.4/manual/config/items/itemtypes/trapper
.. _stats endpoint: https://docs.docker.com/reference/api/docker_remote_api_v1.17/#get-container-stats-based-on-resource-usage
.. _ReadTheDocs: http://docker-zabbix-sender.readthedocs.org/en/stable/
"""),
packages=['docker_zabbix_sender'],
install_requires=[
'docker-py >= 1.0.0',
],
zip_safe=False,
license="Apache license version 2.0",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
],
entry_points = """
[console_scripts]
docker-zabbix-sender = docker_zabbix_sender.zabbix_sender:run
[docker_zabbix_sender.metrics]
containers-count = docker_zabbix_sender.stats:containers_count
"""
)
| #!/usr/bin/env python
import os
import sys
from setuptools import setup
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
requirements = [
'docker-py >= 1.0.0',
]
exec(open('docker_zabbix_sender/version.py').read())
setup(
name="docker-zabbix-sender",
version=version,
author="Tristan Carel",
author_email="tristan.carel@gmail.com",
url="https://github.com/dockermeetupsinbordeaux/docker-zabbix-sender",
download_url="https://github.com/dockermeetupsinbordeaux/docker-zabbix-sender/tarball/v" + version,
description="Push Docker containers information to Zabbix",
packages=['docker_zabbix_sender'],
install_requires=requirements,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
],
entry_points = """
[console_scripts]
docker-zabbix-sender = docker_zabbix_sender.zabbix_sender:run
[docker_zabbix_sender.metrics]
containers-count = docker_zabbix_sender.stats:containers_count
"""
)
| apache-2.0 | Python |
3aae6bc4eabd934c2c5ee60f43b649715f63b3b3 | Declare ourselves zip-sage | walles/px,walles/px | setup.py | setup.py | #!/usr/bin/env python
import subprocess
from setuptools import setup
requirements = None
with open('requirements.txt') as reqsfile:
requirements = reqsfile.readlines()
setup(
name='px',
version=subprocess.check_output(['git', 'describe', '--dirty']).decode('utf-8').strip(),
description='Cross Functional Process Explorer',
author='Johan Walles',
author_email='walles@gmail.com',
url='https://github.com/walles/px',
license='MIT',
packages=['px'],
install_requires=requirements,
# See: http://setuptools.readthedocs.io/en/latest/setuptools.html#setting-the-zip-safe-flag
zip_safe=True,
setup_requires=[
'pytest-runner',
'pytest-cov',
],
tests_require=[
'pytest',
],
entry_points={
'console_scripts': [
'px = px.px:main',
],
}
)
| #!/usr/bin/env python
import subprocess
from setuptools import setup
requirements = None
with open('requirements.txt') as reqsfile:
requirements = reqsfile.readlines()
setup(
name='px',
version=subprocess.check_output(['git', 'describe', '--dirty']).decode('utf-8').strip(),
description='Cross Functional Process Explorer',
author='Johan Walles',
author_email='walles@gmail.com',
url='https://github.com/walles/px',
license='MIT',
packages=['px'],
install_requires=requirements,
setup_requires=[
'pytest-runner',
'pytest-cov',
],
tests_require=[
'pytest',
],
entry_points={
'console_scripts': [
'px = px.px:main',
],
}
)
| mit | Python |
89a871c76207b5354fe00e7e0cdb401020e0461a | add python 3.5 support | thefab/tornadis,thefab/tornadis | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of tornadis library released under the MIT license.
# See the LICENSE file for more information.
from setuptools import setup, find_packages
DESCRIPTION = "tornadis is an async minimal redis client for tornado " \
"ioloop designed for performances (use C hiredis parser)"
try:
with open('PIP.rst') as f:
LONG_DESCRIPTION = f.read()
except IOError:
LONG_DESCRIPTION = DESCRIPTION
with open('pip-requirements.txt') as reqs:
install_requires = [
line for line in reqs.read().split('\n') if (line and not
line.startswith('--'))]
setup(
name='tornadis',
version="0.7.0",
author="Fabien MARTY",
author_email="fabien.marty@gmail.com",
url="https://github.com/thefab/tornadis",
packages=find_packages(),
license='MIT',
download_url='https://github.com/thefab/tornadis',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
'Topic :: System :: Distributed Computing',
'Topic :: Software Development',
],
entry_points="""
[console_scripts]
tornadis-benchmark = tornadis.benchmark:main
""",
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of tornadis library released under the MIT license.
# See the LICENSE file for more information.
from setuptools import setup, find_packages
DESCRIPTION = "tornadis is an async minimal redis client for tornado " \
"ioloop designed for performances (use C hiredis parser)"
try:
with open('PIP.rst') as f:
LONG_DESCRIPTION = f.read()
except IOError:
LONG_DESCRIPTION = DESCRIPTION
with open('pip-requirements.txt') as reqs:
install_requires = [
line for line in reqs.read().split('\n') if (line and not
line.startswith('--'))]
setup(
name='tornadis',
version="0.7.0",
author="Fabien MARTY",
author_email="fabien.marty@gmail.com",
url="https://github.com/thefab/tornadis",
packages=find_packages(),
license='MIT',
download_url='https://github.com/thefab/tornadis',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Utilities',
'Topic :: System :: Distributed Computing',
'Topic :: Software Development',
],
entry_points="""
[console_scripts]
tornadis-benchmark = tornadis.benchmark:main
""",
)
| mit | Python |
572fef2fb200369bcd2090f1e7d000928ba3926e | Replace `17.5.31` => `2017.5.31` | sgaynetdinov/py-vkontakte | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
setup(
name='py-vkontakte',
version='2017.5.31',
packages=['vk'],
url='https://github.com/sgaynetdinov/py-vkontakte',
license='MIT License',
author='Sergey Gaynetdinov',
author_email='s.gaynetdinov@gmail.com',
description='Python API wrapper around vk.com API',
long_description=open('README.md').read(),
keywords='vk.com, vk, vkontakte, vk api',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'requests',
],
)
| #!/usr/bin/env python
from setuptools import setup
setup(
name='py-vkontakte',
version='17.5.31',
packages=['vk'],
url='https://github.com/sgaynetdinov/py-vkontakte',
license='MIT License',
author='Sergey Gaynetdinov',
author_email='s.gaynetdinov@gmail.com',
description='Python API wrapper around vk.com API',
long_description=open('README.md').read(),
keywords='vk.com, vk, vkontakte, vk api',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'requests',
],
)
| mit | Python |
f57cf6b7f29904196e4d7c4bd290c4a6b11ed0fa | Update setup.py | michalbachowski/pylogging_utils,michalbachowski/pylogging_utils,michalbachowski/pylogging_utils | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import codecs
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.0.1'
def read(fname):
return codecs.open(
os.path.join(os.path.dirname(__file__), fname), 'r', 'utf-8').read()
readme = read('README.md')
setup(
name='logging_utils',
version=__version__,
description='',
long_description=readme,
author='Michał Bachowski',
author_email='michalbachowski@gmail.com',
url='https://github.com/michalbachowski/pylogging_utils',
py_modules=['logging_utils'],
include_package_data=True,
license="BSD",
zip_safe=False,
keywords='logging_utils',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Topic :: System :: Logging',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import codecs
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.0.1'
def read(fname):
return codecs.open(
os.path.join(os.path.dirname(__file__), fname), 'r', 'utf-8').read()
readme = read('README.rst')
setup(
name='logging_utils',
version=__version__,
description='',
long_description=readme,
author='Michał Bachowski',
author_email='michalbachowski@gmail.com',
url='https://github.com/michalbachowski/pylogging_utils',
py_modules=['logging_utils'],
include_package_data=True,
license="BSD",
zip_safe=False,
keywords='logging_utils',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Topic :: System :: Logging',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| mit | Python |
dc88f21b61ee55e737c827d098ce36d9e1cb9fa9 | Bump version as instructed by bamboo. Switch to increase the minor instead of patch | pbs/django-cms-smartsnippets,pbs/django-cms-smartsnippets,pbs/django-cms-smartsnippets,pbs/django-cms-smartsnippets | setup.py | setup.py | import os
from setuptools import setup, find_packages
README_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'README.rst')
dependencies = [
'django>=1.3,<1.5',
'django-cms>=2.3,<2.3.6',
'django-admin-extend'
]
dependency_links = [
'http://github.com/pbs/django-admin-extend/tarball/master#egg=django-admin-extend-0.0.1',
]
setup(
name='django-cms-smartsnippets',
version='0.2.0',
description='Parametrizable Django CMS snippets.',
long_description = open(README_PATH, 'r').read(),
author='Sever Banesiu',
author_email='banesiu.sever@gmail.com',
packages=find_packages(),
include_package_data=True,
license='BSD License',
install_requires=dependencies,
dependency_links=dependency_links,
setup_requires = ['s3sourceuploader', ],
)
| import os
from setuptools import setup, find_packages
README_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'README.rst')
dependencies = [
'django>=1.3,<1.5',
'django-cms>=2.3,<2.3.6',
'django-admin-extend'
]
dependency_links = [
'http://github.com/pbs/django-admin-extend/tarball/master#egg=django-admin-extend-0.0.1',
]
setup(
name='django-cms-smartsnippets',
version='0.1.23',
description='Parametrizable Django CMS snippets.',
long_description = open(README_PATH, 'r').read(),
author='Sever Banesiu',
author_email='banesiu.sever@gmail.com',
packages=find_packages(),
include_package_data=True,
license='BSD License',
install_requires=dependencies,
dependency_links=dependency_links,
setup_requires = ['s3sourceuploader', ],
)
| bsd-3-clause | Python |
0a4acdff376f3798b9d53023c51a54f7ffee7dc7 | Update profile.py, allow process_exception middleware to fire | omarish/django-cprofile-middleware,alexsanduk/django-cprofile-middleware | profile.py | profile.py | try:
import cProfile as profile
except ImportError:
import profile
import pstats
from cStringIO import StringIO
from django.conf import settings
class ProfilerMiddleware(object):
"""
Simple profile middleware to profile django views. To run it, add ?prof to
the URL like this:
http://localhost:8000/view/?prof
Optionally pass the following to modify the output:
?sort => Sort the output by a given metric. Default is time.
See http://docs.python.org/2/library/profile.html#pstats.Stats.sort_stats
for all sort options.
?count => The number of rows to display. Default is 100.
This is adapted from an example found here:
http://www.slideshare.net/zeeg/django-con-high-performance-django-presentation.
"""
def can(self, request):
return settings.DEBUG and 'prof' in request.GET and \
request.user is not None and request.user.is_staff
def process_view(self, request, callback, callback_args, callback_kwargs):
if self.can(request):
self.profiler = profile.Profile()
args = (request,) + callback_args
try:
return self.profiler.runcall(callback, *args, **callback_kwargs)
except:
# we want the process_exception middleware to fire
# https://code.djangoproject.com/ticket/12250
return
def process_response(self, request, response):
if self.can(request):
self.profiler.create_stats()
io = StringIO()
stats = pstats.Stats(self.profiler, stream=io)
stats.strip_dirs().sort_stats(request.GET.get('sort', 'time'))
stats.print_stats(int(request.GET.get('count', 100)))
response.content = '<pre>%s</pre>' % io.getvalue()
return response
| try:
import cProfile as profile
except ImportError:
import profile
import pstats
from cStringIO import StringIO
from django.conf import settings
class ProfilerMiddleware(object):
"""
Simple profile middleware to profile django views. To run it, add ?prof to
the URL like this:
http://localhost:8000/view/?prof
Optionally pass the following to modify the output:
?sort => Sort the output by a given metric. Default is time.
See http://docs.python.org/2/library/profile.html#pstats.Stats.sort_stats
for all sort options.
?count => The number of rows to display. Default is 100.
This is adapted from an example found here:
http://www.slideshare.net/zeeg/django-con-high-performance-django-presentation.
"""
def can(self, request):
return settings.DEBUG and 'prof' in request.GET and \
request.user is not None and request.user.is_staff
def process_view(self, request, callback, callback_args, callback_kwargs):
if self.can(request):
self.profiler = profile.Profile()
args = (request,) + callback_args
return self.profiler.runcall(callback, *args, **callback_kwargs)
def process_response(self, request, response):
if self.can(request):
self.profiler.create_stats()
io = StringIO()
stats = pstats.Stats(self.profiler, stream=io)
stats.strip_dirs().sort_stats(request.GET.get('sort', 'time'))
stats.print_stats(int(request.GET.get('count', 100)))
response.content = '<pre>%s</pre>' % io.getvalue()
return response
| mit | Python |
e13b8bfc0871414c6547d795cea2a5224161a4c9 | Update __init__.py | genkosta/django-editor-ymaps,genkosta/django-editor-ymaps,genkosta/django-editor-ymaps | djeym/__init__.py | djeym/__init__.py | # coding: utf-8
"""
$$$$$___$$$$$$__$$$$$__$$__$$__$$___$$
$$__$$______$$__$$______$$$$___$$$_$$$
$$__$$______$$__$$$$_____$$____$$_$_$$
$$__$$__$$__$$__$$_______$$____$$___$$
$$$$$____$$$$___$$$$$____$$____$$___$$
"""
__title__ = 'DjEYM (django-editor-ymaps)'
__version__ = '1.0.10'
__author__ = 'genkosta'
__license__ = 'MIT'
__copyright__ = 'Copyright (c) 2014 genkosta'
default_app_config = 'djeym.apps.DjeymConfig'
| # coding: utf-8
"""
$$$$$___$$$$$$__$$$$$__$$__$$__$$___$$
$$__$$______$$__$$______$$$$___$$$_$$$
$$__$$______$$__$$$$_____$$____$$_$_$$
$$__$$__$$__$$__$$_______$$____$$___$$
$$$$$____$$$$___$$$$$____$$____$$___$$
"""
__title__ = 'DjEYM (django-editor-ymaps)'
__version__ = '1.0.9'
__author__ = 'genkosta'
__license__ = 'MIT'
__copyright__ = 'Copyright (c) 2014 genkosta'
default_app_config = 'djeym.apps.DjeymConfig'
| mit | Python |
2aed3674c6bfa352d5bda02307bed837843ad1cf | update file for description | paolodragone/PyMzn | setup.py | setup.py | import re
import os
import codecs
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
def find_version(*parts):
# Open in Latin-1 so that we avoid encoding errors.
# Use codecs.open for Python 2 compatibility
with codecs.open(os.path.join(here, *parts), 'r', 'latin1') as f:
version_file = f.read()
# The version line must have the form
# __version__ = 'ver'
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
def read(*parts):
with codecs.open(os.path.join(here, *parts), encoding='utf-8') as f:
return f.read()
setup(
name = 'pymzn',
version = find_version('pymzn', '__init__.py'),
url = 'https://github.com/paolodragone/pymzn',
license = 'MIT',
author = 'Paolo Dragone',
author_email = 'dragone.paolo@gmail.com',
description = 'A Python wrapper for the MiniZinc tool pipeline.',
long_description = read('README.md'),
packages = find_packages(exclude=['*tests*']),
test_suite = "pymzn.tests",
install_requires = [
],
extra_require = {
'file-config': ['appdirs', 'pyyaml'],
'templates': ['jinja2']
},
platforms = 'any',
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Development Status :: 4 - Beta',
'Natural Language :: English',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'
],
entry_points = {
'console_scripts': [
'pymzn=pymzn:main'
]
}
)
| import re
import os
import codecs
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
def find_version(*parts):
# Open in Latin-1 so that we avoid encoding errors.
# Use codecs.open for Python 2 compatibility
with codecs.open(os.path.join(here, *parts), 'r', 'latin1') as f:
version_file = f.read()
# The version line must have the form
# __version__ = 'ver'
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
def read(*parts):
with codecs.open(os.path.join(here, *parts), encoding='utf-8') as f:
return f.read()
setup(
name = 'pymzn',
version = find_version('pymzn', '__init__.py'),
url = 'https://github.com/paolodragone/pymzn',
license = 'MIT',
author = 'Paolo Dragone',
author_email = 'dragone.paolo@gmail.com',
description = 'A Python wrapper for the MiniZinc tool pipeline.',
long_description = read('README.rst'),
packages = find_packages(exclude=['*tests*']),
test_suite = "pymzn.tests",
install_requires = [
],
extra_require = {
'file-config': ['appdirs', 'pyyaml'],
'templates': ['jinja2']
},
platforms = 'any',
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Development Status :: 4 - Beta',
'Natural Language :: English',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'
],
entry_points = {
'console_scripts': [
'pymzn=pymzn:main'
]
}
)
| mit | Python |
d1bacfc2331e7c06e5fefccdc5b5f527e7eb9d02 | add gffutils requirement | kipoi/kipoiseq,kipoi/kipoiseq | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
requirements = [
"kipoi>=0.4.2",
# "genomelake",
"pybedtools",
"pyfaidx",
"numpy",
"pandas",
"tqdm",
"colorlog",
"related>=0.6.0",
# sometimes required
"h5py",
"gffutils"
]
test_requirements = [
"bumpversion",
"wheel",
"epc",
"jedi",
"pytest>=3.3.1",
"pytest-xdist", # running tests in parallel
"pytest-pep8", # see https://github.com/kipoi/kipoi/issues/91
"pytest-cov",
"coveralls",
"scikit-learn",
"cython",
# "genomelake",
"keras",
"tensorflow",
"pybedtools"
]
setup(
name='kipoiseq',
version='0.1.1',
description="kipoiseq: sequence-based data-laoders for Kipoi",
author="Kipoi team",
author_email='avsec@in.tum.de',
url='https://github.com/kipoi/kipoiseq',
long_description="kipoiseq: sequence-based data-laoders for Kipoi",
packages=find_packages(),
install_requires=requirements,
extras_require={
"develop": test_requirements,
},
license="MIT license",
zip_safe=False,
keywords=["model zoo", "deep learning",
"computational biology", "bioinformatics", "genomics"],
test_suite='tests',
include_package_data=False,
tests_require=test_requirements
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
requirements = [
"kipoi>=0.4.2",
# "genomelake",
"pybedtools",
"pyfaidx",
"numpy",
"pandas",
"tqdm",
"colorlog",
"related>=0.6.0",
# sometimes required
"h5py",
]
test_requirements = [
"bumpversion",
"wheel",
"epc",
"jedi",
"pytest>=3.3.1",
"pytest-xdist", # running tests in parallel
"pytest-pep8", # see https://github.com/kipoi/kipoi/issues/91
"pytest-cov",
"coveralls",
"scikit-learn",
"cython",
# "genomelake",
"keras",
"tensorflow",
"pybedtools"
]
setup(
name='kipoiseq',
version='0.1.1',
description="kipoiseq: sequence-based data-laoders for Kipoi",
author="Kipoi team",
author_email='avsec@in.tum.de',
url='https://github.com/kipoi/kipoiseq',
long_description="kipoiseq: sequence-based data-laoders for Kipoi",
packages=find_packages(),
install_requires=requirements,
extras_require={
"develop": test_requirements,
},
license="MIT license",
zip_safe=False,
keywords=["model zoo", "deep learning",
"computational biology", "bioinformatics", "genomics"],
test_suite='tests',
include_package_data=False,
tests_require=test_requirements
)
| mit | Python |
eca68b722ce52f987eb07f03e60daf9a1f64bd96 | bump version | vipints/hgvs,counsyl/hgvs,vipints/hgvs,DasAllFolks/hgvs,DasAllFolks/hgvs | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
from pip.req import parse_requirements
import sys
description = ("This library provides a simple to use Python API for parsing, "
"formatting, and normalizing variant names specified in the "
"standard recommended by the Human Genome Variation Society "
"(HGVS).")
def main():
python_version = sys.version_info
if python_version < (2, 6):
print ("This library requires Python version >=2.6, "
"You have version %d.%d" % python_version[:2])
sys.exit(1)
setup(
name='pyhgvs',
version='0.9.3',
description='HGVS name parsing and formatting',
long_description=description,
author='Matt Rasmussen',
author_email='rasmus@counsyl.com',
packages=['pyhgvs', 'pyhgvs.tests'],
include_package_data=True,
package_data={
'': ['requirements-dev.txt'],
},
scripts=[],
install_requires=['pip>=1.2'],
tests_require=[str(line.req) for line in
parse_requirements('requirements-dev.txt')],
)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from setuptools import setup
from pip.req import parse_requirements
import sys
description = ("This library provides a simple to use Python API for parsing, "
"formatting, and normalizing variant names specified in the "
"standard recommended by the Human Genome Variation Society "
"(HGVS).")
def main():
python_version = sys.version_info
if python_version < (2, 6):
print ("This library requires Python version >=2.6, "
"You have version %d.%d" % python_version[:2])
sys.exit(1)
setup(
name='pyhgvs',
version='0.9.2',
description='HGVS name parsing and formatting',
long_description=description,
author='Matt Rasmussen',
author_email='rasmus@counsyl.com',
packages=['pyhgvs', 'pyhgvs.tests'],
include_package_data=True,
package_data={
'': ['requirements-dev.txt'],
},
scripts=[],
install_requires=['pip>=1.2'],
tests_require=[str(line.req) for line in
parse_requirements('requirements-dev.txt')],
)
if __name__ == '__main__':
main()
| mit | Python |
fef30e00b2123ee9acbbee916fc87f11936b964d | update setup.py | mrGeen/metaseq,mrGeen/metaseq,agrimaldi/metaseq,agrimaldi/metaseq,mrGeen/metaseq,daler/metaseq,agrimaldi/metaseq,daler/metaseq,daler/metaseq | setup.py | setup.py | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup
import sys
import os
import numpy
version_py = os.path.join(os.path.dirname(__file__), 'metaseq', 'version.py')
version = open(version_py).read().split('=')[-1].strip().replace('"','')
long_description = """
metaseq is a Python framework for genomic data analysis (primarily
high-throughput sequencing, but can be used for much more). It ties
together other frameworks like BEDTools/pybedtools, samtools/pysam, bx-python,
HTSeq, gffutils, and matplotlib.
"""
setup(
name='metaseq',
version=version,
long_description=long_description,
install_requires=['bx-python', 'numpy', 'HTSeq', 'matplotlib', 'scipy',
'scikits.learn', 'pybedtools', 'gffutils',
'argparse'],
packages=['metaseq', 'metaseq.test', 'metaseq.test.data',],
package_data={'metaseq':['test/data/*']},
scripts=[
'metaseq/scripts/download_metaseq_example_data.py',
'metaseq/scripts/metaseq-cli',
],
author='Ryan Dale',
author_email='dalerr@niddk.nih.gov',
url='http://github.com/daler/metaseq',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Topic :: Scientific/Engineering :: Bio-Informatics']
)
| import ez_setup
ez_setup.use_setuptools()
from setuptools import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import sys
import os
import numpy
if 'setuptools.extension' in sys.modules:
m = sys.modules['setuptools.extension']
m.Extension.__dict__ = m._Extension.__dict__
version_py = os.path.join(os.path.dirname(__file__), 'metaseq', 'version.py')
version = open(version_py).read().split('=')[-1].strip().replace('"','')
exts = []
[Extension(
'metaseq.rebin',
['metaseq/rebin.pyx'],
include_dirs=[numpy.get_include()])]
long_description = """
metaseq is a Python framework for genomic data analysis (primarily
high-throughput sequencing, but can be used for much more). It ties
together other frameworks like BEDTools/pybedtools, samtools/pysam, bx-python,
HTSeq, gffutils, and matplotlib.
"""
setup(
name='metaseq',
version=version,
cmdclass = {'build_ext': build_ext},
long_description=long_description,
ext_modules=exts,
install_requires=['bx-python', 'numpy', 'HTSeq', 'matplotlib', 'scipy', 'scikits.learn', 'pybedtools', 'gffutils'],
packages=['metaseq', 'metaseq.test', 'metaseq.test.data'],
package_data={'metaseq':['test/data/*']},
scripts=['metaseq/scripts/download_metaseq_example_data.py'],
author='Ryan Dale',
author_email='dalerr@niddk.nih.gov',
url='http://github.com/daler/metaseq',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Topic :: Scientific/Engineering :: Bio-Informatics']
)
| mit | Python |
9c3bc4f2a141502b4121109d814fc18e90974744 | Bump the Development Status to Stable | dmtucker/keysmith | setup.py | setup.py | #!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Intended Audience :: End Users/Desktop',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
| #!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Intended Audience :: End Users/Desktop',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 4 - Beta',
],
)
| bsd-3-clause | Python |
c2a7bf92a0e7b0c1d02e238b50c701aabad9162e | bump version: 0.4.0 | petergardfjall/garminexport | setup.py | setup.py | """Setup information for the Garmin Connect activity exporter."""
from setuptools import setup, Extension
from os import path
# needed for Python 2.7 (ensures open() defaults to text mode with universal
# newlines, and accepts an argument to specify the text encoding.
from io import open
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
requires = [
'requests>=2.0,<3',
'python-dateutil~=2.4',
]
test_requires = [
'nose~=1.3',
'coverage~=4.2',
'mock~=2.0',
]
setup(name='garminexport',
version='0.4.0',
description='Garmin Connect activity exporter and backup tool',
long_description=long_description,
long_description_content_type='text/markdown',
author='Peter Gardfjäll',
author_email='peter.gardfjall.work@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
keywords='garmin export backup',
url='https://github.com/petergardfjall/garminexport',
license='Apache License 2.0',
project_urls={
'Source': 'https://github.com/petergardfjall/garminexport.git',
'Tracker': 'https://github.com/petergardfjall/garminexport/issues',
},
packages=[
'garminexport',
'garminexport.cli',
],
python_requires='>=3.5.*, <4',
install_requires=requires,
test_requires=test_requires,
entry_points={
'console_scripts': [
'garmin-backup = garminexport.cli.backup:main',
'garmin-get-activity = garminexport.cli.get_activity:main',
'garmin-upload-activity = garminexport.cli.upload_activity:main',
],
},
)
| """Setup information for the Garmin Connect activity exporter."""
from setuptools import setup, Extension
from os import path
# needed for Python 2.7 (ensures open() defaults to text mode with universal
# newlines, and accepts an argument to specify the text encoding.
from io import open
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
requires = [
'requests>=2.0,<3',
'python-dateutil~=2.4',
]
test_requires = [
'nose~=1.3',
'coverage~=4.2',
'mock~=2.0',
]
setup(name='garminexport',
version='0.3.0',
description='Garmin Connect activity exporter and backup tool',
long_description=long_description,
long_description_content_type='text/markdown',
author='Peter Gardfjäll',
author_email='peter.gardfjall.work@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
keywords='garmin export backup',
url='https://github.com/petergardfjall/garminexport',
license='Apache License 2.0',
project_urls={
'Source': 'https://github.com/petergardfjall/garminexport.git',
'Tracker': 'https://github.com/petergardfjall/garminexport/issues',
},
packages=[
'garminexport',
'garminexport.cli',
],
python_requires='>=3.5.*, <4',
install_requires=requires,
test_requires=test_requires,
entry_points={
'console_scripts': [
'garmin-backup = garminexport.cli.backup:main',
'garmin-get-activity = garminexport.cli.get_activity:main',
'garmin-upload-activity = garminexport.cli.upload_activity:main',
],
},
)
| apache-2.0 | Python |
963857463cd706260667995bd8817bd2facea5f0 | Include the official nydus release | numan/sunspear | setup.py | setup.py | #!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/disqus/nydus/tarball/master#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
| #!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/numan/nydus/tarball/0.10.4#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
| apache-2.0 | Python |
42f963ba0f015e8aa08586bdd42f8e39cfb5b8dd | bump up version | hermantai/sorno-py-scripts,hermantai/sorno-py-scripts | setup.py | setup.py | #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os
dependency_libs = [
"beautifulsoup4",
"dropbox",
"feedparser",
"google_api_python_client",
"humanfriendly",
"humanize",
"ipython",
"lxml",
# https://github.com/ahupp/python-magic
"python_magic",
"python_dateutil",
"python_twitter",
"requests",
"six",
"tzlocal",
]
script_dir = os.path.dirname(os.path.realpath(__file__))
with open("README.rst", "r") as f:
readme_text = f.read()
setup(
name="sorno-py-scripts",
version="0.43.6",
description="Herman Tai's python scripts all prefixed with \"sorno_\"",
long_description=readme_text,
author="Herman Tai",
author_email="htaihm@gmail.com",
license="APLv2",
url="https://github.com/hermantai/sorno-py-scripts",
download_url="https://github.com/hermantai/sorno-py-scripts/archive/master.zip",
packages=[
"sorno",
],
scripts=[
os.path.join("scripts", f)
for f in os.listdir(os.path.join(script_dir, "scripts"))
if f.endswith(".py")
], # include all py scripts under the scripts directory
requires=dependency_libs,
install_requires=dependency_libs,
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"License :: OSI Approved :: Apache Software License",
],
)
| #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os
dependency_libs = [
"beautifulsoup4",
"dropbox",
"feedparser",
"google_api_python_client",
"humanfriendly",
"humanize",
"ipython",
"lxml",
# https://github.com/ahupp/python-magic
"python_magic",
"python_dateutil",
"python_twitter",
"requests",
"six",
"tzlocal",
]
script_dir = os.path.dirname(os.path.realpath(__file__))
with open("README.rst", "r") as f:
readme_text = f.read()
setup(
name="sorno-py-scripts",
version="0.43.5",
description="Herman Tai's python scripts all prefixed with \"sorno_\"",
long_description=readme_text,
author="Herman Tai",
author_email="htaihm@gmail.com",
license="APLv2",
url="https://github.com/hermantai/sorno-py-scripts",
download_url="https://github.com/hermantai/sorno-py-scripts/archive/master.zip",
packages=[
"sorno",
],
scripts=[
os.path.join("scripts", f)
for f in os.listdir(os.path.join(script_dir, "scripts"))
if f.endswith(".py")
], # include all py scripts under the scripts directory
requires=dependency_libs,
install_requires=dependency_libs,
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"License :: OSI Approved :: Apache Software License",
],
)
| apache-2.0 | Python |
a8e472cfdec472d24e1f7b3b73c2c637a86e85dd | Fix xmldoc example | jaredly/pyjamas,jaredly/pyjamas,jaredly/pyjamas,jaredly/pyjamas | examples/xmldoc/XMLload.py | examples/xmldoc/XMLload.py | import pyjd
from pyjamas.ui.Button import Button
from pyjamas.ui.RootPanel import RootPanel
from pyjamas.ui.HTML import HTML
from pyjamas.ui.DockPanel import DockPanel
from pyjamas.ui import HasAlignment
from pyjamas.ui.Hyperlink import Hyperlink
from pyjamas.ui.VerticalPanel import VerticalPanel
from pyjamas.ui.ScrollPanel import ScrollPanel
from pyjamas import Window
from pyjamas.HTTPRequest import HTTPRequest
def create_xml_doc(text):
JS("""
var xmlDoc;
try { //Internet Explorer
xmlDoc=new ActiveXObject("Microsoft.XMLDOM");
xmlDoc.async="false";
xmlDoc.loadXML(text);
} catch(e) {
try { //Firefox, Mozilla, Opera, etc.
parser=new DOMParser();
xmlDoc=parser.parseFromString(text,"text/xml");
} catch(e) {
return null;
}
}
return xmlDoc;
""")
class XMLloader:
def __init__(self, panel):
self.panel = panel
def onCompletion(self, doc):
self.panel.doStuff(create_xml_doc(doc))
def onError(self, text, code):
self.panel.onError(text, code)
def onTimeout(self, text):
self.panel.onTimeout(text)
class XMLload:
def onModuleLoad(self):
HTTPRequest().asyncPost(None, None,
"contacts.xml", "",
XMLloader(self))
def onError(self, text, code):
# FIXME
pass
def onTimeout(self, text):
# FIXME
pass
def doStuff(self, xmldoc):
contacts = xmldoc.getElementsByTagName("contact")
len = contacts.length;
for i in range(len):
contactsDom = contacts.item(i)
firstNames = contactsDom.getElementsByTagName("firstname")
firstNameNode = firstNames.item(0)
firstName = firstNameNode.firstChild.nodeValue
RootPanel().add(HTML("firstname: %s" % str(firstName)))
if __name__ == '__main__':
pyjd.setup("./public/XMLload.html")
app = XMLload()
app.onModuleLoad()
pyjd.run()
| import pyjd
from pyjamas.ui.Button import Button
from pyjamas.ui.RootPanel import RootPanel
from pyjamas.ui.HTML import HTML
from pyjamas.ui.DockPanel import DockPanel
from pyjamas.ui import HasAlignment
from pyjamas.ui.Hyperlink import Hyperlink
from pyjamas.ui.VerticalPanel import VerticalPanel
from pyjamas.ui.ScrollPanel import ScrollPanel
from pyjamas import Window
from pyjamas.HTTPRequest import HTTPRequest
def create_xml_doc(text):
JS("""
try //Internet Explorer
{
xmlDoc=new ActiveXObject("Microsoft.XMLDOM");
xmlDoc.async="false";
xmlDoc.loadXML(text);
}
catch(e)
{
try //Firefox, Mozilla, Opera, etc.
{
parser=new DOMParser();
xmlDoc=parser.parseFromString(text,"text/xml");
}
catch(e)
{
return null;
}
}
""")
class XMLloader:
def __init__(self, panel):
self.panel = panel
def onCompletion(self, doc):
self.panel.doStuff(create_xml_doc(doc))
def onError(self, text, code):
self.panel.onError(text, code)
def onTimeout(self, text):
self.panel.onTimeout(text)
class XMLload:
def onModuleLoad(self):
HTTPRequest().asyncPost(None, None,
"contacts.xml", "",
XMLloader(self))
def onError(self, text, code):
# FIXME
pass
def onTimeout(self, text):
# FIXME
pass
def doStuff(self, xmldoc):
contacts = xmldoc.getElementsByTagName("contact")
len = contacts.length;
for i in range(len):
contactsDom = contacts.item(i)
firstNames = contactsDom.getElementsByTagName("firstname")
firstNameNode = firstNames.item(0)
firstName = firstNameNode.firstChild.nodeValue
RootPanel().add(HTML("firstname: %s" % str(firstName)))
if __name__ == '__main__':
pyjd.setup("./public/XMLload.html")
app = XMLload()
app.onModuleLoad()
pyjd.run()
| apache-2.0 | Python |
0e92abaf74fd79de127d783bd6a3814a3a23e08b | Include visualization component in setup.py | orbingol/NURBS-Python,orbingol/NURBS-Python | setup.py | setup.py | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os
import re
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# Implemented from http://stackoverflow.com/a/41110107
def get_property(prop, project):
result = re.search(r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop), open(project + '/__init__.py').read())
return result.group(1)
project_name = 'geomdl'
setup(
name='NURBS-Python',
version=get_property('__version__', project_name),
description='B-Spline and NURBS library with grid generator',
author='Onur Rauf Bingol',
author_email='contact@onurbingol.net',
license='MIT',
url='https://github.com/orbingol/NURBS-Python',
packages=['geomdl', 'geomdl.visualization'],
long_description=read('README.rst'),
classifiers=[
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Mathematics'
]
)
| try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os
import re
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# Implemented from http://stackoverflow.com/a/41110107
def get_property(prop, project):
result = re.search(r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop), open(project + '/__init__.py').read())
return result.group(1)
project_name = 'geomdl'
setup(
name='NURBS-Python',
version=get_property('__version__', project_name),
description='B-Spline and NURBS library with grid generator',
author='Onur Rauf Bingol',
author_email='contact@onurbingol.net',
license='MIT',
url='https://github.com/orbingol/NURBS-Python',
packages=['geomdl'],
long_description=read('README.rst'),
classifiers=[
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Mathematics'
]
)
| mit | Python |
78526e31cd18ac710ff74002741dd6e65b105233 | Use include_dirs in setup.py | terrelln/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard,terrelln/python-zstandard,indygreg/python-zstandard,terrelln/python-zstandard,terrelln/python-zstandard,indygreg/python-zstandard | setup.py | setup.py | #!/usr/bin/env python
# Copyright (c) 2016-present, Gregory Szorc
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
from setuptools import setup, Extension
zstd_sources = ['zstd/%s' % p for p in (
'common/entropy_common.c',
'common/fse_decompress.c',
'common/xxhash.c',
'common/zstd_common.c',
'compress/fse_compress.c',
'compress/huf_compress.c',
'compress/zbuff_compress.c',
'compress/zstd_compress.c',
'decompress/huf_decompress.c',
'decompress/zbuff_decompress.c',
'decompress/zstd_decompress.c',
'dictBuilder/divsufsort.c',
'dictBuilder/zdict.c',
)]
sources = zstd_sources + ['zstd.c']
# TODO compile with optimizations.
ext = Extension('zstd', sources,
include_dirs=[
'zstd',
'zstd/common',
'zstd/compress',
'zstd/decompress',
'zstd/dictBuilder',
],
)
setup(
name='zstandard',
version='0.0.1',
description='Zstandard bindings for Python',
long_description=open('README.rst', 'r').read(),
url='https://github.com/indygreg/python-zstandard',
author='Gregory Szorc',
author_email='gregory.szorc@gmail.com',
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: C',
'Programming Language :: Python :: 3.5',
],
keywords='zstandard zstd compression',
ext_modules=[ext],
test_suite='tests',
)
| #!/usr/bin/env python
# Copyright (c) 2016-present, Gregory Szorc
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
from setuptools import setup, Extension
zstd_sources = ['zstd/%s' % p for p in (
'common/entropy_common.c',
'common/fse_decompress.c',
'common/xxhash.c',
'common/zstd_common.c',
'compress/fse_compress.c',
'compress/huf_compress.c',
'compress/zbuff_compress.c',
'compress/zstd_compress.c',
'decompress/huf_decompress.c',
'decompress/zbuff_decompress.c',
'decompress/zstd_decompress.c',
'dictBuilder/divsufsort.c',
'dictBuilder/zdict.c',
)]
sources = zstd_sources + ['zstd.c']
# TODO compile with optimizations.
ext = Extension('zstd', sources,
extra_compile_args=[
'-Izstd',
'-Izstd/common',
'-Izstd/compress',
'-Izstd/decompress',
'-Izstd/dictBuilder',
],
)
setup(
name='zstandard',
version='0.0.1',
description='Zstandard bindings for Python',
long_description=open('README.rst', 'r').read(),
url='https://github.com/indygreg/python-zstandard',
author='Gregory Szorc',
author_email='gregory.szorc@gmail.com',
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: C',
'Programming Language :: Python :: 3.5',
],
keywords='zstandard zstd compression',
ext_modules=[ext],
test_suite='tests',
)
| bsd-3-clause | Python |
ce5ec0555c80b79f0d329b7b0b58e33d5226cd41 | Update our support list. | jeamland/wsproto,jeamland/wsproto,python-hyper/wsproto | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import sys
try:
from setuptools import setup
except ImportError:
from distuitls.core import setup
# Get the version
version_regex = r'__version__ = ["\']([^"\']*)["\']'
with open('wsproto/__init__.py', 'r') as f:
text = f.read()
match = re.search(version_regex, text)
if match:
version = match.group(1)
else:
raise RuntimeError("No version number found!")
# Stealing this from Cory Benfield who stole it from Kenneth Reitz
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
packages = [
'wsproto',
]
setup(
name='wsproto',
version=version,
description='WebSockets state-machine based protocol implementation',
author='Benno Rice',
author_email='benno@jeamland.net',
packages=packages,
package_data={'': ['LICENSE', 'README.md']},
package_dir={'wsproto': 'wsproto'},
include_package_data=True,
license='BSD License',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
install_requires=[
'h11 ~= 0.7.0', # means: 0.7.x where x >= 0
],
extras_require={
':python_version == "2.7" or python_version == "3.3"':
['enum34>=1.0.4, <2'],
}
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import sys
try:
from setuptools import setup
except ImportError:
from distuitls.core import setup
# Get the version
version_regex = r'__version__ = ["\']([^"\']*)["\']'
with open('wsproto/__init__.py', 'r') as f:
text = f.read()
match = re.search(version_regex, text)
if match:
version = match.group(1)
else:
raise RuntimeError("No version number found!")
# Stealing this from Cory Benfield who stole it from Kenneth Reitz
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
packages = [
'wsproto',
]
setup(
name='wsproto',
version=version,
description='WebSockets state-machine based protocol implementation',
author='Benno Rice',
author_email='benno@jeamland.net',
packages=packages,
package_data={'': ['LICENSE', 'README.md']},
package_dir={'wsproto': 'wsproto'},
include_package_data=True,
license='BSD License',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
install_requires=[
'h11 ~= 0.7.0', # means: 0.7.x where x >= 0
],
extras_require={
':python_version == "2.7" or python_version == "3.3"':
['enum34>=1.0.4, <2'],
}
)
| mit | Python |
9bc56a273804cf48a282b266d8206179f7b09ede | include all files on install and some minor fixes | kimus/django-blocks,kimus/django-blocks | setup.py | setup.py | #!/usr/bin/env python
import os
from setuptools import setup
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "django-blocks",
version = "1.0",
packages = ['blocks'],
include_package_data=True,
author = "Helder Rossa",
author_email = "kimus.linuxus@gmail.com",
description = "An easier way to build Web apps more quickly and with almost no code.",
license = "MIT License",
url = "https://github.com/kimus/django-blocks",
classifiers = [
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "django-blocks",
version = "1.0",
packages = find_packages(),
author = "Helder Rossa",
author_email = "kimus.linuxus@gmail.com",
description = "An easier way to build Web apps more quickly and with almost no code.",
license = "MIT License",
url = "https://github.com/kimus/django-blocks",
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
)
| mit | Python |
04c21325c15a7100b4bce154e06d6eda8b468778 | Fix 'TypeError: a bytes-like object is required, not 'str'' issue | marteinn/Skeppa,marteinn/Skeppa | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup, find_packages
import skeppa
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
package_exclude = ("tests*", "examples*")
packages = find_packages(exclude=package_exclude)
# Handle requirements
install_requires = [
"Fabric==1.10.2",
"PyCrypto==2.6.1",
"Jinja2==2.8",
"PyYAML==3.11",
"six==1.10.0",
]
# Convert markdown to rst
try:
from pypandoc import convert
long_description = convert("README.md", "rst")
except:
long_description = ""
setup(
name="skeppa",
version=skeppa.__version__,
description=("A docker deployment tool based on fabric and "
"docker-compose"),
long_description=long_description,
author="Marteinn",
author_email="martin@marteinn.se",
url="https://github.com/marteinn/skeppa",
packages=packages,
include_package_data=True,
install_requires=install_requires,
entry_points={
"console_scripts": [
"skeppa = skeppa.scripts.skeppa:main",
]
},
license="MIT",
zip_safe=False,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries",
"Topic :: System :: Software Distribution",
"Topic :: System :: Systems Administration",
],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup, find_packages
import skeppa
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
package_exclude = ("tests*", "examples*")
packages = find_packages(exclude=package_exclude)
# Handle requirements
install_requires = [
"Fabric==1.10.2",
"PyCrypto==2.6.1",
"Jinja2==2.8",
"PyYAML==3.11",
"six==1.10.0",
]
# Convert markdown to rst
try:
from pypandoc import convert
long_description = convert("README.md", "rst")
except:
long_description = ""
setup(
name="skeppa",
version=skeppa.__version__,
description=("A docker deployment tool based on fabric and "
"docker-compose"),
long_description=long_description,
author="Marteinn",
author_email="martin@marteinn.se",
url="https://github.com/marteinn/skeppa",
packages=packages,
include_package_data=True,
install_requires=install_requires,
entry_points={
"console_scripts": [
"skeppa = skeppa.scripts.skeppa:main",
]
},
license="MIT",
zip_safe=False,
classifiers=(
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries",
"Topic :: System :: Software Distribution",
"Topic :: System :: Systems Administration",
),
)
| mit | Python |
e70bf63ad7d7067b5cf940b7ad1888ad2033c30c | Mejora :P | carlosplanchon/outfancy | py-test.py | py-test.py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import outfancy
recordset = [(1, 'Feisbuk', '18-10-2015', '21:57:17', '18-10-2015', '21:57:17', 1234, 'Red social bla bla bla utilizada gente bla bla'), (2, 'Gugle', '18-10-2015', '21:57:44', '18-10-2015', '21:57:44', 12323, 'Motor de busqueda que categoriza resultados por links bla bla'), (3, 'Opera', '18-10-2015', '21:58:39', '18-10-2015', '21:58:39', 4324, 'Navegador de internerd, también es una disciplina musical, que, valga la redundancia, requiere de una brutal disciplina por parte de los interpretes.'), (4, 'Audi', '18-10-2015', '21:59:51', '18-10-2015', '21:59:51', 0, 'OOOO <-- Fabricante alemán de vehiculos de alta gama'), (5, 'The Simpsons', '18-10-2015', '22:0:44', '18-10-2015', '22:0:44', 0, 'Una sitcom que lleva veintipico de temporadas, si no la viste, se puede presumir que vivís bajo una piedra.'), (6, 'BMW', '18-10-2015', '22:1:18', '18-10-2015', '22:1:18', 98765, 'Fabricante alemán de autos de lujo'), (7, 'Yahoo', '18-10-2015', '22:1:56', '18-10-2015', '22:1:56', 53430, 'Expresión de alegría, o compañía gringolandesa.'), (8, 'Coca Cola', '18-10-2015', '22:3:19', '18-10-2015', '22:3:19', 200, 'Compañía que fabrica bebidas, y que no nos paga por ponerla en py-test :c'), (9, 'Pepsi', '18-10-2015', '22:3:40', '18-10-2015', '22:3:40', 340, 'Competidora de la anterior compañía mencionada, y que tampoco nos paga :c'), (10, 'GitHub', '18-10-2015', '22:4:42', '18-10-2015', '22:4:42', 563423, 'Plataforma de gestión de co0o0o0ó0digo'), (11, 'Johnny Walker', '18-10-2015', '22:5:34', '18-10-2015', '22:5:34', 4252, 'Whisky escocés'), (12, 'Mercury', '18-10-2015', '22:5:51', '18-10-2015', '22:5:51', 23423, 'Fabricante de motores para lanchas'), (13, 'Rolls Royce', '18-10-2015', '22:6:7', '18-10-2015', '22:6:7', 75832, 'Fabricante de motores para aviones, y autos de alta gama')]
input('--- Presione ENTER para ver el recordset tal cual es ---')
print(recordset)
input('--- Ahora presione ENTER para ver el recordset renderizado por Outfancy ---')
outfancy.render.render_recordset(recordset)
| #!/usr/bin/python3
# -*- coding: utf-8 -*-
import outfancy
recordset = [(1, 'Google', '18-10-2015', '21:56:51', '18-10-2015', '21:56:51', 0, ''), (2, 'Feisbuk', '18-10-2015', '21:57:17', '18-10-2015', '21:57:17', 0, 'Red social bla bla bla utilizada gente bla bla'), (3, 'Gugle', '18-10-2015', '21:57:44', '18-10-2015', '21:57:44', 0, 'Motor de busqueda que categoriza resultados por links bla bla'), (4, 'Opera', '18-10-2015', '21:58:39', '18-10-2015', '21:58:39', 0, 'Navegador de internerd, también es una disciplina musical, que, valga la redundancia, requiere de una brutal disciplina por parte de los interpretes.'), (5, 'Audi', '18-10-2015', '21:59:51', '18-10-2015', '21:59:51', 0, 'OOOO <-- Fabricante alemán de vehiculos de alta gama'), (6, 'The Simpsons', '18-10-2015', '22:0:44', '18-10-2015', '22:0:44', 0, 'Una sitcom que lleva veintipico de temporadas, si no la viste, se puede presumir que vivís bajo una piedra.'), (7, 'BMW', '18-10-2015', '22:1:18', '18-10-2015', '22:1:18', 0, 'Fabricante alemán de autos de lujo'), (8, 'Yahoo', '18-10-2015', '22:1:56', '18-10-2015', '22:1:56', 0, 'Expresión de alegría, o compañía gringolandesa.'), (9, 'Coca Cola', '18-10-2015', '22:3:19', '18-10-2015', '22:3:19', 0, 'Compañía que fabrica bebidas, y que no nos paga por ponerla en py-test :c'), (10, 'Pepsi', '18-10-2015', '22:3:40', '18-10-2015', '22:3:40', 0, 'Competidora de la anterior compañía mencionada, y que tampoco nos paga :c'), (11, 'GitHub', '18-10-2015', '22:4:42', '18-10-2015', '22:4:42', 0, 'Plataforma de gestión de co0o0o0ó0digo'), (12, 'Johnny Walker', '18-10-2015', '22:5:34', '18-10-2015', '22:5:34', 0, 'Whisky escocés'), (13, 'Mercury', '18-10-2015', '22:5:51', '18-10-2015', '22:5:51', 0, 'Fabricante de motores para lanchas'), (14, 'Rolls Royce', '18-10-2015', '22:6:7', '18-10-2015', '22:6:7', 0, 'Fabricante de motores para aviones, y autos de alta gama')]
input('--- Presione ENTER para ver el recordset tal cual es ---')
print(recordset)
input('--- Ahora presione ENTER para ver el recordset renderizado por Outfancy ---')
outfancy.render.render_recordset(recordset)
| cc0-1.0 | Python |
427292a82aea2a2291833ca0cb3f30cee2afd497 | Fix NR deploy notification bug | infoxchange/ixdjango | ixdjango/management/commands/newrelic_notify_deploy.py | ixdjango/management/commands/newrelic_notify_deploy.py | """
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
try:
ini_file = os.environ['NEW_RELIC_CONFIG_FILE']
except KeyError:
ini_file = settings.NEW_RELIC_CONFIG
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
| """
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
ini_file = os.environ.get('NEW_RELIC_CONFIG_FILE',
settings.NEW_RELIC_CONFIG)
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
| mit | Python |
153955d3c8ca8db233c073f4f3288efd2a16c828 | Update nessus_invoker.py | fedex279/OpenDXL | Nessus/nessus_invoker.py | Nessus/nessus_invoker.py | /* ===================================================
* Copyright 2017 Uha Durbha
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
import json
import logging
import os
import sys
import requests
from nessus_common import *
from dxlclient.client import DxlClient
from dxlclient.client_config import DxlClientConfig
from dxlclient.message import Message, Request
from bs4 import BeautifulSoup
from requests.packages.urllib3.exceptions import InsecureRequestWarning
#bypasses the certificate related warnings while using requests
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
#define the dxl service names
service_name = "/tenable/service/nessus"
service_newscan = service_name + "/new_scan"
# Import common logging and configuration
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/..")
from common import *
# Configure local logger
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger(__name__)
# Create DXL configuration from file
config = DxlClientConfig.create_dxl_config_from_file(CONFIG_FILE)
# Create the client
with DxlClient(config) as client:
# Connect to the fabric
client.connect()
req = Request(service_newscan)
query = "{0} {1}".format(target, scan_type)
print query
req.payload = query.encode()
#Send the request and wait for a response (synchronous)
res = client.sync_request(req)
#decode the received response
response = res.payload.decode(encoding="UTF-8")
soup = BeautifulSoup(response,'lxml')
print "Scan results ready"
#writing scan results to xml file
filename= "nessus_report.xml"
print('Saving scan results to {0}.'.format(filename))
with open(filename, 'w') as f:
f.write(response)
#displaying the scan results
syn = soup.find_all('synopsis')
out = soup.find_all('plugin_output')
sol = soup.find_all('solution')
name = soup.find_all('plugin_name')
for x in range(0,len(syn),1):
print name[x].string
print "Synopsis :{0}".format(syn[x].string)
print "Output :{0}".format(out[x].string)
print "Solution :{0}".format(sol[x].string)
print "\n"
#additionally searching for desired attributes
attr= raw_input("Enter attribute to look for : ")
attr = soup.find_all(attr)
for x in range(0,len(attr),1):
print attr[x].string
| /*Copyright 2017 Uha Durbha
*
*Licensed under the Apache License, Version 2.0 (the "License");
*you may not use this file except in compliance with the License.
*You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing, software
*distributed under the License is distributed on an "AS IS" BASIS,
*WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*See the License for the specific language governing permissions and
*limitations under the License.
*/
import json
import logging
import os
import sys
import requests
from nessus_common import *
from dxlclient.client import DxlClient
from dxlclient.client_config import DxlClientConfig
from dxlclient.message import Message, Request
from bs4 import BeautifulSoup
from requests.packages.urllib3.exceptions import InsecureRequestWarning
#bypasses the certificate related warnings while using requests
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
#define the dxl service names
service_name = "/tenable/service/nessus"
service_newscan = service_name + "/new_scan"
# Import common logging and configuration
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/..")
from common import *
# Configure local logger
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger(__name__)
# Create DXL configuration from file
config = DxlClientConfig.create_dxl_config_from_file(CONFIG_FILE)
# Create the client
with DxlClient(config) as client:
# Connect to the fabric
client.connect()
req = Request(service_newscan)
query = "{0} {1}".format(target, scan_type)
print query
req.payload = query.encode()
#Send the request and wait for a response (synchronous)
res = client.sync_request(req)
#decode the received response
response = res.payload.decode(encoding="UTF-8")
soup = BeautifulSoup(response,'lxml')
print "Scan results ready"
#writing scan results to xml file
filename= "nessus_report.xml"
print('Saving scan results to {0}.'.format(filename))
with open(filename, 'w') as f:
f.write(response)
#displaying the scan results
syn = soup.find_all('synopsis')
out = soup.find_all('plugin_output')
sol = soup.find_all('solution')
name = soup.find_all('plugin_name')
for x in range(0,len(syn),1):
print name[x].string
print "Synopsis :{0}".format(syn[x].string)
print "Output :{0}".format(out[x].string)
print "Solution :{0}".format(sol[x].string)
print "\n"
#additionally searching for desired attributes
attr= raw_input("Enter attribute to look for : ")
attr = soup.find_all(attr)
for x in range(0,len(attr),1):
print attr[x].string
| apache-2.0 | Python |
6d1dc290ec02f756d83ed7f0a69070943e273c5f | Fix api time format | TornikeNatsvlishvili/skivri.ge,TornikeNatsvlishvili/skivri.ge,TornikeNatsvlishvili/skivri.ge,TornikeNatsvlishvili/skivri.ge | frontend/app/blueprints/analytics.py | frontend/app/blueprints/analytics.py | from flask import Blueprint
from flask import request, abort, jsonify
import model.analytics as analytics
from playhouse.shortcuts import model_to_dict
import config
analytic_api = Blueprint('analytic_api', __name__)
PageViewTable, UserTable = analytics.initialize(config.settings['MYSQL_DB'], config.settings['MYSQL_USER'], config.settings['MYSQL_PASS'])
@analytic_api.route('/a.gif', methods=["GET"])
def report_pageview():
if not request.args.get('url'):
abort(404)
ip = request.headers.get('X-Forwarded-For', request.remote_addr)
url = request.args['url']
title = request.args.get('t', 'unknown')
referrer = request.args.get('ref', 'unknown')
PageViewTable.connect()
UserTable.connect()
user, created = UserTable.get_or_create(ip=ip)
UserTable.disconnect()
PageViewTable.create(url=url, title=title, user=user.ip, referrer=referrer)
PageViewTable.disconnect()
return '', 204
@analytic_api.route('/pageviews', methods=["GET"])
def page_views():
pageviews = []
PageViewTable.connect()
for view in PageViewTable.select(PageViewTable.date):
pageviews.append(view.date.strftime('%B %d, %Y %H:%M:%S'))
return jsonify({'pageviews': pageviews}) | from flask import Blueprint
from flask import request, abort, jsonify
import model.analytics as analytics
from playhouse.shortcuts import model_to_dict
import config
analytic_api = Blueprint('analytic_api', __name__)
PageViewTable, UserTable = analytics.initialize(config.settings['MYSQL_DB'], config.settings['MYSQL_USER'], config.settings['MYSQL_PASS'])
@analytic_api.route('/a.gif', methods=["GET"])
def report_pageview():
if not request.args.get('url'):
abort(404)
ip = request.headers.get('X-Forwarded-For', request.remote_addr)
url = request.args['url']
title = request.args.get('t', 'unknown')
referrer = request.args.get('ref', 'unknown')
PageViewTable.connect()
UserTable.connect()
user, created = UserTable.get_or_create(ip=ip)
UserTable.disconnect()
PageViewTable.create(url=url, title=title, user=user.ip, referrer=referrer)
PageViewTable.disconnect()
return '', 204
@analytic_api.route('/pageviews', methods=["GET"])
def page_views():
pageviews = []
PageViewTable.connect()
for view in PageViewTable.select(PageViewTable.date):
pageviews.append(view.date)
return jsonify({'pageviews': pageviews}) | mit | Python |
72ad6a987923c75ea97119dc4238d7d832c596a3 | Update AudioCapture.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | service/AudioCapture.py | service/AudioCapture.py | audiocapture = Runtime.createAndStart("audiocapture","AudioCapture")
#it starts capturing audio
audiocapture.captureAudio()
# it will record for 5 seconds
sleep(5)
#then it stops recording audio
audiocapture.stopAudioCapture()
#it plays audio recorded
audiocapture.playAudio()
sleep(5)
# setting the recording audio format
# 8000,11025,16000,22050,44100
sampleRate = 16000;
# 8 bit or 16 bit
sampleSizeInBits = 16;
# 1 or 2 channels
channels = 1;
# bits are signed or unsigned
bitSigned = True;
# bigEndian or littleEndian
bigEndian = False;
# setting audio format bitrate sample
audiocapture.setAudioFormat(sampleRate, sampleSizeInBits, channels, bitSigned, bigEndian)
#it starts capturing audio
audiocapture.captureAudio()
# it will record for 5 seconds
sleep(5)
#then it stops recording audio
audiocapture.stopAudioCapture()
#it plays audio recorded
audiocapture.playAudio()
| from org.myrobotlab.service import Runtime
from org.myrobotlab.service import AudioCapture
from time import sleep
audiocapture = Runtime.createAndStart("audiocapture","AudioCapture")
#it starts capturing audio
audiocapture.captureAudio()
# it will record for 5 seconds
sleep(5)
#then it stops recording audio
audiocapture.stopAudioCapture()
#it plays audio recorded
audiocapture.playAudio()
| apache-2.0 | Python |
658745b45ae46753b3894edc011947c8092f96a9 | Update motion_sensor.py | jdp7689/motion_security,jdp7689/motion_security | src/motion_sensor.py | src/motion_sensor.py | #!/usr/bin/python
'''
MIT License
Copyright (c) 2016 Joshua Palmer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import RPi.GPIO as GPIO
import time
INPUT_PIN = 7
SLEEP_TIME_SECONDS = .1
class MotionSensorTimeoutException(Exception):
pass
class MotionSensor():
def __init__(self, input_pin=INPUT_PIN):
self.input_pin = input_pin
GPIO.setmode(GPIO.BOARD)
GPIO.setup(input_pin, GPIO.IN)
def wait_for_motion(self, timeout=5.0):
'''
This is a blocking function
param timeout: timeout in seconds. Set to false to block forever.
'''
curr_time = 0.0
while True:
#Raise exception for timeout
if timeout is not False and float(curr_time)>timeout:
raise MotionSensorTimeoutException
elif GPIO.input(self.input_pin):
break
else:
time.sleep(SLEEP_TIME_SECONDS)
if timeout is not False:
curr_time += SLEEP_TIME_SECONDS
def main():
motion_sensor = MotionSensor()
try:
motion_sensor.wait_for_motion()
print "Motion sensed!"
except MotionSensorTimeoutException:
print "No motion detected!"
if __name__ == "__main__":
main()
| #!/usr/bin/python
import RPi.GPIO as GPIO
import time
INPUT_PIN = 7
SLEEP_TIME_SECONDS = .1
class MotionSensorTimeoutException(Exception):
pass
class MotionSensor():
def __init__(self, input_pin=INPUT_PIN):
self.input_pin = input_pin
GPIO.setmode(GPIO.BOARD)
GPIO.setup(input_pin, GPIO.IN)
def wait_for_motion(self, timeout=5.0):
'''
This is a blocking function
param timeout: timeout in seconds. Set to false to block forever.
'''
curr_time = 0.0
while True:
#Raise exception for timeout
if timeout is not False and float(curr_time)>timeout:
raise MotionSensorTimeoutException
elif GPIO.input(self.input_pin):
break
else:
time.sleep(SLEEP_TIME_SECONDS)
if timeout is not False:
curr_time += SLEEP_TIME_SECONDS
def main():
motion_sensor = MotionSensor()
try:
motion_sensor.wait_for_motion()
print "Motion sensed!"
except MotionSensorTimeoutException:
print "No motion detected!"
if __name__ == "__main__":
main()
| mit | Python |
1b8ad8da3d42b38ef1a2a6bbcc156e8fc820db0c | add dbaccessor to get full reverse indices from a list of case ids | dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/ex-submodules/casexml/apps/case/dbaccessors/related.py | corehq/ex-submodules/casexml/apps/case/dbaccessors/related.py | from collections import namedtuple
from casexml.apps.case.sharedmodels import CommCareCaseIndex
def get_indexed_case_ids(domain, case_ids):
"""
Given a base list of case ids, gets all ids of cases they reference (parent cases)
"""
from casexml.apps.case.models import CommCareCase
keys = [[domain, case_id, 'index'] for case_id in case_ids]
return [r['value']['referenced_id'] for r in CommCareCase.get_db().view(
'case/related',
keys=keys,
reduce=False,
)]
def get_reverse_indexed_case_ids(domain, case_ids):
"""
Given a base list of case ids, gets all ids of cases that reference them (child cases)
"""
return [r.case_id for r in get_all_reverse_indices_info(domain, case_ids)]
def get_reverse_indexed_cases(domain, case_ids):
"""
Given a base list of cases, gets all wrapped cases that directly
reference them (child cases).
"""
from casexml.apps.case.models import CommCareCase
keys = [[domain, case_id, 'reverse_index'] for case_id in case_ids]
return CommCareCase.view(
'case/related',
keys=keys,
reduce=False,
include_docs=True,
)
IndexInfo = namedtuple('IndexInfo', ['case_id', 'identifier', 'referenced_id', 'referenced_type'])
def get_all_reverse_indices_info(domain, case_ids):
from casexml.apps.case.models import CommCareCase
keys = [[domain, case_id, 'reverse_index'] for case_id in case_ids]
def _row_to_index_info(row):
return IndexInfo(
case_id=row['id'],
identifier=row['value']['identifier'],
referenced_id=row['key'][1],
referenced_type=row['value']['referenced_type'],
)
return map(_row_to_index_info, CommCareCase.get_db().view(
'case/related',
keys=keys,
reduce=False,
))
def get_reverse_indices_json(case):
from casexml.apps.case.models import CommCareCase
return CommCareCase.get_db().view(
"case/related",
key=[case['domain'], case['_id'], "reverse_index"],
reduce=False,
wrapper=lambda r: r['value']
).all()
def get_reverse_indices(case):
return [CommCareCaseIndex.wrap(raw)
for raw in get_reverse_indices_json(case)]
| from casexml.apps.case.sharedmodels import CommCareCaseIndex
def get_indexed_case_ids(domain, case_ids):
"""
Given a base list of case ids, gets all ids of cases they reference (parent cases)
"""
from casexml.apps.case.models import CommCareCase
keys = [[domain, case_id, 'index'] for case_id in case_ids]
return [r['value']['referenced_id'] for r in CommCareCase.get_db().view(
'case/related',
keys=keys,
reduce=False,
)]
def get_reverse_indexed_case_ids(domain, case_ids):
"""
Given a base list of case ids, gets all ids of cases that reference them (child cases)
"""
from casexml.apps.case.models import CommCareCase
keys = [[domain, case_id, 'reverse_index'] for case_id in case_ids]
return [r['value']['referenced_id'] for r in CommCareCase.get_db().view(
'case/related',
keys=keys,
reduce=False,
)]
def get_reverse_indexed_cases(domain, case_ids):
"""
Given a base list of cases, gets all wrapped cases that directly
reference them (child cases).
"""
from casexml.apps.case.models import CommCareCase
keys = [[domain, case_id, 'reverse_index'] for case_id in case_ids]
return CommCareCase.view(
'case/related',
keys=keys,
reduce=False,
include_docs=True,
)
def get_reverse_indices_json(case):
from casexml.apps.case.models import CommCareCase
return CommCareCase.get_db().view(
"case/related",
key=[case['domain'], case['_id'], "reverse_index"],
reduce=False,
wrapper=lambda r: r['value']
).all()
def get_reverse_indices(case):
return [CommCareCaseIndex.wrap(raw)
for raw in get_reverse_indices_json(case)]
| bsd-3-clause | Python |
2555d56010df92a4a76e56522e9576cfb0e5446b | Remove junk and fix wrong method name | midokura/python-midonetclient,midonet/python-midonetclient,midokura/python-midonetclient,midonet/python-midonetclient | test/functional/test_dhcp.py | test/functional/test_dhcp.py | # Copyright 2012 Midokura Japan KK
import os
import sys
import unittest
from webob import exc
TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir,
os.pardir))
sys.path.insert(0, TOPDIR)
from midonet.client import MidonetClient
from midonet import utils
class TestBridge(unittest.TestCase):
tenent = None
bridge = None
bridge_uuid = None
test_tenant_name = "TEST_TENANT"
test_bridge_name = "TEST_BRIDGE"
@classmethod
def setUpClass(cls):
mc = MidonetClient()
cls.tenant = mc.tenants()
cls.bridge = mc.bridges()
cls.dhcp = mc.dhcps()
try:
cls.tenant.create(cls.test_tenant_name)
r, c = cls.bridge.create(cls.test_tenant_name, cls.test_bridge_name)
cls.bridge_uuid = utils.get_uuid(r)
except:
pass
@classmethod
def tearDownClass(cls):
cls.tenant.delete(cls.test_tenant_name)
def test_create_list_get_delete(self):
r, c = self.dhcp.create(self.test_tenant_name, self.bridge_uuid,
'172.16.0.0', 16, '172.16.0.1')
r, c = self.dhcp.list(self.test_tenant_name, self.bridge_uuid)
self.dhcp.get(self.test_tenant_name, self.bridge_uuid, '172.16.0.0_16')
self.dhcp.delete(self.test_tenant_name, self.bridge_uuid, '172.16.0.0_16')
#TODO: Fix mgmt to return 404. Currently server return 500
#self.assertRaises(exc.HTTPNotFound, self.dhcp.get, self.test_tenant_name, self.bridge_uuid, '172.16.0.0_16')
if __name__ == '__main__':
unittest.main()
| # Copyright 2012 Midokura Japan KK
import os
import sys
import unittest
from webob import exc
TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir,
os.pardir))
sys.path.insert(0, TOPDIR)
from midonet.client import MidonetClient
from midonet import utils
class TestBridge(unittest.TestCase):
tenent = None
bridge = None
bridge_uuid = None
test_tenant_name = "TEST_TENANT"
test_bridge_name = "TEST_BRIDGE"
@classmethod
def setUpClass(cls):
mc = MidonetClient()
cls.tenant = mc.tenants()
cls.bridge = mc.bridges()
cls.dhcp = mc.dhcps()
try:
cls.tenant.create(cls.test_tenant_name)
r, c = cls.bridge.create(cls.test_tenant_name, cls.test_bridge_name)
cls.bridge_uuid = utils.get_uuid(r)
except:
pass
@classmethod
def tearDownClass(cls):
cls.tenant.delete(cls.test_tenant_name)
def test_list(self):
self.bridge.list(self.test_tenant_name)
def test_create_get_delete(self):
r, c = self.dhcp.create(self.test_tenant_name, self.bridge_uuid,
'172.16.0.0', 16, '172.16.0.1')
r, c = self.dhcp.list(self.test_tenant_name, self.bridge_uuid)
self.dhcp.get(self.test_tenant_name, self.bridge_uuid, '172.16.0.0_16')
self.dhcp.delete(self.test_tenant_name, self.bridge_uuid, '172.16.0.0_16')
#TODO: Fix mgmt to return 404. Currently server return 500
#self.assertRaises(exc.HTTPNotFound, self.dhcp.get, self.test_tenant_name, self.bridge_uuid, '172.16.0.0_16')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
f6f9ce94c33ad959cfd3f9cbb1b19e8bff17126d | Fix python object searilization problem in yaml | torgartor21/solar,Mirantis/solar,torgartor21/solar,loles/solar,zen/solar,openstack/solar,pigmej/solar,Mirantis/solar,loles/solar,dshulyak/solar,loles/solar,pigmej/solar,zen/solar,zen/solar,Mirantis/solar,Mirantis/solar,openstack/solar,CGenie/solar,pigmej/solar,openstack/solar,dshulyak/solar,CGenie/solar,loles/solar,zen/solar | solar/solar/utils.py | solar/solar/utils.py | import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.safe_dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
| import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
| apache-2.0 | Python |
bef1a3311d1bee0597690a7e7b7c0024ac499463 | update test | Yubico/yubikey-manager,Yubico/yubikey-manager | test/on_yubikey/test_fido.py | test/on_yubikey/test_fido.py | import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.U2F))
class TestFidoFunctions(DestructiveYubikeyTestCase):
def test_fido_set_pin(self):
output = ykman_cli('fido', 'info')
self.assertIn('PIN is not set.', output)
ykman_cli('fido', 'set-pin', '--new-pin', '123abc')
output = ykman_cli('fido', 'info')
self.assertIn('PIN is set', output)
| import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.U2F))
class TestFidoFunctions(DestructiveYubikeyTestCase):
def test_fido_set_pin(self):
output = ykman_cli('--log-level', 'DEBUG', 'fido', 'info')
self.assertIn('PIN is not set.', output)
ykman_cli('fido', 'set-pin', '--new-pin', '123abc')
output = ykman_cli('fido', 'info')
self.assertIn('PIN is set', output)
| bsd-2-clause | Python |
e055bf11abb78919a4a4dc08a40cfc9a0a209656 | fix credentials retrieval from xml file | lakewik/storj-gui-client | UI/utilities/account_manager.py | UI/utilities/account_manager.py | import xml.etree.cElementTree as ET
class AccountManager:
def __init__(self, login_email=None, password=None):
self.login_email = login_email
self.password = password
def save_account_credentials(self):
root = ET.Element("account")
doc = ET.SubElement(root, "credentials")
i = 0
ET.SubElement(doc, "login_email").text = str(self.login_email)
ET.SubElement(doc, "password").text = str(self.password)
ET.SubElement(doc, "logged_in").text = str("1")
tree = ET.ElementTree(root)
tree.write("storj_account_conf.xml")
def if_logged_in(self):
logged_in = "0"
try:
et = ET.parse("storj_account_conf.xml")
for tags in et.iter('logged_in'):
logged_in = tags.text
except:
logged_in = "0"
print "Error in Account Manager login"
if logged_in == "1":
return True
else:
return False
def logout(self):
print 1
def get_user_password(self):
password = ""
try:
et = ET.parse("storj_account_conf.xml")
for tags in et.iter('password'):
password = tags.text
except:
print "Error in Account Manager get password"
return password
def get_user_email(self):
email = ""
try:
et = ET.parse("storj_account_conf.xml")
for tags in et.iter('login_email'):
email = tags.text
except:
print "Error in Account Manager get email"
return email
print 1
| import xml.etree.cElementTree as ET
class AccountManager:
def __init__(self, login_email=None, password=None):
self.login_email = login_email
self.password = password
def save_account_credentials(self):
root = ET.Element("account")
doc = ET.SubElement(root, "credentials")
i = 0
ET.SubElement(doc, "login_email").text = str(self.login_email)
ET.SubElement(doc, "password").text = str(self.password)
ET.SubElement(doc, "logged_in").text = str("1")
tree = ET.ElementTree(root)
tree.write("storj_account_conf.xml")
def if_logged_in(self):
logged_in = "0"
try:
et = etree.parse("storj_account_conf.xml")
for tags in et.iter('logged_in'):
logged_in = tags.text
except:
logged_in = "0"
print "Unspecified error"
if logged_in == "1":
return True
else:
return False
def logout(self):
print 1
def get_user_password(self):
password = ""
try:
et = etree.parse("storj_account_conf.xml")
for tags in et.iter('password'):
password = tags.text
except:
print "Unspecified error"
return password
def get_user_email(self):
email = ""
try:
et = etree.parse("storj_account_conf.xml")
for tags in et.iter('login_email'):
email = tags.text
except:
print "Unspecified error"
return email
print 1
| mit | Python |
572dff2273532a39d797a63008522fd632c62a3b | fix unit tests. | tobyqin/testcube,tobyqin/testcube,tobyqin/testcube,tobyqin/testcube | testcube/tests/test_utils.py | testcube/tests/test_utils.py | from django.test import TestCase as TC
from testcube.core.models import *
from testcube.utils import *
class ModelsTestCase(TC):
def setUp(self):
pass
def test_get_domain(self):
assert get_domain() == 'company.com'
Configuration.objects.create(key='domain', value='my.com')
assert get_domain() == 'my.com'
def test_read_document(self):
content = read_document('faq')
assert 'what' in content.lower()
| from django.test import TestCase as TC
from testcube.utils import *
class ModelsTestCase(TC):
def setUp(self):
pass
def test_get_domain(self):
assert get_domain() == 'company.com'
Configuration.objects.create(key='domain', value='my.com')
assert get_domain() == 'my.com'
def test_read_document(self):
content = read_document('faq')
assert 'what' in content.lower()
| mit | Python |
e771ae7cba6bca2082dbfd000f6726f53e6f7eb1 | bump version | John-Lin/snortunsock | snortunsock/__init__.py | snortunsock/__init__.py | __author__ = 'John Lin'
__author_email__ = 'linton.tw@gmail.com'
__license__ = 'Apache License 2.0'
__url__ = 'https://github.com/John-Lin/snortunsock'
__version__ = '0.0.3'
| __author__ = 'John Lin'
__author_email__ = 'linton.tw@gmail.com'
__license__ = 'Apache License 2.0'
__url__ = 'https://github.com/John-Lin/snortunsock'
__version__ = '0.0.2'
| apache-2.0 | Python |
5dfcd4e8cea8d821d2476700f2c661c27cae3557 | make CI pass for now | plotly/plotly.py,plotly/plotly.py,plotly/plotly.py | packages/python/plotly/plotly/tests/test_core/test_px/test_pandas_backend.py | packages/python/plotly/plotly/tests/test_core/test_px/test_pandas_backend.py | import plotly.express as px
import numpy as np
import pandas as pd
import pytest
@pytest.mark.skipif(
not hasattr(pd.options.plotting, "backend"),
reason="Currently installed pandas doesn't support plotting backends.",
)
@pytest.mark.parametrize(
"pandas_fn,px_fn",
[
(lambda df: df.plot(), px.line),
(lambda df: df.plot.scatter("A", "B"), lambda df: px.scatter(df, "A", "B"),),
(lambda df: df.plot.line(), px.line),
(lambda df: df.plot.area(), px.area),
(lambda df: df.plot.bar(), px.bar),
(lambda df: df.plot.barh(), lambda df: px.bar(df, orientation="h")),
(lambda df: df.plot.box(), px.box),
(lambda df: df.plot.hist(), px.histogram),
(lambda df: df.boxplot(), px.box),
(lambda df: df.hist(), px.histogram),
(lambda df: df["A"].hist(), lambda df: px.histogram(df["A"])),
],
)
def test_pandas_equiv(pandas_fn, px_fn):
pd.options.plotting.backend = "plotly"
df = pd.DataFrame(np.random.randn(100, 4), columns=list("ABCD")).cumsum()
assert pandas_fn(df) == px_fn(df)
| import plotly.express as px
import numpy as np
import pandas as pd
import pytest
@pytest.mark.parametrize(
"pandas_fn,px_fn",
[
(lambda df: df.plot(), px.line),
(lambda df: df.plot.scatter("A", "B"), lambda df: px.scatter(df, "A", "B"),),
(lambda df: df.plot.line(), px.line),
(lambda df: df.plot.area(), px.area),
(lambda df: df.plot.bar(), px.bar),
(lambda df: df.plot.barh(), lambda df: px.bar(df, orientation="h")),
(lambda df: df.plot.box(), px.box),
(lambda df: df.plot.hist(), px.histogram),
(lambda df: df.boxplot(), px.box),
(lambda df: df.hist(), px.histogram),
(lambda df: df["A"].hist(), lambda df: px.histogram(df["A"])),
],
)
def test_pandas_equiv(pandas_fn, px_fn):
pd.options.plotting.backend = "plotly"
df = pd.DataFrame(np.random.randn(100, 4), columns=list("ABCD")).cumsum()
assert pandas_fn(df) == px_fn(df)
| mit | Python |
3fc20d745ee67e4784645f27ec433fd0c7f9ae61 | fix errors | silenius/amnesia,silenius/amnesia,silenius/amnesia | amnesia/modules/account/views/login.py | amnesia/modules/account/views/login.py | # -*- coding: utf-8 -*-
from marshmallow import ValidationError
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
from pyramid.view import view_defaults
from pyramid.view import view_config
from amnesia.utils.forms import render_form
from amnesia.modules.account.validation import LoginSchema
from amnesia.modules.account import AuthResource
from amnesia.views import BaseView
def includeme(config):
config.scan(__name__)
@view_defaults(context=AuthResource, name='login', permission='login',
renderer='amnesia:templates/account/login.pt')
class Login(BaseView):
form_tmpl = 'amnesia:templates/account/_form_login.pt'
def form(self, data=None, errors=None):
return render_form(self.form_tmpl, self.request, data, errors=errors)
@view_config(request_method='GET')
def get(self):
return {'form': self.form()}
@view_config(request_method='POST')
def post(self):
params = self.request.POST.mixed()
try:
data = LoginSchema().load(params)
except ValidationError as error:
return {'form': self.form(params, error.messages)}
login = data['login']
password = data['password']
user = self.context.find_login(login)
if user:
if not user.enabled:
errors = {'login': 'Error: login must be enabled by an administrator'}
elif self.context.check_user_password(user, password):
headers = remember(self.request, str(user.id))
location = self.request.application_url
return HTTPFound(location=location, headers=headers)
else:
errors = {'password': "Password doesn't match"}
else:
errors = {'login': 'Login failed'}
form = self.form(data, errors)
return {'form': form}
| # -*- coding: utf-8 -*-
from marshmallow import ValidationError
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
from pyramid.view import view_defaults
from pyramid.view import view_config
from amnesia.utils.forms import render_form
from amnesia.modules.account.validation import LoginSchema
from amnesia.modules.account import AuthResource
from amnesia.views import BaseView
def includeme(config):
config.scan(__name__)
@view_defaults(context=AuthResource, name='login', permission='login',
renderer='amnesia:templates/account/login.pt')
class Login(BaseView):
form_tmpl = 'amnesia:templates/account/_form_login.pt'
def form(self, data=None, errors=None):
return render_form(self.form_tmpl, self.request, data, errors=errors)
@view_config(request_method='GET')
def get(self):
return {'form': self.form()}
@view_config(request_method='POST')
def post(self):
try:
result = LoginSchema().load(self.request.POST.mixed())
except ValidationError as error:
return {'form': self.form(result.data, error.messages)}
login = result.data['login']
password = result.data['password']
user = self.context.find_login(login)
if user:
if not user.enabled:
errors = {'login': 'Error: login must be enabled by an administrator'}
elif self.context.check_user_password(user, password):
headers = remember(self.request, str(user.id))
location = self.request.application_url
return HTTPFound(location=location, headers=headers)
else:
errors = {'password': "Password doesn't match"}
else:
errors = {'login': 'Login failed'}
form = self.form(result.data, errors)
return {'form': form}
| bsd-2-clause | Python |
da3c7132f5baef052c290cf0e70587ca05e96443 | Bump version to 0.7.0-alpha-3 | thombashi/tcconfig,thombashi/tcconfig | tcconfig/__init__.py | tcconfig/__init__.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
VERSION = "0.7.0-alpha-3"
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
VERSION = "0.7.0-alpha-2"
| mit | Python |
9bdd0d5189b3f81183869bf678836631f85c2ca4 | send notif only if it is an insert not an update | UrLab/DocHub,UrLab/DocHub,UrLab/beta402,UrLab/DocHub,UrLab/beta402,UrLab/DocHub,UrLab/beta402 | telepathy/signals.py | telepathy/signals.py | import models
from notify.models import PreNotification
from django.core.urlresolvers import reverse
def thread_save(**kwargs):
assert kwargs['sender'] == models.Thread
if kwargs['created']:
thread = kwargs['instance']
#TODO: use truncate instead of str[:N]
PreNotification.objects.create(
node=thread,
text="Nouvelle discussion: "+thread.name[:50].encode('utf-8')+"...",
url=reverse('thread_show', args=[thread.id]),
user=thread.user.user
)
else:
#TODO : maybe send a notif when a post is edited
pass
def message_save(**kwargs):
assert kwargs['sender'] == models.Message
message = kwargs['instance']
thread = message.thread
poster = message.user
if kwargs['created']:
if message.previous:
#TODO: use truncate instead of str[:N]
PreNotification.objects.create(
node=thread,
text="Answer to {} by {}".format(
thread.name[:50].encode('utf-8'), poster.name.encode('utf-8')
),
url=reverse('thread_show', args=[thread.id]),
user=message.user.user
)
else:
#TODO : maybe send a notif when a message is edited
pass | import models
from notify.models import PreNotification
from django.core.urlresolvers import reverse
def thread_save(**kwargs):
assert kwargs['sender'] == models.Thread
thread = kwargs['instance']
#TODO: use truncate instead of str[:N]
PreNotification.objects.create(
node=thread,
text="Nouvelle discussion: "+thread.name[:50].encode('utf-8')+"...",
url=reverse('thread_show', args=[thread.id]),
user=thread.user.user
)
def message_save(**kwargs):
assert kwargs['sender'] == models.Message
message = kwargs['instance']
thread = message.thread
poster = message.user
if message.previous:
#TODO: use truncate instead of str[:N]
PreNotification.objects.create(
node=thread,
text="Answer to {} by {}".format(
thread.name[:50].encode('utf-8'), poster.name.encode('utf-8')
),
url=reverse('thread_show', args=[thread.id]),
user=message.user.user
) | agpl-3.0 | Python |
4eb9cb10943e1b1b25aa1fa89686d3fa41d535b8 | Make CACHE_KEY_PREFIX work for RedisCache. It is available for Werkzeug >= 0.8.1. | thadeusb/flask-cache,kazeeki/mezmorize,thadeusb/flask-cache,alexey-sveshnikov/flask-cache,kazeeki/mezmorize,j-fuentes/flask-cache,ordbogen/flask-cache,alexey-sveshnikov/flask-cache,gerasim13/flask-cache,j-fuentes/flask-cache,gerasim13/flask-cache,ordbogen/flask-cache | flaskext/cache/backends.py | flaskext/cache/backends.py | from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache,
GAEMemcachedCache, FileSystemCache)
def null(app, args, kwargs):
return NullCache()
def simple(app, args, kwargs):
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return SimpleCache(*args, **kwargs)
def memcached(app, args, kwargs):
args.append(app.config['CACHE_MEMCACHED_SERVERS'])
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return MemcachedCache(*args, **kwargs)
def gaememcached(app, args, kwargs):
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return GAEMemcachedCache(*args, **kwargs)
def filesystem(app, args, kwargs):
args.append(app.config['CACHE_DIR'])
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return FileSystemCache(*args, **kwargs)
# RedisCache is supported since Werkzeug 0.7.
try:
from werkzeug.contrib.cache import RedisCache
except ImportError:
pass
else:
def redis(app, args, kwargs):
kwargs.update(dict(
host=app.config.get('CACHE_REDIS_HOST', 'localhost'),
port=app.config.get('CACHE_REDIS_PORT', 6379),
))
password = app.config.get('CACHE_REDIS_PASSWORD')
if password:
kwargs['password'] = password
key_prefix = app.config.get('CACHE_KEY_PREFIX')
if key_prefix:
kwargs['key_prefix'] = key_prefix
return RedisCache(*args, **kwargs)
| from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache,
GAEMemcachedCache, FileSystemCache)
def null(app, args, kwargs):
return NullCache()
def simple(app, args, kwargs):
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return SimpleCache(*args, **kwargs)
def memcached(app, args, kwargs):
args.append(app.config['CACHE_MEMCACHED_SERVERS'])
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return MemcachedCache(*args, **kwargs)
def gaememcached(app, args, kwargs):
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return GAEMemcachedCache(*args, **kwargs)
def filesystem(app, args, kwargs):
args.append(app.config['CACHE_DIR'])
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return FileSystemCache(*args, **kwargs)
# RedisCache is supported since Werkzeug 0.7.
try:
from werkzeug.contrib.cache import RedisCache
except ImportError:
pass
else:
def redis(app, args, kwargs):
kwargs.update(dict(
host=app.config.get('CACHE_REDIS_HOST', 'localhost'),
port=app.config.get('CACHE_REDIS_PORT', 6379),
))
password = app.config.get('CACHE_REDIS_PASSWORD')
if password:
kwargs['password'] = password
return RedisCache(*args, **kwargs)
| bsd-3-clause | Python |
13a26fce9640122006e7c4bd7ea35eb40ed78f67 | Remove unused import | vladimirgamalian/pictools | renamer.py | renamer.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import click
import os
from utils import TempDir
@click.command()
@click.argument('path', type=click.Path(exists=True, file_okay=False))
@click.option('--start', type=click.IntRange(min=0), prompt=True)
def renamer(path, start):
files = sorted([f for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) and f.lower().endswith('.png')])
files = [(f, ('%04d' % (start + i)) + os.path.splitext(f)[1]) for i, f in enumerate(files)]
with TempDir(path) as t:
for f in files:
os.rename(os.path.join(path, f[0]), os.path.join(t, f[1]))
for f in files:
os.rename(os.path.join(t, f[1]), os.path.join(path, f[1]))
if __name__ == '__main__':
renamer()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import click
import os
import shutil
from utils import TempDir
@click.command()
@click.argument('path', type=click.Path(exists=True, file_okay=False))
@click.option('--start', type=click.IntRange(min=0), prompt=True)
def renamer(path, start):
files = sorted([f for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) and f.lower().endswith('.png')])
files = [(f, ('%04d' % (start + i)) + os.path.splitext(f)[1]) for i, f in enumerate(files)]
with TempDir(path) as t:
for f in files:
os.rename(os.path.join(path, f[0]), os.path.join(t, f[1]))
for f in files:
os.rename(os.path.join(t, f[1]), os.path.join(path, f[1]))
if __name__ == '__main__':
renamer()
| mit | Python |
5ccbec054c01cda00f3b4ef94c393f57c5bafb42 | Tidy up FlaskMustache class | bradwright/flask-mustachejs,bradleywright/flask-mustachejs,bradwright/flask-mustachejs,bradleywright/flask-mustachejs | flask_mustache/__init__.py | flask_mustache/__init__.py | # flask-mustache Flask plugin
import os
from jinja2 import Template
from flask import current_app
__all__ = ('FlaskMustache',)
class FlaskMustache(object):
"Wrapper to inject Mustache stuff into Flask"
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
self.app = app
# set up global `mustache` function
app.jinja2.globals['mustache'] = mustache
# attach context processor with template content
app.context_processor(mustache_templates)
# context processor
def mustache_templates():
"Returns the content of all Mustache templates in the Jinja environment"
# short circuit development
if current_app.debug:
return {}
# get all the templates this env knows about
all_templates = current_app.jinja_loader.list_templates()
mustache_templates = {}
for template_name in all_templates:
# TODO: make this configurable
# we only want a specific extension
if template_name.endswith('mustache'):
# throw away everything except the file content
template, _, _ = \
current_app.jinja_loader.get_source(current_app.jinja_env,
template_name)
mustache_templates[template_name] = template
# now we need to render the templates
template_string = """{% if mustache_templates %}
{% for template_name, content in mustache_templates.items() %}
<script type="text/x-mustache-template" id="{{ template_name|replace('/', '-') }}" charset="utf-8">
{{ content|e }}
</script>
{% endfor %}
{% endif %}"""
context = {
'mustache_templates': mustache_templates
}
# returns the full HTML, ready to use in JavaScript
return {'mustache_templates': Template(template_string).render(context)}
# template helper function
def mustache(template, **kwargs):
"""Usage:
{{ mustache('path/to/whatever.mustache', key=value, key1=value1.. keyn=valuen) }}
This uses the regular Jinja2 loader to find the templates, so your *.mustache files
will need to be available in that path.
"""
template, _, _ = current_app.jinja_loader.get_source(current_app.jinja_env, template)
return pystache.render(template, kwargs, encoding='utf-8')
| # flask-mustache Flask plugin
import os
from jinja2 import Template
from flask import current_app
class FlaskMustache(object):
"Wrapper to inject Mustache stuff into Flask"
def __init__(self, app=None, configure_jinja=True):
self._configure_jinja = configure_jinja
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
self.app = app
if self._configure_jinja:
app.jinja2.globals['mustache'] = mustache
app.context_processor(mustache_templates)
# context processor
def mustache_templates():
"Returns the content of all Mustache templates in the Jinja environment"
# short circuit development
if current_app.debug:
return {}
# get all the templates this env knows about
all_templates = current_app.jinja_loader.list_templates()
mustache_templates = {}
for template_name in all_templates:
# TODO: make this configurable
# we only want a specific extension
if template_name.endswith('mustache'):
# throw away everything except the file content
template, _, _ = \
current_app.jinja_loader.get_source(current_app.jinja_env,
template_name)
mustache_templates[template_name] = template
# now we need to render the templates
template_string = """{% if mustache_templates %}
{% for template_name, content in mustache_templates.items() %}
<script type="text/x-mustache-template" id="{{ template_name|replace('/', '-') }}" charset="utf-8">
{{ content|e }}
</script>
{% endfor %}
{% endif %}"""
context = {
'mustache_templates': mustache_templates
}
# returns the full HTML, ready to use in JavaScript
return {'mustache_templates': Template(template_string).render(context)}
# template helper function
def mustache(template, **kwargs):
"""Usage:
{{ mustache('path/to/whatever.mustache', key=value, key1=value1.. keyn=valuen) }}
This uses the regular Jinja2 loader to find the templates, so your *.mustache files
will need to be available in that path.
"""
template, _, _ = current_app.jinja_loader.get_source(current_app.jinja_env, template)
return pystache.render(template, kwargs, encoding='utf-8')
| bsd-3-clause | Python |
1873f8eca4ff96ff987d50e119882bfee363d0b4 | return better Changes and support test=True | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/states/nfs_export.py | salt/states/nfs_export.py | # -*- coding: utf-8 -*-
'''
Management of NFS exports
===============================================
To ensure an NFS export exists:
.. code-block:: yaml
add_simple_export:
nfs_export.present:
- name: '/srv/nfs'
- hosts: '10.0.2.0/24'
- options: 'rw'
For more complex exports with multiple groups of hosts:
.. code-block:: yaml
add_complex_export:
nfs_export.present:
- name: '/srv/nfs'
- exports:
# First export, same as simple one above
- hosts:
- '10.0.2.0/24'
options:
- 'rw'
# Second export
- hosts:
- '192.168.0.0/24'
- '172.19.0.0/16'
options:
- 'ro'
- 'subtree_check'
This creates the following in /etc/exports:
.. code-block:: bash
/srv/nfs 10.0.2.0/24(rw)
Any export of the given path will be modified to match the one specified.
To ensure an NFS export is absent:
.. code-block:: yaml
delete_export:
nfs_export.absent:
- name: '/srv/nfs'
'''
#from __future__ import absolute_import
def absent(name, exports='/etc/exports'):
path = name
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
old = __salt__['nfs3.list_exports'](exports)
if path in old:
if __opts__['test']:
ret['comment'] = 'Export {0} would be removed'.format(path)
ret['result'] = None
return ret
__salt__['nfs3.del_export'](exports, path)
ret['comment'] = 'Export {0} removed'.format(path)
ret['changes'][path] = old[path]
ret['result'] = True
else:
ret['comment'] = 'Export {0} already absent'.format(path)
ret['result'] = True
return ret
| # -*- coding: utf-8 -*-
'''
Management of NFS exports
===============================================
To ensure an NFS export exists:
.. code-block:: yaml
add_simple_export:
nfs_export.present:
- name: '/srv/nfs'
- hosts: '10.0.2.0/24'
- options: 'rw'
For more complex exports with multiple groups of hosts:
.. code-block:: yaml
add_complex_export:
nfs_export.present:
- name: '/srv/nfs'
- exports:
# First export, same as simple one above
- hosts:
- '10.0.2.0/24'
options:
- 'rw'
# Second export
- hosts:
- '192.168.0.0/24'
- '172.19.0.0/16'
options:
- 'ro'
- 'subtree_check'
This creates the following in /etc/exports:
.. code-block:: bash
/srv/nfs 10.0.2.0/24(rw)
Any export of the given path will be modified to match the one specified.
To ensure an NFS export is absent:
.. code-block:: yaml
delete_export:
nfs_export.absent:
- name: '/srv/nfs'
'''
#from __future__ import absolute_import
def absent(name, exports='/etc/exports'):
path = name
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
old = __salt__['nfs3.list_exports'](exports)
if path in old:
__salt__['nfs3.del_export'](exports, path)
ret['comment'] = 'Export {0} removed'.format(path)
ret['changes'] = {'path': path}
ret['result'] = True
else:
ret['comment'] = 'Export {0} already absent'.format(path)
ret['result'] = True
return ret
| apache-2.0 | Python |
7a0ec66727644e26948d237e62c3ed69c47931bc | Update main.py | gameplex/game | src/client/main.py | src/client/main.py | #!/usr/bin/env python
from __future__ import print_function
def main():
print("Running main loop")
if __name__ == "__main__":
print("Hello Client!")
main()
print("Goodbye Client!")
| #!/usr/bin/env python
from __future__ import print_function
if __name__ == "__main__":
print("Hello Client")
| agpl-3.0 | Python |
622967954d3813f6b40c4fdb869becbf663733d1 | Fix global import cards | rdsathene/SchoolIdolAPI,rdsathene/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,dburr/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,dburr/SchoolIdolAPI,dburr/SchoolIdolAPI,rdsathene/SchoolIdolAPI | api/management/commands/importcards.py | api/management/commands/importcards.py | # -*- coding: utf-8 -*-
from api.management.commands.importbasics import *
from api.management.commands.importcardstats import importcardstats
from api.management.commands.import_jp_events import import_jp_events
from api.management.commands.import_en_events import import_en_events
from api.management.commands.import_wikia import import_wikia
from api.management.commands.importcards_japanese import importcards_japanese
from api.management.commands.import_video_stories import import_video_stories
from api.management.commands.import_idols import import_idols
from api.management.commands.import_songs import import_songs
class Command(BaseCommand):
can_import_settings = True
def handle(self, *args, **options):
opt = opt_parse(args)
if opt['delete']:
models.Card.objects.all().delete()
models.Event.objects.all().delete()
models.Idol.objects.all().delete()
models.Song.objects.all().delete()
return
importcardstats(opt)
import_jp_events(opt)
import_en_events(opt)
import_idols(opt)
import_songs(opt)
import_raw_db()
| # -*- coding: utf-8 -*-
from api.management.commands.importbasics import *
from api.management.commands.importcardstats import importcardstats
from api.management.commands.import_jp_events import import_jp_events
from api.management.commands.import_en_events import import_en_events
from api.management.commands.import_wikia import import_wikia
from api.management.commands.importcards_japanese import importcards_japanese
from api.management.commands.import_transparent_images import import_transparent_images
from api.management.commands.import_video_stories import import_video_stories
from api.management.commands.import_idols import import_idols
from api.management.commands.import_songs import import_songs
class Command(BaseCommand):
can_import_settings = True
def handle(self, *args, **options):
opt = opt_parse(args)
if opt['delete']:
models.Card.objects.all().delete()
models.Event.objects.all().delete()
models.Idol.objects.all().delete()
models.Song.objects.all().delete()
return
importcardstats(opt)
import_jp_events(opt)
import_en_events(opt)
import_idols(opt)
import_songs(opt)
import_raw_db()
| apache-2.0 | Python |
97c200c3b242b8794b14642099ba51e0085b0ff2 | Remove debugging lines | eregs/regulations-parser,tadhg-ohiggins/regulations-parser,cmc333333/regulations-parser,tadhg-ohiggins/regulations-parser,cmc333333/regulations-parser,eregs/regulations-parser | regparser/eregs_index.py | regparser/eregs_index.py | """The eregs_index directory contains the output for many of the shell
commands. This module provides a quick interface to this index"""
import os
from lxml import etree
ROOT = ".eregs_index"
class Path(object):
"""Encapsulates access to a particular directory within the index"""
def __init__(self, *dirs):
self.path = os.path.join(ROOT, *dirs)
def _create(self):
if not os.path.exists(self.path):
os.makedirs(self.path)
def write(self, label, content):
self._create()
with open(os.path.join(self.path, label), "w") as f:
f.write(content)
def read(self, label):
self._create()
with open(os.path.join(self.path, label)) as f:
return f.read()
def read_xml(self, label):
return etree.fromstring(self.read(label))
def __len__(self):
self._create()
return sum(1 for name in os.listdir(self.path)
if os.path.isfile(os.path.join(self.path, name)))
| """The eregs_index directory contains the output for many of the shell
commands. This module provides a quick interface to this index"""
import os
from lxml import etree
ROOT = ".eregs_index"
class Path(object):
"""Encapsulates access to a particular directory within the index"""
def __init__(self, *dirs):
self.path = os.path.join(ROOT, *dirs)
def _create(self):
if not os.path.exists(self.path):
os.makedirs(self.path)
def write(self, label, content):
self._create()
with open(os.path.join(self.path, label), "w") as f:
f.write(content)
def read(self, label):
self._create()
with open(os.path.join(self.path, label)) as f:
return f.read()
def read_xml(self, label):
return etree.fromstring(self.read(label))
def __len__(self):
self._create()
print self.path
print os.listdir(self.path)
return sum(1 for name in os.listdir(self.path)
if os.path.isfile(os.path.join(self.path, name)))
| cc0-1.0 | Python |
d71b5207d7ae22a26006b6e4f67e9e3ee0200a77 | Update plot history. | benigls/spam,benigls/spam | spam/common/utils.py | spam/common/utils.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
import matplotlib.pyplot as plt
from sklearn.cross_validation import train_test_split
from spam.common.collections import Dataset, Data
def split_dataset(x, y, seed=0):
""" Split the dataset into unlabel, train, and test sets. """
# split the data into label and unlabel
x_unlabel, x_label, _, y_label = \
train_test_split(
x,
y,
test_size=0.1,
random_state=seed,
)
# split data into train and test data
x_train, x_test, y_train, y_test = \
train_test_split(
x_label,
y_label,
test_size=0.2,
random_state=seed,
)
return Dataset(
x_unlabel,
Data(x_train, None, y_train),
Data(x_test, None, y_test)
)
def get_dataset_meta(dataset=None):
""" Get the dataset meta. """
data_meta = {}
data_meta['unlabeled_count'] = len(dataset.unlabel)
data_meta['labeled_count'] = \
len(dataset.train.X) + len(dataset.test.X)
data_meta['train_data'] = {}
data_meta['test_data'] = {}
data_meta['train_data']['spam_count'] = int(sum(dataset.train.y))
data_meta['train_data']['ham_count'] = \
int(len(dataset.train.y) - sum(dataset.train.y))
data_meta['train_data']['total_count'] = \
data_meta['train_data']['spam_count'] + \
data_meta['train_data']['ham_count']
data_meta['test_data']['spam_count'] = int(sum(dataset.test.y))
data_meta['test_data']['ham_count'] = \
int(len(dataset.test.y) - sum(dataset.test.y))
data_meta['test_data']['total_count'] = \
data_meta['test_data']['spam_count'] + \
data_meta['test_data']['ham_count']
return data_meta
def plot_loss_history(data=None, title=None, name=None, path=None):
""" Plot and export loss history. """
# TODO: add labels to loss history
plt.figure(random.randint(a=1, b=100))
plt.title(title)
plt.plot(data)
plt.savefig('{}/{}.png'.format(path, name))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
from sklearn.cross_validation import train_test_split
from spam.common.collections import Dataset, Data
def split_dataset(x, y, seed=0):
""" Split the dataset into unlabel, train, and test sets. """
# split the data into label and unlabel
x_unlabel, x_label, _, y_label = \
train_test_split(
x,
y,
test_size=0.1,
random_state=seed,
)
# split data into train and test data
x_train, x_test, y_train, y_test = \
train_test_split(
x_label,
y_label,
test_size=0.2,
random_state=seed,
)
return Dataset(
x_unlabel,
Data(x_train, None, y_train),
Data(x_test, None, y_test)
)
def get_dataset_meta(self, dataset=None):
""" Get the dataset meta. """
data_meta = {}
data_meta['unlabeled_count'] = len(dataset.unlabel)
data_meta['labeled_count'] = \
len(dataset.train.X) + len(dataset.test.X)
data_meta['train_data'] = {}
data_meta['test_data'] = {}
data_meta['train_data']['spam_count'] = int(sum(dataset.train.y))
data_meta['train_data']['ham_count'] = \
int(len(dataset.train.y) - sum(dataset.train.y))
data_meta['train_data']['total_count'] = \
data_meta['train_data']['spam_count'] + \
data_meta['train_data']['ham_count']
data_meta['test_data']['spam_count'] = int(sum(dataset.test.y))
data_meta['test_data']['ham_count'] = \
int(len(dataset.test.y) - sum(dataset.test.y))
data_meta['test_data']['total_count'] = \
data_meta['test_data']['spam_count'] + \
data_meta['test_data']['ham_count']
return data_meta
def plot_loss_history(data=None, title=None, name=None, path=None):
""" Plot and export loss history. """
# TODO: add labels to loss history
plt.title(title)
plt.plot(data)
plt.savefig('{}/{}.png'.format(path, name))
| mit | Python |
2de4a2e3ddc304cbc6e68cd8ccfda75d06ef9514 | Add related_name to part_of field | stefanw/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide | froide/georegion/models.py | froide/georegion/models.py | from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.db import models
class GeoRegion(models.Model):
name = models.CharField(_('Name'), max_length=255)
slug = models.SlugField(_('Slug'), max_length=255)
description = models.TextField(blank=True)
kind = models.CharField(_('Kind of Region'), max_length=255,
choices=(
('country', _('country')),
('state', _('state')),
('admin_district', _('administrative district')),
('district', _('district')),
('admin_cooperation', _('administrative cooperation')),
('municipality', _('municipality')),
('borough', _('borough')),
('zipcode', _('zipcode')),
('admin_court_jurisdiction', _('administrative court jurisdiction')),
)
)
kind_detail = models.CharField(max_length=255, blank=True)
level = models.IntegerField(default=0)
region_identifier = models.CharField(max_length=255, blank=True)
global_identifier = models.CharField(max_length=255, blank=True)
area = models.FloatField(_('Area'), default=0.0) # in Sqm
population = models.IntegerField(null=True, blank=True)
valid_on = models.DateTimeField(null=True, blank=True)
geom = models.MultiPolygonField(_('geometry'), geography=True)
gov_seat = models.PointField(_('gov seat'), null=True, blank=True, geography=True)
part_of = models.ForeignKey('self', verbose_name=_('Part of'), null=True,
on_delete=models.SET_NULL, blank=True, related_name='sub_regions'
)
class Meta:
verbose_name = _('Geo Region')
verbose_name_plural = _('Geo Regions')
def __str__(self):
return '%s (%s)' % (self.name, self.pk)
def get_all_sub_regions(self):
def get_subregions(region):
for sub_region in region.sub_regions.all():
yield sub_region
yield from get_subregions(sub_region)
yield from get_subregions(self)
| from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.db import models
class GeoRegion(models.Model):
name = models.CharField(_('Name'), max_length=255)
slug = models.SlugField(_('Slug'), max_length=255)
description = models.TextField(blank=True)
kind = models.CharField(_('Kind of Region'), max_length=255,
choices=(
('country', _('country')),
('state', _('state')),
('admin_district', _('administrative district')),
('district', _('district')),
('admin_cooperation', _('administrative cooperation')),
('municipality', _('municipality')),
('borough', _('borough')),
('zipcode', _('zipcode')),
('admin_court_jurisdiction', _('administrative court jurisdiction')),
)
)
kind_detail = models.CharField(max_length=255, blank=True)
level = models.IntegerField(default=0)
region_identifier = models.CharField(max_length=255, blank=True)
global_identifier = models.CharField(max_length=255, blank=True)
area = models.FloatField(_('Area'), default=0.0) # in Sqm
population = models.IntegerField(null=True, blank=True)
valid_on = models.DateTimeField(null=True, blank=True)
geom = models.MultiPolygonField(_('geometry'), geography=True)
gov_seat = models.PointField(_('gov seat'), null=True, blank=True, geography=True)
part_of = models.ForeignKey('self', verbose_name=_('Part of'), null=True,
on_delete=models.SET_NULL, blank=True
)
class Meta:
verbose_name = _('Geo Region')
verbose_name_plural = _('Geo Regions')
def __str__(self):
return '%s (%s)' % (self.name, self.pk)
| mit | Python |
ba022365174ea6037c76e909f05cb0ed5bd4addb | Set raster plotter in __init__.py | spectralpython/spectral | spectral/__init__.py | spectral/__init__.py | #########################################################################
#
# spectral/__init__.py - This file is part of the Spectral Python (SPy)
# package.
#
# Copyright (C) 2001-2010 Thomas Boggs
#
# Spectral Python is free software; you can redistribute it and/
# or modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Spectral Python is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; if not, write to
#
# Free Software Foundation, Inc.
# 59 Temple Place, Suite 330
# Boston, MA 02111-1307
# USA
#
#########################################################################
#
# Send comments to:
# Thomas Boggs, tboggs@users.sourceforge.net
#
__version__ = '0.9+'
import sys
if sys.byteorder == 'little':
byte_order = 0 # little endian
else:
byte_order = 1 # big endian
BSQ = 0
BIL = 1
BIP = 2
#from numpy import *
from spectral import image, load_training_sets, save_training_sets, settings, \
tile_image, spy_colors, BandInfo
from io import *
from algorithms import *
from graphics import *
from database import *
try:
import pylab
from graphics import spypylab
pylab.ion()
spectral.settings.plotter = spypylab
spectral.settings.viewer = graphics
except:
warn('Unable to import orconfigure pylab plotter. Spectrum plots will be '
'unavailable.', UserWarning)
import utilities.status
status = utilities.status.StatusDisplay()
| #########################################################################
#
# spectral/__init__.py - This file is part of the Spectral Python (SPy)
# package.
#
# Copyright (C) 2001-2010 Thomas Boggs
#
# Spectral Python is free software; you can redistribute it and/
# or modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Spectral Python is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; if not, write to
#
# Free Software Foundation, Inc.
# 59 Temple Place, Suite 330
# Boston, MA 02111-1307
# USA
#
#########################################################################
#
# Send comments to:
# Thomas Boggs, tboggs@users.sourceforge.net
#
__version__ = '0.9+'
import sys
if sys.byteorder == 'little':
byte_order = 0 # little endian
else:
byte_order = 1 # big endian
BSQ = 0
BIL = 1
BIP = 2
#from numpy import *
from spectral import image, load_training_sets, save_training_sets, settings, \
tile_image, spy_colors, BandInfo
from io import *
from algorithms import *
from graphics import *
from database import *
try:
import pylab
from graphics import spypylab
pylab.ion()
spectral.settings.plotter = spypylab
except:
warn('Unable to import orconfigure pylab plotter. Spectrum plots will be '
'unavailable.', UserWarning)
import utilities.status
status = utilities.status.StatusDisplay()
| mit | Python |
98ec51a18c96bc68b9a088a7d23d8aae058ed1f5 | Create tmp dir if it does not exist | 20tab/django-spoolgore | spoolgore/backend.py | spoolgore/backend.py | from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
import threading
import os
import time
spoolgore_local = threading.local()
class EmailBackend(BaseEmailBackend):
__tmp__ = "%s/tmp" % settings.SPOOLGORE_DIRECTORY
def send_messages(self, email_messages):
pid = os.getpid()
tid = threading.current_thread().ident
num_sent = 0
if not email_messages:
return
for email_message in email_messages:
if self._send(email_message.message().as_string(), pid, tid):
num_sent += 1
return num_sent
def fsyncspool(self):
"""
Call fsync() on the spool directory
"""
fd = -1
try:
fd = os.open(settings.SPOOLGORE_DIRECTORY, os.O_RDONLY)
os.fsync(fd)
finally:
if fd > -1: os.close(fd)
def _send(self, data, pid, tid):
if not hasattr(spoolgore_local, 'counter'):
spoolgore_local.counter = 0
spoolgore_local.counter += 1
filename = "%f_%s_%d_%d_%d" % (time.time(), time.strftime("%Y.%m.%d.%H.%M.%S"), pid, tid, spoolgore_local.counter)
tmp = "%s/%s" % (self.__tmp__, filename)
if not os.path.exists(self.__tmp__):
os.makedirs(self.__tmp__)
spool = "%s/%s" % (settings.SPOOLGORE_DIRECTORY, filename)
with open(tmp, 'w') as f:
f.write(data)
try:
os.link(tmp, spool)
self.fsyncspool()
finally:
os.unlink(tmp)
return True
| from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
import threading
import os
import time
spoolgore_local = threading.local()
class EmailBackend(BaseEmailBackend):
__tmp__ = "%s/tmp" % settings.SPOOLGORE_DIRECTORY
def send_messages(self, email_messages):
pid = os.getpid()
tid = threading.current_thread().ident
num_sent = 0
if not email_messages:
return
for email_message in email_messages:
if self._send(email_message.message().as_string(), pid, tid):
num_sent += 1
return num_sent
def fsyncspool(self):
"""
Call fsync() on the spool directory
"""
fd = -1
try:
fd = os.open(settings.SPOOLGORE_DIRECTORY, os.O_RDONLY)
os.fsync(fd)
finally:
if fd > -1: os.close(fd)
def _send(self, data, pid, tid):
if not hasattr(spoolgore_local, 'counter'):
spoolgore_local.counter = 0
spoolgore_local.counter += 1
filename = "%f_%s_%d_%d_%d" % (time.time(), time.strftime("%Y.%m.%d.%H.%M.%S"), pid, tid, spoolgore_local.counter)
tmp = "%s/%s" % (self.__tmp__, filename)
spool = "%s/%s" % (settings.SPOOLGORE_DIRECTORY, filename)
with open(tmp, 'w') as f:
f.write(data)
try:
os.link(tmp, spool)
self.fsyncspool()
finally:
os.unlink(tmp)
return True
| mit | Python |
551abd09046b613cdd187ffeef4f2448fc60001a | Fix test | delta2323/chainer-deepmark,delta2323/chainer-deepmark | tests/net_tests/test_alex.py | tests/net_tests/test_alex.py | import unittest
import numpy
import chainer
from chainer import cuda
from chainer.testing import attr
from deepmark_chainer.net import alex
class TestAlex(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (1, 3, 224, 224)).astype(numpy.float32)
self.l = alex.Alex()
def check_forward(self, xp):
x = chainer.Variable(xp.asarray(self.x))
self.l(x)
def test_forward_cpu(self):
self.check_forward(numpy)
@attr.gpu
def test_forward_gpu(self):
self.l.to_gpu()
self.check_forward(cuda.cupy)
| import unittest
import numpy
import chainer
from chainer import cuda
from chainer.testing import attr
from deepmark_chainer.net import alex
class TestAlex(unittest.TestCase)
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (1, 3, 224, 224)).astype(numpy.float32)
self.l = alex.Alex()
def check_forward(self, xp):
x = chainer.Variable(xp.asarray(self.x))
self.l(x)
def test_forward_cpu(self):
self.check_forward(numpy)
@attr.gpu
def test_forward_gpu(self):
self.l.to_gpu()
self.check_forward(cuda.cupy)
| mit | Python |
8959c0146470e27e494573df0c3ceb370da70305 | Implement Domains on Nameserver API | krmaxwell/spyonweb | spyonweb/spyonweb.py | spyonweb/spyonweb.py | import os
from argparse import ArgumentParser
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
full_url = self.url + "analytics/" + code + "?access_token=" + self.token
if limit:
full_url = full_url + "&limit=" + str(limit)
data = requests.get(full_url)
# TODO: implement paging
return data.json()
def ipaddress(self, ipaddr, limit=None):
full_url = self.url + "ip/" + ipaddr + "?access_token=" + self.token
if limit:
full_url = full_url + "&limit=" + str(limit)
data = requests.get(full_url)
# TODO: implement paging
return data.json()
def dns_domain(self, name, limit=None):
full_url = self.url + "dns_domain/" + name + "?access_token=" + self.token
if limit:
full_url = full_url + "&limit=" + str(limit)
data = requests.get(full_url)
# TODO: implement paging
return data.json()
def main():
parser = ArgumentParser()
parser.add_argument('-s', '--summary', type=str, help="Specify a domain for the Request Summary API")
parser.add_argument('-d', '--domain', type=str, help="Specify a domain for the Domain API")
parser.add_argument('-a', '--analytics', type=str, help="Specify a code for the Analytics API")
parser.add_argument('-i', '--ipaddress', type=str, help="Specify an address for the IP Address API")
parser.add_argument('-n', '--dns_domain', type=str, help="Specify a name for the Domains on Nameserver API")
args, _ = parser.parse_known_args()
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
if args.summary:
print s.summary(args.summary)
if args.domain:
print s.domain(args.domain)
if args.analytics:
print s.analytics(args.analytics)
if args.ipaddress:
print s.ipaddress(args.ipaddress)
if args.dns_domain:
print s.dns_domain(args.dns_domain)
if __name__ == "__main__":
main()
| import os
from argparse import ArgumentParser
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
full_url = self.url + "analytics/" + code + "?access_token=" + self.token
if limit:
full_url = full_url + "&limit=" + str(limit)
data = requests.get(full_url)
# TODO: implement paging
return data.json()
def ipaddress(self, ipaddr, limit=None):
full_url = self.url + "ip/" + ipaddr + "?access_token=" + self.token
if limit:
full_url = full_url + "&limit=" + str(limit)
data = requests.get(full_url)
# TODO: implement paging
return data.json()
def main():
parser = ArgumentParser()
parser.add_argument('-s', '--summary', type=str, help="Specify a domain for the Request Summary API")
parser.add_argument('-d', '--domain', type=str, help="Specify a domain for the Domain API")
parser.add_argument('-a', '--analytics', type=str, help="Specify a code for the Analytics API")
parser.add_argument('-i', '--ipaddress', type=str, help="Specify an address for the IP Address API")
args, _ = parser.parse_known_args()
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
if args.summary:
print s.summary(args.summary)
if args.domain:
print s.domain(args.domain)
if args.analytics:
print s.analytics(args.analytics)
if args.ipaddress:
print s.ipaddress(args.ipaddress)
if __name__ == "__main__":
main()
| apache-2.0 | Python |
94c3a5df4e5634cf256e52a2fcbc1ffc38bf6770 | Add GlassDataSetTest | lucasdavid/Manifold-Learning,lucasdavid/Manifold-Learning | tests/real/1_dataset_in_r.py | tests/real/1_dataset_in_r.py | import os
from unittest import TestCase
from manifold.infrastructure import Retriever, Displayer
class GlassDataSetTest(TestCase):
def test_display_dimensions(self):
data_sets_dir = '../datasets'
data_set = 'glass/glass.data'
file = os.path.join(data_sets_dir, data_set)
print('Displaying data set {%s} in the Rn' % file)
glass = Retriever(file, delimiter=',')
# Glass has the samples' ids in the first column.
glass.split_column(0)
# Additionally, its last column represents the target feature.
glass.split_target()
data, color = glass.retrieve()
d = Displayer(title=data_set)
# Scatter all dimensions (3-by-3), using as many graphs as necessary.
for begin in range(0, glass.features, 3):
end = min(glass.features, begin + 3)
d.load('Dimensions: d e [%i, %i]' % (begin + 1, end), data[:, begin:end], color=color)
d.render()
| import os
from manifold.infrastructure import Retriever, Displayer
DATA_SETS_DIR = '../datasets'
DATA_SET = 'glass/glass.data'
def main():
data_set_file = os.path.join(DATA_SETS_DIR, DATA_SET)
print('Displaying data set {%s} in the Rn' % data_set_file)
glass = Retriever(data_set_file, delimiter=',')
# Glass has the samples' ids in the first column.
glass.split_column(0)
# Additionally, its last column represents the target feature.
glass.split_target()
data, color = glass.retrieve()
d = Displayer(title=DATA_SET)
# Scatter all dimensions (3-by-3), using as many graphs as necessary.
for begin in range(0, glass.features, 3):
end = min(glass.features, begin + 3)
d.load('Dimensions: d e [%i, %i]' % (begin+1, end), data[:, begin:end], color=color)
d.render()
if __name__ == '__main__':
main()
| mit | Python |
329e74f280537aab41d5b810f8650bfd8d6d81f5 | Add teardown specific to the former TestCase class | michaeljoseph/cookiecutter,christabor/cookiecutter,cguardia/cookiecutter,janusnic/cookiecutter,michaeljoseph/cookiecutter,cguardia/cookiecutter,vincentbernat/cookiecutter,drgarcia1986/cookiecutter,Vauxoo/cookiecutter,cichm/cookiecutter,benthomasson/cookiecutter,0k/cookiecutter,terryjbates/cookiecutter,atlassian/cookiecutter,lucius-feng/cookiecutter,Springerle/cookiecutter,hackebrot/cookiecutter,moi65/cookiecutter,0k/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,jhermann/cookiecutter,ramiroluz/cookiecutter,kkujawinski/cookiecutter,agconti/cookiecutter,sp1rs/cookiecutter,lgp171188/cookiecutter,kkujawinski/cookiecutter,jhermann/cookiecutter,venumech/cookiecutter,sp1rs/cookiecutter,luzfcb/cookiecutter,janusnic/cookiecutter,vintasoftware/cookiecutter,atlassian/cookiecutter,stevepiercy/cookiecutter,pjbull/cookiecutter,ionelmc/cookiecutter,takeflight/cookiecutter,letolab/cookiecutter,letolab/cookiecutter,pjbull/cookiecutter,hackebrot/cookiecutter,luzfcb/cookiecutter,audreyr/cookiecutter,takeflight/cookiecutter,lgp171188/cookiecutter,agconti/cookiecutter,vintasoftware/cookiecutter,Springerle/cookiecutter,cichm/cookiecutter,ionelmc/cookiecutter,benthomasson/cookiecutter,lucius-feng/cookiecutter,audreyr/cookiecutter,terryjbates/cookiecutter,foodszhang/cookiecutter,foodszhang/cookiecutter,vincentbernat/cookiecutter,ramiroluz/cookiecutter,tylerdave/cookiecutter,tylerdave/cookiecutter,nhomar/cookiecutter,dajose/cookiecutter,stevepiercy/cookiecutter,nhomar/cookiecutter,willingc/cookiecutter,Vauxoo/cookiecutter,drgarcia1986/cookiecutter,moi65/cookiecutter,christabor/cookiecutter,dajose/cookiecutter | tests/test_generate_files.py | tests/test_generate_files.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
from __future__ import unicode_literals
import os
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures("clean_system")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
| bsd-3-clause | Python |
cfe013915332b7d1d6d5fa208f2dc476dcc3afba | Backup to new hard drive | cmu-delphi/operations,cmu-delphi/operations,cmu-delphi/operations | src/backup.py | src/backup.py | """Makes backups of Delphi files and databases."""
# standard library
import datetime
import os
import subprocess
import time
# first party
import delphi.operations.secrets as secrets
#General setup
dest = '/home/automation/backups'
tag = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
print('Destination: %s | Tag: %s'%(dest, tag))
#Directories
keys_path, keys_name = os.path.split(secrets.apache.keys_dir)
dirs = [
{
'label': 'auto',
'path': '/home/automation',
'name': 'driver',
'exclude': 'flu_data',
}, {
'label': 'html',
'path': '/var/www',
'name': 'html',
}, {
'label': 'data',
'path': '/home/automation/driver',
'name': 'flu_data',
}, {
'label': 'keys',
'path': keys_path,
'name': keys_name,
},
]
#Databases
dbs = [
'automation',
'epicast2',
'epidata',
'utils',
]
#Utils
def get_size(file):
return str(subprocess.check_output('du -h %s/%s'%(dest, file), shell=True), 'utf-8').split('\t')[0]
#Each directory is backed up to it's own archive (*.tgz)
#All databases are backed up at once to a single archive (*.sql.gz)
#This list of archives is used to create a final archive containing all the others
archives = []
#Backup directories
for dir in dirs:
print(' Directory: %s/%s'%(dir['path'], dir['name']))
if 'exclude' not in dir or dir['exclude'] is None:
exclude = ''
else:
exclude = '--exclude %s'%(dir['exclude'])
file = 'backup_%s_%s.tgz'%(tag, dir['label'])
subprocess.check_call('tar -czf %s/%s -C %s %s %s'%(dest, file, dir['path'], exclude, dir['name']), shell=True)
print(' %s'%(get_size(file)))
archives.append(file)
#Backup databases
list = ' '.join(dbs)
print(' Databases: %s'%(list))
file = 'backup_%s_database.sql'%(tag)
u, p = secrets.db.backup
subprocess.check_call('mysqldump --user=%s --password=%s --databases %s > %s/%s'%(u, p, list, dest, file), shell=True)
subprocess.check_call('gzip %s/%s'%(dest, file), shell=True)
file += '.gz'
print(' %s'%(get_size(file)))
archives.append(file)
#Create the final archive
print(' Building final archive')
file = 'backup_%s.tar'%(tag)
final_archive = '%s/%s'%(dest, file)
subprocess.check_call('tar -cf %s -C %s %s'%(final_archive, dest, ' '.join(archives)), shell=True)
print(' %s'%(get_size(file)))
#Delete the intermediate archives
for file in archives:
os.remove('%s/%s'%(dest, file))
#Send the backup to an external drive
subprocess.check_call('cp -v %s /mnt/usb2t/backups/'%(final_archive), shell=True)
# TODO: Send the backup offsite
#Success
print('Backup completed successfully!')
| """Makes backups of Delphi files and databases."""
# standard library
import datetime
import os
import subprocess
import time
# first party
import delphi.operations.secrets as secrets
#General setup
dest = '/home/automation/backups'
tag = datetime.datetime.today().strftime('%Y%m%d_%H%M%S')
print('Destination: %s | Tag: %s'%(dest, tag))
#Directories
keys_path, keys_name = os.path.split(secrets.apache.keys_dir)
dirs = [
{
'label': 'auto',
'path': '/home/automation',
'name': 'driver',
'exclude': 'flu_data',
}, {
'label': 'html',
'path': '/var/www',
'name': 'html',
}, {
'label': 'data',
'path': '/home/automation/driver',
'name': 'flu_data',
}, {
'label': 'keys',
'path': keys_path,
'name': keys_name,
},
]
#Databases
dbs = [
'automation',
'epicast2',
'epidata',
'utils',
]
#Utils
def get_size(file):
return str(subprocess.check_output('du -h %s/%s'%(dest, file), shell=True), 'utf-8').split('\t')[0]
#Each directory is backed up to it's own archive (*.tgz)
#All databases are backed up at once to a single archive (*.sql.gz)
#This list of archives is used to create a final archive containing all the others
archives = []
#Backup directories
for dir in dirs:
print(' Directory: %s/%s'%(dir['path'], dir['name']))
if 'exclude' not in dir or dir['exclude'] is None:
exclude = ''
else:
exclude = '--exclude %s'%(dir['exclude'])
file = 'backup_%s_%s.tgz'%(tag, dir['label'])
subprocess.check_call('tar -czf %s/%s -C %s %s %s'%(dest, file, dir['path'], exclude, dir['name']), shell=True)
print(' %s'%(get_size(file)))
archives.append(file)
#Backup databases
list = ' '.join(dbs)
print(' Databases: %s'%(list))
file = 'backup_%s_database.sql'%(tag)
u, p = secrets.db.backup
subprocess.check_call('mysqldump --user=%s --password=%s --databases %s > %s/%s'%(u, p, list, dest, file), shell=True)
subprocess.check_call('gzip %s/%s'%(dest, file), shell=True)
file += '.gz'
print(' %s'%(get_size(file)))
archives.append(file)
#Create the final archive
print(' Building final archive')
file = 'backup_%s.tar'%(tag)
final_archive = '%s/%s'%(dest, file)
subprocess.check_call('tar -cf %s -C %s %s'%(final_archive, dest, ' '.join(archives)), shell=True)
print(' %s'%(get_size(file)))
#Delete the intermediate archives
for file in archives:
os.remove('%s/%s'%(dest, file))
#Send the backup to an external drive
subprocess.check_call('cp -v %s /mnt/usb500g/backups/'%(final_archive), shell=True)
# TODO: Send the backup offsite
#Success
print('Backup completed successfully!')
| mit | Python |
2c0947fcba8e260e01420182fe0da307af5a8426 | Rename sys modules access to web module | thatsIch/sublime-rainmeter | tests/test_online_checker.py | tests/test_online_checker.py | """This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.web.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
| """This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.http.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
| mit | Python |
a4ade93206171559fa2558a70635d336d0c4b9c8 | Fix docker build invocation. | markrcote/pulseguardian,mozilla/pulseguardian,markrcote/pulseguardian,mozilla/pulseguardian,markrcote/pulseguardian,mozilla/pulseguardian,mozilla/pulseguardian,markrcote/pulseguardian | test/docker_setup.py | test/docker_setup.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from subprocess import call, Popen, PIPE
def create_image():
create_image_command = 'docker build -t=pulse:testing test'
call(create_image_command.split(' '))
def delete_image():
delete_image_pulse_command = 'docker rmi pulse:testing'
delete_image_ubuntu_command = 'docker rmi ubuntu:14.04'
call(delete_image_pulse_command.split(' '))
call(delete_image_ubuntu_command.split(' '))
def setup_container():
setup_command = 'docker run -d -p 5672:5672 -p 15672:15672 --name pulse pulse:testing'
call(setup_command.split(' '))
def teardown_container():
stop_command = 'docker stop pulse'
remove_command = 'docker rm pulse'
call(stop_command.split(' '))
call(remove_command.split(' '))
def check_rabbitmq():
string_to_check = 'Starting broker... completed'
get_logs_command = 'docker logs pulse'
logs_output = Popen(get_logs_command.split(' '),
stdout=PIPE).communicate()[0]
return string_to_check in logs_output
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from subprocess import call, Popen, PIPE
def create_image():
create_image_command = 'docker build -t="pulse:testing" test'
call(create_image_command.split(' '))
def delete_image():
delete_image_pulse_command = 'docker rmi pulse:testing'
delete_image_ubuntu_command = 'docker rmi ubuntu:14.04'
call(delete_image_pulse_command.split(' '))
call(delete_image_ubuntu_command.split(' '))
def setup_container():
setup_command = 'docker run -d -p 5672:5672 -p 15672:15672 --name pulse pulse:testing'
call(setup_command.split(' '))
def teardown_container():
stop_command = 'docker stop pulse'
remove_command = 'docker rm pulse'
call(stop_command.split(' '))
call(remove_command.split(' '))
def check_rabbitmq():
string_to_check = 'Starting broker... completed'
get_logs_command = 'docker logs pulse'
logs_output = Popen(get_logs_command.split(' '),
stdout=PIPE).communicate()[0]
return string_to_check in logs_output
| mpl-2.0 | Python |
dbf43ddd75a9c634eb00c8cf548d32b782baa9af | correct 404 handling | nbio/nbio-django | views.py | views.py | __license__ = "Apache 2.0"
__copyright__ = "Copyright 2008 nb.io"
__author__ = "Randy Reddig - ydnar@nb.io"
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.http import HttpResponse, Http404
from django.template import loader
import os.path
import re
DEFAULT_CONTENT_TYPE = 'text/html'
def null():
return
def auto(request, **kwargs):
try:
t = kwargs['template']
except:
raise Http404
c = RequestContext(request)
return HttpResponse(t.render(c), DEFAULT_CONTENT_TYPE)
| __license__ = "Apache 2.0"
__copyright__ = "Copyright 2008 nb.io"
__author__ = "Randy Reddig - ydnar@nb.io"
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.http import HttpResponse, Http404
from django.template import loader
import os.path
import re
DEFAULT_CONTENT_TYPE = 'text/html'
def null():
return
def auto(request, **kwargs):
t = kwargs['template']
if not t:
raise Http404
c = RequestContext(request)
return HttpResponse(t.render(c), DEFAULT_CONTENT_TYPE)
| apache-2.0 | Python |
ca9f4391c0bb0d0484ffce822f11a938a2e5f1d8 | Add to records queue as they come in from the vehicle. | openxc/web-logging-example,openxc/web-logging-example | views.py | views.py | import json
from collections import deque
from flask import request, render_template
from flask import current_app as app, abort
from util import make_status_response, generate_filename, jsonify
RECORDS_QUEUE = deque(maxlen=100)
def _prime_records_queue(q):
with open(generate_filename(app.config), 'r') as trace_file:
for line in trace_file:
if len(RECORDS_QUEUE) == RECORDS_QUEUE.maxlen:
break
timestamp, record = line.split(':', 1)
record = json.loads(record)
RECORDS_QUEUE.append(record)
def add_record():
if not request.json:
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
records = request.json.get('records', None)
if records is None or not hasattr(records, '__iter__'):
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
with open(generate_filename(app.config), 'a') as trace_file:
for record in records:
timestamp = record.pop('timestamp')
trace_file.write("%s: %s\r\n" % (timestamp, json.dumps(record)))
record['timestamp'] = timestamp
RECORDS_QUEUE.append(record)
return make_status_response(201)
def show_records():
_prime_records_queue(RECORDS_QUEUE)
return jsonify(records=list(RECORDS_QUEUE))
def visualization():
return render_template('visualization.html')
| import json
from collections import deque
from flask import request, render_template
from flask import current_app as app, abort
from util import make_status_response, generate_filename, jsonify
RECORDS_QUEUE = deque(maxlen=100)
def _prime_records_queue(q):
with open(generate_filename(app.config), 'r') as trace_file:
for line in trace_file:
if len(RECORDS_QUEUE) == RECORDS_QUEUE.maxlen:
break
timestamp, record = line.split(':', 1)
record = json.loads(record)
RECORDS_QUEUE.append(record)
def add_record():
if not request.json:
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
records = request.json.get('records', None)
if records is None or not hasattr(records, '__iter__'):
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
with open(generate_filename(app.config), 'a') as trace_file:
for record in records:
timestamp = record.pop('timestamp')
trace_file.write("%s: %s\r\n" % (timestamp, json.dumps(record)))
return make_status_response(201)
def show_records():
_prime_records_queue(RECORDS_QUEUE)
return jsonify(records=list(RECORDS_QUEUE))
def visualization():
return render_template('visualization.html')
| bsd-3-clause | Python |
1c6480a9b46acb3966af7fe5fb4146aa99335756 | Fix the test script path in the awesome JSON script | quicktype/quicktype,quicktype/quicktype,quicktype/quicktype,quicktype/quicktype,quicktype/quicktype | test/test-awesome.py | test/test-awesome.py | #!/usr/bin/env python3
from urllib.request import urlopen
from urllib.error import URLError
import hashlib
import os
import json
import subprocess
test_dir = os.path.dirname(os.path.realpath(__file__))
list_file = os.path.join(test_dir, "awesome-json-datasets")
outdir = os.path.join(test_dir, "awesome-json-results")
os.chdir(os.path.join(test_dir, ".."))
with open(list_file) as f:
urls = [u.rstrip() for u in f.readlines()]
if not os.path.exists(outdir):
os.makedirs(outdir)
for url in urls:
digest = hashlib.new("sha1")
digest.update(url.encode("utf-8"))
filename = os.path.join(outdir, digest.hexdigest() + ".json")
if os.path.exists(filename):
continue
try:
resp = urlopen(url)
content_type = resp.getheader("Content-Type")
if not (content_type == "application/json" or content_type.startswith("application/json;") or content_type == "text/plain"):
print("%s: wrong content type %s" % (url, content_type))
continue
data = resp.read()
except:
print("%s: could not fetch - skipping" % url)
continue
try:
json.loads(data)
except:
print("%s: not valid JSON - skipping" % url)
with open(filename, 'wb') as f:
f.write(data)
print("%s: %d bytes" % (filename, len(data)))
subprocess.call(["node", "test/test.js", "test/awesome-json-results"])
| #!/usr/bin/env python3
from urllib.request import urlopen
from urllib.error import URLError
import hashlib
import os
import json
import subprocess
test_dir = os.path.dirname(os.path.realpath(__file__))
list_file = os.path.join(test_dir, "awesome-json-datasets")
outdir = os.path.join(test_dir, "awesome-json-results")
os.chdir(os.path.join(test_dir, ".."))
with open(list_file) as f:
urls = [u.rstrip() for u in f.readlines()]
if not os.path.exists(outdir):
os.makedirs(outdir)
for url in urls:
digest = hashlib.new("sha1")
digest.update(url.encode("utf-8"))
filename = os.path.join(outdir, digest.hexdigest() + ".json")
if os.path.exists(filename):
continue
try:
resp = urlopen(url)
content_type = resp.getheader("Content-Type")
if not (content_type == "application/json" or content_type.startswith("application/json;") or content_type == "text/plain"):
print("%s: wrong content type %s" % (url, content_type))
continue
data = resp.read()
except:
print("%s: could not fetch - skipping" % url)
continue
try:
json.loads(data)
except:
print("%s: not valid JSON - skipping" % url)
with open(filename, 'wb') as f:
f.write(data)
print("%s: %d bytes" % (filename, len(data)))
subprocess.call(["node", "bin/test.js", "test/awesome-json-results"])
| apache-2.0 | Python |
59dd2d1b8db23fd00058f343317f6eaa5e18618b | Fix UnknownLocaleError on pt-BR | indictranstech/Das_frappe,vjFaLk/frappe,RicardoJohann/frappe,praba230890/frappe,sbkolate/sap_frappe_v6,bohlian/frappe,hatwar/buyback-frappe,indictranstech/fbd_frappe,chdecultot/frappe,sbktechnology/sap_frappe,paurosello/frappe,indictranstech/omnitech-frappe,tundebabzy/frappe,yashodhank/frappe,gangadharkadam/saloon_frappe_install,chdecultot/frappe,saguas/frappe,indictranstech/ebuy-now-frappe,sbktechnology/trufil-frappe,yashodhank/frappe,almeidapaulopt/frappe,neilLasrado/frappe,RicardoJohann/frappe,reachalpineswift/frappe-bench,indictranstech/internal-frappe,sbktechnology/sap_frappe,ShashaQin/frappe,gangadharkadam/saloon_frappe_install,sbktechnology/trufil-frappe,ESS-LLP/frappe,ESS-LLP/frappe,RicardoJohann/frappe,shitolepriya/test-frappe,gangadharkadam/vlinkfrappe,jevonearth/frappe,indictranstech/osmosis-frappe,vCentre/vFRP-6233,elba7r/builder,mbauskar/helpdesk-frappe,gangadharkadam/vlinkfrappe,vqw/frappe,indictranstech/internal-frappe,indictranstech/reciphergroup-frappe,sbktechnology/sap_frappe,elba7r/frameworking,rohitwaghchaure/frappe,hatwar/buyback-frappe,pombredanne/frappe,aboganas/frappe,vjFaLk/frappe,shitolepriya/test-frappe,indictranstech/omnitech-frappe,maxtorete/frappe,aboganas/frappe,gangadharkadam/saloon_frappe,gangadharkadam/v6_frappe,indictranstech/trufil-frappe,indictranstech/osmosis-frappe,drukhil/frappe,praba230890/frappe,elba7r/frameworking,nerevu/frappe,MaxMorais/frappe,mhbu50/frappe,Amber-Creative/amber-frappe,gangadhar-kadam/helpdesk-frappe,ESS-LLP/frappe,indictranstech/tele-frappe,hernad/frappe,mbauskar/omnitech-frappe,ashokrajbathu/secondrep,mbauskar/omnitech-demo-frappe,elba7r/frameworking,indictranstech/ebuy-now-frappe,mhbu50/frappe,sbkolate/sap_frappe_v6,indictranstech/Das_frappe,adityahase/frappe,gangadharkadam/vlinkfrappe,gangadharkadam/v6_frappe,mhbu50/frappe,mbauskar/omnitech-frappe,vqw/frappe,elba7r/builder,tmimori/frappe,mbauskar/tele-frappe,frappe/frappe,saguas/frappe,gangadhar-kadam/helpdesk-frappe,frappe/frappe,reachalpineswift/frappe-bench,indictranstech/omnitech-frappe,vqw/frappe,indictranstech/reciphergroup-frappe,jevonearth/frappe,chdecultot/frappe,saurabh6790/frappe,mbauskar/omnitech-demo-frappe,shitolepriya/test-frappe,indictranstech/ebuy-now-frappe,ShashaQin/frappe,indictranstech/ebuy-now-frappe,indictranstech/internal-frappe,gangadhar-kadam/helpdesk-frappe,reachalpineswift/frappe-bench,tmimori/frappe,saurabh6790/test-frappe,saurabh6790/test-frappe,indictranstech/tele-frappe,indictranstech/trufil-frappe,saguas/frappe,indictranstech/internal-frappe,adityahase/frappe,frappe/frappe,neilLasrado/frappe,drukhil/frappe,chdecultot/frappe,MaxMorais/frappe,paurosello/frappe,indautgrp/frappe,mbauskar/helpdesk-frappe,nerevu/frappe,indictranstech/frappe,indictranstech/trufil-frappe,gangadharkadam/frappecontribution,praba230890/frappe,maxtorete/frappe,rmehta/frappe,suyashphadtare/propshikhari-frappe,shitolepriya/test-frappe,hernad/frappe,neilLasrado/frappe,tundebabzy/frappe,bcornwellmott/frappe,MaxMorais/frappe,saurabh6790/frappe,mbauskar/frappe,suyashphadtare/propshikhari-frappe,bohlian/frappe,gangadharkadam/saloon_frappe_install,mbauskar/tele-frappe,indictranstech/frappe,indautgrp/frappe,sbkolate/sap_frappe_v6,anandpdoshi/frappe,manassolanki/frappe,bcornwellmott/frappe,saurabh6790/frappe,adityahase/frappe,indictranstech/osmosis-frappe,mbauskar/frappe,pombredanne/frappe,elba7r/frameworking,drukhil/frappe,nerevu/frappe,sbktechnology/trufil-frappe,aboganas/frappe,vjFaLk/frappe,mbauskar/omnitech-demo-frappe,tmimori/frappe,indictranstech/frappe,adityahase/frappe,indictranstech/trufil-frappe,paurosello/frappe,gangadharkadam/frappecontribution,Amber-Creative/amber-frappe,gangadharkadam/frappecontribution,vjFaLk/frappe,indictranstech/fbd_frappe,mbauskar/omnitech-demo-frappe,mbauskar/Das_frappe,vCentre/vFRP-6233,ShashaQin/frappe,pombredanne/frappe,sbktechnology/trufil-frappe,mbauskar/Das_frappe,bcornwellmott/frappe,indictranstech/Das_frappe,suyashphadtare/propshikhari-frappe,hernad/frappe,saguas/frappe,ashokrajbathu/secondrep,indictranstech/frappe,indictranstech/reciphergroup-frappe,mbauskar/tele-frappe,indictranstech/omnitech-frappe,elba7r/builder,gangadhar-kadam/helpdesk-frappe,mhbu50/frappe,suyashphadtare/propshikhari-frappe,manassolanki/frappe,mbauskar/helpdesk-frappe,saurabh6790/test-frappe,saurabh6790/test-frappe,sbktechnology/sap_frappe,tundebabzy/frappe,rmehta/frappe,mbauskar/frappe,maxtorete/frappe,rohitwaghchaure/frappe,gangadharkadam/vlinkfrappe,ashokrajbathu/secondrep,ShashaQin/frappe,hatwar/buyback-frappe,StrellaGroup/frappe,neilLasrado/frappe,indictranstech/fbd_frappe,hatwar/buyback-frappe,aboganas/frappe,MaxMorais/frappe,mbauskar/Das_frappe,maxtorete/frappe,mbauskar/Das_frappe,vCentre/vFRP-6233,tundebabzy/frappe,gangadharkadam/v6_frappe,indictranstech/tele-frappe,mbauskar/tele-frappe,vqw/frappe,saurabh6790/frappe,almeidapaulopt/frappe,paurosello/frappe,mbauskar/omnitech-frappe,gangadharkadam/frappecontribution,jevonearth/frappe,StrellaGroup/frappe,gangadharkadam/saloon_frappe,mbauskar/helpdesk-frappe,yashodhank/frappe,mbauskar/frappe,gangadharkadam/saloon_frappe_install,anandpdoshi/frappe,manassolanki/frappe,ESS-LLP/frappe,sbkolate/sap_frappe_v6,elba7r/builder,indautgrp/frappe,StrellaGroup/frappe,pombredanne/frappe,indictranstech/fbd_frappe,bohlian/frappe,rohitwaghchaure/frappe,rohitwaghchaure/frappe,ashokrajbathu/secondrep,bohlian/frappe,praba230890/frappe,vCentre/vFRP-6233,gangadharkadam/v6_frappe,mbauskar/omnitech-frappe,rmehta/frappe,RicardoJohann/frappe,yashodhank/frappe,anandpdoshi/frappe,rmehta/frappe,indictranstech/reciphergroup-frappe,manassolanki/frappe,hernad/frappe,gangadharkadam/saloon_frappe,indictranstech/osmosis-frappe,Amber-Creative/amber-frappe,nerevu/frappe,indictranstech/Das_frappe,indictranstech/tele-frappe,almeidapaulopt/frappe,reachalpineswift/frappe-bench,almeidapaulopt/frappe,Amber-Creative/amber-frappe,drukhil/frappe,anandpdoshi/frappe,tmimori/frappe,jevonearth/frappe,gangadharkadam/saloon_frappe,indautgrp/frappe,bcornwellmott/frappe | frappe/geo/country_info.py | frappe/geo/country_info.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# all country info
from __future__ import unicode_literals
import os, json, frappe
from frappe.utils.momentjs import get_all_timezones
def get_country_info(country=None):
data = get_all()
data = frappe._dict(data.get(country, {}))
if not 'date_format' in data:
data.date_format = "dd-mm-yyyy"
return data
def get_all():
with open(os.path.join(os.path.dirname(__file__), "country_info.json"), "r") as local_info:
all_data = json.loads(local_info.read())
return all_data
@frappe.whitelist()
def get_country_timezone_info():
return {
"country_info": get_all(),
"all_timezones": get_all_timezones()
}
def get_translated_dict():
from babel.dates import get_timezone, get_timezone_name, Locale
translated_dict = {}
locale = Locale(frappe.local.lang, sep="-")
# timezones
for tz in get_all_timezones():
timezone_name = get_timezone_name(get_timezone(tz), locale=frappe.local.lang, width='short')
if timezone_name:
translated_dict[tz] = timezone_name + ' - ' + tz
# country names && currencies
for country, info in get_all().items():
country_name = locale.territories.get((info.get("code") or "").upper())
if country_name:
translated_dict[country] = country_name
currency = info.get("currency")
currency_name = locale.currencies.get(currency)
if currency_name:
translated_dict[currency] = currency_name
return translated_dict
def update():
with open(os.path.join(os.path.dirname(__file__), "currency_info.json"), "r") as nformats:
nformats = json.loads(nformats.read())
all_data = get_all()
for country in all_data:
data = all_data[country]
data["number_format"] = nformats.get(data.get("currency", "default"),
nformats.get("default"))["display"]
with open(os.path.join(os.path.dirname(__file__), "country_info.json"), "w") as local_info:
local_info.write(json.dumps(all_data, indent=1))
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# all country info
from __future__ import unicode_literals
import os, json, frappe
from frappe.utils.momentjs import get_all_timezones
def get_country_info(country=None):
data = get_all()
data = frappe._dict(data.get(country, {}))
if not 'date_format' in data:
data.date_format = "dd-mm-yyyy"
return data
def get_all():
with open(os.path.join(os.path.dirname(__file__), "country_info.json"), "r") as local_info:
all_data = json.loads(local_info.read())
return all_data
@frappe.whitelist()
def get_country_timezone_info():
return {
"country_info": get_all(),
"all_timezones": get_all_timezones()
}
def get_translated_dict():
from babel.dates import get_timezone, get_timezone_name, Locale
translated_dict = {}
locale = Locale(frappe.local.lang)
# timezones
for tz in get_all_timezones():
timezone_name = get_timezone_name(get_timezone(tz), locale=frappe.local.lang, width='short')
if timezone_name:
translated_dict[tz] = timezone_name + ' - ' + tz
# country names && currencies
for country, info in get_all().items():
country_name = locale.territories.get((info.get("code") or "").upper())
if country_name:
translated_dict[country] = country_name
currency = info.get("currency")
currency_name = locale.currencies.get(currency)
if currency_name:
translated_dict[currency] = currency_name
return translated_dict
def update():
with open(os.path.join(os.path.dirname(__file__), "currency_info.json"), "r") as nformats:
nformats = json.loads(nformats.read())
all_data = get_all()
for country in all_data:
data = all_data[country]
data["number_format"] = nformats.get(data.get("currency", "default"),
nformats.get("default"))["display"]
with open(os.path.join(os.path.dirname(__file__), "country_info.json"), "w") as local_info:
local_info.write(json.dumps(all_data, indent=1))
| mit | Python |
ebcb55fe8ad4028bd6699fdc5db60f9dc7d7c6e4 | Add color to Slack notifications | rshk/freight,rshk/freight,klynton/freight,klynton/freight,klynton/freight,getsentry/freight,rshk/freight,klynton/freight,getsentry/freight,getsentry/freight,getsentry/freight,getsentry/freight,rshk/freight | freight/notifiers/slack.py | freight/notifiers/slack.py | from __future__ import absolute_import
__all__ = ['SlackNotifier']
import json
from freight import http
from freight.models import App, TaskStatus
from .base import Notifier, NotifierEvent
STATUS_TO_COLOR = {
'finished': '#2ab27b',
'failed': '#f43f20',
}
class SlackNotifier(Notifier):
def get_options(self):
return {
'webhook_url': {'required': True},
}
def send(self, task, config, event):
webhook_url = config['webhook_url']
app = App.query.get(task.app_id)
params = {
'number': task.number,
'app_name': app.name,
'task_name': task.name,
'env': task.environment,
'ref': task.ref,
'sha': task.sha[:7] if task.sha else task.ref,
'status_label': task.status_label,
'duration': task.duration,
'link': http.absolute_uri('/tasks/{}/{}/{}/'.format(app.name, task.environment, task.number)),
}
# TODO(dcramer): show the ref when it differs from the sha
if event == NotifierEvent.TASK_QUEUED:
title = "[{app_name}/{env}] Queued deploy <{link}|#{number}> ({sha})".format(**params)
elif event == NotifierEvent.TASK_STARTED:
title = "[{app_name}/{env}] Starting deploy <{link}|#{number}> ({sha})".format(**params)
elif task.status == TaskStatus.failed:
title = "[{app_name}/{env}] Failed to deploy <{link}|#{number}> ({sha}) after {duration}s".format(**params)
elif task.status == TaskStatus.cancelled:
title = "[{app_name}/{env}] Deploy <{link}|#{number}> ({sha}) was cancelled after {duration}s".format(**params)
elif task.status == TaskStatus.finished:
title = "[{app_name}/{env}] Successfully deployed <{link}|#{number}> ({sha}) after {duration}s".format(**params)
else:
raise NotImplementedError(task.status)
payload = {
'parse': 'none',
'attachments': [{
'fallback': title,
'title': title,
'color': STATUS_TO_COLOR.get(task.status),
}]
}
values = {'payload': json.dumps(payload)}
http.post(webhook_url, values)
| from __future__ import absolute_import
__all__ = ['SlackNotifier']
import json
from freight import http
from freight.models import App, TaskStatus
from .base import Notifier, NotifierEvent
class SlackNotifier(Notifier):
def get_options(self):
return {
'webhook_url': {'required': True},
}
def send(self, task, config, event):
webhook_url = config['webhook_url']
app = App.query.get(task.app_id)
params = {
'number': task.number,
'app_name': app.name,
'task_name': task.name,
'env': task.environment,
'ref': task.ref,
'sha': task.sha[:7] if task.sha else task.ref,
'status_label': task.status_label,
'duration': task.duration,
'link': http.absolute_uri('/tasks/{}/{}/{}/'.format(app.name, task.environment, task.number)),
}
# TODO(dcramer): show the ref when it differs from the sha
if event == NotifierEvent.TASK_QUEUED:
title = "[{app_name}/{env}] Queued deploy <{link}|#{number}> ({sha})".format(**params)
elif event == NotifierEvent.TASK_STARTED:
title = "[{app_name}/{env}] Starting deploy <{link}|#{number}> ({sha})".format(**params)
elif task.status == TaskStatus.failed:
title = "[{app_name}/{env}] Failed to deploy <{link}|#{number}> ({sha}) after {duration}s".format(**params)
elif task.status == TaskStatus.cancelled:
title = "[{app_name}/{env}] Deploy <{link}|#{number}> ({sha}) was cancelled after {duration}s".format(**params)
elif task.status == TaskStatus.finished:
title = "[{app_name}/{env}] Successfully deployed <{link}|#{number}> ({sha}) after {duration}s".format(**params)
else:
raise NotImplementedError(task.status)
payload = {
'parse': 'none',
'text': title,
}
values = {'payload': json.dumps(payload)}
http.post(webhook_url, values)
| apache-2.0 | Python |
52243269fc080af7094debc63121357035af9c82 | Make test/test_logging work on python 2.6 | multikatt/beets,moodboom/beets,parapente/beets,dfc/beets,madmouser1/beets,mosesfistos1/beetbox,YetAnotherNerd/beets,jcoady9/beets,ibmibmibm/beets,sampsyo/beets,imsparsh/beets,kelvinhammond/beets,mathstuf/beets,shamangeorge/beets,moodboom/beets,sampsyo/beets,Freso/beets,PierreRust/beets,dfc/beets,MyTunesFreeMusic/privacy-policy,dfc/beets,tima/beets,parapente/beets,untitaker/beets,parapente/beets,arabenjamin/beets,PierreRust/beets,ruippeixotog/beets,marcuskrahl/beets,Andypsamp/CODfinalJUNIT,sampsyo/beets,mosesfistos1/beetbox,ttsda/beets,sadatay/beets,kareemallen/beets,mried/beets,xsteadfastx/beets,shanemikel/beets,diego-plan9/beets,randybias/beets,YetAnotherNerd/beets,swt30/beets,jcoady9/beets,lengtche/beets,gabrielaraujof/beets,m-urban/beets,PierreRust/beets,Andypsamp/CODfinalJUNIT,pkess/beets,asteven/beets,ruippeixotog/beets,tima/beets,ibmibmibm/beets,tima/beets,pkess/beets,jmwatte/beets,gabrielaraujof/beets,Andypsamp/CODfinalJUNIT,Andypsamp/CODjunit,kelvinhammond/beets,shamangeorge/beets,multikatt/beets,lengtche/beets,mosesfistos1/beetbox,Andypsamp/CODjunit,sadatay/beets,swt30/beets,xsteadfastx/beets,shanemikel/beets,jmwatte/beets,untitaker/beets,mathstuf/beets,Andypsamp/CODfinalJUNIT,marcuskrahl/beets,Kraymer/beets,randybias/beets,Andypsamp/CODjunit,multikatt/beets,LordSputnik/beets,beetbox/beets,randybias/beets,artemutin/beets,madmouser1/beets,drm00/beets,jackwilsdon/beets,asteven/beets,imsparsh/beets,multikatt/beets,artemutin/beets,imsparsh/beets,imsparsh/beets,jackwilsdon/beets,andremiller/beets,mosesfistos1/beetbox,arabenjamin/beets,kareemallen/beets,Dishwishy/beets,xsteadfastx/beets,pkess/beets,mathstuf/beets,ttsda/beets,gabrielaraujof/beets,marcuskrahl/beets,andremiller/beets,Dishwishy/beets,andremiller/beets,diego-plan9/beets,arabenjamin/beets,arabenjamin/beets,drm00/beets,untitaker/beets,m-urban/beets,SusannaMaria/beets,LordSputnik/beets,ttsda/beets,Andypsamp/CODjunit,Kraymer/beets,Kraymer/beets,ibmibmibm/beets,Dishwishy/beets,pkess/beets,LordSputnik/beets,jackwilsdon/beets,m-urban/beets,madmouser1/beets,asteven/beets,jmwatte/beets,marcuskrahl/beets,SusannaMaria/beets,drm00/beets,shanemikel/beets,swt30/beets,sampsyo/beets,shamangeorge/beets,diego-plan9/beets,MyTunesFreeMusic/privacy-policy,ibmibmibm/beets,lightwang1/beets,artemutin/beets,asteven/beets,tima/beets,YetAnotherNerd/beets,Andypsamp/CODjunit,Freso/beets,Dishwishy/beets,shamangeorge/beets,sadatay/beets,mried/beets,mried/beets,artemutin/beets,swt30/beets,mried/beets,moodboom/beets,Andypsamp/CODfinalJUNIT,MyTunesFreeMusic/privacy-policy,jcoady9/beets,jackwilsdon/beets,drm00/beets,m-urban/beets,kelvinhammond/beets,randybias/beets,shanemikel/beets,lengtche/beets,lightwang1/beets,parapente/beets,xsteadfastx/beets,madmouser1/beets,ttsda/beets,kareemallen/beets,PierreRust/beets,lightwang1/beets,Freso/beets,Kraymer/beets,MyTunesFreeMusic/privacy-policy,untitaker/beets,ruippeixotog/beets,beetbox/beets,beetbox/beets,kelvinhammond/beets,gabrielaraujof/beets,jmwatte/beets,moodboom/beets,LordSputnik/beets,SusannaMaria/beets,Freso/beets,dfc/beets,ruippeixotog/beets,mathstuf/beets,beetbox/beets,diego-plan9/beets,sadatay/beets,lengtche/beets,jcoady9/beets,SusannaMaria/beets,lightwang1/beets,YetAnotherNerd/beets,kareemallen/beets | test/test_logging.py | test/test_logging.py | """Stupid tests that ensure logging works as expected"""
import logging as log
from StringIO import StringIO
import beets.logging as blog
from _common import unittest, TestCase
class LoggingTest(TestCase):
def test_logging_management(self):
l1 = log.getLogger("foo123")
l2 = blog.getLogger("foo123")
self.assertEqual(l1, l2)
self.assertEqual(type(l1), log.Logger)
l3 = blog.getLogger("bar123")
l4 = log.getLogger("bar123")
self.assertEqual(l3, l4)
self.assertEqual(type(l3), blog.StrFormatLogger)
l5 = l3.getChild("shalala")
self.assertEqual(type(l5), blog.StrFormatLogger)
def test_str_format_logging(self):
l = blog.getLogger("baz123")
stream = StringIO()
handler = log.StreamHandler(stream)
l.addHandler(handler)
l.propagate = False
l.warning("foo {} {bar}", "oof", bar="baz")
handler.flush()
self.assertTrue(stream.getvalue(), "foo oof baz")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| """Stupid tests that ensure logging works as expected"""
import logging as log
from StringIO import StringIO
import beets.logging as blog
from _common import unittest, TestCase
class LoggingTest(TestCase):
def test_logging_management(self):
l1 = log.getLogger("foo123")
l2 = blog.getLogger("foo123")
self.assertEqual(l1, l2)
self.assertEqual(type(l1), log.Logger)
l3 = blog.getLogger("bar123")
l4 = log.getLogger("bar123")
self.assertEqual(l3, l4)
self.assertEqual(type(l3), blog.StrFormatLogger)
l5 = l3.getChild("shalala")
self.assertEqual(type(l5), blog.StrFormatLogger)
def test_str_format_logging(self):
l = blog.getLogger("baz123")
stream = StringIO()
handler = log.StreamHandler(stream)
l.addHandler(handler)
l.propagate = False
l.warning("foo {} {bar}", "oof", bar="baz")
handler.flush()
self.assertTrue(stream.getvalue(), "foo oof baz")
if __name__ == '__main__':
unittest.main()
| mit | Python |
c640143c726f63c09a721247bdca1ae5c9ed847a | fix byte+str error on python3.x | hhatto/poyonga | test/test_poyonga.py | test/test_poyonga.py | import struct
import unittest
from mock import patch, Mock
from poyonga import Groonga, GroongaResult
class PoyongaHTTPTestCase(unittest.TestCase):
def setUp(self):
self.g = Groonga()
@patch('poyonga.client.urlopen')
def test_json_result_with_http(self, mock_urlopen):
m = Mock()
m.read.side_effect = ['[[0, 1337566253.89858, 0.000354], {}]']
mock_urlopen.return_value = m
ret = self.g.call('status')
self.assertEqual(type(ret), GroongaResult)
self.assertEqual(ret.status, 0)
class PoyongaGQTPTestCase(unittest.TestCase):
def setUp(self):
self.g = Groonga(protocol='gqtp')
@patch('poyonga.client.socket.socket')
def test_json_result_with_gqtp(self, mock_socket):
m = Mock()
_proto, _qtype, _keylen, _level, _flags, _status, _size, _opaque, _cas, _data = \
0xc7, 0x02, 0, 0, 0, 0, 2, 0, 0, "{}"
packdata = struct.pack("!BBHBBHIIQ2s",
_proto, _qtype, _keylen, _level, _flags, _status, _size, _opaque, _cas, _data)
m.recv.return_value = packdata
mock_socket.return_value = m
ret = self.g.call('status')
self.assertEqual(type(ret), GroongaResult)
self.assertEqual(ret.status, 0)
if __name__ == '__main__':
unittest.main()
| import struct
import unittest
from mock import patch, Mock
from poyonga import Groonga, GroongaResult
class PoyongaHTTPTestCase(unittest.TestCase):
def setUp(self):
self.g = Groonga()
@patch('poyonga.client.urlopen')
def test_json_result_with_http(self, mock_urlopen):
m = Mock()
m.read.side_effect = ['[[0, 1337566253.89858, 0.000354], {}]']
mock_urlopen.return_value = m
ret = self.g.call('status')
self.assertEqual(type(ret), GroongaResult)
self.assertEqual(ret.status, 0)
class PoyongaGQTPTestCase(unittest.TestCase):
def setUp(self):
self.g = Groonga(protocol='gqtp')
@patch('poyonga.client.socket.socket')
def test_json_result_with_gqtp(self, mock_socket):
m = Mock()
_proto, _qtype, _keylen, _level, _flags, _status, _size, _opaque, _cas = \
0xc7, 0x02, 0, 0, 0, 0, 2, 0, 0
packdata = struct.pack("!BBHBBHIIQ",
_proto, _qtype, _keylen, _level, _flags, _status, _size, _opaque, _cas)
m.recv.return_value = packdata + "{}"
mock_socket.return_value = m
ret = self.g.call('status')
self.assertEqual(type(ret), GroongaResult)
self.assertEqual(ret.status, 0)
print(dir(ret))
print(ret.raw_result)
if __name__ == '__main__':
unittest.main()
| mit | Python |
3dc59aa4c999f477c45edb2dcb6c5b9302fd8ac7 | Remove useless compatible code | what-studio/profiling,what-studio/profiling | test/test_tracing.py | test/test_tracing.py | # -*- coding: utf-8 -*-
import sys
import pytest
import six
from _utils import factorial, find_stats, foo
from profiling.stats import RecordingStatistics
from profiling.tracing import TracingProfiler
def test_setprofile():
profiler = TracingProfiler()
assert sys.getprofile() is None
with profiler:
assert sys.getprofile() == profiler._profile
assert sys.getprofile() is None
sys.setprofile(lambda *x: x)
with pytest.raises(RuntimeError):
profiler.start()
sys.setprofile(None)
def test_profile():
profiler = TracingProfiler()
frame = foo()
profiler._profile(frame, 'call', None)
profiler._profile(frame, 'return', None)
assert len(profiler.stats) == 1
stats1 = find_stats(profiler.stats, 'foo')
stats2 = find_stats(profiler.stats, 'bar')
stats3 = find_stats(profiler.stats, 'baz')
assert stats1.own_hits == 0
assert stats2.own_hits == 0
assert stats3.own_hits == 1
assert stats1.deep_hits == 1
assert stats2.deep_hits == 1
assert stats3.deep_hits == 1
def test_profiler():
profiler = TracingProfiler(base_frame=sys._getframe())
assert isinstance(profiler.stats, RecordingStatistics)
stats, cpu_time, wall_time = profiler.result()
assert len(stats) == 0
with profiler:
factorial(1000)
factorial(10000)
stats1 = find_stats(profiler.stats, 'factorial')
stats2 = find_stats(profiler.stats, '__enter__')
stats3 = find_stats(profiler.stats, '__exit__')
assert stats1.deep_time != 0
assert stats1.deep_time == stats1.own_time
assert stats1.own_time > stats2.own_time
assert stats1.own_time > stats3.own_time
assert stats1.own_hits == 2
assert stats2.own_hits == 0 # entering to __enter__() wasn't profiled.
assert stats3.own_hits == 1
| # -*- coding: utf-8 -*-
import sys
import pytest
import six
from _utils import factorial, find_stats, foo
from profiling.stats import RecordingStatistics
from profiling.tracing import TracingProfiler
if six.PY3:
map = lambda *x: list(six.moves.map(*x))
def test_setprofile():
profiler = TracingProfiler()
assert sys.getprofile() is None
with profiler:
assert sys.getprofile() == profiler._profile
assert sys.getprofile() is None
sys.setprofile(lambda *x: x)
with pytest.raises(RuntimeError):
profiler.start()
sys.setprofile(None)
def test_profile():
profiler = TracingProfiler()
frame = foo()
profiler._profile(frame, 'call', None)
profiler._profile(frame, 'return', None)
assert len(profiler.stats) == 1
stats1 = find_stats(profiler.stats, 'foo')
stats2 = find_stats(profiler.stats, 'bar')
stats3 = find_stats(profiler.stats, 'baz')
assert stats1.own_hits == 0
assert stats2.own_hits == 0
assert stats3.own_hits == 1
assert stats1.deep_hits == 1
assert stats2.deep_hits == 1
assert stats3.deep_hits == 1
def test_profiler():
profiler = TracingProfiler(base_frame=sys._getframe())
assert isinstance(profiler.stats, RecordingStatistics)
stats, cpu_time, wall_time = profiler.result()
assert len(stats) == 0
with profiler:
factorial(1000)
factorial(10000)
stats1 = find_stats(profiler.stats, 'factorial')
stats2 = find_stats(profiler.stats, '__enter__')
stats3 = find_stats(profiler.stats, '__exit__')
assert stats1.deep_time != 0
assert stats1.deep_time == stats1.own_time
assert stats1.own_time > stats2.own_time
assert stats1.own_time > stats3.own_time
assert stats1.own_hits == 2
assert stats2.own_hits == 0 # entering to __enter__() wasn't profiled.
assert stats3.own_hits == 1
| bsd-3-clause | Python |
aa4d4cdfb646a53c59790d29066fddca6fec688b | Fix and extend tests | zamiam69/otc | tests/00_test_otc.py | tests/00_test_otc.py | import otc
def test_shade_works():
"""Test shade works"""
cloud = otc.OtcCloud(cloud='test')
def test_otc():
"""Basic Test"""
cloud = otc.OtcCloud(cloud='test')
images = cloud.list_images()
assert len(images) >= 0
def test_otcclient():
"""Test otcclient"""
cloud = otc.OtcCloud(cloud='test')
vpcs = cloud.otcclient.vpcs()
assert len(vpcs) >= 0
def test_elb():
"""Test otcclient"""
cloud = otc.OtcCloud(cloud='test')
elbs = cloud.otcclient.elbs()
assert len(elbs) >= 0
def test_listeners():
"""Test otcclient"""
cloud = otc.OtcCloud(cloud='test')
listeners = cloud.otcclient.elb_listeners()
assert len(listeners) >= 0
# vim: sts=4 sw=4 ts=4 et:
| import otc
def test_shade_works():
"""Test shade works"""
cloud = otc.OTCCloud()
def test_otc():
"""Basic Test"""
cloud = otc.OTCCloud()
images = cloud.list_images()
assert len(images) >= 0
# vim: sts=4 sw=4 ts=4 et:
| apache-2.0 | Python |
2046a04c9f6dbba4d1f327b1202a36624dc2e8f5 | Update main.py | samshadwell/TrumpScript,samshadwell/TrumpScript | src/trumpscript/main.py | src/trumpscript/main.py | import argparse
import os
import sys
from trumpscript.compiler import *
from trumpscript.utils import *
__author__ = 'github.com/samshadwell'
def main():
parser = argparse.ArgumentParser(prog='TRUMP', description='Making programming great again')
parser.add_argument('--Wall', action='store_true', help='If set, prevents running program from Mexican locales')
parser.add_argument('--shut-up', action='store_true', help='If set, ignore all system warnings and run program. '
'Overrides --Wall')
parser.add_argument('program', nargs=1, help='TrumpScript program to run')
args = parser.parse_args()
if not os.path.isfile(args.program[0]):
print("Invalid file specified,")
return
# Decide whether to ignore system warnings
if not args.shut_up:
Utils.verify_system(args.Wall)
# Compile and go
Compiler().compile(sys.argv[-1])
if __name__ == "__main__":
main()
| import argparse
import os
import sys
from trumpscript.compiler import *
from trumpscript.utils import *
__author__ = 'github.com/samshadwell'
def main():
parser = argparse.ArgumentParser(prog='TRUMP', description='Making programming great again')
parser.add_argument('--Wall', action='store_true', help='If set, prevents running program from Mexican locales')
parser.add_argument('--shut_up', action='store_true', help='If set, ignore all system warnings and run program. '
'Overrides --Wall')
parser.add_argument('program', nargs=1, help='TrumpScript program to run')
args = parser.parse_args()
if not os.path.isfile(args.program[0]):
print("Invalid file specified,")
return
# Decide whether to ignore system warnings
if not args.shut_up:
Utils.verify_system(args.Wall)
# Compile and go
Compiler().compile(sys.argv[-1])
if __name__ == "__main__":
main()
| mit | Python |
c8b2327cae9a04a97cfd051fe37a47c94459a5b3 | fix test docstring | infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore | tests/font/BULLET.py | tests/font/BULLET.py | #!/usr/bin/env python
'''Test that rendering of bullet glyphs works.
You should see 5 bullet glyphs rendered in the bottom-left of the window.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import unittest
from pyglet import font
import base_text
class TEST_HALIGN(base_text.TextTestBase):
font_name = ''
font_size = 60
text = u'\u2022'*5
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
'''Test that font.Text horizontal alignment works.
Three labels will be rendered aligned left, center and right.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import unittest
from pyglet import font
import base_text
class TEST_HALIGN(base_text.TextTestBase):
font_name = ''
font_size = 60
text = u'\u2022'*5
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
c309be03dba1c257275cceae9bf383105629fbf6 | add comment | ckaus/EpiPy | src/utils/csvmanager.py | src/utils/csvmanager.py | # -*- coding: utf-8 -*-
import os.path
import csv
import logger
current_dir = os.path.abspath(os.path.dirname(__file__))
resources_dir = os.path.abspath(current_dir + "/../../resources/data")
class Template:
"""Valid CSV templates"""
from collections import OrderedDict
# python sort dictionary keys, thats why I use OrderedDict
# http://stackoverflow.com/questions/1867861/python-dictionary-keep-keys-values-in-same-order-as-declared
SIR = OrderedDict([("Time", []), ("Suspectable", []), ("Infected", []), ("Recovered", [])])
def _check_header_fields(header, template):
"""
This function checks if the header match the template.
:param header: a CSV header
:param type: list of str
:param template: a CSV template
:param type: *Template*
:raises: *Error* if header fields not match template
:returns True if the header match the template header:
"""
for h in header:
if h not in template.keys():
logger.error('Given header field %s not exist in template: %s'
% (h, template))
return False
return True
def read(file_name, template, header_fields, seperator=';'):
"""
This function reads a CSV file.
:param file_name: a file name
:param type: str
:param template: a CSV template
:param type: *Template*
:param: header_fields: header fields must match *Template*
:param type: list of str
:param seperator: a delimiter
:param type: str
:returns: the content of CSV as *Dictionary*
:raises: *Error* if csv file cannot read
Example:
data = csvmanager.read(file_name='data1.csv',
template=csvmanager.Template.SIR,
seperator=';',
header_fields=["Time","Recovered"])
"""
result = {}
try:
# read input file
file = open(resources_dir+"/"+file_name, "rb")
reader = csv.reader(file, delimiter=seperator)
result = {}
header = reader.next()
if len(header_fields) > 0:
# use given header fields as header
header = header_fields
if _check_header_fields(header,template):
# header
for h in header:
result[h] = []
# content
reader.next() # jump to content of csv
for row in reader:
for h in header:
# match content with header
result[h].append(row[template.keys().index(h)])
return result
except csv.Error as e:
logger.error("Can not read file %s, %s" % (filename, e)) | # -*- coding: utf-8 -*-
import os.path
import csv
import logger
current_dir = os.path.abspath(os.path.dirname(__file__))
resources_dir = os.path.abspath(current_dir + "/../../resources/data")
class Template:
"""Valid CSV templates"""
from collections import OrderedDict
# python sort dictionary keys, thats why I use OrderedDict
# http://stackoverflow.com/questions/1867861/python-dictionary-keep-keys-values-in-same-order-as-declared
SIR = OrderedDict([("Time", []), ("Suspectable", []), ("Infected", []), ("Recovered", [])])
def _check_header_fields(header, template):
"""
This function checks if the header match the template.
:param header: a CSV header
:param type: list of str
:param template: a CSV template
:param type: *Template*
:returns True if the header match the template header:
"""
for h in header:
if h not in template.keys():
logger.error('Given header field %s not exist in template: %s'
% (h, template))
return False
return True
def read(file_name, template, header_fields, seperator=';'):
"""
This function reads a CSV file.
:param file_name: a file name
:param type: str
:param template: a CSV template
:param type: *Template*
:param: header_fields: header fields must match *Template*
:param type: list of str
:param seperator: a delimiter
:param type: str
:returns: the content of CSV as *Dictionary*
:raises: *Error* if csv file cannot read
or header fields not match template
Example:
data = csvmanager.read(file_name='data1.csv',
template=csvmanager.Template.SIR,
seperator=';',
header_fields=["Time","Recovered"])
"""
result = {}
try:
# read input file
file = open(resources_dir+"/"+file_name, "rb")
reader = csv.reader(file, delimiter=seperator)
result = {}
header = reader.next()
if len(header_fields) > 0:
# use given header fields as header
header = header_fields
if _check_header_fields(header,template):
# header
for h in header:
result[h] = []
# content
reader.next() # jump to content of csv
for row in reader:
for h in header:
# match content with header
result[h].append(row[template.keys().index(h)])
return result
except csv.Error as e:
logger.error("Can not read file %s, %s" % (filename, e)) | mit | Python |
465587e9a1c2dba14ea4f060a4f5b5d1be03ee54 | connect to mysql database | varnish/zipnish,varnish/zipnish,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/varnish-microservice-monitor | zipkin/zipkin-generate-false-data.py | zipkin/zipkin-generate-false-data.py | import time
import random
import MySQLdb
def ts_microseconds():
return int( time.time() * 1000000 )
def generate_id():
return random.getrandbits(64)
# reference to database connector
def get_db_connection():
return MySQLdb.connect('localhost', 'zipkin', 'kinect', 'zipkin')
db = get_db_connection()
print db
# print 'timestamp: ' + str(ts_microseconds())
# print 'random id: ' + str(generate_id())
| import time
import random
def ts_microseconds():
return int( time.time() * 1000000 )
def generate_id():
return random.getrandbits(64)
print 'timestamp: ' + str(ts_microseconds())
print 'random id: ' + str(generate_id())
| bsd-2-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.