commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
7787db13cb5e130c52cc192664ffdd49c3c1cd09 | add output URL | benofben/azure-resource-manager-dse,benofben/azure-resource-manager-dse | multidc/main.py | multidc/main.py | import json
import opsCenterNode
import dseNodes
# This python script generates an ARM template that deploys DSE across multiple datacenters.
with open('clusterParameters.json') as inputFile:
clusterParameters = json.load(inputFile)
locations = clusterParameters['locations']
vmSize = clusterParameters['vmSize']
nodeCount = clusterParameters['nodeCount']
adminUsername = clusterParameters['adminUsername']
adminPassword = clusterParameters['adminPassword']
nodeType = clusterParameters['nodeType']
# This is the skeleton of the template that we're going to add resources to
generatedTemplate = {
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {},
"variables": {},
"resources": [],
"outputs": {}
}
# Create DSE nodes in each location
for location in locations:
# This is the 1 in 10.1.0.0 and corresponds to the data center we are deploying to
# 10.0.x.y is reserved for the OpsCenter resources.
datacenterIndex = locations.index(location) + 1
resources = dseNodes.generate_template(location, datacenterIndex, vmSize, nodeCount, adminUsername, adminPassword)
generatedTemplate['resources'] += resources
# Create the OpsCenter node
resources = opsCenterNode.generate_template(clusterParameters)
generatedTemplate['resources'] += resources
with open('generatedTemplate.json', 'w') as outputFile:
json.dump(generatedTemplate, outputFile, sort_keys=True, indent=4, ensure_ascii=False)
def opsCenterURL():
return {
"opsCenterURL": {
"type": "string",
"value": "[concat('http://opsc', variables('uniqueString'), '.', " + locations[0] + ", '.cloudapp.azure.com:8888')]"
}
}
generatedTemplate['outputs'] += opsCenterURL()
| import json
import opsCenterNode
import dseNodes
# This python script generates an ARM template that deploys DSE across multiple datacenters.
with open('clusterParameters.json') as inputFile:
clusterParameters = json.load(inputFile)
locations = clusterParameters['locations']
vmSize = clusterParameters['vmSize']
nodeCount = clusterParameters['nodeCount']
adminUsername = clusterParameters['adminUsername']
adminPassword = clusterParameters['adminPassword']
nodeType = clusterParameters['nodeType']
# This is the skeleton of the template that we're going to add resources to
generatedTemplate = {
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {},
"variables": {},
"resources": [],
"outputs": {}
}
# Create DSE nodes in each location
for location in locations:
# This is the 1 in 10.1.0.0 and corresponds to the data center we are deploying to
# 10.0.x.y is reserved for the OpsCenter resources.
datacenterIndex = locations.index(location) + 1
resources = dseNodes.generate_template(location, datacenterIndex, vmSize, nodeCount, adminUsername, adminPassword)
generatedTemplate['resources'] += resources
# Create the OpsCenter node
resources = opsCenterNode.generate_template(clusterParameters)
generatedTemplate['resources'] += resources
with open('generatedTemplate.json', 'w') as outputFile:
json.dump(generatedTemplate, outputFile, sort_keys=True, indent=4, ensure_ascii=False)
# Populate the opsCenterURL in outputs
| mit | Python |
48749f16d2f51d0915614ad5c1c4033077d3cfa6 | make api require unique case number, not urn | ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas | api/v0/serializers.py | api/v0/serializers.py | from rest_framework import serializers
from apps.plea.models import Case, UsageStats, Offence
class OffenceSerializer(serializers.ModelSerializer):
class Meta:
model = Offence
exclude = ("case",)
class CaseSerializer(serializers.ModelSerializer):
case_number = serializers.CharField(required=True)
offences = OffenceSerializer(many=True)
class Meta:
model = Case
fields = ("offences", "urn", "title", "name", "forenames", "surname",
"case_number")
def create(self, validated_data):
# Create the case instance
offences = validated_data.pop("offences", [])
case = Case.objects.create(**validated_data)
# Create or update each page instance
for item in offences:
offence = Offence(**item)
offence.case = case
offence.save()
return case
def validate_case_number(self, value, key):
"""
Make sure case number is unique
"""
#import pdb; pdb.set_trace()
try:
Case.objects.get(case_number=value[key])
except (Case.DoesNotExist, Case.MultipleObjectsReturned ):
return value
else:
raise serializers.ValidationError("Case with this case number already exists")
class UsageStatsSerializer(serializers.ModelSerializer):
class Meta:
model = UsageStats | from rest_framework import serializers
from apps.plea.models import Case, UsageStats, Offence
class OffenceSerializer(serializers.ModelSerializer):
class Meta:
model = Offence
exclude = ("case",)
class CaseSerializer(serializers.ModelSerializer):
urn = serializers.RegexField("^\d{2}/[a-zA-Z]{2}/\d+/\d{2}$", max_length=16, min_length=14)\
offences = OffenceSerializer(many=True)
class Meta:
model = Case
fields = ("offences", "urn", "title", "name", "forenames", "surname",
"case_number")
def create(self, validated_data):
# Create the case instance
offences = validated_data.pop("offences", [])
case = Case.objects.create(**validated_data)
# Create or update each page instance
for item in offences:
offence = Offence(**item)
offence.case = case
offence.save()
return case
def validate(self, attrs):
if not Case.objects.can_use_urn(attrs["urn"]):
raise serializers.ValidationError("Case data already exists")
return attrs
class UsageStatsSerializer(serializers.ModelSerializer):
class Meta:
model = UsageStats | mit | Python |
f1048ee71cc5ffa6d38045e6931cb620216ead4d | Handle GET RESPONSE | LedgerHQ/blue-loader-python | ledgerblue/commTCP.py | ledgerblue/commTCP.py | """
*******************************************************************************
* Ledger Blue
* (c) 2019 Ledger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
********************************************************************************
"""
from .commException import CommException
from binascii import hexlify
import socket
import struct
class DongleServer(object):
def __init__(self, server, port, debug=False):
self.server = server
self.port = port
self.debug = debug
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.opened = True
try:
self.socket.connect((self.server, self.port))
except:
raise CommException("Proxy connection failed")
def exchange(self, apdu, timeout=20000):
def send_apdu(apdu):
if self.debug:
print("=> %s" % hexlify(apdu))
self.socket.send(struct.pack(">I", len(apdu)))
self.socket.send(apdu)
def get_data():
size = struct.unpack(">I", self.socket.recv(4))[0]
response = self.socket.recv(size)
sw = struct.unpack(">H", self.socket.recv(2))[0]
if self.debug:
print("<= %s%.2x" % (hexlify(response), sw))
return (sw, response)
send_apdu(apdu)
(sw, response) = get_data()
if sw != 0x9000:
# handle the get response case:
# When more data is available, the chip sends 0x61XX
# So 0x61xx as a SW must not be interpreted as an error
if (sw & 0xFF00) != 0x6100:
raise CommException("Invalid status %04x" % sw, sw)
else:
while (sw & 0xFF00) == 0x6100:
send_apdu(bytes.fromhex("00c0000000")) # GET RESPONSE
(sw, data) = get_data()
response += data
return bytearray(response)
def apduMaxDataSize(self):
return 240
def close(self):
try:
self.socket.close()
self.socket = None
except:
pass
self.opened = False
def getDongle(server="127.0.0.1", port=9999, debug=False):
return DongleServer(server, port, debug)
| """
*******************************************************************************
* Ledger Blue
* (c) 2019 Ledger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
********************************************************************************
"""
from .commException import CommException
from binascii import hexlify
import socket
import struct
class DongleServer(object):
def __init__(self, server, port, debug=False):
self.server = server
self.port = port
self.debug = debug
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.opened = True
try:
self.socket.connect((self.server, self.port))
except:
raise CommException("Proxy connection failed")
def exchange(self, apdu, timeout=20000):
if self.debug:
print("=> %s" % hexlify(apdu))
self.socket.send(struct.pack(">I", len(apdu)))
self.socket.send(apdu)
size = struct.unpack(">I", self.socket.recv(4))[0]
response = self.socket.recv(size)
sw = struct.unpack(">H", self.socket.recv(2))[0]
if self.debug:
print("<= %s%.2x" % (hexlify(response), sw))
if sw != 0x9000:
raise CommException("Invalid status %04x" % sw, sw)
return bytearray(response)
def apduMaxDataSize(self):
return 240
def close(self):
try:
self.socket.close()
self.socket = None
except:
pass
self.opened = False
def getDongle(server="127.0.0.1", port=9999, debug=False):
return DongleServer(server, port, debug)
| apache-2.0 | Python |
8316a60ba2887a511579e8cedb90b3a02fc1889a | Drop dashes from download urls. | mbr/dope,mbr/dope | dope/util.py | dope/util.py | from uuid import UUID
from werkzeug.routing import BaseConverter
class UUIDConverter(BaseConverter):
to_python = UUID
def to_url(self, obj):
return str(obj).replace('-', '')
| from uuid import UUID
from werkzeug.routing import BaseConverter
class UUIDConverter(BaseConverter):
to_python = UUID
to_url = str
| mit | Python |
75af5ca68ba2238bc8efb2a5f9cf2aa24276fb41 | add bcrypt password encryption | bmwachajr/bucketlist | application/models.py | application/models.py | from flask_sqlalchemy import SQLAlchemy
from application import app, db
from sqlalchemy.sql import func
import bcrypt
class User(db.Model):
""" Creates users on the system """
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
password = db.Column(db.String)
bucketlists = db.relationship('Bucketlist', backref='author', lazy='dynamic')
def set_password(self, password):
""" hash and set the new users password """
self.password = bcrypt.hashpw(password.encode(), bcrypt.gensalt())
def save(self):
""" Save a user into the database """
db.session.add(self)
db.session.commit()
def __repr__(self):
return '<User %r>' % self.username
class Bucketlist(db.Model):
""" creates bucket lists """
__tablename__ = 'bucketlists'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80))
created_by = db.Column(db.Integer, db.ForeignKey('users.email'))
date_created = db.Column(db.DateTime, server_default=func.now())
date_modified = db.Column(db.DateTime, server_onupdate=func.now())
items = db.relationship('Item', backref='bucketlist', lazy='dynamic')
def __repr__(self):
return '<Bucketlist %r>' % self.title
class Item(db.Model):
""" Creates bucketlist items """
__tablename__ = 'items'
id = db.Column(db.Integer, primary_key=True)
description = db.column(db.String(120))
is_done = False
date_created = db.Column(db.DateTime, server_default=func.now())
date_modified = db.Column(db.DateTime, server_onupdate=func.now())
bucketlist_id = db.Column(db.Integer, db.ForeignKey('bucketlists.id'))
def __repr__(self):
return '<item %r>' % self.title
| from flask_sqlalchemy import SQLAlchemy
from application import app, db
from sqlalchemy.sql import func
class User(db.Model):
""" Creates users on the system """
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
password = db.Column(db.String)
bucketlists = db.relationship('Bucketlist', backref='author', lazy='dynamic')
def set_password(self, password):
""" hash and set the new users password """
self.password = password
def save(self):
""" Save a user into the database """
db.session.add(self)
db.session.commit()
def __repr__(self):
return '<User %r>' % self.username
class Bucketlist(db.Model):
""" creates bucket lists """
__tablename__ = 'bucketlists'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80))
created_by = db.Column(db.Integer, db.ForeignKey('users.email'))
date_created = db.Column(db.DateTime, server_default=func.now())
date_modified = db.Column(db.DateTime, server_onupdate=func.now())
items = db.relationship('Item', backref='bucketlist', lazy='dynamic')
def __repr__(self):
return '<Bucketlist %r>' % self.title
class Item(db.Model):
""" Creates bucketlist items """
__tablename__ = 'items'
id = db.Column(db.Integer, primary_key=True)
description = db.column(db.String(120))
is_done = False
date_created = db.Column(db.DateTime, server_default=func.now())
date_modified = db.Column(db.DateTime, server_onupdate=func.now())
bucketlist_id = db.Column(db.Integer, db.ForeignKey('bucketlists.id'))
def __repr__(self):
return '<item %r>' % self.title
| mit | Python |
01099fc95bbce7119e71fe65d608a809e819552e | update domain model | apipanda/openssl,apipanda/openssl,apipanda/openssl,apipanda/openssl | apps/domain/models.py | apps/domain/models.py | from __future__ import unicode_literals, absolute_import
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from apps.certificate.models import Certificate
# Create your models here.
class Domain(models.Model):
domain_name = models.CharField(max_length=200)
domain_url = models.CharField(max_length=200)
domain_registrar = models.CharField(max_length=200)
support_email = models.EmailField(max_length=200)
tld = models.CharField(max_length=200)
slug = models.SlugField(max_length=100)
date_registered = models.DateField()
expiration_date = models.DateField()
last_updated = models.DateField(auto_now=True)
date_entered = models.DateField(auto_now_add=True)
verification_type = models.CharField(choices=settings.DOMAIN_VERIFICATION_OPTIONS, max_length=16)
is_active = models.BooleanField(default=True)
domain_certificate = models.ForeignKey(
Certificate, related_name='certified_domain')
admin = models.ForeignKey(User, related_name='domains')
def get_webmaster(self):
return self.admin
@property
def is_expired(self):
return True if self.expiration_date > self.date_registered else False
def save(self, *args, **kwargs):
if not self.slug:
self.slug = (self.domain_name + self.tld).replace('.', '-')
return super(Domain, self).save(*args, **kwargs) | from __future__ import unicode_literals, absolute_import
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from apps.certificate.models import Certificate
# Create your models here.
class Domain(models.Model):
domain_name = models.CharField(max_length=200)
domain_url = models.CharField(max_length=200)
domain_registerer = models.CharField(max_length=200)
support_email = models.EmailField(max_length=200)
tld = models.CharField(max_length=200)
slug = models.SlugField(max_length=100)
date_registered = models.DateField()
expiration_date = models.DateField()
last_updated = models.DateField(auto_now=True)
date_entered = models.DateField(auto_now_add=True)
verification_type = models.CharField(choices=settings.DOMAIN_VERIFICATION_OPTIONS, max_length=16)
is_active = models.BooleanField(default=True)
domain_certificate = models.ForeignKey(
Certificate, related_name='certified_domain')
admin = models.ForeignKey(User, related_name='domains')
def get_webmaster(self):
return self.admin
@property
def is_expired(self):
return True if self.expiration_date > self.date_registered else False
def save(self, *args, **kwargs):
if not self.slug:
self.slug = (self.domain_name + self.tld).replace('.', '-')
return super(Domain, self).save(*args, **kwargs)
# {
# domain_name: data.domain_name || $localStorage.domain.host,
# domain_url: $localStorage.domain.host,,
# domain_registerer: data.register,
# support_email: (lambda emails: data.emails[0] if isinstance(emails, list) else emails)(data.emails),
# } | mit | Python |
80710617b0dbfb862aa2e7367785c9be3e4cbd3d | Use external exceptions | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/modules/node.py | salt/modules/node.py | # -*- coding: utf-8 -*-
#
# Copyright 2015 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Module for full system inspection.
'''
from __future__ import absolute_import
import logging
import importlib
from salt.modules.inspectlib.exceptions import (InspectorQueryException,
InspectorSnapshotException)
# Import Salt libs
import salt.utils
import salt.utils.fsutils
from salt.exceptions import CommandExecutionError
log = logging.getLogger(__name__)
def __virtual__():
'''
Only work on POSIX-like systems
'''
return not salt.utils.is_windows()
def _(module):
'''
Get inspectlib module for the lazy loader.
:param module:
:return:
'''
mod = importlib.import_module("salt.modules.inspectlib.{0}".format(module))
mod.__grains__ = __grains__
mod.__pillar__ = __pillar__
mod.__salt__ = __salt__
return mod
def inspect(mode='all', priority=19):
'''
Start node inspection and save the data to the database for further query.
Parameters:
* **mode**: Clarify inspection mode: configuration, payload, all (default)
* **priority**: (advanced) Set priority of the inspection. Default is low priority.
CLI Example:
.. code-block:: bash
salt '*' node.inspect
salt '*' node.inspect configuration
'''
collector = _("collector")
try:
return collector.Inspector().request_snapshot(mode, priority=priority)
except collector.Inspector.InspectorSnapshotException as ex:
raise CommandExecutionError(ex)
except Exception as ex:
raise Exception(ex)
def query(scope, **kwargs):
'''
Query the node for specific information.
Parameters:
* **scope**: Specify scope of the query.
CLI Example:
.. code-block:: bash
salt '*' node.query scope=os
'''
query = _("query")
try:
return query.Query(scope)(**kwargs)
except query.Query.InspectorQueryException as ex:
raise CommandExecutionError(ex)
except Exception as ex:
raise Exception(ex)
| # -*- coding: utf-8 -*-
#
# Copyright 2015 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Module for full system inspection.
'''
from __future__ import absolute_import
import logging
import importlib
# Import Salt libs
import salt.utils
import salt.utils.fsutils
from salt.exceptions import CommandExecutionError
log = logging.getLogger(__name__)
def __virtual__():
'''
Only work on POSIX-like systems
'''
return not salt.utils.is_windows()
def _(module):
'''
Get inspectlib module for the lazy loader.
:param module:
:return:
'''
mod = importlib.import_module("salt.modules.inspectlib.{0}".format(module))
mod.__grains__ = __grains__
mod.__pillar__ = __pillar__
mod.__salt__ = __salt__
return mod
def inspect(mode='all', priority=19):
'''
Start node inspection and save the data to the database for further query.
Parameters:
* **mode**: Clarify inspection mode: configuration, payload, all (default)
* **priority**: (advanced) Set priority of the inspection. Default is low priority.
CLI Example:
.. code-block:: bash
salt '*' node.inspect
salt '*' node.inspect configuration
'''
collector = _("collector")
try:
return collector.Inspector().request_snapshot(mode, priority=priority)
except collector.Inspector.InspectorSnapshotException as ex:
raise CommandExecutionError(ex)
except Exception as ex:
raise Exception(ex)
def query(scope, **kwargs):
'''
Query the node for specific information.
Parameters:
* **scope**: Specify scope of the query.
CLI Example:
.. code-block:: bash
salt '*' node.query scope=os
'''
query = _("query")
try:
return query.Query(scope)(**kwargs)
except query.Query.InspectorQueryException as ex:
raise CommandExecutionError(ex)
except Exception as ex:
raise Exception(ex)
| apache-2.0 | Python |
38e349f04257d1d35d431a2754c6c249a7b4650c | Allow runner to update_ca_bundle | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/runners/http.py | salt/runners/http.py | # -*- coding: utf-8 -*-
'''
Module for making various web calls. Primarily designed for webhooks and the
like, but also useful for basic http testing.
'''
from __future__ import absolute_import
# Import Python libs
import logging
# Import salt libs
import salt.utils.http
log = logging.getLogger(__name__)
def query(url, output=True, **kwargs):
'''
Query a resource, and decode the return data
CLI Example:
.. code-block:: bash
salt-run http.query http://somelink.com/
salt-run http.query http://somelink.com/ method=POST \
params='key1=val1&key2=val2'
salt-run http.query http://somelink.com/ method=POST \
data='<xml>somecontent</xml>'
'''
if output is not True:
log.warn('Output option has been deprecated. Please use --quiet.')
if 'node' not in kwargs:
kwargs['node'] = 'master'
ret = salt.utils.http.query(url=url, opts=__opts__, **kwargs)
return ret
def update_ca_bundle(target=None, source=None, merge_files=None):
'''
Update the local CA bundle file from a URL
CLI Example:
.. code-block:: bash
salt-run http.update_ca_bundle
salt-run http.update_ca_bundle target=/path/to/cacerts.pem
salt-run http.update_ca_bundle source=https://example.com/cacerts.pem
If the ``target`` is not specified, it will be pulled from the ``ca_cert``
configuration variable available to the master. If it cannot be found there,
it will be placed at ``<<FILE_ROOTS>>/cacerts.pem``.
If the ``source`` is not specified, it will be pulled from the
``ca_cert_url`` configuration variable available to the master. If it cannot
be found, it will be downloaded from the cURL website, using an http (not
https) URL. USING THE DEFAULT URL SHOULD BE AVOIDED!
``merge_files`` may also be specified, which includes a string or list of
strings representing a file or files to be appended to the end of the CA
bundle, once it is downloaded.
CLI Example:
.. code-block:: bash
salt-run http.update_ca_bundle merge_files=/path/to/mycert.pem
'''
return salt.utils.http.update_ca_bundle(
target, source, __opts__, merge_files
)
| # -*- coding: utf-8 -*-
'''
Module for making various web calls. Primarily designed for webhooks and the
like, but also useful for basic http testing.
'''
from __future__ import absolute_import
# Import Python libs
import logging
# Import salt libs
import salt.utils.http
log = logging.getLogger(__name__)
def query(url, output=True, **kwargs):
'''
Query a resource, and decode the return data
CLI Example:
.. code-block:: bash
salt-run http.query http://somelink.com/
salt-run http.query http://somelink.com/ method=POST \
params='key1=val1&key2=val2'
salt-run http.query http://somelink.com/ method=POST \
data='<xml>somecontent</xml>'
'''
if output is not True:
log.warn('Output option has been deprecated. Please use --quiet.')
if 'node' not in kwargs:
kwargs['node'] = 'master'
ret = salt.utils.http.query(url=url, opts=__opts__, **kwargs)
return ret
| apache-2.0 | Python |
6c096bdd89b2e276eafe019b90c9088c49e1d6f2 | Update version number | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | saltcloud/version.py | saltcloud/version.py | import sys
__version_info__ = (0, 8, 6)
__version__ = '.'.join(map(str, __version_info__))
def versions_report():
libs = (
("Salt", "salt", "__version__"),
("Apache Libcloud", "libcloud", "__version__"),
("PyYAML", "yaml", "__version__"),
)
padding = len(max([lib[0] for lib in libs], key=len)) + 1
fmt = '{0:>{pad}}: {1}'
yield fmt.format("Salt Cloud", __version__, pad=padding)
yield fmt.format(
"Python", sys.version.rsplit('\n')[0].strip(), pad=padding
)
for name, imp, attr in libs:
try:
imp = __import__(imp)
version = getattr(imp, attr)
if not isinstance(version, basestring):
version = '.'.join(map(str, version))
yield fmt.format(name, version, pad=padding)
except ImportError:
yield fmt.format(name, "not installed", pad=padding)
| import sys
__version_info__ = (0, 8, 5)
__version__ = '.'.join(map(str, __version_info__))
def versions_report():
libs = (
("Salt", "salt", "__version__"),
("Apache Libcloud", "libcloud", "__version__"),
("PyYAML", "yaml", "__version__"),
)
padding = len(max([lib[0] for lib in libs], key=len)) + 1
fmt = '{0:>{pad}}: {1}'
yield fmt.format("Salt Cloud", __version__, pad=padding)
yield fmt.format(
"Python", sys.version.rsplit('\n')[0].strip(), pad=padding
)
for name, imp, attr in libs:
try:
imp = __import__(imp)
version = getattr(imp, attr)
if not isinstance(version, basestring):
version = '.'.join(map(str, version))
yield fmt.format(name, version, pad=padding)
except ImportError:
yield fmt.format(name, "not installed", pad=padding)
| apache-2.0 | Python |
e5d30636c29cde0c4fa9e81f213b1fc008237229 | Update libchromiumcontent for linking with msvcrt dll | joaomoreno/atom-shell,bbondy/electron,noikiy/electron,GoooIce/electron,jannishuebl/electron,Jacobichou/electron,michaelchiche/electron,tinydew4/electron,arturts/electron,vaginessa/electron,electron/electron,soulteary/electron,natgolov/electron,aliib/electron,sshiting/electron,roadev/electron,shockone/electron,arusakov/electron,bobwol/electron,abhishekgahlot/electron,brenca/electron,soulteary/electron,voidbridge/electron,arusakov/electron,oiledCode/electron,mrwizard82d1/electron,seanchas116/electron,xfstudio/electron,Zagorakiss/electron,kenmozi/electron,Neron-X5/electron,bruce/electron,christian-bromann/electron,synaptek/electron,ervinb/electron,RIAEvangelist/electron,icattlecoder/electron,Jonekee/electron,davazp/electron,saronwei/electron,jcblw/electron,bwiggs/electron,pombredanne/electron,arturts/electron,leethomas/electron,nicholasess/electron,matiasinsaurralde/electron,fomojola/electron,saronwei/electron,faizalpribadi/electron,simongregory/electron,nekuz0r/electron,sircharleswatson/electron,shiftkey/electron,leolujuyi/electron,tincan24/electron,stevekinney/electron,JussMee15/electron,rreimann/electron,shockone/electron,nicholasess/electron,RIAEvangelist/electron,robinvandernoord/electron,fritx/electron,jcblw/electron,evgenyzinoviev/electron,tinydew4/electron,d-salas/electron,iftekeriba/electron,thompsonemerson/electron,dahal/electron,preco21/electron,cos2004/electron,RobertJGabriel/electron,kazupon/electron,robinvandernoord/electron,hokein/atom-shell,nicholasess/electron,GoooIce/electron,IonicaBizauKitchen/electron,xiruibing/electron,aichingm/electron,noikiy/electron,mjaniszew/electron,bright-sparks/electron,SufianHassan/electron,vipulroxx/electron,etiktin/electron,mhkeller/electron,kostia/electron,leftstick/electron,nicholasess/electron,baiwyc119/electron,mattotodd/electron,webmechanicx/electron,JussMee15/electron,christian-bromann/electron,rreimann/electron,twolfson/electron,MaxWhere/electron,dongjoon-hyun/electron,Ivshti/electron,sshiting/electron,the-ress/electron,dkfiresky/electron,simonfork/electron,farmisen/electron,simonfork/electron,posix4e/electron,takashi/electron,posix4e/electron,IonicaBizauKitchen/electron,Rokt33r/electron,jiaz/electron,stevemao/electron,joaomoreno/atom-shell,tonyganch/electron,miniak/electron,vaginessa/electron,synaptek/electron,aecca/electron,bobwol/electron,etiktin/electron,adcentury/electron,anko/electron,rreimann/electron,takashi/electron,mattdesl/electron,iftekeriba/electron,mattdesl/electron,jlord/electron,carsonmcdonald/electron,nekuz0r/electron,nekuz0r/electron,twolfson/electron,zhakui/electron,gabrielPeart/electron,BionicClick/electron,vHanda/electron,robinvandernoord/electron,bruce/electron,yalexx/electron,aecca/electron,nicobot/electron,adcentury/electron,fomojola/electron,Evercoder/electron,the-ress/electron,smczk/electron,webmechanicx/electron,beni55/electron,fabien-d/electron,MaxWhere/electron,Jacobichou/electron,sshiting/electron,christian-bromann/electron,tonyganch/electron,cqqccqc/electron,trigrass2/electron,MaxGraey/electron,carsonmcdonald/electron,Rokt33r/electron,shiftkey/electron,tonyganch/electron,noikiy/electron,shockone/electron,jhen0409/electron,fritx/electron,brave/muon,Andrey-Pavlov/electron,ervinb/electron,gbn972/electron,etiktin/electron,brave/muon,fritx/electron,electron/electron,RIAEvangelist/electron,gabriel/electron,astoilkov/electron,dahal/electron,John-Lin/electron,evgenyzinoviev/electron,voidbridge/electron,RobertJGabriel/electron,leolujuyi/electron,yan-foto/electron,kostia/electron,meowlab/electron,aliib/electron,medixdev/electron,howmuchcomputer/electron,BionicClick/electron,bitemyapp/electron,Floato/electron,jsutcodes/electron,Zagorakiss/electron,chriskdon/electron,thompsonemerson/electron,Floato/electron,deed02392/electron,gbn972/electron,miniak/electron,shiftkey/electron,aaron-goshine/electron,shaundunne/electron,systembugtj/electron,the-ress/electron,timruffles/electron,wan-qy/electron,soulteary/electron,greyhwndz/electron,jlhbaseball15/electron,bright-sparks/electron,tylergibson/electron,shiftkey/electron,bpasero/electron,astoilkov/electron,gamedevsam/electron,felixrieseberg/electron,sircharleswatson/electron,faizalpribadi/electron,arusakov/electron,kokdemo/electron,LadyNaggaga/electron,iftekeriba/electron,icattlecoder/electron,benweissmann/electron,coderhaoxin/electron,anko/electron,JesselJohn/electron,Evercoder/electron,adamjgray/electron,mrwizard82d1/electron,soulteary/electron,leethomas/electron,jjz/electron,rsvip/electron,trankmichael/electron,felixrieseberg/electron,jaanus/electron,eric-seekas/electron,stevekinney/electron,vipulroxx/electron,gerhardberger/electron,synaptek/electron,jsutcodes/electron,biblerule/UMCTelnetHub,simongregory/electron,carsonmcdonald/electron,nicobot/electron,jsutcodes/electron,carsonmcdonald/electron,setzer777/electron,rsvip/electron,nicobot/electron,jsutcodes/electron,JesselJohn/electron,SufianHassan/electron,kcrt/electron,DivyaKMenon/electron,abhishekgahlot/electron,felixrieseberg/electron,Evercoder/electron,trigrass2/electron,wolfflow/electron,leethomas/electron,bright-sparks/electron,shaundunne/electron,IonicaBizauKitchen/electron,dongjoon-hyun/electron,joneit/electron,trankmichael/electron,deed02392/electron,benweissmann/electron,bitemyapp/electron,mjaniszew/electron,adamjgray/electron,rhencke/electron,joaomoreno/atom-shell,bbondy/electron,mirrh/electron,preco21/electron,Evercoder/electron,minggo/electron,biblerule/UMCTelnetHub,digideskio/electron,eric-seekas/electron,rajatsingla28/electron,MaxWhere/electron,dongjoon-hyun/electron,anko/electron,the-ress/electron,lrlna/electron,RobertJGabriel/electron,kokdemo/electron,kostia/electron,faizalpribadi/electron,bbondy/electron,Faiz7412/electron,miniak/electron,BionicClick/electron,bobwol/electron,shockone/electron,leolujuyi/electron,LadyNaggaga/electron,edulan/electron,fritx/electron,farmisen/electron,aliib/electron,fffej/electron,electron/electron,davazp/electron,benweissmann/electron,Jonekee/electron,leftstick/electron,coderhaoxin/electron,eric-seekas/electron,thomsonreuters/electron,lzpfmh/electron,fffej/electron,bpasero/electron,mjaniszew/electron,gamedevsam/electron,egoist/electron,seanchas116/electron,kcrt/electron,astoilkov/electron,systembugtj/electron,roadev/electron,thomsonreuters/electron,Faiz7412/electron,JussMee15/electron,mirrh/electron,thompsonemerson/electron,ianscrivener/electron,jiaz/electron,jsutcodes/electron,aaron-goshine/electron,joneit/electron,astoilkov/electron,cqqccqc/electron,kostia/electron,kcrt/electron,anko/electron,beni55/electron,fffej/electron,tincan24/electron,egoist/electron,trigrass2/electron,simonfork/electron,tylergibson/electron,jlord/electron,oiledCode/electron,medixdev/electron,rreimann/electron,ankitaggarwal011/electron,simongregory/electron,gerhardberger/electron,miniak/electron,jiaz/electron,micalan/electron,gabriel/electron,LadyNaggaga/electron,kazupon/electron,eriser/electron,shennushi/electron,etiktin/electron,thompsonemerson/electron,michaelchiche/electron,darwin/electron,bbondy/electron,takashi/electron,dkfiresky/electron,DivyaKMenon/electron,thingsinjars/electron,jaanus/electron,trankmichael/electron,Andrey-Pavlov/electron,gabriel/electron,vaginessa/electron,minggo/electron,benweissmann/electron,BionicClick/electron,gabriel/electron,twolfson/electron,vipulroxx/electron,destan/electron,jtburke/electron,edulan/electron,soulteary/electron,renaesop/electron,christian-bromann/electron,adcentury/electron,lzpfmh/electron,jhen0409/electron,stevemao/electron,digideskio/electron,greyhwndz/electron,leftstick/electron,bpasero/electron,hokein/atom-shell,jacksondc/electron,jjz/electron,Jonekee/electron,noikiy/electron,minggo/electron,jannishuebl/electron,abhishekgahlot/electron,fireball-x/atom-shell,egoist/electron,jtburke/electron,arturts/electron,nicobot/electron,fffej/electron,sircharleswatson/electron,rsvip/electron,micalan/electron,saronwei/electron,egoist/electron,eric-seekas/electron,mjaniszew/electron,Neron-X5/electron,bright-sparks/electron,tincan24/electron,DivyaKMenon/electron,simonfork/electron,evgenyzinoviev/electron,leolujuyi/electron,howmuchcomputer/electron,destan/electron,stevemao/electron,etiktin/electron,shennushi/electron,Gerhut/electron,faizalpribadi/electron,tinydew4/electron,lzpfmh/electron,systembugtj/electron,micalan/electron,neutrous/electron,bright-sparks/electron,kokdemo/electron,baiwyc119/electron,cqqccqc/electron,icattlecoder/electron,smczk/electron,kokdemo/electron,Evercoder/electron,jjz/electron,oiledCode/electron,seanchas116/electron,mattotodd/electron,felixrieseberg/electron,abhishekgahlot/electron,Andrey-Pavlov/electron,tomashanacek/electron,meowlab/electron,wan-qy/electron,coderhaoxin/electron,minggo/electron,jlord/electron,nekuz0r/electron,dongjoon-hyun/electron,aaron-goshine/electron,Gerhut/electron,jlhbaseball15/electron,gabrielPeart/electron,Jacobichou/electron,synaptek/electron,bitemyapp/electron,ervinb/electron,eric-seekas/electron,thingsinjars/electron,sky7sea/electron,JesselJohn/electron,dahal/electron,BionicClick/electron,gerhardberger/electron,Jonekee/electron,yalexx/electron,IonicaBizauKitchen/electron,chriskdon/electron,rhencke/electron,pirafrank/electron,joneit/electron,wan-qy/electron,biblerule/UMCTelnetHub,jhen0409/electron,michaelchiche/electron,kenmozi/electron,SufianHassan/electron,RIAEvangelist/electron,Andrey-Pavlov/electron,smczk/electron,bpasero/electron,renaesop/electron,bruce/electron,matiasinsaurralde/electron,noikiy/electron,gabrielPeart/electron,egoist/electron,d-salas/electron,fomojola/electron,stevekinney/electron,aecca/electron,voidbridge/electron,thingsinjars/electron,voidbridge/electron,jlord/electron,rhencke/electron,DivyaKMenon/electron,RobertJGabriel/electron,shaundunne/electron,jhen0409/electron,subblue/electron,astoilkov/electron,takashi/electron,beni55/electron,adamjgray/electron,ankitaggarwal011/electron,Ivshti/electron,electron/electron,bwiggs/electron,JesselJohn/electron,Ivshti/electron,brave/muon,shaundunne/electron,JesselJohn/electron,twolfson/electron,kazupon/electron,arusakov/electron,miniak/electron,bwiggs/electron,sshiting/electron,DivyaKMenon/electron,xfstudio/electron,benweissmann/electron,dongjoon-hyun/electron,howmuchcomputer/electron,aichingm/electron,faizalpribadi/electron,Floato/electron,posix4e/electron,rsvip/electron,aichingm/electron,sircharleswatson/electron,shennushi/electron,wan-qy/electron,rhencke/electron,baiwyc119/electron,leftstick/electron,gbn972/electron,aecca/electron,electron/electron,bobwol/electron,jcblw/electron,medixdev/electron,bwiggs/electron,nicobot/electron,wan-qy/electron,gerhardberger/electron,aichingm/electron,jacksondc/electron,kenmozi/electron,etiktin/electron,matiasinsaurralde/electron,kazupon/electron,Jonekee/electron,stevekinney/electron,meowlab/electron,subblue/electron,seanchas116/electron,shiftkey/electron,mhkeller/electron,Zagorakiss/electron,JussMee15/electron,MaxGraey/electron,iftekeriba/electron,leolujuyi/electron,nicholasess/electron,Neron-X5/electron,kostia/electron,stevemao/electron,fritx/electron,aichingm/electron,yan-foto/electron,tylergibson/electron,jaanus/electron,ankitaggarwal011/electron,Neron-X5/electron,joaomoreno/atom-shell,subblue/electron,adcentury/electron,Rokt33r/electron,Jacobichou/electron,RobertJGabriel/electron,gerhardberger/electron,adamjgray/electron,pandoraui/electron,bbondy/electron,Gerhut/electron,roadev/electron,jonatasfreitasv/electron,jiaz/electron,tincan24/electron,leolujuyi/electron,natgolov/electron,aaron-goshine/electron,oiledCode/electron,tomashanacek/electron,bruce/electron,trankmichael/electron,brave/electron,tylergibson/electron,jaanus/electron,MaxWhere/electron,Evercoder/electron,JussMee15/electron,jtburke/electron,yan-foto/electron,bbondy/electron,bobwol/electron,wolfflow/electron,xiruibing/electron,gabrielPeart/electron,coderhaoxin/electron,leethomas/electron,mirrh/electron,shennushi/electron,evgenyzinoviev/electron,setzer777/electron,digideskio/electron,dahal/electron,brenca/electron,coderhaoxin/electron,farmisen/electron,rajatsingla28/electron,Ivshti/electron,IonicaBizauKitchen/electron,takashi/electron,gerhardberger/electron,adamjgray/electron,sky7sea/electron,natgolov/electron,jacksondc/electron,stevemao/electron,yan-foto/electron,bright-sparks/electron,biblerule/UMCTelnetHub,Neron-X5/electron,digideskio/electron,brave/muon,michaelchiche/electron,ankitaggarwal011/electron,joaomoreno/atom-shell,jhen0409/electron,pandoraui/electron,destan/electron,preco21/electron,zhakui/electron,simongregory/electron,lzpfmh/electron,yan-foto/electron,jaanus/electron,farmisen/electron,jannishuebl/electron,zhakui/electron,medixdev/electron,joaomoreno/atom-shell,fritx/electron,howmuchcomputer/electron,rhencke/electron,greyhwndz/electron,michaelchiche/electron,jonatasfreitasv/electron,matiasinsaurralde/electron,vipulroxx/electron,dkfiresky/electron,greyhwndz/electron,roadev/electron,davazp/electron,egoist/electron,setzer777/electron,thingsinjars/electron,minggo/electron,eriser/electron,jlord/electron,pirafrank/electron,gerhardberger/electron,cqqccqc/electron,vHanda/electron,vipulroxx/electron,gabriel/electron,rajatsingla28/electron,mirrh/electron,darwin/electron,rajatsingla28/electron,nekuz0r/electron,shiftkey/electron,Jonekee/electron,aecca/electron,evgenyzinoviev/electron,pombredanne/electron,brave/muon,darwin/electron,tomashanacek/electron,thingsinjars/electron,twolfson/electron,timruffles/electron,thomsonreuters/electron,fireball-x/atom-shell,greyhwndz/electron,xfstudio/electron,vHanda/electron,Floato/electron,benweissmann/electron,xiruibing/electron,fffej/electron,synaptek/electron,jannishuebl/electron,pirafrank/electron,nekuz0r/electron,jtburke/electron,JesselJohn/electron,shennushi/electron,brave/electron,MaxWhere/electron,gbn972/electron,arturts/electron,fomojola/electron,Zagorakiss/electron,ianscrivener/electron,Jacobichou/electron,Faiz7412/electron,pandoraui/electron,leftstick/electron,mattotodd/electron,John-Lin/electron,medixdev/electron,xiruibing/electron,renaesop/electron,timruffles/electron,thompsonemerson/electron,hokein/atom-shell,dongjoon-hyun/electron,smczk/electron,kcrt/electron,ervinb/electron,cos2004/electron,jannishuebl/electron,wolfflow/electron,preco21/electron,adcentury/electron,brenca/electron,JussMee15/electron,yalexx/electron,takashi/electron,John-Lin/electron,natgolov/electron,jsutcodes/electron,christian-bromann/electron,shockone/electron,posix4e/electron,shockone/electron,matiasinsaurralde/electron,thingsinjars/electron,jacksondc/electron,baiwyc119/electron,faizalpribadi/electron,preco21/electron,micalan/electron,felixrieseberg/electron,Neron-X5/electron,fffej/electron,neutrous/electron,wolfflow/electron,brave/electron,Faiz7412/electron,lzpfmh/electron,rreimann/electron,icattlecoder/electron,fabien-d/electron,IonicaBizauKitchen/electron,dkfiresky/electron,mhkeller/electron,mattdesl/electron,carsonmcdonald/electron,coderhaoxin/electron,destan/electron,vaginessa/electron,destan/electron,deed02392/electron,baiwyc119/electron,kokdemo/electron,zhakui/electron,simongregory/electron,icattlecoder/electron,xiruibing/electron,aliib/electron,Rokt33r/electron,saronwei/electron,bwiggs/electron,wan-qy/electron,twolfson/electron,systembugtj/electron,miniak/electron,zhakui/electron,eriser/electron,timruffles/electron,bruce/electron,oiledCode/electron,ervinb/electron,edulan/electron,deed02392/electron,fireball-x/atom-shell,webmechanicx/electron,xfstudio/electron,meowlab/electron,MaxGraey/electron,fabien-d/electron,abhishekgahlot/electron,sshiting/electron,fomojola/electron,bitemyapp/electron,michaelchiche/electron,thomsonreuters/electron,jacksondc/electron,beni55/electron,digideskio/electron,vHanda/electron,kazupon/electron,Gerhut/electron,howmuchcomputer/electron,kcrt/electron,kazupon/electron,ankitaggarwal011/electron,eriser/electron,renaesop/electron,simongregory/electron,webmechanicx/electron,sky7sea/electron,aaron-goshine/electron,LadyNaggaga/electron,jtburke/electron,mhkeller/electron,subblue/electron,subblue/electron,sshiting/electron,the-ress/electron,matiasinsaurralde/electron,neutrous/electron,shaundunne/electron,mhkeller/electron,meowlab/electron,brave/electron,xiruibing/electron,Gerhut/electron,kenmozi/electron,yalexx/electron,joneit/electron,neutrous/electron,mrwizard82d1/electron,Ivshti/electron,John-Lin/electron,noikiy/electron,adamjgray/electron,posix4e/electron,SufianHassan/electron,bruce/electron,cos2004/electron,shennushi/electron,smczk/electron,Zagorakiss/electron,jonatasfreitasv/electron,farmisen/electron,greyhwndz/electron,darwin/electron,MaxWhere/electron,ianscrivener/electron,robinvandernoord/electron,electron/electron,chriskdon/electron,webmechanicx/electron,yalexx/electron,Andrey-Pavlov/electron,ianscrivener/electron,dahal/electron,tonyganch/electron,kostia/electron,jaanus/electron,brenca/electron,leethomas/electron,bwiggs/electron,Andrey-Pavlov/electron,adcentury/electron,eriser/electron,jlhbaseball15/electron,seanchas116/electron,saronwei/electron,jjz/electron,sky7sea/electron,lrlna/electron,ervinb/electron,eric-seekas/electron,gabriel/electron,darwin/electron,preco21/electron,brave/electron,xfstudio/electron,d-salas/electron,cos2004/electron,seanchas116/electron,MaxGraey/electron,LadyNaggaga/electron,micalan/electron,edulan/electron,rsvip/electron,tonyganch/electron,fireball-x/atom-shell,stevekinney/electron,cqqccqc/electron,subblue/electron,SufianHassan/electron,stevemao/electron,jiaz/electron,arusakov/electron,roadev/electron,thomsonreuters/electron,trigrass2/electron,jlhbaseball15/electron,arusakov/electron,biblerule/UMCTelnetHub,lzpfmh/electron,icattlecoder/electron,renaesop/electron,systembugtj/electron,gabrielPeart/electron,DivyaKMenon/electron,gamedevsam/electron,joneit/electron,pirafrank/electron,evgenyzinoviev/electron,Floato/electron,gabrielPeart/electron,the-ress/electron,dahal/electron,bpasero/electron,beni55/electron,robinvandernoord/electron,posix4e/electron,wolfflow/electron,jlhbaseball15/electron,tomashanacek/electron,deed02392/electron,pandoraui/electron,jonatasfreitasv/electron,gamedevsam/electron,John-Lin/electron,digideskio/electron,tonyganch/electron,christian-bromann/electron,yan-foto/electron,mrwizard82d1/electron,chriskdon/electron,brenca/electron,dkfiresky/electron,aaron-goshine/electron,jcblw/electron,pombredanne/electron,chriskdon/electron,vaginessa/electron,aichingm/electron,simonfork/electron,Jacobichou/electron,sircharleswatson/electron,carsonmcdonald/electron,sky7sea/electron,hokein/atom-shell,Rokt33r/electron,brave/muon,brave/electron,mjaniszew/electron,Floato/electron,cos2004/electron,vipulroxx/electron,cos2004/electron,mirrh/electron,trankmichael/electron,natgolov/electron,edulan/electron,mrwizard82d1/electron,kenmozi/electron,kcrt/electron,d-salas/electron,edulan/electron,GoooIce/electron,fabien-d/electron,chriskdon/electron,setzer777/electron,mrwizard82d1/electron,aliib/electron,baiwyc119/electron,lrlna/electron,abhishekgahlot/electron,mattotodd/electron,eriser/electron,Gerhut/electron,bobwol/electron,pombredanne/electron,gamedevsam/electron,vaginessa/electron,voidbridge/electron,nicobot/electron,beni55/electron,the-ress/electron,fireball-x/atom-shell,shaundunne/electron,mhkeller/electron,robinvandernoord/electron,LadyNaggaga/electron,leftstick/electron,destan/electron,trigrass2/electron,systembugtj/electron,bitemyapp/electron,ankitaggarwal011/electron,davazp/electron,renaesop/electron,pirafrank/electron,oiledCode/electron,GoooIce/electron,d-salas/electron,micalan/electron,tinydew4/electron,jlhbaseball15/electron,sky7sea/electron,roadev/electron,gamedevsam/electron,zhakui/electron,anko/electron,gbn972/electron,astoilkov/electron,kokdemo/electron,rajatsingla28/electron,vHanda/electron,RobertJGabriel/electron,RIAEvangelist/electron,arturts/electron,medixdev/electron,jannishuebl/electron,d-salas/electron,trigrass2/electron,RIAEvangelist/electron,davazp/electron,jcblw/electron,fabien-d/electron,tylergibson/electron,sircharleswatson/electron,smczk/electron,MaxGraey/electron,wolfflow/electron,tincan24/electron,tomashanacek/electron,synaptek/electron,jiaz/electron,hokein/atom-shell,biblerule/UMCTelnetHub,mattotodd/electron,stevekinney/electron,fomojola/electron,voidbridge/electron,lrlna/electron,webmechanicx/electron,arturts/electron,John-Lin/electron,GoooIce/electron,pombredanne/electron,leethomas/electron,lrlna/electron,pirafrank/electron,tomashanacek/electron,saronwei/electron,minggo/electron,BionicClick/electron,simonfork/electron,ianscrivener/electron,howmuchcomputer/electron,xfstudio/electron,mattdesl/electron,Zagorakiss/electron,tinydew4/electron,bitemyapp/electron,cqqccqc/electron,thomsonreuters/electron,farmisen/electron,kenmozi/electron,SufianHassan/electron,anko/electron,davazp/electron,pandoraui/electron,pombredanne/electron,neutrous/electron,tylergibson/electron,Rokt33r/electron,deed02392/electron,meowlab/electron,thompsonemerson/electron,soulteary/electron,setzer777/electron,Faiz7412/electron,iftekeriba/electron,iftekeriba/electron,ianscrivener/electron,felixrieseberg/electron,electron/electron,jonatasfreitasv/electron,jacksondc/electron,dkfiresky/electron,rajatsingla28/electron,lrlna/electron,setzer777/electron,jjz/electron,aliib/electron,mjaniszew/electron,rhencke/electron,tincan24/electron,aecca/electron,mattdesl/electron,joneit/electron,neutrous/electron,jhen0409/electron,tinydew4/electron,rreimann/electron,bpasero/electron,natgolov/electron,brenca/electron,yalexx/electron,jjz/electron,jtburke/electron,jonatasfreitasv/electron,mattotodd/electron,GoooIce/electron,trankmichael/electron,jcblw/electron,mirrh/electron,nicholasess/electron,timruffles/electron,pandoraui/electron,mattdesl/electron,vHanda/electron,bpasero/electron,gbn972/electron | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
import errno
import os
import platform
import sys
BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '90a5b9c3792645067ad9517e60cf5eb99730e0f9'
PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
verbose_mode = False
def get_target_arch():
# Always build 64bit on OS X.
if PLATFORM == 'darwin':
return 'x64'
# Only build for host's arch on Linux.
elif PLATFORM == 'linux':
if platform.architecture()[0] == '32bit':
return 'ia32'
else:
return 'x64'
# On Windows it depends on user.
elif PLATFORM == 'win32':
try:
target_arch_path = os.path.join(__file__, '..', '..', '..', 'vendor',
'brightray', 'vendor', 'download',
'libchromiumcontent', '.target_arch')
with open(os.path.normpath(target_arch_path)) as f:
return f.read().strip()
except IOError as e:
if e.errno != errno.ENOENT:
raise
# Build 32bit by default.
return 'ia32'
# Maybe we will support other platforms in future.
else:
return 'x64'
def s3_config():
config = (os.environ.get('ATOM_SHELL_S3_BUCKET', ''),
os.environ.get('ATOM_SHELL_S3_ACCESS_KEY', ''),
os.environ.get('ATOM_SHELL_S3_SECRET_KEY', ''))
message = ('Error: Please set the $ATOM_SHELL_S3_BUCKET, '
'$ATOM_SHELL_S3_ACCESS_KEY, and '
'$ATOM_SHELL_S3_SECRET_KEY environment variables')
assert all(len(c) for c in config), message
return config
def enable_verbose_mode():
print 'Running in verbose mode'
global verbose_mode
verbose_mode = True
def is_verbose_mode():
return verbose_mode
| #!/usr/bin/env python
import errno
import os
import platform
import sys
BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '07a73a610496e4a1b4f3abc3c2fb0516187ec460'
PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
verbose_mode = False
def get_target_arch():
# Always build 64bit on OS X.
if PLATFORM == 'darwin':
return 'x64'
# Only build for host's arch on Linux.
elif PLATFORM == 'linux':
if platform.architecture()[0] == '32bit':
return 'ia32'
else:
return 'x64'
# On Windows it depends on user.
elif PLATFORM == 'win32':
try:
target_arch_path = os.path.join(__file__, '..', '..', '..', 'vendor',
'brightray', 'vendor', 'download',
'libchromiumcontent', '.target_arch')
with open(os.path.normpath(target_arch_path)) as f:
return f.read().strip()
except IOError as e:
if e.errno != errno.ENOENT:
raise
# Build 32bit by default.
return 'ia32'
# Maybe we will support other platforms in future.
else:
return 'x64'
def s3_config():
config = (os.environ.get('ATOM_SHELL_S3_BUCKET', ''),
os.environ.get('ATOM_SHELL_S3_ACCESS_KEY', ''),
os.environ.get('ATOM_SHELL_S3_SECRET_KEY', ''))
message = ('Error: Please set the $ATOM_SHELL_S3_BUCKET, '
'$ATOM_SHELL_S3_ACCESS_KEY, and '
'$ATOM_SHELL_S3_SECRET_KEY environment variables')
assert all(len(c) for c in config), message
return config
def enable_verbose_mode():
print 'Running in verbose mode'
global verbose_mode
verbose_mode = True
def is_verbose_mode():
return verbose_mode
| mit | Python |
64c3a1df70907d857f2eff5cf6a010075eba797f | Update version to 20160713 | pidydx/artifacts,pidydx/artifacts,destijl/artifacts,destijl/artifacts | artifacts/__init__.py | artifacts/__init__.py | # -*- coding: utf-8 -*-
__version__ = '20160713'
| # -*- coding: utf-8 -*-
__version__ = '20160114'
| apache-2.0 | Python |
fabe0a071ce6e0e3d92098669dac33c2c9bc9b62 | add folder docs | MarineLasbleis/GrowYourIC | GrowYourIC/__init__.py | GrowYourIC/__init__.py | """
GrowYourIC
====
GrowYourIC is a tool to model seismic observation through inner core geodynamical modelisation.
"""
from __future__ import absolute_import
__version__ = "0.1.1"
from . import data
from . import geodyn
from . import intersection
from . import positions
from . import plot_data
from . import mineral_phys_data
from . import geodyn_trg
from . import geodyn_static
from . import cross_sections
| """
GrowYourIC
====
GrowYourIC is a tool to model seismic observation through inner core geodynamical modelisation.
"""
from __future__ import absolute_import
__version__ = "0.1.0"
from . import data
from . import geodyn
from . import intersection
from . import positions
from . import plot_data
from . import mineral_phys_data
from . import geodyn_trg
from . import geodyn_static
from . import cross_sections
| mit | Python |
9d46df1680e3d799971e73ec73043c2a6c0590ce | Fix building tar in deployment | vmalloc/mailboxer,Infinidat/lanister,vmalloc/mailboxer,Infinidat/lanister,getslash/mailboxer,vmalloc/mailboxer,getslash/mailboxer,getslash/mailboxer | scripts/build_tar.py | scripts/build_tar.py | #! /usr/bin/python
import os
import subprocess
root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
tarfile = os.path.join(root_dir, "src_pkg.tar")
def _is_dir_newer(directory, filename):
file_mtime = os.stat(filename).st_mtime
for dirname, _, filenames in os.walk(directory):
if _is_file_newer(dirname, file_mtime):
return True
for filename in filenames:
if filename.endswith(".pyc"):
continue
if _is_file_newer(os.path.join(dirname, filename), file_mtime):
return True
return False
def _is_file_newer(filename, file_mtime):
returned = os.stat(filename).st_mtime > file_mtime
return returned
def _tar():
if 0 != subprocess.call("tar cvf {0} flask_app manage.py static".format(tarfile), shell=True, cwd=root_dir):
raise Exception("Tar failed")
if __name__ == '__main__':
if not os.path.exists(tarfile) or \
_is_dir_newer(os.path.join(root_dir, "flask_app"), tarfile) or \
_is_dir_newer(os.path.join(root_dir, "static"), tarfile) or \
_is_file_newer(os.path.join(root_dir, "manage.py"), os.stat(tarfile).st_mtime):
_tar()
| #! /usr/bin/python
import os
import subprocess
root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
tarfile = os.path.join(root_dir, "src_pkg.tar")
def _is_dir_newer(directory, filename):
file_mtime = os.stat(filename).st_mtime
for dirname, _, filenames in os.walk(directory):
for filename in filenames:
if filename.endswith(".pyc"):
continue
if _is_file_newer(os.path.join(dirname, filename), file_mtime):
return True
return False
def _is_file_newer(filename, file_mtime):
return os.stat(filename).st_mtime > file_mtime
def _tar():
if 0 != subprocess.call("tar cvf {0} flask_app manage.py static".format(tarfile), shell=True, cwd=root_dir):
raise Exception("Tar failed")
if __name__ == '__main__':
if not os.path.exists(tarfile) or \
_is_dir_newer(os.path.join(root_dir, "flask_app"), tarfile) or \
_is_dir_newer(os.path.join(root_dir, "static"), tarfile) or \
_is_file_newer(os.path.join(root_dir, "manage.py"), os.stat(tarfile).st_mtime):
_tar()
| bsd-3-clause | Python |
6b0746b82d7a085655a0f594201a7efb471dc3c5 | Fix attachment representation | Hackfmi/Diaphanum,Hackfmi/Diaphanum | attachments/models.py | attachments/models.py | from django.db import models
from datetime import datetime
class Attachment(models.Model):
file_name = models.FileField(upload_to='attachments')
def __unicode__(self):
return unicode(self.file_name)
| from django.db import models
class Attachment(models.Model):
file_name = models.FileField(upload_to='attachments')
def __unicode__(self):
return self.file_name
| mit | Python |
494f497b4aee2189bffd6f916fb3d9ec4090fc6a | Update task_9_21.py | Mariaanisimova/pythonintask | IVTp/2014/task_9_21.py | IVTp/2014/task_9_21.py | #Задача 9. Вариант 21.
#Создайте игру, в которой компьютер выбирает какое-либо слово, а игрок должен его отгадать.
#Компьютер сообщает игроку, сколько букв в слове, и дает пять попыток узнать, есть ли какая-либо буква в слове, причем программа может отвечать только "Да" и "Нет".
#Вслед за тем игрок должен попробовать отгадать слово.
#Шпенькова А.С.
#23.05.2016
import random
word = ("зебра","слон","кот","носорог","динозавр","комар")
variant=""
computer_selection=random.choice (word)
tr1=5
x=1
i=1
print("\n Угадайте заданное слово...\n")
print ("\n",word, "\n")
print(computer_selection,"\n")
while variant!= computer_selection:
if tr1==5 :
if (input("Нужны ли Вам подсказки?\n")) == "да" :
print("Длина заданного слова = :",len(computer_selection),"\n")
tr1=tr1-1
variant=input("\nВаш вариант:")
if variant==computer_selection :
print("Вы угадали!")
else :
print("Вы ошиблись!\n")
input("\n\nНажмите Enter, чтобы выйти...")
| #Задача 9. Вариант 21.
#Создайте игру, в которой компьютер выбирает какое-либо слово, а игрок должен его отгадать.
#Компьютер сообщает игроку, сколько букв в слове, и дает пять попыток узнать, есть ли какая-либо буква в слове, причем программа может отвечать только "Да" и "Нет".
#Вслед за тем игрок должен попробовать отгадать слово.
#Шпенькова А.С.
#23.05.2016
import random
word = ("зебра","слон","кот","носорог","динозавр","комар")
variant=""
computer_selection=random.choice (word)
tr1=5
x=1
i=1
print("\n Угадайте заданное слово...\n")
print ("\n",word, "\n")
print(computer_selection,"\n")
while variant!= computer_selection:
if tr1==5 :
if (input("Нужны ли Вам подсказки?\n")) == "да" :
print("Длина заданного слова = :",len(computer_selection),"\n")
tr1=tr1-1
variant=input("\nВаш вариант:")
if variant==computer_selection :
print("Вы угадали!")
else :
"Вы ошиблись\n"
input("\n\nНажмите Enter, чтобы выйти...")
| apache-2.0 | Python |
bc5e9ab68cfc3ad4372a5dd37cff768f3008f6e8 | Test Change 2 | iambillal/inf1340_2015_asst1 | exercise1.py | exercise1.py | #!/usr/bin/env python
""" Assignment 1, Exercise 1, INF1340, Fall, 2014. Grade to gpa conversion
This module prints the amount of money that Lakshmi has remaining
after the stock transactions
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
money = 3000.00
print(money)
| #!/usr/bin/env python
""" Assignment 1, Exercise 1, INF1340, Fall, 2014. Grade to gpa conversion
This module prints the amount of money that Lakshmi has remaining
after the stock transactions
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
money = 2000.00
print(money)
| mit | Python |
dc59fd211e7881706ea2629dd34bcaeac4260d1a | Fix typo | lowRISC/fusesoc,lowRISC/fusesoc,olofk/fusesoc,olofk/fusesoc | orpsoc/utils.py | orpsoc/utils.py | import subprocess
class Launcher:
def __init__(self, cmd, args=[], shell=False, cwd=None, stderr=None, errormsg=None):
self.cmd = cmd
self.args = args
self.shell = shell
self.cwd = cwd
self.stderr = stderr
self.errormsg = errormsg
def run(self):
try:
subprocess.check_call([self.cmd] + self.args,
cwd = self.cwd,
shell = self.shell,
stderr = self.stderr,
stdin=subprocess.PIPE),
except OSError:
raise RuntimeError("Error: Command " + self.cmd + " not found. Make sure it is in $PATH")
except subprocess.CalledProcessError:
if self.stderr is None:
self.stderr = "stderr"
if self.errormsg:
raise RuntimeError(self.errormsg)
else:
raise RuntimeError("Error: " + self.cmd + ' '.join(self.args) + " returned errors. See " + self.stderr + " for details")
def __str__(self):
return self.cmd + ' ' + ' '.join(self.args)
def launch(cmd, args=[], shell=False, cwd=None, stderr=None):
try:
subprocess.check_call([cmd] + args,
cwd = cwd,
shell = shell,
stderr = stderr),
except OSError:
print("Error: Command " + cmd + " not found. Make sure it is in $PATH")
exit(1)
except subprocess.CalledProcessError:
if stderr is None:
stderr = "stderr"
print("Error: " + cmd + ' '.join(args) + " returned errors. See " + stderr + " for details")
exit(1)
| import subprocess
class Launcher:
def __init__(self, cmd, args=[], shell=False, cwd=None, stderr=None, errormsg=None):
self.cmd = cmd
self.args = args
self.shell = shell
self.cwd = cwd
self.stderr = stderr
self.errormsg = errormsg
def run(self):
try:
subprocess.check_call([self.cmd] + self.args,
cwd = self.cwd,
shell = self.shell,
stderr = self.stderr,
stdin=subprocess.PIPE),
except OSError:
raise RuntimeError("Error: Command " + self.cmd + " not found. Make sure it is in $PATH")
except subprocess.CalledProcessError:
if self.stderr is None:
self.stderr = "stderr"
if self.errormsg:
raise RuntimeError(self.errormsg)
else:
raise RuntimeError("Error: " + self.cmd + ' '.join(args) + " returned errors. See " + self.stderr + " for details")
def __str__(self):
return self.cmd + ' ' + ' '.join(self.args)
def launch(cmd, args=[], shell=False, cwd=None, stderr=None):
try:
subprocess.check_call([cmd] + args,
cwd = cwd,
shell = shell,
stderr = stderr),
except OSError:
print("Error: Command " + cmd + " not found. Make sure it is in $PATH")
exit(1)
except subprocess.CalledProcessError:
if stderr is None:
stderr = "stderr"
print("Error: " + cmd + ' '.join(args) + " returned errors. See " + stderr + " for details")
exit(1)
| bsd-2-clause | Python |
aa43737b06ccdff8fd4d60be67d09ba4a05bbc65 | Add Asciinema for GofmtBear | horczech/coala-bears,LWJensen/coala-bears,horczech/coala-bears,seblat/coala-bears,srisankethu/coala-bears,kaustubhhiware/coala-bears,kaustubhhiware/coala-bears,sounak98/coala-bears,Shade5/coala-bears,meetmangukiya/coala-bears,madhukar01/coala-bears,ankit01ojha/coala-bears,shreyans800755/coala-bears,coala-analyzer/coala-bears,aptrishu/coala-bears,horczech/coala-bears,horczech/coala-bears,damngamerz/coala-bears,yash-nisar/coala-bears,incorrectusername/coala-bears,shreyans800755/coala-bears,damngamerz/coala-bears,shreyans800755/coala-bears,ankit01ojha/coala-bears,incorrectusername/coala-bears,madhukar01/coala-bears,ankit01ojha/coala-bears,arjunsinghy96/coala-bears,ankit01ojha/coala-bears,aptrishu/coala-bears,yash-nisar/coala-bears,Vamshi99/coala-bears,refeed/coala-bears,LWJensen/coala-bears,damngamerz/coala-bears,arjunsinghy96/coala-bears,aptrishu/coala-bears,horczech/coala-bears,horczech/coala-bears,coala-analyzer/coala-bears,naveentata/coala-bears,refeed/coala-bears,seblat/coala-bears,coala-analyzer/coala-bears,coala-analyzer/coala-bears,aptrishu/coala-bears,yashtrivedi96/coala-bears,refeed/coala-bears,vijeth-aradhya/coala-bears,refeed/coala-bears,yash-nisar/coala-bears,aptrishu/coala-bears,Shade5/coala-bears,seblat/coala-bears,ankit01ojha/coala-bears,yash-nisar/coala-bears,vijeth-aradhya/coala-bears,yashtrivedi96/coala-bears,aptrishu/coala-bears,coala-analyzer/coala-bears,yashtrivedi96/coala-bears,gs0510/coala-bears,Shade5/coala-bears,Asnelchristian/coala-bears,sounak98/coala-bears,refeed/coala-bears,vijeth-aradhya/coala-bears,madhukar01/coala-bears,madhukar01/coala-bears,madhukar01/coala-bears,yashtrivedi96/coala-bears,madhukar01/coala-bears,yash-nisar/coala-bears,gs0510/coala-bears,seblat/coala-bears,LWJensen/coala-bears,meetmangukiya/coala-bears,Vamshi99/coala-bears,srisankethu/coala-bears,Vamshi99/coala-bears,vijeth-aradhya/coala-bears,refeed/coala-bears,damngamerz/coala-bears,incorrectusername/coala-bears,arjunsinghy96/coala-bears,coala-analyzer/coala-bears,Vamshi99/coala-bears,meetmangukiya/coala-bears,yash-nisar/coala-bears,aptrishu/coala-bears,arjunsinghy96/coala-bears,aptrishu/coala-bears,srisankethu/coala-bears,naveentata/coala-bears,coala/coala-bears,naveentata/coala-bears,vijeth-aradhya/coala-bears,LWJensen/coala-bears,Vamshi99/coala-bears,arjunsinghy96/coala-bears,shreyans800755/coala-bears,naveentata/coala-bears,damngamerz/coala-bears,madhukar01/coala-bears,aptrishu/coala-bears,kaustubhhiware/coala-bears,coala/coala-bears,gs0510/coala-bears,vijeth-aradhya/coala-bears,shreyans800755/coala-bears,Vamshi99/coala-bears,srisankethu/coala-bears,Asnelchristian/coala-bears,damngamerz/coala-bears,yash-nisar/coala-bears,sounak98/coala-bears,ankit01ojha/coala-bears,coala/coala-bears,kaustubhhiware/coala-bears,kaustubhhiware/coala-bears,srisankethu/coala-bears,Asnelchristian/coala-bears,srisankethu/coala-bears,incorrectusername/coala-bears,meetmangukiya/coala-bears,Vamshi99/coala-bears,gs0510/coala-bears,seblat/coala-bears,LWJensen/coala-bears,horczech/coala-bears,refeed/coala-bears,damngamerz/coala-bears,madhukar01/coala-bears,refeed/coala-bears,horczech/coala-bears,seblat/coala-bears,ankit01ojha/coala-bears,horczech/coala-bears,yashtrivedi96/coala-bears,meetmangukiya/coala-bears,meetmangukiya/coala-bears,Shade5/coala-bears,meetmangukiya/coala-bears,shreyans800755/coala-bears,Asnelchristian/coala-bears,yash-nisar/coala-bears,coala/coala-bears,incorrectusername/coala-bears,LWJensen/coala-bears,Vamshi99/coala-bears,naveentata/coala-bears,Shade5/coala-bears,ankit01ojha/coala-bears,arjunsinghy96/coala-bears,seblat/coala-bears,gs0510/coala-bears,refeed/coala-bears,incorrectusername/coala-bears,sounak98/coala-bears,ankit01ojha/coala-bears,yashtrivedi96/coala-bears,yash-nisar/coala-bears,coala/coala-bears,sounak98/coala-bears,yashtrivedi96/coala-bears,coala/coala-bears,srisankethu/coala-bears,shreyans800755/coala-bears,shreyans800755/coala-bears,LWJensen/coala-bears,Shade5/coala-bears,yashtrivedi96/coala-bears,naveentata/coala-bears,coala-analyzer/coala-bears,coala/coala-bears,yash-nisar/coala-bears,coala-analyzer/coala-bears,Asnelchristian/coala-bears,Vamshi99/coala-bears,naveentata/coala-bears,Shade5/coala-bears,Shade5/coala-bears,coala/coala-bears,horczech/coala-bears,coala/coala-bears,srisankethu/coala-bears,shreyans800755/coala-bears,kaustubhhiware/coala-bears,naveentata/coala-bears,coala/coala-bears,meetmangukiya/coala-bears,gs0510/coala-bears,kaustubhhiware/coala-bears,incorrectusername/coala-bears,coala/coala-bears,damngamerz/coala-bears,sounak98/coala-bears,shreyans800755/coala-bears,meetmangukiya/coala-bears,LWJensen/coala-bears,seblat/coala-bears,srisankethu/coala-bears,shreyans800755/coala-bears,vijeth-aradhya/coala-bears,aptrishu/coala-bears,madhukar01/coala-bears,sounak98/coala-bears,yashtrivedi96/coala-bears,vijeth-aradhya/coala-bears,arjunsinghy96/coala-bears,arjunsinghy96/coala-bears,Asnelchristian/coala-bears,gs0510/coala-bears,ankit01ojha/coala-bears,arjunsinghy96/coala-bears,kaustubhhiware/coala-bears,Asnelchristian/coala-bears,coala/coala-bears,srisankethu/coala-bears,horczech/coala-bears,Asnelchristian/coala-bears,coala-analyzer/coala-bears,Vamshi99/coala-bears,gs0510/coala-bears,incorrectusername/coala-bears,refeed/coala-bears,yash-nisar/coala-bears,Vamshi99/coala-bears,incorrectusername/coala-bears,kaustubhhiware/coala-bears,damngamerz/coala-bears,ankit01ojha/coala-bears,naveentata/coala-bears,refeed/coala-bears,damngamerz/coala-bears,sounak98/coala-bears,sounak98/coala-bears,gs0510/coala-bears,Asnelchristian/coala-bears,srisankethu/coala-bears,damngamerz/coala-bears,Shade5/coala-bears,aptrishu/coala-bears,LWJensen/coala-bears,vijeth-aradhya/coala-bears | bears/go/GofmtBear.py | bears/go/GofmtBear.py | from coalib.bearlib.abstractions.Linter import linter
from coalib.bears.requirements.GoRequirement import GoRequirement
@linter(executable='gofmt',
use_stdin=True,
output_format='corrected',
result_message='Formatting can be improved.')
class GofmtBear:
"""
Suggest better formatting options in Go code. Basic checks like alignment,
indentation, and redundant parentheses are provided.
This is done using the ``gofmt`` utility. For more information visit
<https://golang.org/cmd/gofmt/>.
"""
LANGUAGES = {'Go'}
REQUIREMENTS = {GoRequirement(package='golang.org/cmd/gofmt', flag='-u')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_FIX = {'Formatting'}
ASCIINEMA_URL = 'https://asciinema.org/a/94812'
@staticmethod
def create_arguments(filename, file, config_file):
return ()
| from coalib.bearlib.abstractions.Linter import linter
from coalib.bears.requirements.GoRequirement import GoRequirement
@linter(executable='gofmt',
use_stdin=True,
output_format='corrected',
result_message='Formatting can be improved.')
class GofmtBear:
"""
Suggest better formatting options in Go code. Basic checks like alignment,
indentation, and redundant parentheses are provided.
This is done using the ``gofmt`` utility. For more information visit
<https://golang.org/cmd/gofmt/>.
"""
LANGUAGES = {'Go'}
REQUIREMENTS = {GoRequirement(package='golang.org/cmd/gofmt', flag='-u')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_FIX = {'Formatting'}
@staticmethod
def create_arguments(filename, file, config_file):
return ()
| agpl-3.0 | Python |
00d87e000c2abcc772c40e5c0d9faaf1e3cd5758 | Bump version | slash-testing/backslash-python,vmalloc/backslash-python | backslash/__version__.py | backslash/__version__.py | __version__ = '2.25.3'
| __version__ = '2.25.2'
| bsd-3-clause | Python |
a1390619619a364b9fab13504fb5c2464491d449 | Refactor Largest Palindrome Product for range of n is [1,8] | Kunal57/Python_Algorithms | Largest_Palindrome_Product.py | Largest_Palindrome_Product.py | # Find the largest palindrome made from the product of two n-digit numbers.
# Since the result could be very large, you should return the largest palindrome mod 1337.
# Example:
# Input: 2
# Output: 987
# Explanation: 99 x 91 = 9009, 9009 % 1337 = 987
# Note:
# The range of n is [1,8].
from itertools import product
def largestPalindrome(n):
"""
:type n: int
:rtype: int
"""
number = ""
for x in range(n):
number += "9"
number = int(number)
palindrome = 0
for x in range(number, 1, -2):
if (x*x) < palindrome:
break
for i in range(number, x - 1, -2):
product = x * i
if product < palindrome:
break
elif isPalindrome(product):
palindrome = product
break
return palindrome % 1337
def isPalindrome(num):
""" Return True is number is Palindrome, else return False """
return str(num) == str(num)[::-1]
n = 7
print(largestPalindrome(n)) | # Find the largest palindrome made from the product of two n-digit numbers.
# Since the result could be very large, you should return the largest palindrome mod 1337.
# Example:
# Input: 2
# Output: 987
# Explanation: 99 x 91 = 9009, 9009 % 1337 = 987
# Note:
# The range of n is [1,8].
def largestPalindrome(n):
"""
:type n: int
:rtype: int
"""
number = ""
for x in range(n):
number += "9"
minNum = int(number[:-1])
number = int(number)
palindrome = 0
for x in range(number, minNum, -2):
if (x**2) < palindrome:
break
for i in range(number, x - 1, -2):
product = x * i
if product <= palindrome or product % 11 != 0:
break
elif isPalindrome(product):
palindrome = product
print(palindrome, palindrome % 1337)
break
return (palindrome, palindrome % 1337)
def isPalindrome(num):
""" Return True is number is Palindrome, else return False """
numString = str(num)
if numString == numString[::-1]:
return True
return False
n = 8
print(largestPalindrome(n))
# for i in range(upper, int((x*x)**.5), -2):
# 990090099 152 99999 9901 99998 76865 | mit | Python |
de4af7935c1c8d6751c5a71ad90dd5f531f7a1b0 | Fix the script function args | fedora-infra/fedimg,fedora-infra/fedimg | bin/trigger_upload.py | bin/trigger_upload.py | #!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(url, compose_id, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
fedimg.uploader.upload(upload_pool, [url],
compose_id=compose_id,
push_notifications=push_notifications)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
| #!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
fedimg.uploader.upload(upload_pool, [url],
compose_id=compose_id,
push_notifications=push_notifications)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
| agpl-3.0 | Python |
166bff52496bfb47c5a3a03585bd10fb449b8d77 | Add wrapper for initscr() to copy the ACS_ and LINES,COLS bindings | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/curses/__init__.py | Lib/curses/__init__.py | """curses
The main package for curses support for Python. Normally used by importing
the package, and perhaps a particular module inside it.
import curses
from curses import textpad
curses.initwin()
...
"""
__revision__ = "$Id$"
from _curses import *
from curses.wrapper import wrapper
# Some constants, most notably the ACS_* ones, are only added to the C
# _curses module's dictionary after initscr() is called. (Some
# versions of SGI's curses don't define values for those constants
# until initscr() has been called.) This wrapper function calls the
# underlying C initscr(), and then copies the constants from the
# _curses module to the curses package's dictionary. Don't do 'from
# curses import *' if you'll be needing the ACS_* constants.
def initscr():
import _curses, curses
stdscr = _curses.initscr()
for key, value in _curses.__dict__.items():
if key[0:4] == 'ACS_' or key in ('LINES', 'COLS'):
setattr(curses, key, value)
return stdscr
| """curses
The main package for curses support for Python. Normally used by importing
the package, and perhaps a particular module inside it.
import curses
from curses import textpad
curses.initwin()
...
"""
__revision__ = "$Id$"
from _curses import *
from curses.wrapper import wrapper
| mit | Python |
1e3e0ee1f2966a121027eda405325e9cf627c147 | use original 'path' when printing errors if os.path.relpath fails | googlefonts/fontmake,googlei18n/fontmake,googlefonts/fontmake,googlei18n/fontmake | Lib/fontmake/errors.py | Lib/fontmake/errors.py | import os
def _try_relative_path(path):
# Try to return 'path' relative to the current working directory, or
# return input 'path' if we can't make a relative path.
# E.g. on Windows, os.path.relpath fails when path and "." are on
# different mount points, C: or D: etc.
try:
return os.path.relpath(path)
except ValueError:
return path
class FontmakeError(Exception):
"""Base class for all fontmake exceptions.
This exception is intended to be chained to the original exception. The
main purpose is to provide a source file trail that points to where the
explosion came from.
"""
def __init__(self, msg, source_file):
self.msg = msg
self.source_trail = [source_file]
def __str__(self):
trail = " -> ".join(
f"'{str(_try_relative_path(s))}'"
for s in reversed(self.source_trail)
if s is not None
)
cause = str(self.__cause__) if self.__cause__ is not None else None
message = ""
if trail:
message = f"In {trail}: "
message += f"{self.msg}"
if cause:
message += f": {cause}"
return message
class TTFAError(FontmakeError):
def __init__(self, exitcode, source_file):
self.exitcode = exitcode
self.source_trail = source_file
def __str__(self):
return (
f"ttfautohint failed for '{str(_try_relative_path(self.source_trail))}': "
f"error code {str(self.exitcode)}."
)
| import os
class FontmakeError(Exception):
"""Base class for all fontmake exceptions.
This exception is intended to be chained to the original exception. The
main purpose is to provide a source file trail that points to where the
explosion came from.
"""
def __init__(self, msg, source_file):
self.msg = msg
self.source_trail = [source_file]
def __str__(self):
trail = " -> ".join(
f"'{str(os.path.relpath(s))}'"
for s in reversed(self.source_trail)
if s is not None
)
cause = str(self.__cause__) if self.__cause__ is not None else None
message = ""
if trail:
message = f"In {trail}: "
message += f"{self.msg}"
if cause:
message += f": {cause}"
return message
class TTFAError(FontmakeError):
def __init__(self, exitcode, source_file):
self.exitcode = exitcode
self.source_trail = source_file
def __str__(self):
return (
f"ttfautohint failed for '{str(os.path.relpath(self.source_trail))}': "
f"error code {str(self.exitcode)}."
)
| apache-2.0 | Python |
727c266d123d7c6afdd4697e72d79a7e3659f9a1 | Update basic-calculator-ii.py | yiwen-luo/LeetCode,kamyu104/LeetCode,githubutilities/LeetCode,yiwen-luo/LeetCode,jaredkoontz/leetcode,kamyu104/LeetCode,githubutilities/LeetCode,githubutilities/LeetCode,githubutilities/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,githubutilities/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,jaredkoontz/leetcode,jaredkoontz/leetcode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,jaredkoontz/leetcode,yiwen-luo/LeetCode,kamyu104/LeetCode | Python/basic-calculator-ii.py | Python/basic-calculator-ii.py | # Time: O(n)
# Space: O(n)
#
# Implement a basic calculator to evaluate a simple expression string.
#
# The expression string contains only non-negative integers, +, -, *, /
# operators and empty spaces . The integer division should truncate toward zero.
#
# You may assume that the given expression is always valid.
#
# Some examples:
# "3+2*2" = 7
# " 3/2 " = 1
# " 3+5 / 2 " = 5
# Note: Do not use the eval built-in library function.
#
class Solution:
# @param {string} s
# @return {integer}
def calculate(self, s):
operands, operators = [], []
operand = ""
for i in reversed(xrange(len(s))):
if s[i].isdigit():
operand += s[i]
if i == 0 or not s[i-1].isdigit():
operands.append(int(operand[::-1]))
operand = ""
elif s[i] == ')' or s[i] == '*' or s[i] == '/':
operators.append(s[i])
elif s[i] == '+' or s[i] == '-':
while operators and \
(operators[-1] == '*' or operators[-1] == '/'):
self.compute(operands, operators)
operators.append(s[i])
elif s[i] == '(':
while operators[-1] != ')':
self.compute(operands, operators)
operators.pop()
while operators:
self.compute(operands, operators)
return operands[-1]
def compute(self, operands, operators):
left, right = operands.pop(), operands.pop()
op = operators.pop()
if op == '+':
operands.append(left + right)
elif op == '-':
operands.append(left - right)
elif op == '*':
operands.append(left * right)
elif op == '/':
operands.append(left / right)
| # Time: O(n)
# Space: O(n)
#
# Implement a basic calculator to evaluate a simple expression string.
#
# The expression string contains only non-negative integers, +, -, *, /
# operators and empty spaces . The integer division should truncate toward zero.
#
# You may assume that the given expression is always valid.
#
# Some examples:
# "3+2*2" = 7
# " 3/2 " = 1
# " 3+5 / 2 " = 5
# Note: Do not use the eval built-in library function.
#
class Solution:
# @param {string} s
# @return {integer}
def calculate(self, s):
operands, operators = [], []
operand = ""
for i in reversed(xrange(len(s))):
if s[i].isdigit():
operand += s[i]
if i == 0 or not s[i-1].isdigit():
operands.append(int(operand[::-1]))
operand = ""
elif s[i] == ')' or s[i] == '*' or s[i] == '/':
operators.append(s[i])
elif s[i] == '+' or s[i] == '-':
while operators and \
(operators[-1] == '*' or operators[-1] == '/'):
self.compute(operands, operators)
operators.append(s[i])
elif s[i] == '(':
while operators[-1] != ')':
self.compute(operands, operators)
operators.pop()
while operators:
self.compute(operands, operators)
return operands[-1]
def compute(self, operands, operators):
left, right = operands.pop(), operands.pop()
op = operators.pop()
if op == '+':
operands.append(left + right)
elif op == '-':
operands.append(left - right)
elif op == '*':
operands.append(left * right)
elif op == '/':
operands.append(left / right)
| mit | Python |
dec61efc0259c94186d5f704ac272dbb646bd824 | Make the "monitor" a positional argument that is not automatically converted to a monitor object | enthought/pikos,enthought/pikos,enthought/pikos | pikos/runner.py | pikos/runner.py | import argparse
import os
import sys
from pikos.monitors.api import (FunctionMonitor, LineMonitor,
FunctionMemoryMonitor,
LineMemoryMonitor)
from pikos.recorders.api import TextStreamRecorder
MONITORS = {'functions': FunctionMonitor,
'lines': LineMonitor,
'function_memory': FunctionMemoryMonitor,
'line_memory': LineMemoryMonitor}
def run_code_under_monitor(script, monitor):
"""Compile the file and run inside the monitor context.
Parameters
----------
filename : str
The filename of the script to run.
monitor : object
The monitor (i.e. context manager object) to use.
"""
sys.path.insert(0, os.path.dirname(script))
with open(script, 'rb') as handle:
code = compile(handle.read(), script, 'exec')
globs = {
'__file__': script,
'__name__': '__main__',
'__package__': None}
with monitor:
exec code in globs, None
def main():
description = "Execute the python script inside the pikos monitor context."
parser = argparse.ArgumentParser(description=description)
parser.add_argument('monitor', choices=MONITORS.keys(),
help='The monitor to use')
parser.add_argument('-o', '--output', type=argparse.FileType('w'),
help='Output results to a file')
parser.add_argument('--buffered', action='store_true',
help='Use a buffered stream.')
parser.add_argument('script', help='The script to run')
args = parser.parse_args()
stream = args.output if args.output is not None else sys.stdout
recorder = TextStreamRecorder(stream, auto_flush=(not args.buffered), formated=True)
monitor = MONITORS[args.monitor](recorder=recorder)
run_code_under_monitor(args.script, monitor)
if __name__ == '__main__':
main()
| import argparse
from pikos.monitors.api import (FunctionMonitor, LineMonitor, FunctionMemoryMonitor,
LineMemoryMonitor)
MONITORS = {'functions': FunctionMonitor,
'lines': LineMonitor,
'function_memory': FunctionMemoryMonitor,
'line_memory': LineMemoryMonitor}
def run_code_under_monitor(script, monitor):
"""Compile the file and run inside the monitor context.
Parameters
----------
filename : str
The filename of the script to run.
monitor : object
The monitor (i.e. context manager object) to use.
"""
sys.path.insert(0, os.path.dirname(script))
with open(script, 'rb') as handle:
code = compile(handle.read(), script, 'exec')
globs = {
'__file__': script,
'__name__': '__main__',
'__package__': None}
with monitor:
exec cmd in globs, None
def MonitorType(monitor_name):
"""Create the monitor from the command arguments. """
return MONITORS[monitor_name]
def main():
import sys
description = "Execute the python script inside the pikos monitor context. "
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-m', '--monitor', type=MonitorType, default='functions',
choices=MONITORS.keys(), help='The monitor to use')
parser.add_argument('-o', '--output', type=argparse.FileType('w'),
help='Output results to a file')
parser.add_argument('--buffered', action='store_true',
help='Use a buffered stream.')
parser.add_argument('script', help='The script to run')
args = parser.parse_args()
stream = args.output if args.output is not None else sys.stdout
recorder = TextStreamRecorder(stream, auto_flush=(not args.buffered))
monitor = args.monitor(recorder=recorder)
run_code_under_monitor(args.script, monitor)
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
ad480bd4707771a416f474997225a55d92cf676a | Bump version number. | smarkets/smk_python_sdk | smarkets/__init__.py | smarkets/__init__.py | "Smarkets API package"
# Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
import inspect
import os
if 'READTHEDOCS' in os.environ:
from mock import Mock
import sys
eto = seto = \
sys.modules['smarkets.eto'] = sys.modules['smarkets.eto.piqi_pb2'] = \
sys.modules['smarkets.seto'] = sys.modules['smarkets.seto.piqi_pb2'] = Mock()
from smarkets.clients import Smarkets
from smarkets.events import (
EventsRequest,
Politics,
CurrentAffairs,
TvAndEntertainment,
SportByDate,
FootballByDate,
HorseRacingByDate,
TennisByDate,
BasketballByDate,
AmericanFootballByDate,
BaseballByDate,
CricketByDate,
HandballByDate,
RugbyByDate,
RugbyLeagueByDate,
VolleyballByDate,
SportOther,
)
from smarkets.exceptions import (
Error,
ConnectionError,
DecodeError,
ParseError,
SocketDisconnected,
InvalidCallbackError,
)
from smarkets.orders import OrderCreate
from smarkets.sessions import Session, SessionSettings
__version__ = '0.4.5'
__all__ = sorted(name for name, obj in locals().items()
if not (name.startswith('_') or inspect.ismodule(obj)))
VERSION = tuple((int(x) for x in __version__.split('.')))
| "Smarkets API package"
# Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
import inspect
import os
if 'READTHEDOCS' in os.environ:
from mock import Mock
import sys
eto = seto = \
sys.modules['smarkets.eto'] = sys.modules['smarkets.eto.piqi_pb2'] = \
sys.modules['smarkets.seto'] = sys.modules['smarkets.seto.piqi_pb2'] = Mock()
from smarkets.clients import Smarkets
from smarkets.events import (
EventsRequest,
Politics,
CurrentAffairs,
TvAndEntertainment,
SportByDate,
FootballByDate,
HorseRacingByDate,
TennisByDate,
BasketballByDate,
AmericanFootballByDate,
BaseballByDate,
CricketByDate,
HandballByDate,
RugbyByDate,
RugbyLeagueByDate,
VolleyballByDate,
SportOther,
)
from smarkets.exceptions import (
Error,
ConnectionError,
DecodeError,
ParseError,
SocketDisconnected,
InvalidCallbackError,
)
from smarkets.orders import OrderCreate
from smarkets.sessions import Session, SessionSettings
__version__ = '0.4.4'
__all__ = sorted(name for name, obj in locals().items()
if not (name.startswith('_') or inspect.ismodule(obj)))
VERSION = tuple((int(x) for x in __version__.split('.')))
| mit | Python |
7c12b5fe6ee5a5cd9761482cc2d3e3363cab7cd3 | Save the figure | eggplantbren/TwinPeaks3,eggplantbren/TwinPeaks3,eggplantbren/TwinPeaks3 | Paper/figures/joint.py | Paper/figures/joint.py | import numpy as np
import numpy.random as rng
from matplotlib import rc
import matplotlib.pyplot as plt
"""
Make a plot of \pi(L1, L2)
Based on a similar plot from ABCNS
"""
rng.seed(0)
rc("font", size=18, family="serif", serif="Computer Sans")
rc("text", usetex=True)
# Resolution
N = 256
[x, y] = np.meshgrid(np.linspace(0., 5., N), np.linspace(5., 0., N))
f = np.exp(-0.5*(x-3.5)**2/1.**2)*np.exp(-0.5*((y - 5*(x/5)**2)**2)/0.3**2)
f /= f.sum()
# Generate samples
M = 20
xx = 3.5 + 1.*rng.randn(M)
yy = 5*(xx/5)**2 + 0.3*rng.randn(M)
keep = (xx > 0.) & (xx < 5.) & (yy > 0.) & (yy < 5.)
xx = xx[keep]
yy = yy[keep]
plt.imshow(f, extent=[x.min(), x.max(), y.min(), y.max()], cmap='Blues')
plt.plot(xx, yy, 'ko')
plt.xlabel(r'$L_1$')
plt.ylabel(r'$L_2$')
plt.title(r'Prior $\pi(L_1, L_2)$')
plt.axhline(1.5, xmin=3./5., xmax=5./5., color='k')
plt.axvline(3., ymin=1.5/5., ymax=5./5., color='k')
plt.fill_between(x[0, :][x[0, :] > 3.], 1.5, 5., color=[0.6, 0.6, 0.6], alpha=0.2)
plt.savefig('joint1.pdf', bbox_inches='tight')
plt.show()
#plt.axhline(1.3095, xmin=0., xmax=2.6455/5., color='k')
#plt.axvline(2.6455, ymin=0., ymax=1.3095/5., color='k')
#plt.axhline(1.3095, linestyle='--', color='k')
#plt.axvline(2.6455, linestyle='--', color='k')
#plt.axhline(0.841, xmin=0., xmax=2.6455/5., linestyle='-.', color='k')
#plt.fill_between(x[0, :][x[0, :] < 2.6455], 0., 1.3095, color=[0.6, 0.6, 0.6], alpha=0.2)
#plt.plot(2.645, 1.31, 'r*', markersize=15)
#plt.plot(2.525, 0.84, 'gH', markersize=10)
#plt.savefig('joint2.pdf', bbox_inches='tight')
#plt.show()
| import numpy as np
import numpy.random as rng
from matplotlib import rc
import matplotlib.pyplot as plt
"""
Make a plot of \pi(L1, L2)
Based on a similar plot from ABCNS
"""
rng.seed(0)
rc("font", size=18, family="serif", serif="Computer Sans")
rc("text", usetex=True)
# Resolution
N = 256
[x, y] = np.meshgrid(np.linspace(0., 5., N), np.linspace(5., 0., N))
f = np.exp(-0.5*(x-3.5)**2/1.**2)*np.exp(-0.5*((y - 5*(x/5)**2)**2)/0.3**2)
f /= f.sum()
# Generate samples
M = 20
xx = 3.5 + 1.*rng.randn(M)
yy = 5*(xx/5)**2 + 0.3*rng.randn(M)
keep = (xx > 0.) & (xx < 5.) & (yy > 0.) & (yy < 5.)
xx = xx[keep]
yy = yy[keep]
plt.imshow(f, extent=[x.min(), x.max(), y.min(), y.max()], cmap='Blues')
plt.plot(xx, yy, 'ko')
plt.xlabel(r'$L_1$')
plt.ylabel(r'$L_2$')
plt.title(r'Prior $\pi(L_1, L_2)$')
plt.axhline(1.5, xmin=3./5., xmax=5./5., color='k')
plt.axvline(3., ymin=1.5/5., ymax=5./5., color='k')
plt.fill_between(x[0, :][x[0, :] > 3.], 1.5, 5., color=[0.6, 0.6, 0.6], alpha=0.2)
plt.show()
#plt.savefig('joint1.pdf', bbox_inches='tight')
#plt.axhline(1.3095, xmin=0., xmax=2.6455/5., color='k')
#plt.axvline(2.6455, ymin=0., ymax=1.3095/5., color='k')
#plt.axhline(1.3095, linestyle='--', color='k')
#plt.axvline(2.6455, linestyle='--', color='k')
#plt.axhline(0.841, xmin=0., xmax=2.6455/5., linestyle='-.', color='k')
#plt.fill_between(x[0, :][x[0, :] < 2.6455], 0., 1.3095, color=[0.6, 0.6, 0.6], alpha=0.2)
#plt.plot(2.645, 1.31, 'r*', markersize=15)
#plt.plot(2.525, 0.84, 'gH', markersize=10)
#plt.savefig('joint2.pdf', bbox_inches='tight')
#plt.show()
| mit | Python |
af1b840c17febd851785c95b872faa99ededd184 | Remove repeated argument | adw0rd/django-social-auth,VishvajitP/django-social-auth,duoduo369/django-social-auth,qas612820704/django-social-auth,vxvinh1511/django-social-auth,michael-borisov/django-social-auth,qas612820704/django-social-auth,gustavoam/django-social-auth,caktus/django-social-auth,beswarm/django-social-auth,dongguangming/django-social-auth,dongguangming/django-social-auth,vuchau/django-social-auth,vxvinh1511/django-social-auth,caktus/django-social-auth,gustavoam/django-social-auth,VishvajitP/django-social-auth,omab/django-social-auth,getsentry/django-social-auth,vuchau/django-social-auth,michael-borisov/django-social-auth,omab/django-social-auth,beswarm/django-social-auth | social_auth/views.py | social_auth/views.py | from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_POST
from social.utils import setting_name
from social.actions import do_auth, do_complete, do_disconnect
from social.strategies.utils import get_strategy
from social.apps.django_app.utils import strategy, BACKENDS, STORAGE
from social.apps.django_app.views import _do_login
STRATEGY = getattr(settings, setting_name('STRATEGY'),
'social_auth.strategy.DSAStrategy')
def load_strategy(*args, **kwargs):
return get_strategy(BACKENDS, STRATEGY, STORAGE, *args, **kwargs)
@strategy('socialauth_complete', load_strategy=load_strategy)
def auth(request, backend):
return do_auth(request.strategy, redirect_name=REDIRECT_FIELD_NAME)
@csrf_exempt
@strategy('socialauth_complete', load_strategy=load_strategy)
def complete(request, backend, *args, **kwargs):
return do_complete(request.strategy, _do_login, request.user,
redirect_name=REDIRECT_FIELD_NAME, *args, **kwargs)
@login_required
@strategy(load_strategy=load_strategy)
@require_POST
@csrf_protect
def disconnect(request, backend, association_id=None):
return do_disconnect(request.strategy, request.user, association_id,
redirect_name=REDIRECT_FIELD_NAME)
| from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_POST
from social.utils import setting_name
from social.actions import do_auth, do_complete, do_disconnect
from social.strategies.utils import get_strategy
from social.apps.django_app.utils import strategy, BACKENDS, STORAGE
from social.apps.django_app.views import _do_login
STRATEGY = getattr(settings, setting_name('STRATEGY'),
'social_auth.strategy.DSAStrategy')
def load_strategy(*args, **kwargs):
return get_strategy(BACKENDS, STRATEGY, STORAGE, *args, **kwargs)
@strategy('socialauth_complete', load_strategy=load_strategy)
def auth(request, backend):
return do_auth(request.strategy, redirect_name=REDIRECT_FIELD_NAME)
@csrf_exempt
@strategy('socialauth_complete', load_strategy=load_strategy)
def complete(request, backend, *args, **kwargs):
return do_complete(request.strategy, _do_login, request.user,
redirect_name=REDIRECT_FIELD_NAME, request=request,
*args, **kwargs)
@login_required
@strategy(load_strategy=load_strategy)
@require_POST
@csrf_protect
def disconnect(request, backend, association_id=None):
return do_disconnect(request.strategy, request.user, association_id,
redirect_name=REDIRECT_FIELD_NAME)
| bsd-3-clause | Python |
17faea99343e37036b7ee35e5d3273f98a52dba9 | Fix numpy related errors on Mavericks. | cryos/tomviz,thewtex/tomviz,cjh1/tomviz,cryos/tomviz,cryos/tomviz,Hovden/tomviz,Hovden/tomviz,yijiang1/tomviz,cjh1/tomviz,thewtex/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,yijiang1/tomviz,cjh1/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,thewtex/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,OpenChemistry/tomviz | Python/tomviz/utils.py | Python/tomviz/utils.py | import numpy as np
import vtk.numpy_interface.dataset_adapter as dsa
import vtk.util.numpy_support as np_s
def get_scalars(dataobject):
do = dsa.WrapDataObject(dataobject)
# get the first
rawarray = do.PointData.GetScalars()
vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do)
vtkarray.Association = dsa.ArrayAssociation.POINT
return vtkarray
def set_scalars(dataobject, newscalars):
do = dsa.WrapDataObject(dataobject)
oldscalars = do.PointData.GetScalars()
name = oldscalars.GetName()
del oldscalars
# handle the case if the newscalars array has a type that
# cannot be passed on to VTK. In which case, we convert to
# convert to float64
vtk_typecode = np_s.get_vtk_array_type(newscalars.dtype)
if vtk_typecode is None:
newscalars = newscalars.astype(np.float64)
do.PointData.append(newscalars, name)
do.PointData.SetActiveScalars(name)
| import numpy as np
import vtk.numpy_interface.dataset_adapter as dsa
def get_scalars(dataobject):
do = dsa.WrapDataObject(dataobject)
# get the first
rawarray = do.PointData.GetScalars()
vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do)
vtkarray.Association = dsa.ArrayAssociation.POINT
return vtkarray
def set_scalars(dataobject, newscalars):
do = dsa.WrapDataObject(dataobject)
oldscalars = do.PointData.GetScalars()
name = oldscalars.GetName()
del oldscalars
do.PointData.append(newscalars, name)
do.PointData.SetActiveScalars(name)
| bsd-3-clause | Python |
5a305a9063f20eacdcce255b650b0246286a54bc | add gender filter | praekelt/molo-gem,praekelt/molo-gem,praekelt/molo-gem | gem/admin.py | gem/admin.py | from django.contrib import admin
from django.contrib.auth.models import User
from gem.models import GemUserProfile, GemCommentReport
from molo.commenting.admin import MoloCommentAdmin
from molo.commenting.models import MoloComment
from molo.profiles.admin import ProfileUserAdmin
from django.http import HttpResponse
import csv
def download_as_csv_gem(GemUserAdmin, request, queryset):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment;filename=export.csv'
writer = csv.writer(response)
user_model_fields = (
'username', 'email', 'first_name',
'last_name', 'is_staff', 'date_joined')
profile_fields = ('alias', 'mobile_number', 'date_of_birth')
gem_profile_fields = ('gender',)
field_names = user_model_fields + profile_fields + gem_profile_fields
writer.writerow(field_names)
for obj in queryset:
if hasattr(obj, 'gem_profile'):
if obj.profile.alias:
obj.profile.alias = obj.profile.alias.encode('utf-8')
obj.username = obj.username.encode('utf-8')
obj.date_joined = obj.date_joined.strftime("%Y-%m-%d %H:%M")
writer.writerow(
[getattr(obj, field) for field in user_model_fields] +
[getattr(obj.profile, field) for field in profile_fields] +
[getattr(
obj.gem_profile, field) for field in gem_profile_fields])
return response
download_as_csv_gem.short_description = "Download selected as csv gem"
class GemUserProfileInlineModelAdmin(admin.StackedInline):
model = GemUserProfile
can_delete = False
class GemCommentReportModelAdmin(admin.StackedInline):
model = GemCommentReport
can_delete = True
max_num = 0
actions = None
readonly_fields = ["user", "reported_reason", ]
class GemUserAdmin(ProfileUserAdmin):
inlines = (GemUserProfileInlineModelAdmin, )
list_display = ProfileUserAdmin.list_display + ('gender',)
actions = ProfileUserAdmin.actions + [download_as_csv_gem]
list_filter = ('gem_profile__gender',)
def gender(self, obj):
return obj.gem_profile.get_gender_display()
class GemCommentReportAdmin(MoloCommentAdmin):
inlines = (GemCommentReportModelAdmin,)
admin.site.unregister(User)
admin.site.register(User, GemUserAdmin)
admin.site.unregister(MoloComment)
admin.site.register(MoloComment, GemCommentReportAdmin)
| from django.contrib import admin
from django.contrib.auth.models import User
from gem.models import GemUserProfile, GemCommentReport
from molo.commenting.admin import MoloCommentAdmin
from molo.commenting.models import MoloComment
from molo.profiles.admin import ProfileUserAdmin
from django.http import HttpResponse
import csv
def download_as_csv_gem(GemUserAdmin, request, queryset):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment;filename=export.csv'
writer = csv.writer(response)
user_model_fields = (
'username', 'email', 'first_name',
'last_name', 'is_staff', 'date_joined')
profile_fields = ('alias', 'mobile_number', 'date_of_birth')
gem_profile_fields = ('gender',)
field_names = user_model_fields + profile_fields + gem_profile_fields
writer.writerow(field_names)
for obj in queryset:
if hasattr(obj, 'gem_profile'):
if obj.profile.alias:
obj.profile.alias = obj.profile.alias.encode('utf-8')
obj.username = obj.username.encode('utf-8')
obj.date_joined = obj.date_joined.strftime("%Y-%m-%d %H:%M")
writer.writerow(
[getattr(obj, field) for field in user_model_fields] +
[getattr(obj.profile, field) for field in profile_fields] +
[getattr(
obj.gem_profile, field) for field in gem_profile_fields])
return response
download_as_csv_gem.short_description = "Download selected as csv gem"
class GemUserProfileInlineModelAdmin(admin.StackedInline):
model = GemUserProfile
can_delete = False
class GemCommentReportModelAdmin(admin.StackedInline):
model = GemCommentReport
can_delete = True
max_num = 0
actions = None
readonly_fields = ["user", "reported_reason", ]
class GemUserAdmin(ProfileUserAdmin):
inlines = (GemUserProfileInlineModelAdmin, )
list_display = ProfileUserAdmin.list_display + ('gender',)
actions = ProfileUserAdmin.actions + [download_as_csv_gem]
def gender(self, obj):
return obj.gem_profile.get_gender_display()
class GemCommentReportAdmin(MoloCommentAdmin):
inlines = (GemCommentReportModelAdmin,)
admin.site.unregister(User)
admin.site.register(User, GemUserAdmin)
admin.site.unregister(MoloComment)
admin.site.register(MoloComment, GemCommentReportAdmin)
| bsd-2-clause | Python |
edb4bfb06882dfe4c9ec6690ab6825c802514981 | set svn:propset | landryb/psutil,landryb/psutil,tomprince/psutil,mindw/psutil,packages/psutil,packages/psutil,msarahan/psutil,cloudbase/psutil,0-wiz-0/psutil,mindw/psutil,giampaolo/psutil,msarahan/psutil,jamesblunt/psutil,mindw/psutil,qbit/psutil,msarahan/psutil,jorik041/psutil,cloudbase/psutil,Q-Leap-Networks/psutil,landryb/psutil,mrjefftang/psutil,mindw/psutil,0-wiz-0/psutil,qbit/psutil,qbit/psutil,tomprince/psutil,mrjefftang/psutil,jorik041/psutil,mrjefftang/psutil,landryb/psutil,giampaolo/psutil,tomprince/psutil,jamesblunt/psutil,Q-Leap-Networks/psutil,packages/psutil,tomprince/psutil,tomprince/psutil,jamesblunt/psutil,mindw/psutil,jorik041/psutil,cloudbase/psutil,landryb/psutil | psutil/error.py | psutil/error.py | # $Id
# this exception get overriden by the platform specific modules if necessary
class Error(Exception):
pass
class NoSuchProcess(Error):
"""No process was found for the given parameters."""
def __init__(self, pid=None, msg=None):
self.pid = pid
self.msg = msg
def __str__(self):
if self.msg:
return self.msg
elif self.pid:
return "no such process with pid %d" % self.pid
else:
return ""
class AccessDenied(Error):
"""Exception raised when permission to perform an action is denied."""
def __init__(self, pid=None, msg=None):
self.pid = pid
self.msg = msg
def __str__(self):
return self.msg or ""
| # $Id$
# this exception get overriden by the platform specific modules if necessary
class Error(Exception):
pass
class NoSuchProcess(Error):
"""No process was found for the given parameters."""
def __init__(self, pid=None, msg=None):
self.pid = pid
self.msg = msg
def __str__(self):
if self.msg:
return self.msg
elif self.pid:
return "no such process with pid %d" % self.pid
else:
return ""
class AccessDenied(Error):
"""Exception raised when permission to perform an action is denied."""
def __init__(self, pid=None, msg=None):
self.pid = pid
self.msg = msg
def __str__(self):
return self.msg or ""
| bsd-3-clause | Python |
e7c1b294f5c3fb825a301e89365de7f19b3ddf3b | Add config file reading | peterfpeterson/finddata | publish_plot.py | publish_plot.py | #!/usr/bin/env python
import json
import os
CONFIG_FILE = '/etc/autoreduce/post_processing.conf'
class Configuration(object):
"""
Read and process configuration file and provide an easy way to create a configured Client object
"""
def __init__(self, config_file):
if os.access(config_file, os.R_OK) == False:
raise RuntimeError, "Configuration file doesn't exist or is not readable: %s" % config_file
cfg = open(config_file, 'r')
json_encoded = cfg.read()
config = json.loads(json_encoded)
# Keep a record of which config file we are using
self.config_file = config_file
# plot publishing
self.publish_url = config['publish_url_template'] if 'publish_url_template' in config else ''
self.publisher_username = config['publisher_username'] if 'publisher_username' in config else ''
self.publisher_password = config['publisher_password'] if 'publisher_password' in config else ''
def read_configuration(config_file=None):
"""
Returns a new configuration object for a given
configuration file
@param config_file: configuration file to process
"""
if config_file is None:
# Make sure we have a configuration file to read
config_file = CONFIG_FILE
if os.access(config_file, os.R_OK) == False:
raise RuntimeError("Configuration file doesn't exist or is not readable: %s" % CONFIG_FILE)
return Configuration(config_file)
def loadDiv(filename):
if not os.path.exists(filename):
raise RuntimeError('\'%s\' does not exist' % filename)
print 'loading \'%s\'' % filename
with file(filename, 'r') as handle:
div = handle.read()
return div
def _getURL(url_template, instrument, run_number):
import string
url_template=string.Template(url_template)
url = url_template.substitute(instrument=instrument,
run_number=str(run_number))
return url
def publish_plot(instrument, run_number, files, config=None):
if config is None:
config = read_configuration()
run_number = str(run_number)
url = _getURL(config.publish_url, instrument, run_number)
print 'posting to \'%s\'' % url
# these next 2 lines are explicity bad - and doesn't seem
# to do ANYTHING
# https://urllib3.readthedocs.org/en/latest/security.html
import urllib3
urllib3.disable_warnings()
import requests
request = requests.post(url, data={'username': config.publisher_username,
'password': config.publisher_password},
files=files, verify=False)
return request
if __name__ == '__main__':
import sys
div = loadDiv(sys.argv[1])
name = os.path.split(sys.argv[1])[-1]
(instr, runnumber) = name.split('_')[:2]
#print '**********'
#print div
config = read_configuration('post_processing.conf')
request = publish_plot(instr, runnumber, {'file':div}, config)
print 'request returned', request.status_code
| #!/usr/bin/env python
import os
import json
def loadDiv(filename):
if not os.path.exists(filename):
raise RuntimeError('\'%s\' does not exist' % filename)
print 'loading \'%s\'' % filename
with file(filename, 'r') as handle:
div = handle.read()
return div
def getURL(instrument, run_number):
import string
url_template='https://livedata.sns.gov/plots/$instrument/$run_number/upload_plot_data/'
url_template=string.Template(url_template)
url = url_template.substitute(instrument=instrument,
run_number=str(run_number))
return url
def publish_plot(instrument, run_number, files):
run_number = str(run_number)
url = getURL(instrument, run_number)
print 'posting to \'%s\'' % url
# these next 2 lines are explicity bad - and doesn't seem
# to do ANYTHING
# https://urllib3.readthedocs.org/en/latest/security.html
import urllib3
urllib3.disable_warnings()
import requests
return request
if __name__ == '__main__':
import sys
div = loadDiv(sys.argv[1])
#print '**********'
#print div
request = publish_plot('PG3', '29574', {'file':div})
print 'request returned', request.status_code
| mit | Python |
311c8db11c984cc9a8ed21ae42b85b1d5a8cdc59 | index views | anselmobd/fo2,anselmobd/fo2,anselmobd/fo2,anselmobd/fo2 | src/fo2/views/views.py | src/fo2/views/views.py | from pprint import pprint
from django.contrib.auth import logout
from django.contrib.auth.models import User
from django.http import HttpResponse, JsonResponse
from django.shortcuts import redirect, render
from django.utils import timezone
from django.views.generic import TemplateView, View
from base.models import Colaborador
from utils.classes import AcessoInterno
from utils.functions import get_client_ip, fo2logger
from utils.functions.ssh import router_add_ip_apoio_auth
def index_tussor_view(request):
fo2logger.info('index')
return redirect('apoio_ao_erp')
def index_agator_view(request):
fo2logger.info('index')
return HttpResponse("Agator")
def test_view(request):
context = {}
return render(request, 'test.html', context)
class ApoioAoErpView(TemplateView):
template_name = "index.html"
# def get_context_data(self, *args, **kwargs):
# context = super(ApoioAoErpView, self).get_context_data(
# *args, **kwargs)
# context['logged_count'] = Colaborador.objects.filter(
# logged=True).count()
# return context
class IntranetView(TemplateView):
template_name = "intranet.html"
def myip_view(request):
return HttpResponse("Your IP is : {}".format(get_client_ip(request)))
def meuip_view(request):
return HttpResponse("Seu IP é : {}".format(get_client_ip(request)))
def ack_view(request):
return HttpResponse("Ack")
class SystextilView(View):
def get(self, request, *args, **kwargs):
acesso_interno = AcessoInterno()
try:
acesso_externo = not acesso_interno.current_interno
except Exception:
acesso_externo = False
context = {
'externo': acesso_externo,
}
return render(request, "oficial_systextil.html", context)
def router_ip_to_apoio_auth(request):
result = router_add_ip_apoio_auth(get_client_ip(request))
return JsonResponse(result, safe=False)
| from pprint import pprint
from django.contrib.auth import logout
from django.contrib.auth.models import User
from django.http import HttpResponse, JsonResponse
from django.shortcuts import redirect, render
from django.utils import timezone
from django.views.generic import TemplateView, View
from base.models import Colaborador
from utils.classes import AcessoInterno
from utils.functions import get_client_ip, fo2logger
from utils.functions.ssh import router_add_ip_apoio_auth
def index_view(request):
fo2logger.info('index')
return redirect('apoio_ao_erp')
def test_view(request):
context = {}
return render(request, 'test.html', context)
class ApoioAoErpView(TemplateView):
template_name = "index.html"
# def get_context_data(self, *args, **kwargs):
# context = super(ApoioAoErpView, self).get_context_data(
# *args, **kwargs)
# context['logged_count'] = Colaborador.objects.filter(
# logged=True).count()
# return context
class IntranetView(TemplateView):
template_name = "intranet.html"
def myip_view(request):
return HttpResponse("Your IP is : {}".format(get_client_ip(request)))
def meuip_view(request):
return HttpResponse("Seu IP é : {}".format(get_client_ip(request)))
def ack_view(request):
return HttpResponse("Ack")
class SystextilView(View):
def get(self, request, *args, **kwargs):
acesso_interno = AcessoInterno()
try:
acesso_externo = not acesso_interno.current_interno
except Exception:
acesso_externo = False
context = {
'externo': acesso_externo,
}
return render(request, "oficial_systextil.html", context)
def router_ip_to_apoio_auth(request):
result = router_add_ip_apoio_auth(get_client_ip(request))
return JsonResponse(result, safe=False)
| mit | Python |
f24b80a7f8804560c8703da4e31321411b609611 | Bump to 0.7.0 and add Python 3 classifiers | web-push-libs/vapid,jrconlin/vapid,jrconlin/vapid,web-push-libs/vapid,jrconlin/vapid,jrconlin/vapid,web-push-libs/vapid,web-push-libs/vapid,web-push-libs/vapid | python/setup.py | python/setup.py | import io
import os
from setuptools import setup, find_packages
__version__ = "0.7.0"
def read_from(file):
reply = []
with io.open(os.path.join(here, file), encoding='utf8') as f:
for l in f:
l = l.strip()
if not l:
break
if l[0] != '#' or l[:2] != '//':
reply.append(l)
return reply
here = os.path.abspath(os.path.dirname(__file__))
with io.open(os.path.join(here, 'README.md'), encoding='utf8') as f:
README = f.read()
with io.open(os.path.join(here, 'CHANGELOG.md'), encoding='utf8') as f:
CHANGES = f.read()
setup(name="py-vapid",
version=__version__,
description='Simple VAPID header generation library',
long_description=README + '\n\n' + CHANGES,
classifiers=["Topic :: Internet :: WWW/HTTP",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
],
keywords='vapid push webpush',
author="JR Conlin",
author_email="src+vapid@jrconlin.com",
url='https://github.com/mozilla-services/vapid',
license="MPL2",
test_suite="nose.collector",
include_package_data=True,
zip_safe=False,
packages=find_packages(),
package_data={'': ['README.md', 'CHANGELOG.md',
'requirements.txt', 'test-requirements.txt']},
install_requires=read_from('requirements.txt'),
tests_require=read_from('test-requirements.txt'),
entry_points="""
[console_scripts]
vapid = py_vapid.main:main
[nose.plugins]
object-tracker = autopush.noseplugin:ObjectTracker
""",
)
| import io
import os
from setuptools import setup, find_packages
__version__ = "0.6.0"
def read_from(file):
reply = []
with io.open(os.path.join(here, file), encoding='utf8') as f:
for l in f:
l = l.strip()
if not l:
break
if l[0] != '#' or l[:2] != '//':
reply.append(l)
return reply
here = os.path.abspath(os.path.dirname(__file__))
with io.open(os.path.join(here, 'README.md'), encoding='utf8') as f:
README = f.read()
with io.open(os.path.join(here, 'CHANGELOG.md'), encoding='utf8') as f:
CHANGES = f.read()
setup(name="py-vapid",
version=__version__,
description='Simple VAPID header generation library',
long_description=README + '\n\n' + CHANGES,
classifiers=["Topic :: Internet :: WWW/HTTP",
'Programming Language :: Python',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7"
],
keywords='vapid push webpush',
author="JR Conlin",
author_email="src+vapid@jrconlin.com",
url='https://github.com/mozilla-services/vapid',
license="MPL2",
test_suite="nose.collector",
include_package_data=True,
zip_safe=False,
packages=find_packages(),
package_data={'': ['README.md', 'CHANGELOG.md',
'requirements.txt', 'test-requirements.txt']},
install_requires=read_from('requirements.txt'),
tests_require=read_from('test-requirements.txt'),
entry_points="""
[console_scripts]
vapid = py_vapid.main:main
[nose.plugins]
object-tracker = autopush.noseplugin:ObjectTracker
""",
)
| mpl-2.0 | Python |
edd74c12aa04e9d28fbcea9d8df21885abeab890 | Add a #! thingy and make executable. This is easier for me to get the right Python (i.e. not Cygwin) to execute the script on Windows. | zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb | python/setup.py | python/setup.py | #!/usr/bin/env python
# The contents of this file are subject to the MonetDB Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://monetdb.cwi.nl/Legal/MonetDBLicense-1.1.html
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
# License for the specific language governing rights and limitations
# under the License.
#
# The Original Code is the MonetDB Database System.
#
# The Initial Developer of the Original Code is CWI.
# Portions created by CWI are Copyright (C) 1997-July 2008 CWI.
# Copyright August 2008-2010 MonetDB B.V.
# All Rights Reserved.
from distutils.core import setup
setup(name='python-monetdb',
version='1.0',
description='Native MonetDB client Python API',
long_description='''\
MonetDB is a database management system that is developed from a
main-memory perspective with use of a fully decomposed storage model,
automatic index management, extensibility of data types and search
accelerators, SQL- and XML- frontends.
This package contains the files needed to use MonetDB from a Python
program.
''',
author='MonetDB BV',
author_email='info@monetdb.org',
url='http://monetdb.cwi.nl/',
packages=['monetdb', 'monetdb.sql'],
download_url='<will be filled in before a release>/python-monetdb-1.0.tar.gz',
)
| # The contents of this file are subject to the MonetDB Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://monetdb.cwi.nl/Legal/MonetDBLicense-1.1.html
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
# License for the specific language governing rights and limitations
# under the License.
#
# The Original Code is the MonetDB Database System.
#
# The Initial Developer of the Original Code is CWI.
# Portions created by CWI are Copyright (C) 1997-July 2008 CWI.
# Copyright August 2008-2010 MonetDB B.V.
# All Rights Reserved.
from distutils.core import setup
setup(name='python-monetdb',
version='1.0',
description='Native MonetDB client Python API',
long_description='''\
MonetDB is a database management system that is developed from a
main-memory perspective with use of a fully decomposed storage model,
automatic index management, extensibility of data types and search
accelerators, SQL- and XML- frontends.
This package contains the files needed to use MonetDB from a Python
program.
''',
author='MonetDB BV',
author_email='info@monetdb.org',
url='http://monetdb.cwi.nl/',
packages=['monetdb', 'monetdb.sql'],
download_url='<will be filled in before a release>/python-monetdb-1.0.tar.gz',
)
| mpl-2.0 | Python |
9d8a299db591fadd729fc4ec78a1de1fe2cca869 | Fix delete_selected KeyError in locations admin | mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign | src/locations/admin.py | src/locations/admin.py | from django.contrib import admin
from django.db.models import F, Sum, IntegerField
from django.utils.translation import ugettext_lazy as _
import nested_admin
from .models import District, Location
from campaigns.models import CampaignLocationShift
from coordinators.models import filter_by_district
from campaigns.admin import (
VolunteerParticipantInlineBase,
CampaignLocationShiftForm
)
from .admin_actions import list_volunteers_by_shift_and_location
class VolunteerParticipantInline(VolunteerParticipantInlineBase, nested_admin.NestedTabularInline):
pass
class CampaignShiftInline(nested_admin.NestedTabularInline):
form = CampaignLocationShiftForm
model = CampaignLocationShift
fields = ['day', 'start', 'end', 'total_places', 'shift_leader']
readonly_fields = ['day', 'start', 'end', 'total_places']
exclude = ['volunteers']
inlines = [VolunteerParticipantInline]
extra = 0
class LocationAdmin(nested_admin.NestedModelAdmin):
search_fields = ['name', 'address',
'campaignlocationshift__volunteers__first_name',
'campaignlocationshift__volunteers__last_name']
list_filter = ['district']
inlines = [CampaignShiftInline]
list_display = ['name', 'volunteers_count', 'free_places']
save_on_top = True
actions = [list_volunteers_by_shift_and_location]
def volunteers_count(self, obj):
return obj.campaignlocationshift_set.aggregate(
count=Sum('volunteers__participant_count'))['count']
volunteers_count.short_description = _('Volunteers count')
def free_places(self, obj):
total_places = obj.campaignlocationshift_set.aggregate(total=Sum(
'total_places'))['total']
taken_places = obj.campaignlocationshift_set.aggregate(taken=Sum(
'volunteers__participant_count'))['taken']
return int(total_places or 0) - int(taken_places or 0)
free_places.short_description = _('Free places')
def get_queryset(self, request):
qs = super().get_queryset(request)
return filter_by_district(qs, request.user, 'district')
def get_actions(self, request):
actions = super().get_actions(request)
if 'delete_selected' in actions:
del actions['delete_selected']
return actions
admin.site.register(District)
admin.site.register(Location, LocationAdmin)
| from django.contrib import admin
from django.db.models import F, Sum, IntegerField
from django.utils.translation import ugettext_lazy as _
import nested_admin
from .models import District, Location
from campaigns.models import CampaignLocationShift
from coordinators.models import filter_by_district
from campaigns.admin import (
VolunteerParticipantInlineBase,
CampaignLocationShiftForm
)
from .admin_actions import list_volunteers_by_shift_and_location
class VolunteerParticipantInline(VolunteerParticipantInlineBase, nested_admin.NestedTabularInline):
pass
class CampaignShiftInline(nested_admin.NestedTabularInline):
form = CampaignLocationShiftForm
model = CampaignLocationShift
fields = ['day', 'start', 'end', 'total_places', 'shift_leader']
readonly_fields = ['day', 'start', 'end', 'total_places']
exclude = ['volunteers']
inlines = [VolunteerParticipantInline]
extra = 0
class LocationAdmin(nested_admin.NestedModelAdmin):
search_fields = ['name', 'address',
'campaignlocationshift__volunteers__first_name',
'campaignlocationshift__volunteers__last_name']
list_filter = ['district']
inlines = [CampaignShiftInline]
list_display = ['name', 'volunteers_count', 'free_places']
save_on_top = True
actions = [list_volunteers_by_shift_and_location]
def volunteers_count(self, obj):
return obj.campaignlocationshift_set.aggregate(
count=Sum('volunteers__participant_count'))['count']
volunteers_count.short_description = _('Volunteers count')
def free_places(self, obj):
total_places = obj.campaignlocationshift_set.aggregate(total=Sum(
'total_places'))['total']
taken_places = obj.campaignlocationshift_set.aggregate(taken=Sum(
'volunteers__participant_count'))['taken']
return int(total_places or 0) - int(taken_places or 0)
free_places.short_description = _('Free places')
def get_queryset(self, request):
qs = super().get_queryset(request)
return filter_by_district(qs, request.user, 'district')
def get_actions(self, request):
actions = super().get_actions(request)
del actions['delete_selected']
return actions
admin.site.register(District)
admin.site.register(Location, LocationAdmin)
| mit | Python |
3bc097096c97fbf8936bea9c61f55c67cb0dd34e | Update cnn_functions.py | AlperenAydin/GenreRecognition,AlperenAydin/GenreRecognition | src/cnn_functions.py | src/cnn_functions.py | import numpy as np
import tensorflow as tf
# Weight variable
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
# Bias variable
def bias_variable(shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
# These two for the basis for most of the other function
# This convolution has the same size for the output as the input
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
# This pooling layer halves the size
def max_pool_2x1(x):
return tf.nn.max_pool(x, ksize=[1, 2, 1, 1],
strides=[1, 2, 1, 1], padding='SAME')
# A 2d connvolutional layer with bias
def conv2d_bias(x, shape):
W_conv = weight_variable(shape)
b_conv = bias_variable([shape[3]])
return (conv2d(x,W_conv) + b_conv)
# Everything needed for a convolutional in a single function
def cnm2x1Layer(x, shape):
h_conv = tf.nn.relu(conv2d_bias(x, shape))
h_pool = max_pool_2x1(h_conv)
return h_pool
# A fully connected neural network
def fc_nn(x,shape):
W_fc = weight_variable(shape)
b_fc = bias_variable([shape[1]])
return tf.matmul(x, W_fc) + b_fc
| import numpy as np
import tensorflow as tf
# Weight variable
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
# Bias variable
def bias_variable(shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
# These two for the basis for most of the other function
# This convolution has the same size for the output as the input
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
# This pooling layer halves the size
def max_pool_2x1(x):
return tf.nn.max_pool(x, ksize=[1, 2, 1, 1],
strides=[1, 2, 1, 1], padding='SAME')
# A 2d connvolutional layer with bias
def conv2d_bias(x, shape):
W_conv = weight_variable(shape)
b_conv = bias_variable([shape[3]])
return (conv2d(x,W_conv) + b_conv)
# I am lazy so I put everything I need for a convolutional in a single function
def cnm2x1Layer(x, shape):
h_conv = tf.nn.relu(conv2d_bias(x, shape))
h_pool = max_pool_2x1(h_conv)
return h_pool
# A fully connected neural network
def fc_nn(x,shape):
W_fc = weight_variable(shape)
b_fc = bias_variable([shape[1]])
return tf.matmul(x, W_fc) + b_fc
| mit | Python |
85264809758c173280ffd0e2fbc0978b66c59f57 | Fix mod_wsgi queue time stats. | ENCODE-DCC/encoded,kidaa/encoded,ClinGen/clincoded,philiptzou/clincoded,4dn-dcic/fourfront,ENCODE-DCC/encoded,hms-dbmi/fourfront,ClinGen/clincoded,philiptzou/clincoded,ClinGen/clincoded,ENCODE-DCC/snovault,ClinGen/clincoded,T2DREAM/t2dream-portal,4dn-dcic/fourfront,4dn-dcic/fourfront,ENCODE-DCC/snovault,T2DREAM/t2dream-portal,ENCODE-DCC/encoded,kidaa/encoded,kidaa/encoded,hms-dbmi/fourfront,T2DREAM/t2dream-portal,ENCODE-DCC/snovault,4dn-dcic/fourfront,philiptzou/clincoded,hms-dbmi/fourfront,ENCODE-DCC/snovault,T2DREAM/t2dream-portal,ClinGen/clincoded,kidaa/encoded,philiptzou/clincoded,philiptzou/clincoded,ENCODE-DCC/encoded,ENCODE-DCC/snovault,hms-dbmi/fourfront,hms-dbmi/fourfront,kidaa/encoded | src/encoded/stats.py | src/encoded/stats.py | import pyramid.tweens
import time
from pyramid.settings import asbool
from pyramid.threadlocal import manager as threadlocal_manager
from sqlalchemy import event
from sqlalchemy.engine import Engine
from urllib import urlencode
def includeme(config):
config.add_tween('.stats.stats_tween_factory',
under=pyramid.tweens.INGRESS)
def get_root_request():
if threadlocal_manager.stack:
return threadlocal_manager.stack[0]['request']
def requests_timing_hook(prefix='requests'):
count_key = prefix + '_count'
time_key = prefix + '_time'
def response_hook(r, *args, **kwargs):
request = get_root_request()
if request is None:
return
stats = request._stats
stats[count_key] = stats.get(count_key, 0) + 1
# requests response.elapsed is a timedelta
e = r.elapsed
duration = (e.days * 86400 + e.seconds) * 1000000 + e.microseconds
stats[time_key] = stats.get(time_key, 0) + duration
return response_hook
# See http://www.sqlalchemy.org/trac/wiki/UsageRecipes/Profiling
@event.listens_for(Engine, 'before_cursor_execute')
def before_cursor_execute(
conn, cursor, statement, parameters, context, executemany):
context._query_start_time = int(time.time() * 1e6)
@event.listens_for(Engine, 'after_cursor_execute')
def after_cursor_execute(
conn, cursor, statement, parameters, context, executemany):
end = int(time.time() * 1e6)
request = get_root_request()
if request is None:
return
stats = request._stats
stats['db_count'] = stats.get('db_count', 0) + 1
duration = end - context._query_start_time
stats['db_time'] = stats.get('db_time', 0) + duration
# http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/hooks.html#creating-a-tween-factory
def stats_tween_factory(handler, registry):
def stats_tween(request):
stats = request._stats = {}
begin = stats['wsgi_begin'] = int(time.time() * 1e6)
response = handler(request)
end = stats['wsgi_end'] = int(time.time() * 1e6)
stats['wsgi_time'] = end - begin
environ = request.environ
if 'mod_wsgi.queue_start' in environ:
queue_begin = int(environ['mod_wsgi.queue_start'])
stats['queue_begin'] = queue_begin
stats['queue_time'] = begin - queue_begin
response.headers['X-Stats'] = urlencode(sorted(stats.items()))
return response
return stats_tween
| import pyramid.tweens
import time
from pyramid.settings import asbool
from pyramid.threadlocal import manager as threadlocal_manager
from sqlalchemy import event
from sqlalchemy.engine import Engine
from urllib import urlencode
def includeme(config):
config.add_tween('.stats.stats_tween_factory',
under=pyramid.tweens.INGRESS)
def get_root_request():
if threadlocal_manager.stack:
return threadlocal_manager.stack[0]['request']
def requests_timing_hook(prefix='requests'):
count_key = prefix + '_count'
time_key = prefix + '_time'
def response_hook(r, *args, **kwargs):
request = get_root_request()
if request is None:
return
stats = request._stats
stats[count_key] = stats.get(count_key, 0) + 1
# requests response.elapsed is a timedelta
e = r.elapsed
duration = (e.days * 86400 + e.seconds) * 1000000 + e.microseconds
stats[time_key] = stats.get(time_key, 0) + duration
return response_hook
# See http://www.sqlalchemy.org/trac/wiki/UsageRecipes/Profiling
@event.listens_for(Engine, 'before_cursor_execute')
def before_cursor_execute(
conn, cursor, statement, parameters, context, executemany):
context._query_start_time = int(time.time() * 1e6)
@event.listens_for(Engine, 'after_cursor_execute')
def after_cursor_execute(
conn, cursor, statement, parameters, context, executemany):
end = int(time.time() * 1e6)
request = get_root_request()
if request is None:
return
stats = request._stats
stats['db_count'] = stats.get('db_count', 0) + 1
duration = end - context._query_start_time
stats['db_time'] = stats.get('db_time', 0) + duration
# http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/hooks.html#creating-a-tween-factory
def stats_tween_factory(handler, registry):
def stats_tween(request):
stats = request._stats = {}
begin = stats['wsgi_begin'] = int(time.time() * 1e6)
response = handler(request)
end = stats['wsgi_end'] = int(time.time() * 1e6)
stats['wsgi_time'] = end - begin
environ = request.environ
if 'mod_wsgi.queue_start' in environ:
queue_begin = int(environ['mod_wsgi.queue_start'])
stats.append(('queue_begin', queue_begin))
stats.append(('queue_time', begin - queue_begin))
response.headers['X-Stats'] = urlencode(sorted(stats.items()))
return response
return stats_tween
| mit | Python |
ca962b6994314ac3d3a5a5bcbf9044d52cd88ad9 | Remove unnecessary TODO | liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4 | adhocracy4/actions/signals.py | adhocracy4/actions/signals.py | from itertools import chain
from django.apps import apps
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_delete, post_save
from .models import Action
from .verbs import Verbs
# Actions resulting from the create_system_actions management call
SYSTEM_ACTIONABLES = (
('a4phases', 'Phase'),
('a4projects', 'Project')
)
def _extract_target(instance):
target = None
if hasattr(instance, 'content_object'):
target = instance.content_object
elif hasattr(instance, 'project'):
target = instance.project
return target
def _add_action(sender, instance, created, **kwargs):
actor = instance.creator if hasattr(instance, 'creator') else None
target = None
if created:
target = _extract_target(instance)
if target:
verb = Verbs.ADD.value
else:
verb = Verbs.CREATE.value
else:
verb = Verbs.UPDATE.value
action = Action(
actor=actor,
verb=verb,
obj=instance,
target=target,
)
if hasattr(instance, 'project'):
action.project = instance.project
action.save()
for app, model in settings.A4_ACTIONABLES:
post_save.connect(_add_action, apps.get_model(app, model))
def _delete_action(sender, instance, **kwargs):
contenttype = ContentType.objects.get_for_model(sender)
Action.objects\
.filter(obj_content_type=contenttype, obj_object_id=instance.id)\
.delete()
for app, model in chain(SYSTEM_ACTIONABLES, settings.A4_ACTIONABLES):
post_delete.connect(_delete_action, apps.get_model(app, model))
| from itertools import chain
from django.apps import apps
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_delete, post_save
from .models import Action
from .verbs import Verbs
# Actions resulting from the create_system_actions management call
SYSTEM_ACTIONABLES = (
('a4phases', 'Phase'),
('a4projects', 'Project')
)
def _extract_target(instance):
target = None
if hasattr(instance, 'content_object'):
target = instance.content_object
elif hasattr(instance, 'project'):
target = instance.project
return target
def _add_action(sender, instance, created, **kwargs):
actor = instance.creator if hasattr(instance, 'creator') else None
target = None
if created:
target = _extract_target(instance)
if target:
verb = Verbs.ADD.value
else:
verb = Verbs.CREATE.value
else:
verb = Verbs.UPDATE.value
action = Action(
actor=actor,
verb=verb,
obj=instance,
target=target,
)
# TODO: this could be extended
if hasattr(instance, 'project'):
action.project = instance.project
action.save()
for app, model in settings.A4_ACTIONABLES:
post_save.connect(_add_action, apps.get_model(app, model))
def _delete_action(sender, instance, **kwargs):
contenttype = ContentType.objects.get_for_model(sender)
Action.objects\
.filter(obj_content_type=contenttype, obj_object_id=instance.id)\
.delete()
for app, model in chain(SYSTEM_ACTIONABLES, settings.A4_ACTIONABLES):
post_delete.connect(_delete_action, apps.get_model(app, model))
| agpl-3.0 | Python |
abd428583d73d7c34025ab28428922473de219d3 | Improve sanitize_html test | alephdata/aleph,alephdata/aleph,alephdata/aleph,pudo/aleph,alephdata/aleph,alephdata/aleph,pudo/aleph,pudo/aleph | aleph/tests/test_view_util.py | aleph/tests/test_view_util.py | from lxml.html import document_fromstring
from aleph.views.util import get_best_next_url, sanitize_html
from aleph.tests.util import TestCase, UI_URL
class ViewUtilTest(TestCase):
def setUp(self):
super(ViewUtilTest, self).setUp()
def test_get_best_next_url_blank(self):
self.assertEqual(UI_URL, get_best_next_url(''))
def test_get_best_next_url_unsafe(self):
self.assertEqual(UI_URL, get_best_next_url(self.fake.url())) # noqa
def test_get_best_next_url_unsafe_safe(self):
self.assertEqual(
UI_URL + 'next', get_best_next_url(self.fake.url(), '/next'))
def test_get_best_next_url_all_unsafe(self):
self.assertEqual(UI_URL, get_best_next_url(self.fake.url(), self.fake.url())) # noqa
def test_sanitize_html(self):
html_str = '<!doctype html><html><head><title>Article</title><style type="text/css">body { }</style><script>alert("We love Angular")</script><link rel="stylesheet" href="http://xss.rocks/xss.css"></head><body><article id="story"><h1>We welcome our new React overlords</h1><img src=" javascript:alert(\'XSS\');" alt="" /><p>Published on <time onmouseover="alert(\'XSS\')">1 January 2018</time></p><p>Really the only thing better than the <a href="/blockchain">blockchain</a> is ReactJS.</p></article><video> <source onerror = "javascript: alert (XSS)"></video></body></html>'
processed = sanitize_html(
html_str, 'https://example.org/welcome-react')
html = document_fromstring(processed)
assert html.find('.//img') is None, html
assert html.find('.//video') is None, html
assert html.find('.//style') is None, html
assert html.find('.//script') is None, html
assert len(html.findall('.//article')) == 1, html
assert html.find('.//time').get('onmouseover') == None, html
assert html.find(
'.//a').get('href') == 'https://example.org/blockchain', html
assert html.find('.//a').get('target') == '_blank', html
assert html.find('.//a').get('rel') == 'nofollow', html
| from aleph.views.util import get_best_next_url, sanitize_html
from aleph.tests.util import TestCase, UI_URL
class ViewUtilTest(TestCase):
def setUp(self):
super(ViewUtilTest, self).setUp()
def test_get_best_next_url_blank(self):
self.assertEqual(UI_URL, get_best_next_url(''))
def test_get_best_next_url_unsafe(self):
self.assertEqual(UI_URL, get_best_next_url(self.fake.url())) # noqa
def test_get_best_next_url_unsafe_safe(self):
self.assertEqual(
UI_URL + 'next', get_best_next_url(self.fake.url(), '/next'))
def test_get_best_next_url_all_unsafe(self):
self.assertEqual(UI_URL, get_best_next_url(self.fake.url(), self.fake.url())) # noqa
def test_sanitize_html(self):
html = '<!doctype html><html><head><title>Article</title><style type="text/css">body { }</style><script>alert("We love Angular")</script><link rel="stylesheet" href="http://xss.rocks/xss.css"></head><body><article id="story"><h1>We welcome our new React overlords</h1><img src=" javascript:alert(\'XSS\');" alt="" /><p>Published on <time onmouseover="alert(\'XSS\')">1 January 2018</time></p><p>Really the only thing better than the <a href="/blockchain">blockchain</a> is ReactJS.</p></article><video> <source onerror = "javascript: alert (XSS)"></video></body></html>'
cleaned = '<div><article id="story"><h1>We welcome our new React overlords</h1><p>Published on <time>1 January 2018</time></p><p>Really the only thing better than the <a href="https://example.org/blockchain" target="_blank" rel="nofollow">blockchain</a> is ReactJS.</p></article></div>'
processed = sanitize_html(html, 'https://example.org/welcome-react')
self.assertEqual(processed, cleaned)
| mit | Python |
b5f840ad02d96714721f0558409f770161f4a8e0 | add test for update notebook | aligot-project/aligot,skitoo/aligot,aligot-project/aligot,aligot-project/aligot | aligot/tests/test_notebook.py | aligot/tests/test_notebook.py | # coding: utf-8
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from rest_framework import status
from ..models import NoteBook, User
import logging
# Get an instance of a logger
logger = logging.getLogger(__name__)
class TestNoteBookApi(TestCase):
def setUp(self):
self.client = APIClient()
self.user = User.objects.create(username='user', password='pass')
self.client.force_authenticate(user=self.user)
self.url = reverse('notebook-list')
def test_create_without_params(self):
self.assertEquals(status.HTTP_400_BAD_REQUEST, self.client.post(self.url).status_code)
self.assertEquals(0, NoteBook.objects.count())
def test_create(self):
response = self.client.post(self.url, {'title': 'a title', 'created_by': reverse('user-detail', args=[self.user.id])})
self.assertEquals(status.HTTP_201_CREATED, response.status_code, response.content)
self.assertEquals(1, NoteBook.objects.count())
def test_update(self):
notebook = NoteBook.objects.create(title='a title', created_by=self.user)
self.assertEquals(1, NoteBook.objects.count())
response = self.client.put(reverse('notebook-detail', args=[notebook.id]), {'title': 'new title', 'created_by': reverse('user-detail', args=[self.user.id])})
self.assertEquals(status.HTTP_200_OK, response.status_code, response.content)
self.assertEquals(1, NoteBook.objects.count())
self.assertEquals('new title', NoteBook.objects.all()[0].title)
| # coding: utf-8
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from rest_framework import status
from ..models import NoteBook, User
import logging
# Get an instance of a logger
logger = logging.getLogger(__name__)
class TestNoteBookApi(TestCase):
def setUp(self):
self.client = APIClient()
self.user = User.objects.create(username='user', password='pass')
self.client.force_authenticate(user=self.user)
self.url = reverse('notebook-list')
def test_create_without_params(self):
self.assertEquals(status.HTTP_400_BAD_REQUEST, self.client.post(self.url).status_code)
self.assertEquals(0, NoteBook.objects.count())
def test_create(self):
response = self.client.post(self.url, {'title': 'a title', 'created_by': reverse('user-detail', args=[self.user.id])})
self.assertEquals(status.HTTP_201_CREATED, response.status_code, response.content)
self.assertEquals(1, NoteBook.objects.count())
| mit | Python |
05c509cb2c298425f141e11f9af1d20962f2bb7e | Update __init__.py | linkedin/iris,linkedin/iris,linkedin/iris,linkedin/iris | src/iris/__init__.py | src/iris/__init__.py | __version__ = "1.0.15"
| __version__ = "1.0.14"
| bsd-2-clause | Python |
7d1dc9bcdaf0a533b0b52ae9fb6e2d37940fad3c | Update email check regex to be more thorough | charlesthk/python-mailchimp | mailchimp3/helpers.py | mailchimp3/helpers.py | # coding=utf-8
"""
Helper functions to perform simple tasks for multiple areas of the API
"""
import hashlib
import re
HTTP_METHOD_ACTION_MATCHING = {
'get': 'GET',
'create': 'POST',
'update': 'PATCH',
'create_or_update': 'PUT',
'delete': 'DELETE'
}
def get_subscriber_hash(member_email):
"""
The MD5 hash of the lowercase version of the list member's email.
Used as subscriber_hash
:param member_email: The member's email address
:type member_email: :py:class:`str`
:returns: The md5 hash in hex
:rtype: :py:class:`str`
"""
check_subscriber_email(member_email)
member_email = member_email.lower().encode()
m = hashlib.md5(member_email)
return m.hexdigest()
def check_subscriber_hash(potential_hash):
if re.match('^[0-9a-f]{32}$', potential_hash):
return potential_hash
else:
return get_subscriber_hash(potential_hash)
def check_subscriber_email(email):
if not re.search(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)", email):
raise ValueError('String passed is not a valid email address')
return
def merge_two_dicts(x, y):
"""
Given two dicts, merge them into a new dict as a shallow copy.
:param x: The first dictionary
:type x: :py:class:`dict`
:param y: The second dictionary
:type y: :py:class:`dict`
:returns: The merged dictionary
:rtype: :py:class:`dict`
"""
z = x.copy()
z.update(y)
return z
| # coding=utf-8
"""
Helper functions to perform simple tasks for multiple areas of the API
"""
import hashlib
import re
HTTP_METHOD_ACTION_MATCHING = {
'get': 'GET',
'create': 'POST',
'update': 'PATCH',
'create_or_update': 'PUT',
'delete': 'DELETE'
}
def get_subscriber_hash(member_email):
"""
The MD5 hash of the lowercase version of the list member's email.
Used as subscriber_hash
:param member_email: The member's email address
:type member_email: :py:class:`str`
:returns: The md5 hash in hex
:rtype: :py:class:`str`
"""
check_subscriber_email(member_email)
member_email = member_email.lower().encode()
m = hashlib.md5(member_email)
return m.hexdigest()
def check_subscriber_hash(potential_hash):
if re.match('^[0-9a-f]{32}$', potential_hash):
return potential_hash
else:
return get_subscriber_hash(potential_hash)
def check_subscriber_email(email):
if not re.search('@', email):
raise ValueError('String passed is not a valid email address')
return
def merge_two_dicts(x, y):
"""
Given two dicts, merge them into a new dict as a shallow copy.
:param x: The first dictionary
:type x: :py:class:`dict`
:param y: The second dictionary
:type y: :py:class:`dict`
:returns: The merged dictionary
:rtype: :py:class:`dict`
"""
z = x.copy()
z.update(y)
return z
| mit | Python |
1b89b0e6fe89cff56736ba88edc6422ce6078979 | Bump version to 0.2.0 | opendxl/opendxl-epo-service-python | dxleposervice/_version.py | dxleposervice/_version.py | __version__ = "0.2.0"
| __version__ = "0.1.4"
| apache-2.0 | Python |
6f14527aaa18ca3f707523535d596acc1c8fb847 | update version to 0.9.6 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/version.py | salt/version.py | __version_info__ = (0, 9, 6)
__version__ = '.'.join(map(str, __version_info__))
| __version_info__ = (0, 9, 5)
__version__ = '.'.join(map(str, __version_info__))
| apache-2.0 | Python |
1c434432977f0e3e126ec4d22be0bdccd4c44463 | Remove request processor. | serathius/sanic-sentry | sanic_sentry.py | sanic_sentry.py | import logging
import sanic
import raven
import raven_aiohttp
from raven.handlers.logging import SentryHandler
class SanicSentry:
def __init__(self, app=None):
self.app = None
self.handler = None
self.client = None
if app is not None:
self.init_app(app)
def init_app(self, app: sanic.Sanic):
self.client = raven.Client(
dsn=app.config['SENTRY_DSN'],
transport=raven_aiohttp.AioHttpTransport,
)
self.handler = SentryHandler(client=self.client, level=app.config.get('SENTRY_LEVEL', logging.ERROR))
logger = logging.getLogger('sanic')
logger.addHandler(self.handler)
self.app = app
self.app.sentry = self
| import logging
import sanic
import raven
import raven_aiohttp
from raven.handlers.logging import SentryHandler
from raven.processors import Processor
class SanicSentry:
def __init__(self, app=None):
self.app = None
self.handler = None
self.client = None
if app is not None:
self.init_app(app)
def init_app(self, app: sanic.Sanic):
self.client = raven.Client(
dsn=app.config['SENTRY_DSN'],
transport=raven_aiohttp.AioHttpTransport,
)
self.handler = SentryHandler(client=self.client, level=app.config.get('SENTRY_LEVEL', logging.ERROR))
logger = logging.getLogger('sanic')
logger.addHandler(self.handler)
self.app = app
self.app.sentry = self
class RequestProcessor(Processor):
async def process(self, data, request=None):
if request is None:
return {}
data = {
'request': {
'url': "%s://%s%s" % (request.scheme, request.host, request.path),
'method': request.method,
'data': (await request.read()),
'query_string': request.query_string,
'headers': {k.title(): str(v) for k, v in request.headers.items()},
}
}
return data
| mit | Python |
62df776433ccb4b5de1e586f03381bbfe0c56817 | Remove unneccessary iteration | janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system | src/webapp/public.py | src/webapp/public.py | import json
from flask import Blueprint, render_template
import database as db
from database.model import Team
bp = Blueprint('public', __name__)
@bp.route("/map")
def map_page():
return render_template("public/map.html")
@bp.route("/map_teams")
def map_teams():
qry = db.session.query(Team).filter_by(confirmed=True).filter_by(deleted=False).filter_by(backup=False)
data_dict = {}
for item in qry:
if item.location is not None:
ident = "%s%s" % (item.location.lat, item.location.lon)
if ident not in data_dict:
data_dict[ident] = {"lat": item.location.lat,
"lon": item.location.lon,
"name": item.name}
else:
data_dict[ident]["name"] += "<br>" + item.name
return json.dumps(data_dict.values())
| import json
from flask import Blueprint, render_template
import database as db
from database.model import Team
bp = Blueprint('public', __name__)
@bp.route("/map")
def map_page():
return render_template("public/map.html")
@bp.route("/map_teams")
def map_teams():
qry = db.session.query(Team).filter_by(confirmed=True).filter_by(deleted=False).filter_by(backup=False)
data_dict = {}
for item in qry:
if item.location is not None:
ident = "%s%s" % (item.location.lat, item.location.lon)
if ident not in data_dict:
data_dict[ident] = {"lat": item.location.lat,
"lon": item.location.lon,
"name": item.name}
else:
data_dict[ident]["name"] += "<br>" + item.name
data = [entry for entry in data_dict.itervalues()]
return json.dumps(data)
| bsd-3-clause | Python |
91e107e209f2c4ce1341cc0dc3ec45055c2c3d27 | Fix deprecation warning | springload/madewithwagtail,springload/madewithwagtail,springload/madewithwagtail,springload/madewithwagtail | core/tests/test_template_tags.py | core/tests/test_template_tags.py | from django.template import Context, Template
from bs4 import BeautifulSoup
from core.tests.utils import *
from core.models import *
class TemplateTagsTestCase(WagtailTest):
def setUp(self):
super(TemplateTagsTestCase, self).setUp()
self.home_page = HomePage.objects.all()[0]
def test_set_var(self):
"""
Test set_var(parser, token) tag
{% set <var_name> = <var_value> %}
"""
# We just assign a simple string to a ver and check it gets printed nicely
rendered = Template("{% load core_tags %}{% set x = 'The hammer of Zeus' %}{{ x }}").render(Context({}))
self.assertEqual(rendered, "The hammer of Zeus")
def test_footer_menu(self):
"""
Test footer_menu(context) tag
{% footer_menu %}
"""
response = self.client.get(self.home_page.url)
# Render and check footer has 2 items
rendered = Template("{% load core_tags %}{% footer_menu name='Footer' current_page=self.home_page %}").render(Context({"request": response.request}))
soup = BeautifulSoup(rendered, 'html5lib')
self.assertEqual(
len(soup.findAll("a")),
4 # Number of links in the footer, should be 4 but fixtures are bit outdated
)
def test_main_menu(self):
"""
Test main_menu(context) tag
{% main_menu %}
"""
response = self.client.get(self.home_page.url)
# Render and check footer has 2 items
rendered = Template("{% load core_tags %}{% menu name='Main' current_page=self.home_page %}").render(Context({"request": response.request}))
soup = BeautifulSoup(rendered, 'html5lib')
self.assertEqual(
len(soup.findAll("a")),
5 # Number of links in the main nav
)
| from django.template import Context, Template
from bs4 import BeautifulSoup
from core.tests.utils import *
from core.models import *
class TemplateTagsTestCase(WagtailTest):
def setUp(self):
super(TemplateTagsTestCase, self).setUp()
self.home_page = HomePage.objects.all()[0]
def test_set_var(self):
"""
Test set_var(parser, token) tag
{% set <var_name> = <var_value> %}
"""
# We just assign a simple string to a ver and check it gets printed nicely
rendered = Template("{% load core_tags %}{% set x = 'The hammer of Zeus' %}{{ x }}").render(Context({}))
self.assertEqual(rendered, "The hammer of Zeus")
def test_footer_menu(self):
"""
Test footer_menu(context) tag
{% footer_menu %}
"""
response = self.client.get(self.home_page.url)
# Render and check footer has 2 items
rendered = Template("{% load core_tags %}{% footer_menu name='Footer' current_page=self.home_page %}").render(Context({"request": response.request}))
soup = BeautifulSoup(rendered)
self.assertEqual(
len(soup.findAll("a")),
4 # Number of links in the footer, should be 4 but fixtures are bit outdated
)
def test_main_menu(self):
"""
Test main_menu(context) tag
{% main_menu %}
"""
response = self.client.get(self.home_page.url)
# Render and check footer has 2 items
rendered = Template("{% load core_tags %}{% menu name='Main' current_page=self.home_page %}").render(Context({"request": response.request}))
soup = BeautifulSoup(rendered)
self.assertEqual(
len(soup.findAll("a")),
5 # Number of links in the main nav
)
| mit | Python |
8e5892e2fd5d3560ed4ffe3e8f11cfdb393865bf | Make it easier to see what the error was on failed module load. | mininet/mininet,mininet/mininet,mininet/mininet | mininet/moduledeps.py | mininet/moduledeps.py | "Module dependency utility functions for Mininet."
from mininet.util import quietRun
from mininet.log import info, error, debug
from os import environ
def lsmod():
"Return output of lsmod."
return quietRun( 'lsmod' )
def rmmod( mod ):
"""Return output of lsmod.
mod: module string"""
return quietRun( [ 'rmmod', mod ] )
def modprobe( mod ):
"""Return output of modprobe
mod: module string"""
return quietRun( [ 'modprobe', mod ] )
OF_KMOD = 'ofdatapath'
OVS_KMOD = 'openvswitch_mod'
TUN = 'tun'
def moduleDeps( subtract=None, add=None ):
"""Handle module dependencies.
subtract: string or list of module names to remove, if already loaded
add: string or list of module names to add, if not already loaded"""
subtract = subtract if subtract is not None else []
add = add if add is not None else []
if type( subtract ) is str:
subtract = [ subtract ]
if type( add ) is str:
add = [ add ]
for mod in subtract:
if mod in lsmod():
info( '*** Removing ' + mod + '\n' )
rmmodOutput = rmmod( mod )
if rmmodOutput:
error( 'Error removing ' + mod + ': <%s>\n' % rmmodOutput )
exit( 1 )
if mod in lsmod():
error( 'Failed to remove ' + mod + '; still there!\n' )
exit( 1 )
for mod in add:
if mod not in lsmod():
info( '*** Loading ' + mod + '\n' )
modprobeOutput = modprobe( mod )
if modprobeOutput:
error( 'Error inserting ' + mod + '- is it installed?\n' +
'Error was: <%s>\n' % modprobeOutput )
if mod not in lsmod():
error( 'Failed to insert ' + mod + '\n' )
exit( 1 )
else:
debug( '*** ' + mod + ' already loaded\n' )
def pathCheck( *args ):
"Make sure each program in *args can be found in $PATH."
for arg in args:
if not quietRun( 'which ' + arg ):
error( 'Cannot find required executable %s -'
' is it installed somewhere in your $PATH?\n(%s)\n' %
( arg, environ[ 'PATH' ] ) )
exit( 1 )
| "Module dependency utility functions for Mininet."
from mininet.util import quietRun
from mininet.log import info, error, debug
from os import environ
def lsmod():
"Return output of lsmod."
return quietRun( 'lsmod' )
def rmmod( mod ):
"""Return output of lsmod.
mod: module string"""
return quietRun( [ 'rmmod', mod ] )
def modprobe( mod ):
"""Return output of modprobe
mod: module string"""
return quietRun( [ 'modprobe', mod ] )
OF_KMOD = 'ofdatapath'
OVS_KMOD = 'openvswitch_mod'
TUN = 'tun'
def moduleDeps( subtract=None, add=None ):
"""Handle module dependencies.
subtract: string or list of module names to remove, if already loaded
add: string or list of module names to add, if not already loaded"""
subtract = subtract if subtract is not None else []
add = add if add is not None else []
if type( subtract ) is str:
subtract = [ subtract ]
if type( add ) is str:
add = [ add ]
for mod in subtract:
if mod in lsmod():
info( '*** Removing ' + mod + '\n' )
rmmodOutput = rmmod( mod )
if rmmodOutput:
error( 'Error removing ' + mod + ': %s\n' % rmmodOutput )
exit( 1 )
if mod in lsmod():
error( 'Failed to remove ' + mod + '; still there!\n' )
exit( 1 )
for mod in add:
if mod not in lsmod():
info( '*** Loading ' + mod + '\n' )
modprobeOutput = modprobe( mod )
if modprobeOutput:
error( 'Error inserting ' + mod + '- is it installed?\n' +
'Error was: %s\n' % modprobeOutput )
if mod not in lsmod():
error( 'Failed to insert ' + mod + '\n' )
exit( 1 )
else:
debug( '*** ' + mod + ' already loaded\n' )
def pathCheck( *args ):
"Make sure each program in *args can be found in $PATH."
for arg in args:
if not quietRun( 'which ' + arg ):
error( 'Cannot find required executable %s -'
' is it installed somewhere in your $PATH?\n(%s)\n' %
( arg, environ[ 'PATH' ] ) )
exit( 1 )
| bsd-3-clause | Python |
98649d486b9e2eb2c83e594e73cf6bbaa29213e5 | Make the server example listen on 0.0.0.0 by default. | mwicat/python2-osc,attwad/python-osc,ragnarula/python-osc,emlyn/python-osc | examples/simple_server.py | examples/simple_server.py | import argparse
import math
from pythonosc import dispatcher
from pythonosc import osc_server
def print_volume_handler(args, volume):
print("[{0}] ~ {1}".format(args[0], volume))
def print_compute_handler(args, volume):
try:
print("[{0}] ~ {1}".format(args[0], args[1](volume)))
except ValueError: pass
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--ip",
default="0.0.0.0", help="The ip to listen on")
parser.add_argument("--port",
type=int, default=5005, help="The port to listen on")
args = parser.parse_args()
dispatcher = dispatcher.Dispatcher()
dispatcher.map("/debug", print)
dispatcher.map("/volume", print_volume_handler, "Volume")
dispatcher.map("/logvolume", print_compute_handler, "Log volume", math.log)
server = osc_server.ThreadingOSCUDPServer(
(args.ip, args.port), dispatcher)
print("Serving on {}".format(server.server_address))
server.serve_forever()
| import argparse
import math
from pythonosc import dispatcher
from pythonosc import osc_server
def print_volume_handler(args, volume):
print("[{0}] ~ {1}".format(args[0], volume))
def print_compute_handler(args, volume):
try:
print("[{0}] ~ {1}".format(args[0], args[1](volume)))
except ValueError: pass
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--ip",
default="127.0.0.1", help="The ip to listen on")
parser.add_argument("--port",
type=int, default=5005, help="The port to listen on")
args = parser.parse_args()
dispatcher = dispatcher.Dispatcher()
dispatcher.map("/debug", print)
dispatcher.map("/volume", print_volume_handler, "Volume")
dispatcher.map("/logvolume", print_compute_handler, "Log volume", math.log)
server = osc_server.ThreadingOSCUDPServer(
(args.ip, args.port), dispatcher)
print("Serving on {}".format(server.server_address))
server.serve_forever()
| unlicense | Python |
1988798c80a123572c3fa19873b3b028504575dc | Improve error message when drmaa cannot be loaded. | galaxyproject/pulsar,natefoo/pulsar,galaxyproject/pulsar,natefoo/pulsar | pulsar/managers/util/drmaa/__init__.py | pulsar/managers/util/drmaa/__init__.py | try:
from drmaa import Session, JobControlAction
except OSError as e:
LOAD_ERROR_MESSAGE = "OSError - problem loading shared library [%s]." % e
Session = None
except ImportError as e:
LOAD_ERROR_MESSAGE = "ImportError - problem importing library (`pip install drmaa` may fix this) [%s]." % e
# Will not be able to use DRMAA
Session = None
NO_DRMAA_MESSAGE = "Attempt to use DRMAA, but DRMAA Python library cannot be loaded. "
class DrmaaSessionFactory(object):
"""
Abstraction used to production DrmaaSession wrappers.
"""
def __init__(self):
self.session_constructor = Session
def get(self, **kwds):
session_constructor = self.session_constructor
if session_constructor is None:
raise Exception(NO_DRMAA_MESSAGE + LOAD_ERROR_MESSAGE)
return DrmaaSession(session_constructor(), **kwds)
class DrmaaSession(object):
"""
Abstraction around `drmaa` module `Session` objects.
"""
def __init__(self, session, **kwds):
self.session = session
session.initialize()
def run_job(self, **kwds):
"""
Create a DRMAA job template, populate with specified properties,
run the job, and return the external_job_id.
"""
template = self.session.createJobTemplate()
try:
for key in kwds:
setattr(template, key, kwds[key])
return self.session.runJob(template)
finally:
self.session.deleteJobTemplate(template)
def kill(self, external_job_id):
return self.session.control(str(external_job_id), JobControlAction.TERMINATE)
def job_status(self, external_job_id):
return self.session.jobStatus(str(external_job_id))
def close(self):
return self.session.exit()
__all__ = ['DrmaaSessionFactory']
| try:
from drmaa import Session, JobControlAction
except ImportError as e:
# Will not be able to use DRMAA
Session = None
NO_DRMAA_MESSAGE = "Attempt to use DRMAA, but DRMAA Python library cannot be loaded."
class DrmaaSessionFactory(object):
"""
Abstraction used to production DrmaaSession wrappers.
"""
def __init__(self):
self.session_constructor = Session
def get(self, **kwds):
session_constructor = self.session_constructor
if not session_constructor:
raise Exception(NO_DRMAA_MESSAGE)
return DrmaaSession(session_constructor(), **kwds)
class DrmaaSession(object):
"""
Abstraction around `drmaa` module `Session` objects.
"""
def __init__(self, session, **kwds):
self.session = session
session.initialize()
def run_job(self, **kwds):
"""
Create a DRMAA job template, populate with specified properties,
run the job, and return the external_job_id.
"""
template = self.session.createJobTemplate()
try:
for key in kwds:
setattr(template, key, kwds[key])
return self.session.runJob(template)
finally:
self.session.deleteJobTemplate(template)
def kill(self, external_job_id):
return self.session.control(str(external_job_id), JobControlAction.TERMINATE)
def job_status(self, external_job_id):
return self.session.jobStatus(str(external_job_id))
def close(self):
return self.session.exit()
__all__ = ['DrmaaSessionFactory']
| apache-2.0 | Python |
e56764c6e3393c105fd5acb439323b89f8775161 | Update __openerp__.py | Elico-Corp/openerp-7.0,Elico-Corp/openerp-7.0,Elico-Corp/openerp-7.0 | purchase_double_confirm/__openerp__.py | purchase_double_confirm/__openerp__.py | # -*- encoding: utf-8 -*-
# © 2014 Elico corp(www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
{
'name': 'Purchase Order Approvement',
'version': '7.0.1.0.0',
'category': 'Purchases',
'description': """
Button on PO will pop a question, give the operator a possibility to confirm.
""",
'author': 'Elico Corp',
'website': 'www.elico-corp.com',
'depends': ['purchase'],
'update_xml': [
'purchase_confirm_view.xml',
],
'installable': True,
'active': False,
}
| # -*- encoding: utf-8 -*-
# © 2014 Elico corp(www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
{
'name': 'Purchase Order Approvement',
'version': '7.0.1.0.0',
'category': 'Purchases',
'description': """
Button on PO will pop a question, give the operator a chance to confirm.
""",
'author': 'Elico Corp',
'website': 'http://www.openerp.net.cn',
'depends': ['purchase'],
'update_xml': [
'purchase_confirm_view.xml',
],
'installable': True,
'active': False,
}
| agpl-3.0 | Python |
e173868deff0672b2f5540d6409f4c87d1fe1827 | fix couchdb json encoding | wangjun/pyspider,wangjun/pyspider,binux/pyspider,luoq/pyspider,binux/pyspider,wangjun/pyspider,luoq/pyspider,binux/pyspider,luoq/pyspider | pyspider/database/couchdb/projectdb.py | pyspider/database/couchdb/projectdb.py | import time, requests, json
from pyspider.database.base.projectdb import ProjectDB as BaseProjectDB
class ProjectDB(BaseProjectDB):
__collection_name__ = 'projectdb'
def __init__(self, url, database='projectdb'):
self.url = url
self.database = database
self.insert('', {})
def _default_fields(self, each):
if each is None:
return each
each.setdefault('group', None)
each.setdefault('status', 'TODO')
each.setdefault('script', '')
each.setdefault('comments', None)
each.setdefault('rate', 0)
each.setdefault('burst', 0)
each.setdefault('updatetime', 0)
return each
def insert(self, name, obj={}):
url = self.url + self.__collection_name__ + "/" + name
obj = dict(obj)
obj['name'] = name
obj['updatetime'] = time.time()
res = requests.put(url, data = json.dumps(obj), headers = {"Content-Type": "application/json"}).json()
print('[couchdb projectdb insert] - res: {}'.format(res))
return res
def update(self, name, obj={}, **kwargs):
obj = dict(obj)
obj.update(kwargs)
self.insert(name, obj)
def get_all(self, fields=None):
payload = {
"selector": {},
"fields": fields
}
res = requests.post(self.url+"_find", data=json.dumps(payload)).json()
print('[couchdb projectdb get_all] - res: {}'.format(res))
return res
def get(self, name, fields=None):
payload = {
"selector": {"name": name},
"fields": fields,
"limit": 1
}
res = requests.post(self.url + "_find", data=json.dumps(payload)).json()
print('[couchdb projectdb get] - res: {}'.format(res))
return res
def check_update(self, timestamp, fields=None):
for project in self.get_all(fields=('updatetime', 'name')):
if project['updatetime'] > timestamp:
project = self.get(project['name'], fields)
yield self._default_fields(project)
def drop(self, name):
doc = json.loads(self.get(name))
res = requests.delete(self.url+name+"/"+doc["_rev"]).json()
print('[couchdb projectdb drop] - res: {}'.format(res))
return res
| import time, requests, json
from pyspider.database.base.projectdb import ProjectDB as BaseProjectDB
class ProjectDB(BaseProjectDB):
__collection_name__ = 'projectdb'
def __init__(self, url, database='projectdb'):
self.url = url
self.database = database
self.insert('', {})
def _default_fields(self, each):
if each is None:
return each
each.setdefault('group', None)
each.setdefault('status', 'TODO')
each.setdefault('script', '')
each.setdefault('comments', None)
each.setdefault('rate', 0)
each.setdefault('burst', 0)
each.setdefault('updatetime', 0)
return each
def insert(self, name, obj={}):
url = self.url + self.__collection_name__ + "/" + name
obj = dict(obj)
obj['name'] = name
obj['updatetime'] = time.time()
print("[couchdb insert] - insert url: {} obj: {}".format(url, json.dumps(obj)))
return requests.put(url, data = json.dumps(obj), headers = {"Content-Type": "application/json"})
def update(self, name, obj={}, **kwargs):
obj = dict(obj)
obj.update(kwargs)
self.insert(name, obj)
def get_all(self, fields=None):
payload = {
"selector": {},
"fields": fields
}
return json.loads(requests.post(self.url+"_find", data=json.dumps(payload)).json())
def get(self, name, fields=None):
payload = {
"selector": {"name": name},
"fields": fields,
"limit": 1
}
return json.loads(requests.post(self.url + "_find", data=json.dumps(payload)).json())
def check_update(self, timestamp, fields=None):
for project in self.get_all(fields=('updatetime', 'name')):
if project['updatetime'] > timestamp:
project = self.get(project['name'], fields)
yield self._default_fields(project)
def drop(self, name):
doc = json.loads(self.get(name))
return json.loads(requests.delete(self.url+name+"/"+doc["_rev"]).json())
| apache-2.0 | Python |
e55d04760daaefb6dda7f26d1ebcd66d7af45abc | Fix docstring in projection.py | 1024jp/LensCalibrator | modules/projection.py | modules/projection.py | #!/usr/bin/env python
"""
(C) 2007-2018 1024jp
"""
import numpy as np
import cv2
class Projector:
def __init__(self, image_points, ideal_points):
self.homography = self._estimate_homography(image_points, ideal_points)
@staticmethod
def _estimate_homography(image_points, ideal_points):
"""Find homography matrix.
"""
fp = np.array(image_points)
tp = np.array(ideal_points)
H, _ = cv2.findHomography(fp, tp, 0)
return H
def project_point(self, x, y):
"""Project x, y coordinates using homography matrix.
Arguments:
x (float) -- x coordinate to project.
y (float) -- y coordinate to project.
"""
result = np.dot(self.homography, [x, y, 1])
projected_x = result[0] / result[2]
projected_y = result[1] / result[2]
return projected_x, projected_y
def project_image(self, image, size, offset=(0, 0)):
"""Remove parspective from given image.
Arguments:
image numpy.array -- Image source in numpy image form.
size ([int]) -- Size of the output image.
"""
translation = np.matrix([
[1.0, 0.0, -offset[0]],
[0.0, 1.0, -offset[1]],
[0.0, 0.0, 1.0]
])
matrix = translation * self.homography
return cv2.warpPerspective(image, matrix, tuple(size))
| #!/usr/bin/env python
"""
(C) 2007-2017 1024jp
"""
import numpy as np
import cv2
class Projector:
def __init__(self, image_points, ideal_points):
self.homography = self._estimate_homography(image_points, ideal_points)
@staticmethod
def _estimate_homography(image_points, ideal_points):
"""Find homography matrix.
"""
fp = np.array(image_points)
tp = np.array(ideal_points)
H, _ = cv2.findHomography(fp, tp, 0)
return H
def project_point(self, x, y):
"""Project x, y coordinates using homography matrix.
Arguments:
homography (list[list[float]]) -- 3x3 homography matrix.
x (float) -- x coordinate to project.
y (float) -- y coordinate to project.
"""
result = np.dot(self.homography, [x, y, 1])
projected_x = result[0] / result[2]
projected_y = result[1] / result[2]
return projected_x, projected_y
def project_image(self, image, size, offset=(0, 0)):
"""Remove parspective from given image.
Arguments:
image numpy.array -- Image source in numpy image form.
size ([int]) -- Size of the output image.
"""
translation = np.matrix([
[1.0, 0.0, -offset[0]],
[0.0, 1.0, -offset[1]],
[0.0, 0.0, 1.0]
])
matrix = translation * self.homography
return cv2.warpPerspective(image, matrix, tuple(size))
| mit | Python |
1cdf903ef028be519a6d3eee1b30b53234de51f2 | Fix valueerrors on pyquery. | mhils/readthedocs.org,sils1297/readthedocs.org,fujita-shintaro/readthedocs.org,d0ugal/readthedocs.org,safwanrahman/readthedocs.org,fujita-shintaro/readthedocs.org,GovReady/readthedocs.org,takluyver/readthedocs.org,atsuyim/readthedocs.org,istresearch/readthedocs.org,LukasBoersma/readthedocs.org,CedarLogic/readthedocs.org,istresearch/readthedocs.org,singingwolfboy/readthedocs.org,cgourlay/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,nikolas/readthedocs.org,mrshoki/readthedocs.org,davidfischer/readthedocs.org,nikolas/readthedocs.org,kenshinthebattosai/readthedocs.org,johncosta/private-readthedocs.org,nikolas/readthedocs.org,Tazer/readthedocs.org,cgourlay/readthedocs.org,nyergler/pythonslides,tddv/readthedocs.org,stevepiercy/readthedocs.org,raven47git/readthedocs.org,clarkperkins/readthedocs.org,dirn/readthedocs.org,espdev/readthedocs.org,safwanrahman/readthedocs.org,nyergler/pythonslides,asampat3090/readthedocs.org,royalwang/readthedocs.org,jerel/readthedocs.org,CedarLogic/readthedocs.org,sils1297/readthedocs.org,Carreau/readthedocs.org,emawind84/readthedocs.org,Tazer/readthedocs.org,rtfd/readthedocs.org,sils1297/readthedocs.org,rtfd/readthedocs.org,jerel/readthedocs.org,atsuyim/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,mhils/readthedocs.org,soulshake/readthedocs.org,wijerasa/readthedocs.org,soulshake/readthedocs.org,Carreau/readthedocs.org,sid-kap/readthedocs.org,royalwang/readthedocs.org,michaelmcandrew/readthedocs.org,cgourlay/readthedocs.org,nyergler/pythonslides,wanghaven/readthedocs.org,dirn/readthedocs.org,VishvajitP/readthedocs.org,wanghaven/readthedocs.org,soulshake/readthedocs.org,techtonik/readthedocs.org,kenwang76/readthedocs.org,Carreau/readthedocs.org,kdkeyser/readthedocs.org,kenshinthebattosai/readthedocs.org,davidfischer/readthedocs.org,emawind84/readthedocs.org,singingwolfboy/readthedocs.org,sunnyzwh/readthedocs.org,clarkperkins/readthedocs.org,singingwolfboy/readthedocs.org,attakei/readthedocs-oauth,titiushko/readthedocs.org,michaelmcandrew/readthedocs.org,sid-kap/readthedocs.org,titiushko/readthedocs.org,mhils/readthedocs.org,Tazer/readthedocs.org,GovReady/readthedocs.org,SteveViss/readthedocs.org,attakei/readthedocs-oauth,soulshake/readthedocs.org,LukasBoersma/readthedocs.org,hach-que/readthedocs.org,kenwang76/readthedocs.org,gjtorikian/readthedocs.org,VishvajitP/readthedocs.org,royalwang/readthedocs.org,tddv/readthedocs.org,asampat3090/readthedocs.org,kdkeyser/readthedocs.org,mrshoki/readthedocs.org,LukasBoersma/readthedocs.org,gjtorikian/readthedocs.org,espdev/readthedocs.org,emawind84/readthedocs.org,nyergler/pythonslides,GovReady/readthedocs.org,ojii/readthedocs.org,asampat3090/readthedocs.org,CedarLogic/readthedocs.org,mhils/readthedocs.org,sunnyzwh/readthedocs.org,Tazer/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,fujita-shintaro/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,stevepiercy/readthedocs.org,d0ugal/readthedocs.org,atsuyim/readthedocs.org,KamranMackey/readthedocs.org,SteveViss/readthedocs.org,laplaceliu/readthedocs.org,clarkperkins/readthedocs.org,sid-kap/readthedocs.org,KamranMackey/readthedocs.org,kenshinthebattosai/readthedocs.org,wanghaven/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,stevepiercy/readthedocs.org,safwanrahman/readthedocs.org,LukasBoersma/readthedocs.org,wijerasa/readthedocs.org,Carreau/readthedocs.org,laplaceliu/readthedocs.org,pombredanne/readthedocs.org,GovReady/readthedocs.org,d0ugal/readthedocs.org,asampat3090/readthedocs.org,wijerasa/readthedocs.org,hach-que/readthedocs.org,espdev/readthedocs.org,rtfd/readthedocs.org,VishvajitP/readthedocs.org,ojii/readthedocs.org,VishvajitP/readthedocs.org,techtonik/readthedocs.org,techtonik/readthedocs.org,ojii/readthedocs.org,nikolas/readthedocs.org,mrshoki/readthedocs.org,safwanrahman/readthedocs.org,laplaceliu/readthedocs.org,royalwang/readthedocs.org,istresearch/readthedocs.org,raven47git/readthedocs.org,atsuyim/readthedocs.org,agjohnson/readthedocs.org,kenshinthebattosai/readthedocs.org,pombredanne/readthedocs.org,raven47git/readthedocs.org,rtfd/readthedocs.org,istresearch/readthedocs.org,singingwolfboy/readthedocs.org,CedarLogic/readthedocs.org,mrshoki/readthedocs.org,laplaceliu/readthedocs.org,agjohnson/readthedocs.org,KamranMackey/readthedocs.org,espdev/readthedocs.org,SteveViss/readthedocs.org,agjohnson/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,hach-que/readthedocs.org,fujita-shintaro/readthedocs.org,dirn/readthedocs.org,takluyver/readthedocs.org,johncosta/private-readthedocs.org,kenwang76/readthedocs.org,attakei/readthedocs-oauth,gjtorikian/readthedocs.org,stevepiercy/readthedocs.org,johncosta/private-readthedocs.org,sunnyzwh/readthedocs.org,pombredanne/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,sid-kap/readthedocs.org,sils1297/readthedocs.org,kdkeyser/readthedocs.org,emawind84/readthedocs.org,hach-que/readthedocs.org,gjtorikian/readthedocs.org,sunnyzwh/readthedocs.org,agjohnson/readthedocs.org,tddv/readthedocs.org,takluyver/readthedocs.org,d0ugal/readthedocs.org,jerel/readthedocs.org,techtonik/readthedocs.org,davidfischer/readthedocs.org,dirn/readthedocs.org,michaelmcandrew/readthedocs.org,wanghaven/readthedocs.org,kdkeyser/readthedocs.org,ojii/readthedocs.org,takluyver/readthedocs.org | readthedocs/projects/search_indexes.py | readthedocs/projects/search_indexes.py | # -*- coding: utf-8-*-
import codecs
import os
from django.utils.html import strip_tags
from haystack import site
from haystack.indexes import *
from celery_haystack.indexes import CelerySearchIndex
from pyquery import PyQuery
from projects.models import File, ImportedFile, Project
import logging
log = logging.getLogger(__name__)
class ProjectIndex(CelerySearchIndex):
text = CharField(document=True, use_template=True)
author = CharField()
title = CharField(model_attr='name')
description = CharField(model_attr='description')
repo_type = CharField(model_attr='repo_type')
def prepare_author(self, obj):
return obj.users.all()[0]
class FileIndex(CelerySearchIndex):
text = CharField(document=True, use_template=True)
author = CharField()
project = CharField(model_attr='project__name', faceted=True)
title = CharField(model_attr='heading')
def prepare_author(self, obj):
return obj.project.users.all()[0]
#Should prob make a common subclass for this and FileIndex
class ImportedFileIndex(CelerySearchIndex):
text = CharField(document=True)
author = CharField()
project = CharField(model_attr='project__name', faceted=True)
title = CharField(model_attr='name')
def prepare_author(self, obj):
return obj.project.users.all()[0]
def prepare_text(self, obj):
"""
Prepare the text of the html file.
This only works on machines that have the html
files for the projects checked out.
"""
full_path = obj.project.rtd_build_path()
file_path = os.path.join(full_path, obj.path.lstrip('/'))
try:
with codecs.open(file_path, encoding='utf-8', mode='r') as f:
content = f.read()
except IOError as e:
log.info('Unable to index file: %s, error :%s' % (file_path, e))
return
log.debug('Indexing %s' % obj.slug)
try:
to_index = strip_tags(PyQuery(content)("div.document").html()).replace(u'¶', '')
except ValueError:
#Pyquery returns ValueError if div.document doesn't exist.
return
return to_index
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
site.register(Project, ProjectIndex)
| # -*- coding: utf-8-*-
import codecs
import os
from django.utils.html import strip_tags
from haystack import site
from haystack.indexes import *
from celery_haystack.indexes import CelerySearchIndex
from pyquery import PyQuery
from projects.models import File, ImportedFile, Project
import logging
log = logging.getLogger(__name__)
class ProjectIndex(CelerySearchIndex):
text = CharField(document=True, use_template=True)
author = CharField()
title = CharField(model_attr='name')
description = CharField(model_attr='description')
repo_type = CharField(model_attr='repo_type')
def prepare_author(self, obj):
return obj.users.all()[0]
class FileIndex(CelerySearchIndex):
text = CharField(document=True, use_template=True)
author = CharField()
project = CharField(model_attr='project__name', faceted=True)
title = CharField(model_attr='heading')
def prepare_author(self, obj):
return obj.project.users.all()[0]
#Should prob make a common subclass for this and FileIndex
class ImportedFileIndex(CelerySearchIndex):
text = CharField(document=True)
author = CharField()
project = CharField(model_attr='project__name', faceted=True)
title = CharField(model_attr='name')
def prepare_author(self, obj):
return obj.project.users.all()[0]
def prepare_text(self, obj):
"""
Prepare the text of the html file.
This only works on machines that have the html
files for the projects checked out.
"""
full_path = obj.project.rtd_build_path()
file_path = os.path.join(full_path, obj.path.lstrip('/'))
try:
with codecs.open(file_path, encoding='utf-8', mode='r') as f:
content = f.read()
except IOError as e:
log.info('Unable to index file: %s, error :%s' % (file_path, e))
return
log.debug('Indexing %s' % obj.slug)
to_index = strip_tags(PyQuery(content)("div.document").html()).replace(u'¶', '')
return to_index
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
site.register(Project, ProjectIndex)
| mit | Python |
6e06e335257e4a370d815b733ccce184a3541f81 | fix unhandled calls | mylokin/servy | servy/server.py | servy/server.py | from __future__ import absolute_import
import webob.exc
import webob.dec
import webob.response
import pickle
class Server(object):
def __init__(self, **services):
self.services = services
@webob.dec.wsgify
def __call__(self, request):
if request.method == 'POST':
return self.rpc(request)
raise webob.exc.HTTPMethodNotAllowed
def rpc(self, request):
service = request.path[1:]
if service not in self.services:
raise webob.exc.HTTPNotFound
service = self.services[service]
try:
procedure, args, kw = pickle.load(request.body_file)
except:
raise webob.exc.HTTPBadRequest
for attr in procedure.split('.'):
if not hasattr(service, attr):
raise webob.exc.HTTPNotImplemented
service = getattr(service, attr)
content = service(*args, **kw)
content = pickle.dumps(content)
return webob.response.Response(content)
| from __future__ import absolute_import
import webob.exc
import webob.dec
import webob.response
import pickle
class Server(object):
def __init__(self, **services):
self.services = services
@webob.dec.wsgify
def __call__(self, request):
if request.method == 'POST':
return self.rpc(request)
def rpc(self, request):
service = request.path[1:]
if service not in self.services:
raise webob.exc.HTTPNotFound
service = self.services[service]
try:
procedure, args, kw = pickle.load(request.body_file)
except:
raise webob.exc.HTTPBadRequest
for attr in procedure.split('.'):
if not hasattr(service, attr):
raise webob.exc.HTTPNotImplemented
service = getattr(service, attr)
content = service(*args, **kw)
content = pickle.dumps(content)
return webob.response.Response(content)
| mit | Python |
a1406c93c2cbdc0da73c8bc5412f695a0ab18d53 | add session keys file source code UTF-8 encoding | unStatiK/TorrentBOX,unStatiK/TorrentBOX,unStatiK/TorrentBOX | session_keys.py | session_keys.py | # -*- coding: utf-8 -*-
USER_TOKEN = 'user'
USER_ID_TOKEN = 'id_user'
|
USER_TOKEN = 'user'
USER_ID_TOKEN = 'id_user' | mit | Python |
8b77e1e865d72720a602b7b7cc5912cb852d68cf | Revert back to original settings for Celery Broker | pythonindia/junction,pythonindia/junction,pythonindia/junction,pythonindia/junction | settings/dev.py | settings/dev.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
from .common import * # noqa
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(ROOT_DIR, 'db.sqlite3'),
}
}
ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'http'
TEMPLATE_CONTEXT_PROCESSORS += (
"django.core.context_processors.debug",
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += ('django_extensions',)
# settings for celery
BROKER_URL = os.environ.get("BROKER_URL", "redis://redis:6379/0")
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", 'redis://redis:6379/0')
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
from .common import * # noqa
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(ROOT_DIR, 'db.sqlite3'),
}
}
ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'http'
TEMPLATE_CONTEXT_PROCESSORS += (
"django.core.context_processors.debug",
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += ('django_extensions',)
# settings for celery
BROKER_URL = os.environ.get("BROKER_URL", "redis://127.0.0.1:6379/0")
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", 'redis://127.0.0.1:6379/0')
| mit | Python |
e753038de039fd23f0d59bb0094f59fc73efe22b | Set a custom JSON Encoder to serialize date class. | viniciuschiele/flask-apscheduler | flask_apscheduler/json.py | flask_apscheduler/json.py | import datetime
import flask
import json
from apscheduler.job import Job
from .utils import job_to_dict
loads = json.loads
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
if isinstance(obj, datetime.date):
return obj.isoformat()
if isinstance(obj, Job):
return job_to_dict(obj)
return super(JSONEncoder, self).default(obj)
| import flask
import json
from datetime import datetime
from apscheduler.job import Job
from .utils import job_to_dict
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, Job):
return job_to_dict(obj)
return super(JSONEncoder, self).default(obj)
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
| apache-2.0 | Python |
edcfe2b156af23943478bc86592b4c8d5dc07e10 | Support older versions of MongoEngine | gerasim13/flask-mongoengine-1,rochacbruno/flask-mongoengine,quokkaproject/flask-mongoengine,quokkaproject/flask-mongoengine,gerasim13/flask-mongoengine-1,losintikfos/flask-mongoengine,rochacbruno/flask-mongoengine,losintikfos/flask-mongoengine | flask_mongoengine/json.py | flask_mongoengine/json.py | from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
try:
from mongoengine.base import BaseQuerySet
except ImportError as ie: # support mongoengine < 0.7
from mongoengine.queryset import QuerySet as BaseQuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
documents and queryset objects.
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
elif isinstance(obj, BaseQuerySet):
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
| from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
from mongoengine import QuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
documents and querysets.
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
elif isinstance(obj, QuerySet):
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
| bsd-3-clause | Python |
6f31cb8cad05f1713fbdef861c194385bc875ad9 | Clean up comment | ppapadeas/wprevents,yvan-sraka/wprevents,yvan-sraka/wprevents,yvan-sraka/wprevents,ppapadeas/wprevents,yvan-sraka/wprevents,ppapadeas/wprevents | mozcal/events/urls.py | mozcal/events/urls.py | from django.conf.urls.defaults import url, patterns
from . import views
urlpatterns = patterns('',
url(r'^(?P<slug>[a-z0-9-]+)$', views.one, name='event_one'),
url(r'^', views.all, name='event_all'),
)
| from django.conf.urls.defaults import url, patterns
from . import views
urlpatterns = patterns('',
# Match
url(r'^(?P<slug>[a-z0-9-]+)$', views.one, name='event_one'),
url(r'^', views.all, name='event_all'),
)
| bsd-3-clause | Python |
7e5672e6527ad2fce8b1cb97b07228b4db8770e2 | Update to version 0.1.1 | Anthony25/mpd_muspy | mpd_muspy/__init__.py | mpd_muspy/__init__.py | #!/usr/bin/python
# Author: Anthony Ruhier
import __main__
import os
import sys
# Check that the configuration exists
try:
_current_dir = os.path.dirname(__main__.__file__)
except AttributeError:
_current_dir = os.getcwd()
if not os.path.exists(os.path.join(_current_dir, "config.py")):
print("Configuration file config.py not found. Please copy the "
"config.py.default as config.py.", file=sys.stderr)
sys.exit(1)
_version = "0.1.1"
_release_name = "mpd_muspy"
| #!/usr/bin/python
# Author: Anthony Ruhier
import __main__
import os
import sys
# Check that the configuration exists
try:
_current_dir = os.path.dirname(__main__.__file__)
except AttributeError:
_current_dir = os.getcwd()
if not os.path.exists(os.path.join(_current_dir, "config.py")):
print("Configuration file config.py not found. Please copy the "
"config.py.default as config.py.", file=sys.stderr)
sys.exit(1)
_version = "0.1beta"
_release_name = "mpd_muspy"
| bsd-2-clause | Python |
6cd3e3128be2308bf13029b01b7ac6cc0a4cd7be | Fix setupcluster.py script. | ymap/aioredis | setupcluster.py | setupcluster.py | import argparse
import os
import sys
from aioredis.cluster.testcluster import TestCluster
assert sys.version >= '3.5', 'Please use Python 3.5 or higher.'
START_PORT = 7001
REDIS_COUNT = 6
def parse_arguments():
parser = argparse.ArgumentParser(
description="Set up a Redis cluster to run the examples.")
parser.add_argument(
'--dir',
default='redis-cluster',
help='Directory for the Redis cluster. '
'Must be empty or nonexistent, unless -f is specified.'
)
return parser.parse_args()
def setup_test_cluster(args):
directory = os.path.abspath(os.path.expanduser(args.dir))
cluster = TestCluster(list(range(START_PORT, START_PORT + REDIS_COUNT)), directory)
cluster.setup()
if __name__ == '__main__':
args = parse_arguments()
setup_test_cluster(args)
print(
"Cluster has been set up."
"To stop the cluster, simply kill the processes."
)
| import argparse
import os
import sys
from aioredis.cluster import testcluster
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
START_PORT = 7001
REDIS_COUNT = 6
def parse_arguments():
parser = argparse.ArgumentParser(
description="Set up a Redis cluster for the unittests.")
parser.add_argument(
'--dir',
default='redis-cluster',
help='Directory for the Redis cluster. '
'Must be empty or nonexistent, unless -f is specified.'
)
return parser.parse_args()
def setup_test_cluster(args):
directory = os.path.abspath(os.path.expanduser(args.dir))
testcluster.setup_test_cluster(REDIS_COUNT, START_PORT, directory)
if __name__ == '__main__':
args = parse_arguments()
setup_test_cluster(args)
print(
"Cluster has been set up. Use 'python runtests.py --cluster' "
" to run the tests. "
"To stop the cluster, simply kill the processes."
)
| mit | Python |
abfcf0bdee7183ee46d784fb53918e95b477554e | fix pages/admin.py for InterestedForm | raccoongang/socraticqs2,cjlee112/socraticqs2,raccoongang/socraticqs2,raccoongang/socraticqs2,raccoongang/socraticqs2,cjlee112/socraticqs2,cjlee112/socraticqs2,cjlee112/socraticqs2 | mysite/pages/admin.py | mysite/pages/admin.py | from django.contrib import admin
from models import InterestedForm
class InterestedFormAdmin(admin.ModelAdmin):
list_display = ('first_name', 'last_name', 'email', 'timezone')
admin.site.register(InterestedForm, InterestedFormAdmin)
| from django.contrib import admin
from models import InterestedForm
class InterestedFormAdmin(admin.ModelAdmin):
pass
admin.register(InterestedFormAdmin, InterestedForm)
# Register your models here.
| apache-2.0 | Python |
6607258b78002637372f67ee238bdb35aba7ce61 | check both /v0/ and /v0/legacy | NCI-GDC/gdc-client,NCI-GDC/gdc-client | gdc_client/query/index.py | gdc_client/query/index.py | import requests
from urlparse import urljoin
from ..log import get_logger
# Logging
log = get_logger('query')
class GDCIndexClient(object):
def __init__(self, uri):
self.uri = uri if uri.endswith('/') else uri + '/'
def get_related_files(self, file_id):
"""Query the GDC api for related files.
:params str file_id: String containing the id of the primary entity
:returns: A list of related file ids
"""
r = self.get('files', file_id, fields=['related_files.file_id'])
return [rf['file_id'] for rf in r['data'].get('related_files', [])]
def get_annotations(self, file_id):
"""Query the GDC api for annotations and download them to a file.
:params str file_id: String containing the id of the primary entity
:returns: A list of related file ids
"""
r = self.get('files', file_id, fields=['annotations.annotation_id'])
return [a['annotation_id'] for a in r['data'].get('annotations', [])]
def get(self, path, ID, fields=[]):
url = urljoin(self.uri, 'v0/{}/{}'.format(path, ID))
params = {'fields': ','.join(fields)} if fields else {}
r = requests.get(url, verify=False, params=params)
if r.status_code != requests.codes.ok:
url = urljoin(self.uri, 'v0/legacy/{}/{}'.format(path, ID))
r = requests.get(url, verify=False, params=params)
r.raise_for_status()
return r.json()
| import requests
from urlparse import urljoin
from ..log import get_logger
# Logging
log = get_logger('query')
class GDCIndexClient(object):
def __init__(self, uri):
self.uri = uri if uri.endswith('/') else uri + '/'
def get_related_files(self, file_id):
"""Query the GDC api for related files.
:params str file_id: String containing the id of the primary entity
:returns: A list of related file ids
"""
r = self.get('files', file_id, fields=['related_files.file_id'])
return [rf['file_id'] for rf in r['data'].get('related_files', [])]
def get_annotations(self, file_id):
"""Query the GDC api for annotations and download them to a file.
:params str file_id: String containing the id of the primary entity
:returns: A list of related file ids
"""
r = self.get('files', file_id, fields=['annotations.annotation_id'])
return [a['annotation_id'] for a in r['data'].get('annotations', [])]
def get(self, path, ID, fields=[]):
url = urljoin(self.uri, '{}/{}'.format(path, ID))
params = {'fields': ','.join(fields)} if fields else {}
r = requests.get(url, verify=False, params=params)
r.raise_for_status()
return r.json()
| apache-2.0 | Python |
8fec68740ab7f8467f952f478a971a8418d48108 | Add run_cmd logging statement | sjktje/sjkscan,sjktje/sjkscan | sjkscan/scan.py | sjkscan/scan.py | #!/usr/bin/env python
# encoding: utf-8
import logging
import os
from datetime import datetime
from .config import config, load_config
from .utils import run_cmd
from .logging import init_logging
def run_scan(output_directory):
"""Run scanimage in batch mode.
:param string output_directory: directory to write scanned images to
"""
logging.info('Scanning to %s', output_directory)
command = [
'scanimage',
'--resolution {}'.format(config['Scanimage']['resolution']),
'--batch={}/scan_%03d.pnm'.format(output_directory),
'--format=pnm',
'--mode Gray',
'--brightness {}'.format(config['Scanimage']['brightness']),
'--contrast {}'.format(config['Scanimage']['contrast']),
'--source "ADF Duplex"',
'-v'
]
run_cmd(command)
def main():
"""
Scan documents.
Documents are placed in data_dir/YYYY-MM-DD_HH-MM-SS.unfinished.
Once the scan has been completed, the '.unfinished' is removed.
"""
load_config()
init_logging()
timestamp = datetime.today().strftime('%Y-%m-%d_%H-%M-%S')
unfinished = os.path.join(config['Paths']['data'], timestamp + '.unfinished')
finished = os.path.join(config['Paths']['data'], timestamp)
output_dir = os.path.join(config['Paths']['data'], unfinished)
try:
os.mkdir(output_dir)
except OSError as e:
print('Could not create {}: {}', output_dir, e)
run_scan(output_dir)
try:
os.rename(unfinished, finished)
except OSError as e:
print('Could not rename {} to {}: {}', unfinished, finished, e)
| #!/usr/bin/env python
# encoding: utf-8
import os
from datetime import datetime
from .config import config, load_config
from .utils import run_cmd
def run_scan(output_directory):
"""Run scanimage in batch mode.
:param string output_directory: directory to write scanned images to
"""
command = [
'scanimage',
'--resolution {}'.format(config['Scanimage']['resolution']),
'--batch={}/scan_%03d.pnm'.format(output_directory),
'--format=pnm',
'--mode Gray',
'--brightness {}'.format(config['Scanimage']['brightness']),
'--contrast {}'.format(config['Scanimage']['contrast']),
'--source "ADF Duplex"',
'-v'
]
run_cmd(command)
def main():
"""
Scan documents.
Documents are placed in data_dir/YYYY-MM-DD_HH-MM-SS.unfinished.
Once the scan has been completed, the '.unfinished' is removed.
"""
load_config()
timestamp = datetime.today().strftime('%Y-%m-%d_%H-%M-%S')
unfinished = os.path.join(config['Paths']['data'], timestamp + '.unfinished')
finished = os.path.join(config['Paths']['data'], timestamp)
output_dir = os.path.join(config['Paths']['data'], unfinished)
try:
os.mkdir(output_dir)
except OSError as e:
print('Could not create {}: {}', output_dir, e)
run_scan(output_dir)
try:
os.rename(unfinished, finished)
except OSError as e:
print('Could not rename {} to {}: {}', unfinished, finished, e)
| bsd-2-clause | Python |
f22267a1bed6520a1531f33b30130da556dd1348 | Fix gunicorn.http.__all__. | mvaled/gunicorn,malept/gunicorn,gtrdotmcs/gunicorn,WSDC-NITWarangal/gunicorn,zhoucen/gunicorn,harrisonfeng/gunicorn,elelianghh/gunicorn,GitHublong/gunicorn,malept/gunicorn,MrKiven/gunicorn,tejasmanohar/gunicorn,mvaled/gunicorn,keakon/gunicorn,zhoucen/gunicorn,ccl0326/gunicorn,ephes/gunicorn,mvaled/gunicorn,gtrdotmcs/gunicorn,gtrdotmcs/gunicorn,prezi/gunicorn,prezi/gunicorn,ccl0326/gunicorn,tempbottle/gunicorn,ccl0326/gunicorn,prezi/gunicorn,zhoucen/gunicorn,malept/gunicorn,z-fork/gunicorn | gunicorn/http/__init__.py | gunicorn/http/__init__.py | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from gunicorn.http.message import Message, Request
from gunicorn.http.parser import RequestParser
__all__ = ['Message', 'Request', 'RequestParser']
| # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from gunicorn.http.message import Message, Request
from gunicorn.http.parser import RequestParser
__all__ = [Message, Request, RequestParser]
| mit | Python |
62bb9d7785e3ba9241b6a8531685e7f3266db7cd | Revert "Add initial notification's target #19" | nnsnodnb/django-ios-notifications,nnsnodnb/django-ios-notifications | notification/forms.py | notification/forms.py | from django import forms
from .models import DeviceToken
class CertFileUploadForm(forms.Form):
cert_file = forms.FileField()
target = forms.ChoiceField(choices=((0, 'Develop'), (1, 'Distribute')), required=True, widget=forms.RadioSelect)
class NotificationSendForm(forms.Form):
target = forms.ChoiceField(choices=((0, 'Develop'), (1, 'Distribute')), required=True, widget=forms.RadioSelect)
device_token = forms.ModelMultipleChoiceField(label='Device Token',
queryset=DeviceToken.objects.all().values_list('device_token',
flat=True)
)
title = forms.CharField(required=True)
subtitle = forms.CharField()
body = forms.CharField()
sound = forms.CharField(initial='default', required=True)
badge = forms.IntegerField(initial=1, required=True)
content_available = forms.BooleanField(initial=False)
mutable_content = forms.BooleanField(initial=False)
extra = forms.CharField(widget=forms.Textarea)
| from django import forms
from .models import DeviceToken
class CertFileUploadForm(forms.Form):
cert_file = forms.FileField()
target = forms.ChoiceField(choices=((0, 'Develop'), (1, 'Distribute')), required=True, widget=forms.RadioSelect)
class NotificationSendForm(forms.Form):
target = forms.ChoiceField(choices=((0, 'Develop'), (1, 'Distribute')),
required=True,
widget=forms.RadioSelect,
initial=0)
device_token = forms.ModelMultipleChoiceField(label='Device Token',
queryset=DeviceToken.objects.all().values_list('device_token',
flat=True)
)
title = forms.CharField(required=True)
subtitle = forms.CharField()
body = forms.CharField()
sound = forms.CharField(initial='default', required=True)
badge = forms.IntegerField(initial=1, required=True)
content_available = forms.BooleanField(initial=False)
mutable_content = forms.BooleanField(initial=False)
extra = forms.CharField(widget=forms.Textarea)
| mit | Python |
c095525de6f28feb3cb34ca63c4c13091a10a726 | Fix error caused by missing "-static" libraries defines for some platform e.g. OSX and BSD | cpcloud/numba,gmarkall/numba,cpcloud/numba,gmarkall/numba,gmarkall/numba,stuartarchibald/numba,IntelLabs/numba,stonebig/numba,gmarkall/numba,numba/numba,cpcloud/numba,numba/numba,IntelLabs/numba,IntelLabs/numba,stonebig/numba,stonebig/numba,cpcloud/numba,IntelLabs/numba,numba/numba,stuartarchibald/numba,gmarkall/numba,stonebig/numba,seibert/numba,stonebig/numba,cpcloud/numba,numba/numba,numba/numba,stuartarchibald/numba,IntelLabs/numba,seibert/numba,stuartarchibald/numba,seibert/numba,seibert/numba,seibert/numba,stuartarchibald/numba | numba/misc/findlib.py | numba/misc/findlib.py | import sys
import os
import re
def get_lib_dirs():
"""
Anaconda specific
"""
if sys.platform == 'win32':
# on windows, historically `DLLs` has been used for CUDA libraries,
# since approximately CUDA 9.2, `Library\bin` has been used.
dirnames = ['DLLs', os.path.join('Library', 'bin')]
else:
dirnames = ['lib', ]
libdirs = [os.path.join(sys.prefix, x) for x in dirnames]
return libdirs
DLLNAMEMAP = {
'linux': r'lib%(name)s\.so\.%(ver)s$',
'linux2': r'lib%(name)s\.so\.%(ver)s$',
'linux-static': r'lib%(name)s\.a$',
'darwin': r'lib%(name)s\.%(ver)s\.dylib$',
'win32': r'%(name)s%(ver)s\.dll$',
'win32-static': r'%(name)s\.lib$',
'bsd': r'lib%(name)s\.so\.%(ver)s$',
}
RE_VER = r'[0-9]*([_\.][0-9]+)*'
def find_lib(libname, libdir=None, platform=None, static=False):
platform = platform or sys.platform
platform = 'bsd' if 'bsd' in platform else platform
if static:
platform = f"{platform}-static"
if platform not in DLLNAMEMAP:
# Return empty list if platform name is undefined.
# Not all platforms define their static library paths.
return []
pat = DLLNAMEMAP[platform] % {"name": libname, "ver": RE_VER}
regex = re.compile(pat)
return find_file(regex, libdir)
def find_file(pat, libdir=None):
if libdir is None:
libdirs = get_lib_dirs()
elif isinstance(libdir, str):
libdirs = [libdir,]
else:
libdirs = list(libdir)
files = []
for ldir in libdirs:
entries = os.listdir(ldir)
candidates = [os.path.join(ldir, ent)
for ent in entries if pat.match(ent)]
files.extend([c for c in candidates if os.path.isfile(c)])
return files
| import sys
import os
import re
def get_lib_dirs():
"""
Anaconda specific
"""
if sys.platform == 'win32':
# on windows, historically `DLLs` has been used for CUDA libraries,
# since approximately CUDA 9.2, `Library\bin` has been used.
dirnames = ['DLLs', os.path.join('Library', 'bin')]
else:
dirnames = ['lib', ]
libdirs = [os.path.join(sys.prefix, x) for x in dirnames]
return libdirs
DLLNAMEMAP = {
'linux': r'lib%(name)s\.so\.%(ver)s$',
'linux2': r'lib%(name)s\.so\.%(ver)s$',
'linux-static': r'lib%(name)s\.a$',
'darwin': r'lib%(name)s\.%(ver)s\.dylib$',
'win32': r'%(name)s%(ver)s\.dll$',
'win32-static': r'%(name)s\.lib$',
'bsd': r'lib%(name)s\.so\.%(ver)s$',
}
RE_VER = r'[0-9]*([_\.][0-9]+)*'
def find_lib(libname, libdir=None, platform=None, static=False):
platform = platform or sys.platform
platform = 'bsd' if 'bsd' in platform else platform
if static:
platform = f"{platform}-static"
pat = DLLNAMEMAP[platform] % {"name": libname, "ver": RE_VER}
regex = re.compile(pat)
return find_file(regex, libdir)
def find_file(pat, libdir=None):
if libdir is None:
libdirs = get_lib_dirs()
elif isinstance(libdir, str):
libdirs = [libdir,]
else:
libdirs = list(libdir)
files = []
for ldir in libdirs:
entries = os.listdir(ldir)
candidates = [os.path.join(ldir, ent)
for ent in entries if pat.match(ent)]
files.extend([c for c in candidates if os.path.isfile(c)])
return files
| bsd-2-clause | Python |
4e080dc896737d9bd60fe0f02004489bbfecedd4 | Fix article admin. | opps/opps,jeanmask/opps,opps/opps,jeanmask/opps,YACOWS/opps,opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps | opps/article/admin.py | opps/article/admin.py | # -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from opps.article.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostSourceInline(admin.TabularInline):
model = PostSource
fk_name = 'post'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ["title",]}
list_display = ['title', 'channel', 'date_available', 'published']
list_filter = ['published', 'date_available', 'channel']
search_fields = ['title', 'headline']
inlines = [PostImageInline, PostSourceInline]
exclude = ('user',)
raw_id_fields = ['main_image', 'channel']
fieldsets = (
(_(u'Identification'), {
'fields': ('title', 'slug',)}),
(_(u'Content'), {
'fields': ('short_title', 'headline', 'content',
'main_image')}),
(_(u'Relationships'), {
'fields': ('channel',)}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available')}),
)
def save_model(self, request, obj, form, change):
try:
obj.site = obj.channel.site
if obj.user:
pass
except User.DoesNotExist:
obj.user = request.user
super(PostAdmin, self).save_model(request, obj, form, change)
admin.site.register(Post, PostAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from django.contrib.auth.models import User
from opps.article.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostSourceInline(admin.TabularInline):
model = PostSource
fk_name = 'post'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline, PostSourceInline]
exclude = ('user',)
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
try:
obj.site = obj.channel.site
if obj.user:
pass
except User.DoesNotExist:
obj.user = request.user
super(PostAdmin, self).save_model(request, obj, form, change)
admin.site.register(Post, PostAdmin)
| mit | Python |
c63cf3972ecd26438c3825bdf67ebd0e8b4751f1 | update docstring formatting | incuna/django-orderable,incuna/django-orderable | orderable/managers.py | orderable/managers.py | from django.db import models
class OrderableManager(models.Manager):
"""
Adds additional functionality to `Orderable.objects`.
Provides access to the next and previous ordered object within the queryset.
"""
def before(self, orderable):
return self.filter(sort_order__lt=orderable.sort_order).last()
def after(self, orderable):
return self.filter(sort_order__gt=orderable.sort_order).first()
| from django.db import models
class OrderableManager(models.Manager):
'''
Adds additional functionality to `Orderable.objects` providing access to the next and
previous ordered object within the queryset.
'''
def before(self, orderable):
return self.filter(sort_order__lt=orderable.sort_order).last()
def after(self, orderable):
return self.filter(sort_order__gt=orderable.sort_order).first()
| bsd-2-clause | Python |
ab4ba6e453b68fd084dc4cafdc193a17557fcf1d | Update WitForJBMC.py | ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec | benchexec/tools/WitForJBMC.py | benchexec/tools/WitForJBMC.py | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2021 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool2):
"""
Tool info for Wit4JBMC
(https://github.com/Anthonysdu/MSc-project/blob/main/jbmc/Wit4JBMC.py).
"""
def executable(self, tool_locator):
return tool_locator.find_executable("Wit4JBMC.py")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "Wit4JBMC"
def cmdline(self, executable, options, task, rlimits):
return [executable] + options + list(task.input_files)
def determine_result(self, run):
output = run.output
validation = "unknown"
for line in output:
if "Exception" in line:
if "AssertionError" in line:
validation = "false"
else:
validation = "unknown"
break
else:
validation = "true"
if validation == "false":
status = result.RESULT_FALSE_PROP
# print(exit_code)
elif validation == "true":
status = result.RESULT_TRUE_PROP
elif validation == "unknown":
status = result.RESULT_UNKNOWN
else:
status = result.RESULT_ERROR
return status
| import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool2):
def executable(self, tool_locator):
return tool_locator.find_executable("Wit4JBMC.py")
def name(self):
return "Wit4JBMC"
def cmdline(self, executable, options, task, rlimits):
return [executable] + options + list(task.input_files_or_identifier)
def determine_result(self, run):
output = run.output
validation = 'unknown'
for line in output:
if 'Exception' in line:
if 'AssertionError' in line:
validation = 'false'
else:
validation = 'unknown'
break
else:
validation = 'true'
if validation == 'false':
status = result.RESULT_FALSE_PROP
#print(exit_code)
elif validation == 'true':
status = result.RESULT_TRUE_PROP
else:
status = result.RESULT_UNKNOWN
return status
| apache-2.0 | Python |
42988dc093c688ad1975116fc0c0e85e943e70b5 | Complete a test case | biothings/biothings.api,biothings/biothings.api | biothings/tests/test_query.py | biothings/tests/test_query.py | '''
Biothings Query Component Common Tests
'''
import os
from nose.core import main
from biothings.tests import BiothingsTestCase
class QueryTests(BiothingsTestCase):
''' Test against server specified in environment variable BT_HOST
and BT_API or MyGene.info production server V3 by default '''
host = os.getenv("BT_HOST", "http://mygene.info")
api = os.getenv("BT_API", "/v3")
def test_01(self):
''' KWARGS CTRL Format Json '''
self.query(q='__all__', size='1')
def test_02(self):
''' KWARGS CTRL Format Yaml '''
res = self.request('query?q=__all__&size=1&format=yaml').text
assert res.startswith('max_score:')
def test_03(self):
''' KWARGS CTRL Format Html '''
res = self.request('query?q=__all__&size=1&format=html').text
assert '<html>' in res
def test_04(self):
''' KWARGS CTRL Format Msgpack '''
res = self.request('query?q=__all__&size=1&format=msgpack').content
self.msgpack_ok(res)
def test_11(self):
''' HANDLE Unmatched Quotes'''
# Sentry
# Issue 529121368
# Event 922fc99638cb4987bccbfd30c914ff03
_q = 'query?q=c("ZNF398", "U2AF...'
self.request(_q, expect_status=400)
if __name__ == '__main__':
main(defaultTest='__main__.QueryTests', argv=['', '-v'])
| '''
Biothings Query Component Common Tests
'''
import os
from nose.core import main
from biothings.tests import BiothingsTestCase
class QueryTests(BiothingsTestCase):
''' Test against server specified in environment variable BT_HOST
and BT_API or MyGene.info production server V3 by default '''
host = os.getenv("BT_HOST", "http://mygene.info")
api = os.getenv("BT_API", "/v3")
def test_01(self):
''' KWARGS CTRL Format Json '''
self.query(q='__all__', size='1')
def test_02(self):
''' KWARGS CTRL Format Yaml '''
res = self.request('query?q=__all__&size=1&format=yaml').text
assert res.startswith('max_score:')
def test_03(self):
''' KWARGS CTRL Format Html '''
res = self.request('query?q=__all__&size=1&format=html').text
assert '<html>' in res
def test_04(self):
''' KWARGS CTRL Format Msgpack '''
res = self.request('query?q=__all__&size=1&format=msgpack').content
self.msgpack_ok(res)
def test_11(self):
''' HANDLE Unmatched Quotes'''
# TODO example: "U2AF...
# self.query('"U2AF...', expect_hits=False)
if __name__ == '__main__':
main(defaultTest='__main__.QueryTests', argv=['', '-v'])
| apache-2.0 | Python |
9f09f21c592bc2a0da6a4f56bcf4b971411474b1 | increase min processing threshold | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/form_processor/management/commands/run_submission_reprocessing_queue.py | corehq/form_processor/management/commands/run_submission_reprocessing_queue.py | from datetime import timedelta, datetime
from time import sleep
from django.core.management import BaseCommand
from django.db.models import F
from django.db.models import Q
from corehq.form_processor.tasks import reprocess_submission
from corehq.util.metrics import metrics_gauge
from couchforms.models import UnfinishedSubmissionStub
from dimagi.utils.logging import notify_exception
ENQUEUING_TIMEOUT = 14 * 24 * 60 # 14 days (in minutes)
BATCH_SIZE = 1000
def _record_datadog_metrics():
count = UnfinishedSubmissionStub.objects.count()
metrics_gauge('commcare.submission_reprocessing.queue_size', count)
class SubmissionReprocessingEnqueuingOperation(BaseCommand):
help = "Runs the Submission Reprocessing Queue"
def handle(self, **options):
while True:
try:
num_processed = self.create_tasks()
except Exception:
num_processed = 0
notify_exception(None, message="Could not queue unprocessed submissions")
sleep_time = 10 if num_processed < BATCH_SIZE else 0
sleep(sleep_time)
def create_tasks(self):
stub_ids = self.get_items_to_be_processed()
for stub_id in stub_ids:
reprocess_submission.delay(stub_id)
return len(stub_ids)
def get_items_to_be_processed(self):
_record_datadog_metrics()
utcnow = datetime.utcnow()
queued_threshold = utcnow - timedelta(minutes=ENQUEUING_TIMEOUT)
filters = Q(date_queued__isnull=True) | Q(date_queued__lte=queued_threshold)
# wait before processing to avoid processing during form submission and hopefully after any
# current infra issues
min_processing_age = utcnow - timedelta(minutes=30)
filters = Q(timestamp__lt=min_processing_age) & filters
query = UnfinishedSubmissionStub.objects.filter(filters).order_by('timestamp')
stub_ids = list(query.values_list('id', flat=True)[:BATCH_SIZE])
if stub_ids:
UnfinishedSubmissionStub.objects.filter(pk__in=stub_ids).update(
date_queued=utcnow, attempts=F('attempts') + 1
)
return stub_ids
class Command(SubmissionReprocessingEnqueuingOperation):
pass
| from datetime import timedelta, datetime
from time import sleep
from django.core.management import BaseCommand
from django.db.models import F
from django.db.models import Q
from corehq.form_processor.tasks import reprocess_submission
from corehq.util.metrics import metrics_gauge
from couchforms.models import UnfinishedSubmissionStub
from dimagi.utils.logging import notify_exception
ENQUEUING_TIMEOUT = 14 * 24 * 60 # 14 days (in minutes)
BATCH_SIZE = 1000
def _record_datadog_metrics():
count = UnfinishedSubmissionStub.objects.count()
metrics_gauge('commcare.submission_reprocessing.queue_size', count)
class SubmissionReprocessingEnqueuingOperation(BaseCommand):
help = "Runs the Submission Reprocessing Queue"
def handle(self, **options):
while True:
try:
num_processed = self.create_tasks()
except Exception:
num_processed = 0
notify_exception(None, message="Could not queue unprocessed submissions")
sleep_time = 10 if num_processed < BATCH_SIZE else 0
sleep(sleep_time)
def create_tasks(self):
stub_ids = self.get_items_to_be_processed()
for stub_id in stub_ids:
reprocess_submission.delay(stub_id)
return len(stub_ids)
def get_items_to_be_processed(self):
_record_datadog_metrics()
utcnow = datetime.utcnow()
queued_threshold = utcnow - timedelta(minutes=ENQUEUING_TIMEOUT)
min_processing_age = utcnow - timedelta(minutes=5)
filters = Q(date_queued__isnull=True) | Q(date_queued__lte=queued_threshold)
# wait 5 mins before processing to avoid processing during form submission
filters = Q(timestamp__lt=min_processing_age) & filters
query = UnfinishedSubmissionStub.objects.filter(filters).order_by('timestamp')
stub_ids = list(query.values_list('id', flat=True)[:BATCH_SIZE])
if stub_ids:
UnfinishedSubmissionStub.objects.filter(pk__in=stub_ids).update(
date_queued=utcnow, attempts=F('attempts') + 1
)
return stub_ids
class Command(SubmissionReprocessingEnqueuingOperation):
pass
| bsd-3-clause | Python |
3d7b5d61b7e985d409cd50c98d4bcbdc8ab9c723 | Use current user as email author | 2mv/raapija | mailer.py | mailer.py | from marrow.mailer import Mailer as MarrowMailer
from message import Message
import sys
import os
import pwd
import socket
class Mailer:
MAILER = MarrowMailer(dict(manager=dict(use='immediate'), transport=dict(use='sendmail')))
DEFAULT_AUTHOR = pwd.getpwuid(os.getuid()).pw_name + '@' + socket.getfqdn()
@staticmethod
def send(message):
Mailer.MAILER.send(message)
@staticmethod
def start():
Mailer.MAILER.start()
@staticmethod
def stop():
Mailer.MAILER.stop()
@staticmethod
def send_transactions(transactions, to_addr):
Mailer.start()
message = Message(
author=Mailer.DEFAULT_AUTHOR,
to=to_addr,
subject='New transactions',
plain=repr(transactions)
)
Mailer.send(message)
Mailer.stop()
@staticmethod
def get_cli_email_addr():
try:
return sys.argv[1]
except IndexError:
return None
| from marrow.mailer import Mailer as MarrowMailer
from message import Message
import sys
class Mailer:
MAILER = MarrowMailer(dict(manager=dict(use='immediate'), transport=dict(use='sendmail')))
@staticmethod
def send(message):
Mailer.MAILER.send(message)
@staticmethod
def start():
Mailer.MAILER.start()
@staticmethod
def stop():
Mailer.MAILER.stop()
@staticmethod
def send_transactions(transactions, to_addr):
Mailer.start()
message = Message(
to=to_addr,
subject='New transactions',
plain=repr(transactions)
)
Mailer.send(message)
Mailer.stop()
@staticmethod
def get_cli_email_addr():
try:
return sys.argv[1]
except IndexError:
return None
| isc | Python |
169bd20e0b0158b67d0dce226730c48dd4f2763b | Update the deploy code | tanayseven/personal_website,tanayseven/personal_website,tanayseven/personal_website,tanayseven/personal_website | manage.py | manage.py | #!/usr/bin/env python3
import os
from getpass import getpass
from flask_frozen import Freezer
from manager import Manager
from personal_website.endpoint_freezer import freeze_endpoints
from personal_website.flask_app import app, process_static
from personal_website.routes import attach_routes
css_file = process_static()
attach_routes(app, css_file)
manager = Manager()
freezer = Freezer(app)
freeze_endpoints(freezer)
@manager.command
def build():
"""build the project and saves the output in the build directory"""
freezer.freeze()
@manager.command
def serve(host='0.0.0.0', port='8000'):
"""performs a build and hosts a server that runs locally on your system"""
build()
print("Running a server on " + host + ":" + port)
freezer.serve(host=host, port=int(port), debug=True)
@manager.command
def clean():
"""removes all the files that are created by the `build` command"""
os.system("rm -rf personal_website/build/*")
@manager.command
def test():
"""runs the complete test suite which includes the unit tests and the smoke tests"""
clean()
build()
os.system("py.test; behave")
@manager.command
def deploy(ip_address, user_name):
"""does an rsync to deploy to the <user>@<ip_address>:~/tanayseven.com"""
password = getpass('Enter your password: ')
os.system('sshpass -p "{password}" rsync -arvP personal_website/build/ {user_name}@{ip_address}:~/tanayseven.com'.format(
user_name=user_name,
ip_address=ip_address,
password=password,
))
if __name__ == '__main__':
manager.main()
| #!/usr/bin/env python3
import os
from getpass import getpass
from flask_frozen import Freezer
from manager import Manager
from personal_website.endpoint_freezer import freeze_endpoints
from personal_website.flask_app import app, process_static
from personal_website.routes import attach_routes
css_file = process_static()
attach_routes(app, css_file)
manager = Manager()
freezer = Freezer(app)
freeze_endpoints(freezer)
@manager.command
def build():
"""build the project and saves the output in the build directory"""
freezer.freeze()
@manager.command
def serve(host='0.0.0.0', port='8000'):
"""performs a build and hosts a server that runs locally on your system"""
build()
print("Running a server on " + host + ":" + port)
freezer.serve(host=host, port=int(port), debug=True)
@manager.command
def clean():
"""removes all the files that are created by the `build` command"""
os.system("rm -rf personal_website/build/*")
@manager.command
def test():
"""runs the complete test suite which includes the unit tests and the smoke tests"""
clean()
build()
os.system("py.test; behave")
@manager.command
def deploy(ip_address, user_name):
"""does an rsync to deploy to the <user>@<ip_address>:~/website"""
password = getpass('Enter your password: ')
os.system('sshpass -p "{password}" rsync -arvP personal_website/build/ {user_name}@{ip_address}:~/website'.format(
user_name=user_name,
ip_address=ip_address,
password=password,
))
if __name__ == '__main__':
manager.main()
| mit | Python |
516c84d3b13262e59e9be59580ac5f8959d2e650 | Add back the Django seamless virtualenv snippet to provide update_ve command #33 | tovmeod/anaf,thiagof/treeio,Sofcom/treeio,thiagof/treeio,Sofcom/treeio,thiagof/treeio,treeio/treeio,treeio/treeio,nuwainfo/treeio,treeio/treeio,nuwainfo/treeio,treeio/treeio,Sofcom/treeio,Sofcom/treeio,tovmeod/anaf,nuwainfo/treeio,treeio/treeio,nuwainfo/treeio,Sofcom/treeio,thiagof/treeio,tovmeod/anaf,tovmeod/anaf,tovmeod/anaf,thiagof/treeio | manage.py | manage.py | # encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
#!/usr/bin/env python
import sys
import shutil
try:
import virtualenv
except ImportError:
print 'Error: virtualenv module not found. Please install virtualenv (e.g. pip install virtualenv)'
import subprocess
from os import path
PROJECT_ROOT = path.abspath(path.dirname(__file__))
REQUIREMENTS = path.join(PROJECT_ROOT, 'requirements.pip')
VE_ROOT = path.join(PROJECT_ROOT, '.ve')
VE_TIMESTAMP = path.join(VE_ROOT, 'timestamp')
VE_ACTIVATE = path.join(VE_ROOT, 'bin', 'activate_this.py')
envtime = path.exists(VE_ROOT) and path.getmtime(VE_ROOT) or 0
envreqs = path.exists(VE_TIMESTAMP) and path.getmtime(VE_TIMESTAMP) or 0
envspec = path.getmtime(REQUIREMENTS)
def go_to_ve():
# going into ve
if not VE_ROOT in sys.prefix:
if sys.platform == 'win32':
python = path.join(VE_ROOT, 'Scripts', 'python.exe')
else:
python = path.join(VE_ROOT, 'bin', 'python')
try:
retcode = subprocess.call([python, __file__] + sys.argv[1:])
except KeyboardInterrupt:
retcode = 1
sys.exit(retcode)
update_ve = 'update_ve' in sys.argv
if update_ve or envtime < envspec or envreqs < envspec:
if update_ve:
# install ve
if envtime < envspec:
if path.exists(VE_ROOT):
shutil.rmtree(VE_ROOT)
virtualenv.logger = virtualenv.Logger(consumers=[])
virtualenv.create_environment(VE_ROOT, site_packages=True)
go_to_ve()
# check requirements
if update_ve or envreqs < envspec:
import pip
pip.main(initial_args=['install', '-r', REQUIREMENTS, '--upgrade'])
file(VE_TIMESTAMP, 'w').close()
sys.exit(0)
else:
print "VirtualEnv need to be updated"
print "Run ./manage.py update_ve"
sys.exit(1)
go_to_ve()
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
| # encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings) | bsd-3-clause | Python |
7337c3ab453347e8b70804dc2b5b1dd3797c0102 | fix test | jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk | bustimes/management/tests/test_tnds.py | bustimes/management/tests/test_tnds.py | import mock
import time_machine
from datetime import timedelta
from tempfile import TemporaryDirectory
from pathlib import Path
from django.test import TestCase, override_settings
from django.core.management import call_command
class TNDSTest(TestCase):
@mock.patch('bustimes.management.commands.import_tnds.call_command')
@mock.patch('ftplib.FTP', autospec=True)
@mock.patch('boto3.client', autospec=True)
def test_import_tnds(self, boto3, ftp, mock_call_command):
ftp.return_value.mlsd = mock.Mock(return_value=[
('EA.zip', {'type': 'file', 'modify': '20210719162822', 'size': '4879294'}),
('EM.zip', {'type': 'file', 'modify': '20210719162823', 'size': '21222664'}),
('IOM.zip', {'type': 'file', 'modify': '20210719162823', 'size': '501649'}),
])
with time_machine.travel('2021-01-01', tick=False):
with mock.patch('builtins.print') as mocked_print:
with TemporaryDirectory() as directory:
with override_settings(TNDS_DIR=Path(directory)):
call_command('import_tnds', 'u', 'p')
boto3.assert_called_with('s3', endpoint_url='https://ams3.digitaloceanspaces.com')
ftp.assert_called_with(host='ftp.tnds.basemap.co.uk', user='u', passwd='p')
mock_call_command.assert_called()
mocked_print.assert_called_with(timedelta())
| import mock
import time_machine
from datetime import timedelta
from django.test import TestCase
from django.core.management import call_command
class TNDSTest(TestCase):
@mock.patch('bustimes.management.commands.import_tnds.call_command')
@mock.patch('ftplib.FTP', autospec=True)
@mock.patch('boto3.client', autospec=True)
def test_import_tnds(self, boto3, ftp, mock_call_command):
ftp.return_value.mlsd = mock.Mock(return_value=[
('EA.zip', {'type': 'file', 'modify': '20210719162822', 'size': '4879294'}),
('EM.zip', {'type': 'file', 'modify': '20210719162823', 'size': '21222664'}),
('IOM.zip', {'type': 'file', 'modify': '20210719162823', 'size': '501649'}),
])
with time_machine.travel('2021-01-01', tick=False):
with mock.patch('builtins.print') as mocked_print:
call_command('import_tnds', 'u', 'p')
boto3.assert_called_with('s3', endpoint_url='https://ams3.digitaloceanspaces.com')
ftp.assert_called_with(host='ftp.tnds.basemap.co.uk', user='u', passwd='p')
mock_call_command.assert_called()
mocked_print.assert_called_with(timedelta())
| mpl-2.0 | Python |
d5a1acbae91431ca781b659243013a5928ece5b9 | fix args splitting in next cmd | Djiit/err-meetup | meetup.py | meetup.py | # coding: utf-8
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from datetime import datetime
import json
try:
from http import client
except ImportError:
import httplib as client
from jinja2 import Environment
from errbot import BotPlugin, botcmd
MEETUP_API_HOST = 'api.meetup.com'
class MeetUpPlugin(BotPlugin):
"""Basic Err integration with Jenkins CI"""
min_err_version = '3.2.3'
# max_err_version = '3.3.0'
@botcmd(split_args_with=None)
def meetup_next(self, mess, args):
"""TODO"""
if len(args) == 0:
return 'Which MeetUp group would you like to query ?'
conn = client.HTTPSConnection(MEETUP_API_HOST)
conn.request("GET", "/{name}/events".format(name=args[0]))
r = conn.getresponse()
if r.status != 200:
return "Oops, something went wrong."
res = json.loads(r.read().decode())
return self.format_events(res)
@staticmethod
def datetimeformat(timestamp):
return datetime.fromtimestamp(timestamp/1000).strftime('%d/%m/%Y')
@staticmethod
def format_events(results):
env = Environment()
env.filters['datetimeformat'] = MeetUpPlugin.datetimeformat
EVENTS_TEMPLATE = env.from_string("""Next events for {{results['0'].group.name}}:
{% for e in results%}[{{results[e].time|datetimeformat}}] \
"{{results[e].name}}" at {{results[e].venue.name}} - \
{{results[e].venue.city}} ({{results[e].link}})
{% endfor %}
""")
return EVENTS_TEMPLATE.render({"results": results})
| # coding: utf-8
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from datetime import datetime
import json
try:
from http import client
except ImportError:
import httplib as client
from jinja2 import Environment
from errbot import BotPlugin, botcmd
MEETUP_API_HOST = 'api.meetup.com'
class MeetUpPlugin(BotPlugin):
"""Basic Err integration with Jenkins CI"""
min_err_version = '3.2.3'
# max_err_version = '3.3.0'
@botcmd
def meetup_next(self, mess, args):
"""TODO"""
if len(args) == 0:
return 'Which MeetUp group would you like to query ?'
conn = client.HTTPSConnection(MEETUP_API_HOST)
conn.request("GET", "/{name}/events".format(name=args[0]))
r = conn.getresponse()
if r.status != 200:
return "Oops, something went wrong."
res = json.loads(r.read().decode())
return self.format_events(res)
@staticmethod
def datetimeformat(timestamp):
return datetime.fromtimestamp(timestamp/1000).strftime('%d/%m/%Y')
@staticmethod
def format_events(results):
env = Environment()
env.filters['datetimeformat'] = MeetUpPlugin.datetimeformat
EVENTS_TEMPLATE = env.from_string("""Next events for {{results['0'].group.name}}:
{% for e in results%}[{{results[e].time|datetimeformat}}] \
"{{results[e].name}}" at {{results[e].venue.name}} - \
{{results[e].venue.city}} ({{results[e].link}})
{% endfor %}
""")
return EVENTS_TEMPLATE.render({"results": results})
| mit | Python |
0a2b548f4c637f803ddbd19ac899fcc7ecf54caf | make the version information available to "piped -v" | alexbrasetvik/Piped,foundit/Piped,foundit/Piped,alexbrasetvik/Piped | contrib/cyclone/piped/plugins/cyclone_provider.py | contrib/cyclone/piped/plugins/cyclone_provider.py | from piped_cyclone import version
from piped_cyclone.providers import * | from piped_cyclone.providers import * | mit | Python |
696a2b19e70fdbefc416cb6b519589e404708f92 | Add www.censusreporter.org to allowed_hosts | censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter | censusreporter/config/prod/settings.py | censusreporter/config/prod/settings.py | from config.base.settings import *
DEBUG = False
ROOT_URLCONF = 'config.prod.urls'
WSGI_APPLICATION = "config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'.compute-1.amazonaws.com', # allows viewing of instances directly
'cr-prod-409865157.us-east-1.elb.amazonaws.com', # from the load balancer
]
# From https://forums.aws.amazon.com/thread.jspa?messageID=423533:
# "The Elastic Load Balancer HTTP health check will use the instance's internal IP."
# From https://dryan.com/articles/elb-django-allowed-hosts/
import requests
EC2_PRIVATE_IP = None
try:
EC2_PRIVATE_IP = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4', timeout=0.01).text
except requests.exceptions.RequestException:
pass
if EC2_PRIVATE_IP:
ALLOWED_HOSTS.append(EC2_PRIVATE_IP)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': 'localhost:11211',
}
}
| from config.base.settings import *
DEBUG = False
ROOT_URLCONF = 'config.prod.urls'
WSGI_APPLICATION = "config.prod.wsgi.application"
ALLOWED_HOSTS = [
'.censusreporter.org',
'.compute-1.amazonaws.com', # allows viewing of instances directly
'cr-prod-409865157.us-east-1.elb.amazonaws.com', # from the load balancer
]
# From https://forums.aws.amazon.com/thread.jspa?messageID=423533:
# "The Elastic Load Balancer HTTP health check will use the instance's internal IP."
# From https://dryan.com/articles/elb-django-allowed-hosts/
import requests
EC2_PRIVATE_IP = None
try:
EC2_PRIVATE_IP = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4', timeout=0.01).text
except requests.exceptions.RequestException:
pass
if EC2_PRIVATE_IP:
ALLOWED_HOSTS.append(EC2_PRIVATE_IP)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': 'localhost:11211',
}
}
| mit | Python |
27a54a2545100cbc755721275441a7cf4b93a306 | make sure we get the right service **before** closing the connection. | shenhequnying/ceph-deploy,osynge/ceph-deploy,ceph/ceph-deploy,shenhequnying/ceph-deploy,zhouyuan/ceph-deploy,imzhulei/ceph-deploy,Vicente-Cheng/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,trhoden/ceph-deploy,SUSE/ceph-deploy,ghxandsky/ceph-deploy,alfredodeza/ceph-deploy,rtulke/ceph-deploy,ddiss/ceph-deploy,rtulke/ceph-deploy,SUSE/ceph-deploy,Vicente-Cheng/ceph-deploy,imzhulei/ceph-deploy,isyippee/ceph-deploy,branto1/ceph-deploy,ktdreyer/ceph-deploy,codenrhoden/ceph-deploy,zhouyuan/ceph-deploy,ktdreyer/ceph-deploy,alfredodeza/ceph-deploy,jumpstarter-io/ceph-deploy,ghxandsky/ceph-deploy,codenrhoden/ceph-deploy,ceph/ceph-deploy,isyippee/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,ddiss/ceph-deploy,osynge/ceph-deploy,trhoden/ceph-deploy,jumpstarter-io/ceph-deploy,branto1/ceph-deploy | ceph_deploy/hosts/debian/mon/create.py | ceph_deploy/hosts/debian/mon/create.py | from ceph_deploy.hosts import common
from ceph_deploy.misc import remote_shortname
from ceph_deploy.lib.remoto import process
from ceph_deploy.connection import get_connection
def create(distro, logger, args, monitor_keyring):
hostname = remote_shortname(distro.sudo_conn.modules.socket)
common.mon_create(distro, logger, args, monitor_keyring, hostname)
service = common.which_service(distro.sudo_conn, logger)
distro.sudo_conn.close()
# TODO transition this once pushy is out
rconn = get_connection(hostname, logger)
if distro.init == 'upstart': # Ubuntu uses upstart
process.run(
rconn,
[
'initctl',
'emit',
'ceph-mon',
'cluster={cluster}'.format(cluster=args.cluster),
'id={hostname}'.format(hostname=hostname),
],
exit=True,
timeout=7,
)
elif distro.init == 'sysvinit': # Debian uses sysvinit
process.run(
rconn,
[
service,
'ceph',
'start',
'mon.{hostname}'.format(hostname=hostname)
],
exit=True,
timeout=7,
)
else:
raise RuntimeError('create cannot use init %s' % distro.init)
| from ceph_deploy.hosts import common
from ceph_deploy.misc import remote_shortname
from ceph_deploy.lib.remoto import process
from ceph_deploy.connection import get_connection
def create(distro, logger, args, monitor_keyring):
hostname = remote_shortname(distro.sudo_conn.modules.socket)
common.mon_create(distro, logger, args, monitor_keyring, hostname)
distro.sudo_conn.close()
# TODO transition this once pushy is out
rconn = get_connection(hostname, logger)
if distro.init == 'upstart': # Ubuntu uses upstart
process.run(
rconn,
[
'initctl',
'emit',
'ceph-mon',
'cluster={cluster}'.format(cluster=args.cluster),
'id={hostname}'.format(hostname=hostname),
],
exit=True,
timeout=7,
)
elif distro.init == 'sysvinit': # Debian uses sysvinit
service = common.which_service(distro.sudo_conn, logger)
process.run(
rconn,
[
service,
'ceph',
'start',
'mon.{hostname}'.format(hostname=hostname)
],
exit=True,
timeout=7,
)
else:
raise RuntimeError('create cannot use init %s' % distro.init)
| mit | Python |
b07d46dd5c20bbf98d867595c138ec95b3e17f2b | Update mo_sms.py | darkpioneer/aaisp | mo_sms.py | mo_sms.py | #!/usr/bin/python
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import cgi, sys
class PostHandler(BaseHTTPRequestHandler):
def do_POST(self):
# Parse the form data posted
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={'REQUEST_METHOD':'POST','CONTENT_TYPE':self.headers['Content-Type'],})
# Get data from fields, posted values from https://support.aa.net.uk/SMS_API
username = form.getvalue('username')
password = form.getvalue('password')
source = form.getvalue('oa')
destination = form.getvalue('da')
text = form.getvalue('ud')
limit = form.getvalue('limit')
costcentre = form.getvalue('costcentre')
private = form.getvalue('private')
udh = form.getvalue('udh')
srr = form.getvalue('srr')
# Begin the response
self.send_response(200)
self.end_headers()
# Print the sms received
print "SMS Received From ICCID: " + source + " sent to: " + destination + " containing the text: " + text
def run(server_class=HTTPServer, handler_class=PostHandler, port=8001):
try:
server_address = ('', port)
httpd = server_class(server_address, handler_class)
print 'Starting server, use <Ctrl-C> to stop'
httpd.serve_forever()
except KeyboardInterrupt:
print "Shutting Down"
except Exception:
trackback.print_exc(file=sys.stdout)
sys.exit(0)
if __name__ == "__main__":
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run()
| #!/usr/bin/python
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import cgi, sys
class PostHandler(BaseHTTPRequestHandler):
def do_POST(self):
# Parse the form data posted
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={'REQUEST_METHOD':'POST','CONTENT_TYPE':self.headers['Content-Type'],})
# Get data from fields, posted values from https://support.aa.net.uk/SMS_API
username = form.getvalue('username')
password = form.getvalue('password')
source = form.getvalue('oa')
destination = form.getvalue('da')
text = form.getvalue('ud')
limit = form.getvalue('limit')
costcentre = form.getvalue('costcentre')
private = form.getvalue('private')
udh = form.getvalue('udh')
srr = form.getvalue('srr')
# Begin the response
self.send_response(200)
self.end_headers()
# Print the sms received
print "SMS Received From ICCID: " + source + " sent to: " + destination + " containing the text: " + text
def run(server_class=HTTPServer, handler_class=PostHandler, port=8001):
try:
server_address = ('', port)
httpd = server_class(server_address, handler_class)
print 'Starting server, use <Ctrl-C> to stop'
httpd.serve_forever()
except KeyboardInterrupt:
print "Shutting Down"
except Exception:
trackback.print_exc(file=sys.stdout)
sys.exit(0)
if __name__ == "__main__":
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run()
| agpl-3.0 | Python |
3023f630fd615dffb44ac75d593d9c2677b8a0b6 | Add rest to import | hydroshare/hydroshare,hydroshare/hydroshare,FescueFungiShare/hydroshare,RENCI/xDCIShare,RENCI/xDCIShare,hydroshare/hydroshare,hydroshare/hydroshare,ResearchSoftwareInstitute/MyHPOM,FescueFungiShare/hydroshare,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,FescueFungiShare/hydroshare,FescueFungiShare/hydroshare,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare | hs_core/tests/__init__.py | hs_core/tests/__init__.py | from .api.native import *
from .api.rest import *
| __author__ = 'jeffersonheard'
from .api.native import *
#from .api.http import *
| bsd-3-clause | Python |
21ec2389f430b67b0380ef43c05e3c82de24cd23 | Check for mariadb file descriptor limit | coolsvap/clapper,coolsvap/clapper,rthallisey/clapper,coolsvap/clapper,rthallisey/clapper | check_overcloud_controller_settings.py | check_overcloud_controller_settings.py | #!/usr/bin/env python
import os
import ConfigParser
MARIADB_MAX_CONNECTIONS_MIN = 4096
MARIADB_OPEN_FILES_LIMIT_MIN = 16384
def find_mariadb_config_file():
potential_locations = [
'/etc/my.cnf.d/galera.cnf',
'/etc/my.cnf.d/server.cnf',
'/etc/my.cnf',
]
for filepath in potential_locations:
if os.access(filepath, os.R_OK):
return filepath
raise Exception(
"Can't find mariadb config at %s" %
potential_locations
)
def check_mariadb_config():
config_file = find_mariadb_config_file()
config = ConfigParser.SafeConfigParser()
config.read(config_file)
print "Checking settings in {}".format(config_file)
if not config.has_option('mysqld', 'max_connections'):
print "WARNING max_connections is unset, it should be at least {}" \
.format(MARIADB_MAX_CONNECTIONS_MIN)
elif config.getint('mysqld', 'max_connections') < MARIADB_MAX_CONNECTIONS_MIN:
print "WARNING max_connections is {}, it should be at least {}".format(
config.getint('mysqld', 'max_connections'),
MARIADB_MAX_CONNECTIONS_MIN)
if config.has_option('mysqld', 'open_files_limit') and \
config.getint('mysqld', 'open_files_limit') < MARIADB_OPEN_FILES_LIMIT_MIN:
print "WARNING open_files_limit is {}, it should be at least {}".format(
config.getint('mysqld', 'open_files_limit'),
MARIADB_OPEN_FILES_LIMIT_MIN)
check_mariadb_config()
| #!/usr/bin/env python
import os
import ConfigParser
MARIADB_MAX_CONNECTIONS_MIN = 4096
def find_mariadb_config_file():
potential_locations = [
'/etc/my.cnf.d/galera.cnf',
'/etc/my.cnf.d/server.cnf',
'/etc/my.cnf',
]
for filepath in potential_locations:
if os.access(filepath, os.R_OK):
return filepath
raise Exception(
"Can't find mariadb config at %s" %
potential_locations
)
def check_mariadb_config():
config_file = find_mariadb_config_file()
config = ConfigParser.SafeConfigParser()
config.read(config_file)
print "Checking settings in {}".format(config_file)
if not config.has_option('mysqld', 'max_connections'):
print "WARNING max_connections is unset, it should be at least {}" \
.format(MARIADB_MAX_CONNECTIONS_MIN)
elif config.getint('mysqld', 'max_connections') < MARIADB_MAX_CONNECTIONS_MIN:
print "WARNING max_connections is {}, it should be at least {}".format(
config.getint('mysqld', 'max_connections'),
MARIADB_MAX_CONNECTIONS_MIN)
check_mariadb_config()
| apache-2.0 | Python |
aa8fb0d70a6fcbee4f5bd3ddfd940c7093abd0de | delete image file on deletion of Image object | allo-/django-imagehoster | models.py | models.py | from django.db import models
from django.db.models.signals import pre_delete
from django.dispatch import receiver
import settings
assert hasattr(settings, "UPLOAD_DIR"), "you need to set UPLOAD_DIR (relative to MEDIA_URL) in settings"
assert len(settings.UPLOAD_DIR) and settings.UPLOAD_DIR[-1] == "/", "UPLOAD_DIR must have a trailing slash"
class Image(models.Model):
date=models.DateTimeField(auto_now_add=True)
image=models.ImageField(upload_to=settings.UPLOAD_DIR)
def get_filename(self):
return str(self.image.name).split("/")[-1]
@receiver(pre_delete, sender=Image)
def image_deleted(sender, instance, **kwargs):
instance.image.delete()
| from django.db import models
import settings
assert hasattr(settings, "UPLOAD_DIR"), "you need to set UPLOAD_DIR (relative to MEDIA_URL) in settings"
assert len(settings.UPLOAD_DIR) and settings.UPLOAD_DIR[-1] == "/", "UPLOAD_DIR must have a trailing slash"
class Image(models.Model):
date=models.DateTimeField(auto_now_add=True)
image=models.ImageField(upload_to=settings.UPLOAD_DIR)
def get_filename(self):
return str(self.image.name).split("/")[-1]
| agpl-3.0 | Python |
65973802a3e68e23f9a903937ef94f8afa277013 | Create documentation of DataSource Settings | stoeps13/ibmcnx2,stoeps13/ibmcnx2 | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.splitlines()
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines().split('('):
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | apache-2.0 | Python |
0016c305f872ae63e5bb2126630f0742b9221113 | change module iface by environment variable GDPY3_IFACE | shmilee/gdpy3,shmilee/gdpy3,shmilee/gdpy3,shmilee/gdpy3 | src/__main__.py | src/__main__.py | # -*- coding: utf-8 -*-
# Copyright (c) 2020 shmilee
if __name__ == "__main__":
import os
iface = os.getenv('GDPY3_IFACE', default='gui')
if iface == 'gui':
from .GUI import gui_script as i_script
else:
from .cli import cli_script as i_script
i_script()
| # -*- coding: utf-8 -*-
# Copyright (c) 2020 shmilee
if __name__ == "__main__":
iface = 'cli'
if iface == 'gui':
from .GUI import gui_script as i_script
else:
from .cli import cli_script as i_script
i_script()
| mit | Python |
b19f86e13d0fca2f2ea5dbbc99e4cf8b08a3a81d | add validate for store view | Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup | storemanage/views.py | storemanage/views.py | from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.models import User
from .models import Currency, Ticket
from django.utils.dateparse import parse_date
@login_required()
@permission_required('usermanage.store_rights',raise_exception=True)
def index(request):
user = request.user
tickets = Ticket.objects.filter(store=user)
return render(request,'store/index.html', {
'user': user,
'tickets': tickets
})
@login_required()
@permission_required('usermanage.store_rights',raise_exception=True)
def currencyRegister(request):
user = request.user
if request.method == 'GET':
return render(request,'storemanage/currency-form.html')
data = request.POST
currency = Currency(store=user,name=data['name'])
currency.save()
return redirect('index:index')
def validateForm(data):
error = {}
if not data['name']:
error['name'] = True
if not data['detail']:
error['price'] = True
if not data['expire_date']:
error['expire_date'] = True
if not data['currency']:
error['currency'] = True
if not data['ticket_image_url']:
error['ticket_image_url'] = True
if not data['content_image_url']:
error['content_image_url'] = True
return error
@login_required()
@permission_required('usermanage.store_rights',raise_exception=True)
def ticketRegister(request):
user = request.user
currency_list = [{'pk':c.pk,'name':c.name} for c in Currency.objects.filter(store=user)]
context = {
'currency_list':currency_list
}
if request.method == 'GET':
return render(request,'store/add.html',context)
data = request.POST
error = validateForm(data)
if error:
return render(request,'store/add.html', {
'error': error,
'currency_list':currency_list
})
ticket_attrib = {k:v for k,v in data.items() if v != ''}
ticket_attrib.pop('csrfmiddlewaretoken')
ticket_attrib['is_period'] = True if 'is_period' in ticket_attrib else False
ticket_attrib['is_limit'] = True if 'is_limit' in ticket_attrib else False
ticket_attrib['currency'] = Currency.objects.get(pk=ticket_attrib['currency'])
ticket_attrib['store'] = user
ticket = Ticket(**ticket_attrib)
ticket.save()
return redirect('store:index')
@login_required()
@permission_required('usermanage.store_rights',raise_exception=True)
def ticketEdit(request, ticket_id):
user = request.user
currency_list = [{'pk':c.pk,'name':c.name} for c in Currency.objects.filter(store=user)]
ticket = Ticket.objects.get(pk=ticket_id)
return render(request, 'store/edit.html', {
'ticket': ticket,
'currency_list':currency_list
}) | from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.models import User
from .models import Currency, Ticket
from django.utils.dateparse import parse_date
@login_required()
@permission_required('usermanage.store_rights',raise_exception=True)
def index(request):
user = request.user
tickets = Ticket.objects.filter(store=user)
return render(request,'store/index.html', {
'user': user,
'tickets': tickets
})
def currencyRegister(request):
user = request.user
if request.method == 'GET':
return render(request,'storemanage/currency-form.html')
data = request.POST
currency = Currency(store=user,name=data['name'])
currency.save()
return redirect('index:index')
def validateForm(data):
error = {}
if not data['name']:
error['name'] = True
if not data['detail']:
error['price'] = True
if not data['expire_date']:
error['expire_date'] = True
if not data['currency']:
error['currency'] = True
if not data['ticket_image_url']:
error['ticket_image_url'] = True
if not data['content_image_url']:
error['content_image_url'] = True
return error
def ticketRegister(request):
user = request.user
currency_list = [{'pk':c.pk,'name':c.name} for c in Currency.objects.filter(store=user)]
context = {
'currency_list':currency_list
}
if request.method == 'GET':
return render(request,'store/add.html',context)
data = request.POST
error = validateForm(data)
if error:
return render(request,'store/add.html', {
'error': error,
'currency_list':currency_list
})
ticket_attrib = {k:v for k,v in data.items() if v != ''}
ticket_attrib.pop('csrfmiddlewaretoken')
ticket_attrib['is_period'] = True if 'is_period' in ticket_attrib else False
ticket_attrib['is_limit'] = True if 'is_limit' in ticket_attrib else False
ticket_attrib['currency'] = Currency.objects.get(pk=ticket_attrib['currency'])
ticket_attrib['store'] = user
ticket = Ticket(**ticket_attrib)
ticket.save()
return redirect('store:index')
def ticketEdit(request, ticket_id):
user = request.user
currency_list = [{'pk':c.pk,'name':c.name} for c in Currency.objects.filter(store=user)]
ticket = Ticket.objects.get(pk=ticket_id)
return render(request, 'store/edit.html', {
'ticket': ticket,
'currency_list':currency_list
}) | mit | Python |
7cf05ec8b45e2f50f2e45ef13354f2b54a02c712 | support stormlocal -t in streamparse.cmdln | eric7j/streamparse,scrapinghub/streamparse,eric7j/streamparse,petchat/streamparse,phanib4u/streamparse,msmakhlouf/streamparse,msmakhlouf/streamparse,scrapinghub/streamparse,hodgesds/streamparse,msmakhlouf/streamparse,Parsely/streamparse,codywilbourn/streamparse,crohling/streamparse,phanib4u/streamparse,scrapinghub/streamparse,scrapinghub/streamparse,msmakhlouf/streamparse,petchat/streamparse,scrapinghub/streamparse,msmakhlouf/streamparse,crohling/streamparse,Parsely/streamparse,codywilbourn/streamparse,petchat/streamparse,hodgesds/streamparse,petchat/streamparse,petchat/streamparse | streamparse/cmdln.py | streamparse/cmdln.py | from docopt import docopt
from invoke import run
def main():
"""sparse: manage streamparse clusters.
sparse provides a front-end to streamparse, a framework for creating Python
projects for running, debugging, and submitting computation topologies against
real-time streams, using Apache Storm.
It requires the lein (Clojure build tool) to be on your $PATH, and uses
lein and Clojure under the hood for JVM interop.
Usage:
sparse quickstart <proj_name>
sparse setup [-e ENV]
sparse run [-e ENV] [-t TIME]
sparse debug [-e ENV]
sparse kill [-e ENV]
sparse restart [-e ENV]
sparse attach [-e ENV]
sparse list [-e ENV]
sparse submit [-e ENV]
sparse logs [-e ENV]
sparse (-h | --help)
sparse --version
Options:
-h --help Show this screen.
--version Show version.
-e ENV Set environment; as described in config.json [default: local].
-t TIME Time (in milliseconds) to keep cluster running [default: 5000].
--verbose Verbose output.
--debug Debug output.
"""
args = docopt(main.__doc__, version="sparse 0.1")
print args
if args["run"]:
print "Running wordcount topology..."
word_count = "topologies/wordcount.clj"
run("lein run -s {topology} -t {time}".format(
topology=word_count, time=args["-t"]))
elif args["debug"]:
print "Debugging wordcount topology..."
run("lein run -s topologies/wordcount.clj")
elif args["list"]:
print "invoke tasks:"
run("invoke -l")
print
print "fabric tasks:"
run("fab -l")
elif args["setup"]:
print "Setting up virtualenv on remote cluster..."
run("fab workers setup_virtualenv")
elif args["quickstart"]:
print "Starting a new sparse project..."
run("echo mkdir -p yourproj ...")
if __name__ == "__main__":
main()
| from docopt import docopt
from invoke import run
def main():
"""sparse: manage StreamParse clusters.
sparse provides a front-end to StreamParse, a framework for creating Python
projects for running, debugging, and submitting Storm topologies for data
processing.
It requires the lein (Clojure build tool) to be on your $PATH, and uses
lein and Clojure under the hood for JVM interop.
Usage:
sparse quickstart <proj_name>
sparse setup [-e ENV]
sparse run [-e ENV]
sparse debug [-e ENV]
sparse kill [-e ENV]
sparse restart [-e ENV]
sparse attach [-e ENV]
sparse list [-e ENV]
sparse submit [-e ENV]
sparse logs [-e ENV]
sparse (-h | --help)
sparse --version
Options:
-h --help Show this screen.
--version Show version.
-e ENV Set environment [default: local].
--verbose Verbose output.
--debug Debug output.
"""
args = docopt(main.__doc__, version="sparse 0.1")
print args
if args["run"]:
print "Running wordcount topology..."
run("lein run -s topologies/wordcount.clj")
elif args["debug"]:
print "Debugging wordcount topology..."
run("lein run -s topologies/wordcount.clj")
elif args["list"]:
print "invoke tasks:"
run("invoke -l")
print
print "fabric tasks:"
run("fab -l")
elif args["setup"]:
print "Setting up virtualenv on remote cluster..."
run("fab workers setup_virtualenv")
elif args["quickstart"]:
print "Starting a new sparse project..."
run("echo mkdir -p yourproj ...")
if __name__ == "__main__":
main()
| apache-2.0 | Python |
d6fd0ee9e38ccc4391eea4e73dfdda45a365621f | Fix errors in tasks | p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles | studygroups/tasks.py | studygroups/tasks.py | from __future__ import absolute_import
from celery import shared_task
from django.utils import timezone
from django.conf import settings
from django.utils import translation
from django.contrib.auth.models import User
from studygroups.models import StudyGroup
from studygroups.models import StudyGroupMeeting
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from studygroups.models import send_weekly_update
from studygroups.models import send_new_studygroup_email
from studygroups.models import send_new_facilitator_email
import datetime
@shared_task
def send_reminders():
now = timezone.now()
translation.activate(settings.LANGUAGE_CODE)
# TODO - should this be set here or closer to where the language matters?
# TODO - make sure both the StudyGroup and StudyGroupMeeting is still available
for reminder in Reminder.objects.filter(sent_at__isnull=True, study_group__in=StudyGroup.objects.active(), study_group_meeting__in=StudyGroupMeeting.objects.active()):
if reminder.study_group_meeting and reminder.study_group_meeting.meeting_datetime() - now < datetime.timedelta(days=2):
send_reminder(reminder)
@shared_task
def gen_reminders():
for study_group in StudyGroup.objects.active():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
@shared_task
def weekly_update():
# Create a report for the previous week
send_weekly_update()
@shared_task
def send_new_facilitator_emails():
# send email to organizers who signed up a week ago
now = timezone.now()
seven_days_ago = now.date() - datetime.timedelta(days=7)
six_days_ago = now.date() - datetime.timedelta(days=6)
for facilitator in User.objects.filter(date_joined__gte=seven_days_ago, date_joined__lt=six_days_ago):
send_new_facilitator_email(facilitator)
@shared_task
def send_new_studygroup_emails():
# send email to organizers who signed up a week ago
now = timezone.now()
seven_days_ago = now.date() - datetime.timedelta(days=7)
six_days_ago = now.date() - datetime.timedelta(days=6)
for studygroup in StudyGroup.objects.filter(created_at__gte=seven_days_ago, created_at__lt=six_days_ago):
send_new_studygroup_email(studygroup)
| from __future__ import absolute_import
from celery import shared_task
from django.utils import timezone
from django.conf import settings
from django.utils import translation
from django.contrib.auth.models import User
from studygroups.models import StudyGroup
from studygroups.models import StudyGroupMeeting
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from studygroups.models import send_weekly_update
from studygroups.models import send_new_studygroup_email
from studygroups.models import send_new_facilitator_email
import datetime
@shared_task
def send_reminders():
now = timezone.now()
translation.activate(settings.LANGUAGE_CODE)
# TODO - should this be set here or closer to where the language matters?
# TODO - make sure both the StudyGroup and StudyGroupMeeting is still available
for reminder in Reminder.objects.filter(sent_at__isnull=True, study_group__in=StudyGroup.objects.active(), study_group_meeting__in=StudyGroupMeeting.objects.active()):
if reminder.study_group_meeting and reminder.study_group_meeting.meeting_datetime() - now < datetime.timedelta(days=2):
send_reminder(reminder)
@shared_task
def gen_reminders():
for study_group in StudyGroup.objects.active():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
@shared_task
def weekly_update():
# Create a report for the previous week
send_weekly_update()
@shared_task
def send_new_facilitator_emails():
# send email to organizers who signed up a week ago
now = timezone.now()
seven_days_ago = now.date() - datetime.timeteldta(days=7)
six_days_ago = now.date() - datetime.timeteldta(days=6)
for faciltator in User.objects.filter(date_joined__gte=seven_days_ago, date_joined__lt=six_days_ago):
send_new_facilitator_email(facilitator)
@shared_task
def send_new_studygroup_emails():
# send email to organizers who signed up a week ago
now = timezone.now()
seven_days_ago = now.date() - datetime.timeteldta(days=7)
six_days_ago = now.date() - datetime.timeteldta(days=6)
for studygroup in StudyGroup.objects.filter(created_at__gte=seven_days_ago, created_at__lt=six_days_ago):
send_new_studygroup_email(studygroup)
| mit | Python |
99ce8dedeec67b46c7e7f90709bedaf132d9fcd5 | Fix syntax error | p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles | studygroups/tasks.py | studygroups/tasks.py | from django.utils import timezone
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_group_message
import datetime
def send_reminders():
now = timezone.now()
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.meeting_date - now < datetime.timedelta(days=2):
send_group_message(
reminder.study_group,
reminder.email_subject,
reminder.email_body,
reminder.sms_body
)
reminder.sent_at = now
reminder.save()
def gen_reminders():
for study_group in StudyGroup.objects.all():
generate_reminder(study_group)
| from django.utils import timezone
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.generate_reminder
from studygroups.models import send_group_message
import datetime
def send_reminders():
now = timezone.now()
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.meeting_date - now < datetime.timedelta(days=2):
send_group_message(
reminder.study_group,
reminder.email_subject,
reminder.email_body,
reminder.sms_body
)
reminder.sent_at = now
reminder.save()
def gen_reminders():
for study_group in StudyGroup.objects.all():
generate_reminder(study_group)
| mit | Python |
5c180b5048df69f9ae37289c5ae49cb266ba10cf | Replace use of deprecated Pillow constant `Image.ANTIALIAS` | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | byceps/util/image/__init__.py | byceps/util/image/__init__.py | """
byceps.util.image
~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from io import BytesIO
from typing import BinaryIO, Union
from PIL import Image, ImageFile
from .models import Dimensions
FilenameOrStream = Union[str, BinaryIO]
def read_dimensions(filename_or_stream: FilenameOrStream) -> Dimensions:
"""Return the dimensions of the image."""
image = Image.open(filename_or_stream)
return Dimensions(*image.size)
def create_thumbnail(
filename_or_stream: FilenameOrStream,
image_type: str,
maximum_dimensions: Dimensions,
*,
force_square: bool = False,
) -> BinaryIO:
"""Create a thumbnail from the given image and return the result stream."""
output_stream = BytesIO()
image = Image.open(filename_or_stream)
if force_square:
image = _crop_to_square(image)
image.thumbnail(maximum_dimensions, resample=Image.Resampling.LANCZOS)
image.save(output_stream, format=image_type)
output_stream.seek(0)
return output_stream
def _crop_to_square(image: ImageFile) -> ImageFile:
"""Crop image to be square."""
dimensions = Dimensions(*image.size)
if dimensions.is_square:
return image
edge_length = min(*dimensions)
crop_box = (0, 0, edge_length, edge_length)
return image.crop(crop_box)
| """
byceps.util.image
~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from io import BytesIO
from typing import BinaryIO, Union
from PIL import Image, ImageFile
from .models import Dimensions
FilenameOrStream = Union[str, BinaryIO]
def read_dimensions(filename_or_stream: FilenameOrStream) -> Dimensions:
"""Return the dimensions of the image."""
image = Image.open(filename_or_stream)
return Dimensions(*image.size)
def create_thumbnail(
filename_or_stream: FilenameOrStream,
image_type: str,
maximum_dimensions: Dimensions,
*,
force_square: bool = False,
) -> BinaryIO:
"""Create a thumbnail from the given image and return the result stream."""
output_stream = BytesIO()
image = Image.open(filename_or_stream)
if force_square:
image = _crop_to_square(image)
image.thumbnail(maximum_dimensions, resample=Image.ANTIALIAS)
image.save(output_stream, format=image_type)
output_stream.seek(0)
return output_stream
def _crop_to_square(image: ImageFile) -> ImageFile:
"""Crop image to be square."""
dimensions = Dimensions(*image.size)
if dimensions.is_square:
return image
edge_length = min(*dimensions)
crop_box = (0, 0, edge_length, edge_length)
return image.crop(crop_box)
| bsd-3-clause | Python |
b5f9f36daf680eb1873558dcd853b99ccf85d1a7 | change path to stored re data | the-it/WS_THEbotIT,the-it/WS_THEbotIT | scripts/service/ws_re/download/base.py | scripts/service/ws_re/download/base.py | import os
from abc import ABC, abstractmethod
from pathlib import Path
BASE_PATH = Path("/mnt/temp_erik/re")
if not os.path.isdir(BASE_PATH):
os.mkdir(BASE_PATH)
class DownloadTarget(ABC):
@abstractmethod
def get_source(self):
pass
@abstractmethod
def get_target(self):
pass
| import os
from abc import ABC, abstractmethod
from pathlib import Path
BASE_PATH = Path.home().joinpath("re")
if not os.path.isdir(BASE_PATH):
os.mkdir(BASE_PATH)
class DownloadTarget(ABC):
@abstractmethod
def get_source(self):
pass
@abstractmethod
def get_target(self):
pass
| mit | Python |
f9c198194d8fd64b7e5c43eaf252edf79a16e07f | Update setup.py for new pip package release (#262) | google/osv.dev,google/osv.dev,google/osv.dev,google/osv.dev,google/osv.dev | lib/setup.py | lib/setup.py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""setup.py for OSV."""
import setuptools
with open('README.md', 'r') as fh:
long_description = fh.read()
setuptools.setup(
name='osv',
version='0.0.7',
author='OSV authors',
author_email='osv-discuss@googlegroups.com',
description='Open Source Vulnerabilities library',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/google/osv',
packages=setuptools.find_packages(),
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
],
install_requires=[
'google-cloud-ndb',
'pygit2',
'PyYAML',
'semver',
],
package_dir={
'': '.',
},
python_requires='>=3.7',
zip_safe=False,
)
| # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""setup.py for OSV."""
import setuptools
with open('README.md', 'r') as fh:
long_description = fh.read()
setuptools.setup(
name='osv',
version='0.0.6',
author='OSV authors',
author_email='osv-discuss@googlegroups.com',
description='Open Source Vulnerabilities library',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/google/osv',
packages=setuptools.find_packages(),
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
],
install_requires=[
'google-cloud-ndb',
'pygit2',
'PyYAML',
'semver',
],
package_dir={
'': '.',
},
python_requires='>=3.7',
zip_safe=False,
)
| apache-2.0 | Python |
1d0800c2fada6d88a9650c85bfd5b4e78c3b9973 | Fix Flake8 violation | Commonists/pageview-api | pageviewapi/period.py | pageviewapi/period.py | """Helper functions on period."""
import datetime
import pageviewapi.client
def sum_last(project, page, last=30, agent='all-agents', access='all-access'):
"""Page views during last days."""
views = pageviewapi.client.per_article(project, page,
__days_ago__(last),
__today__(),
access=access, agent=agent)
return sum([daily['views'] for daily in views['items']])
def avg_last(project, page, last=30, agent='all-agents', access='all-access'):
"""Page views during last days."""
views = pageviewapi.client.per_article(project, page,
__days_ago__(last),
__today__(),
access=access, agent=agent)
return __avg__([daily['views'] for daily in views['items']])
def __today__():
"""Date of the day as YYYYmmdd format."""
return datetime.date.today().strftime('%Y%m%d')
def __days_ago__(days):
"""Days ago as YYYYmmdd format."""
today = datetime.date.today()
delta = datetime.timedelta(days=days)
ago = today - delta
return ago.strftime('%Y%m%d')
def __avg__(numericlist):
"""Basic average function."""
return sum(numericlist) / float(len(numericlist))
| """Helper functions on period."""
import datetime
import pageviewapi.client
def sum_last(project, page, last=30, agent='all-agents', access='all-access'):
"""Page views during last days."""
views = pageviewapi.client.per_article(project, page,
__days_ago__(last),
__today__(),
access=access, agent=agent)
return sum([daily['views'] for daily in views['items']])
def avg_last(project, page, last=30, agent='all-agents', access='all-access'):
"""Page views during last days."""
views = pageviewapi.client.per_article(project, page,
__days_ago__(last),
__today__(),
access=access, agent=agent)
return __avg__([daily['views'] for daily in views['items']])
def __today__():
"""Date of the day as YYYYmmdd format."""
return datetime.date.today().strftime('%Y%m%d')
def __days_ago__(days):
"""Days ago as YYYYmmdd format."""
today = datetime.date.today()
delta = datetime.timedelta(days=days)
ago = today - delta
return ago.strftime('%Y%m%d')
def __avg__(numericlist):
"""Basic average function."""
return sum(numericlist)/float(len(numericlist))
| mit | Python |
6ae5d15c00ce6bd1aa21f522db5a6b423e772bff | add note about assumption re: energy | mattjj/pybasicbayes,fivejjs/pybasicbayes,michaelpacer/pybasicbayes | parallel_tempering.py | parallel_tempering.py | from __future__ import division
import numpy as np
from collections import defaultdict
from util.text import progprint_xrange
class ParallelTempering(object):
def __init__(self,model,temperatures):
temperatures = [1.] + list(sorted(temperatures))
self.models = [model.copy_sample() for T in temperatures]
for m,T in zip(self.models,temperatures):
m.temperature = T
self.swapcounts = defaultdict(int)
self.itercount = 0
@property
def unit_temp_model(self):
return self.models[0]
@property
def temperatures(self):
return [m.temperature for m in self.models]
@property
def energies(self):
# NOTE: this line assumes that only the likelihood terms are
# temperature-raised (and not the priors, so they cancel!)
return [m.log_likelihood() for m in self.models]
@property
def triples(self):
return zip(self.models,self.energies,self.temperatures)
def step(self,intermediate_resamples):
for m in self.models:
for itr in xrange(intermediate_resamples):
m.resample_model()
for (M1,E1,T1), (M2,E2,T2) in zip(self.triples[:-1],self.triples[1:]):
swap_logprob = min(0., (E1-E2)*(1./T1 - 1./T2) )
if np.log(np.random.random()) < swap_logprob:
M1.swap_sample_with(M2)
self.swapcounts[(T1,T2)] += 1
self.itercount += 1
def run(self,niter,intermediate_resamples):
samples = []
for itr in progprint_xrange(niter):
self.step(intermediate_resamples)
samples.append(self.unit_temp_model.copy_sample())
return samples
| from __future__ import division
import numpy as np
from collections import defaultdict
from util.text import progprint_xrange
class ParallelTempering(object):
def __init__(self,model,temperatures):
temperatures = [1.] + list(sorted(temperatures))
self.models = [model.copy_sample() for T in temperatures]
for m,T in zip(self.models,temperatures):
m.temperature = T
self.swapcounts = defaultdict(int)
self.itercount = 0
@property
def unit_temp_model(self):
return self.models[0]
@property
def temperatures(self):
return [m.temperature for m in self.models]
@property
def energies(self):
return [m.log_likelihood() for m in self.models]
@property
def triples(self):
return zip(self.models,self.energies,self.temperatures)
def step(self,intermediate_resamples):
for m in self.models:
for itr in xrange(intermediate_resamples):
m.resample_model()
for (M1,E1,T1), (M2,E2,T2) in zip(self.triples[:-1],self.triples[1:]):
swap_logprob = min(0., (E1-E2)*(1./T1 - 1./T2) )
if np.log(np.random.random()) < swap_logprob:
M1.swap_sample_with(M2)
self.swapcounts[(T1,T2)] += 1
self.itercount += 1
def run(self,niter,intermediate_resamples):
samples = []
for itr in progprint_xrange(niter):
self.step(intermediate_resamples)
samples.append(self.unit_temp_model.copy_sample())
return samples
| mit | Python |
b6fa47b0a2195c754c0770316af5130cdad4f8af | Test for `Every` | jscott1989/django-periodically,hzdg/django-periodically | periodically/tests.py | periodically/tests.py | from django.test import TestCase
from . import schedules
from datetime import datetime
now = datetime(1983, 7, 1, 3, 41)
class ScheduleTest(TestCase):
def test_hourly(self):
sched = schedules.Hourly(20, 2, 4)
self.assertEqual(sched.time_before(now), datetime(1983, 7, 1, 3, 20, 2, 4))
self.assertEqual(sched.time_after(now), datetime(1983, 7, 1, 4, 20, 2, 4))
def test_every(self):
sched = schedules.Every(minutes=1)
self.assertEqual(sched.time_before(now), datetime(1983, 7, 1, 3, 41))
self.assertEqual(sched.time_after(now), datetime(1983, 7, 1, 3, 42))
| from django.test import TestCase
from . import schedules
from datetime import datetime
now = datetime(1983, 7, 1, 3, 41)
class ScheduleTest(TestCase):
def test_hourly(self):
sched = schedules.Hourly(20, 2, 4)
self.assertEqual(sched.time_before(now), datetime(1983, 7, 1, 3, 20, 2, 4))
self.assertEqual(sched.time_after(now), datetime(1983, 7, 1, 4, 20, 2, 4))
| mit | Python |
3943a81a96dbed18ac14a85420a1094c6ea2f8d1 | Fix dumb typo | twneale/tater,twneale/tater | tater/core/config.py | tater/core/config.py | import os
import logging
LOG_MSG_MAXWIDTH = 300
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'tater.utils.ansiterm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'tater': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': False
},
'tater.Lexer': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': False
},
},
}
LOGLEVEL = None
if not os.environ.get('TATER_IGNORE_COMMANDLINE'):
import argparse
parser = argparse.ArgumentParser(description='Tater')
parser.add_argument('--fatal', action='store_true')
parser.add_argument('--critical', action='store_true')
parser.add_argument('--warning', action='store_true')
parser.add_argument('--error', action='store_true')
parser.add_argument('--info', action='store_true')
parser.add_argument('--debug', action='store_true')
args, unknown = parser.parse_known_args()
loglevels = []
for level in 'critical info debug error fatal warning'.split():
if getattr(args, level):
loglevel = getattr(logging, level.upper())
loglevels.append(loglevel)
if loglevels:
LOGLEVEL = max(loglevels)
| import os
import logging
LOG_MSG_MAXWIDTH = 300
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'form at': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'tater.utils.ansiterm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'tater': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': False
},
'tater.Lexer': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': False
},
},
}
LOGLEVEL = None
if not os.environ.get('TATER_IGNORE_COMMANDLINE'):
import argparse
parser = argparse.ArgumentParser(description='Tater')
parser.add_argument('--fatal', action='store_true')
parser.add_argument('--critical', action='store_true')
parser.add_argument('--warning', action='store_true')
parser.add_argument('--error', action='store_true')
parser.add_argument('--info', action='store_true')
parser.add_argument('--debug', action='store_true')
args, unknown = parser.parse_known_args()
loglevels = []
for level in 'critical info debug error fatal warning'.split():
if getattr(args, level):
loglevel = getattr(logging, level.upper())
loglevels.append(loglevel)
if loglevels:
LOGLEVEL = max(loglevels)
| bsd-3-clause | Python |
860215950666e283126b934011b79e23e88ed16b | Fix filter not being optional, display job offers count | esabouraud/scripts,esabouraud/scripts | jsonjobs/jobs/__main__.py | jsonjobs/jobs/__main__.py | """Get jobs offer from json stream"""
import re
import argparse
import json
import urllib3
urllib3.disable_warnings()
def get_jobs_json(url):
"""Get jobs json payload"""
http = urllib3.PoolManager()
req = http.request("GET", url)
return req.data
def display_jobs(jobs_json, joburl_prefix, filter_rx):
"""Filter and display job offers"""
jobs = json.loads(jobs_json)
selected_jobs = [job for job in jobs["items"] if not filter_rx or not filter_rx.search(json.dumps(job))]
print("%d job offers selected:" % len(selected_jobs))
for job in selected_jobs:
print("\t%s - %s => %s%s" % (job["title"], job["fulllocation"], joburl_prefix, job["id"]))
def main():
"""Run script"""
parser = argparse.ArgumentParser(
prog="jobs", description="jobs filter")
parser.add_argument(
"-f", "--filter", dest="filter", default=None, help="Exclude jobs that match this regexp")
parser.add_argument(
dest="url", default=None, help="URL for jobs json stream")
parser.add_argument(
dest="joburl_prefix", default=None, help="URL prefix for outgoing link")
options = parser.parse_args()
print("Fetching job offers from: %s" % options.url)
jobs_json = get_jobs_json(options.url)
if options.filter:
filter_rx = re.compile(options.filter)
else:
filter_rx = None
display_jobs(jobs_json, options.joburl_prefix, filter_rx)
if __name__ == "__main__":
main()
| """Get jobs offer from json stream"""
import re
import argparse
import json
import urllib3
urllib3.disable_warnings()
def get_jobs_json(url):
"""Get jobs json payload"""
http = urllib3.PoolManager()
req = http.request("GET", url)
return req.data
def display_jobs(jobs_json, joburl_prefix, filter_rx):
"""Filter and display job offers"""
jobs = json.loads(jobs_json)
for job in jobs["items"]:
match = filter_rx.search(json.dumps(job))
if not match:
print("%s - %s => %s%s" % (job["title"], job["fulllocation"], joburl_prefix, job["id"]))
def main():
"""Run script"""
parser = argparse.ArgumentParser(
prog="jobs", description="jobs filter")
parser.add_argument(
"-f", "--filter", dest="filter", default=None, help="Exclude jobs that match this regexp")
parser.add_argument(
dest="url", default=None, help="URL for jobs json stream")
parser.add_argument(
dest="joburl_prefix", default=None, help="URL prefix for outgoing link")
options = parser.parse_args()
print("Fetching jobs from: %s" % options.url)
jobs_json = get_jobs_json(options.url)
if options.filter:
filter_rx = re.compile(options.filter)
else:
filter_rx = None
display_jobs(jobs_json, options.joburl_prefix, filter_rx)
if __name__ == "__main__":
main()
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.