text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import logging
import traceback
import json
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from helpers.exceptions import NotAuthorizedException, FailedAuthenticationException
logger = logging.getLogger(__name__)
# TODO so we are using this as the catch ALL, and report error, as the last resort
# this is fine, except the exception stack trace is not particularly user-friendly
# We should not depends on this too much, but in the code handle as much exceptino
# as we can and generate user friendly message there.
class ExceptionHandlerMiddleware(object):
def process_exception(self, request, exception):
logger.exception('Exception thrown when handling request ' + str(request))
# Error is displayed as a fragment over related feature area
if request.is_ajax():
ajax_vars = {'success': False, 'error': exception.message}
return HttpResponse(json.dumps(ajax_vars), mimetype='application/javascript')
else:
# Not authorized
if isinstance(exception, NotAuthorizedException):
return render(request, 'users/not_authorized.html', {
"message": exception.message,
})
elif isinstance(exception, FailedAuthenticationException):
request.session.modified = True
request.session.flush()
return HttpResponseRedirect("/")
return render(request, 'error.html', {
'message': exception.message,
'stacktrace': traceback.format_exc(),
})
|
{
"content_hash": "f117ba77048ad59afb4f2494455a7870",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 89,
"avg_line_length": 42.78947368421053,
"alnum_prop": 0.6703567035670357,
"repo_name": "brennentsmith/teletraan",
"id": "6f67094403821f2fa90aea847e1eafe5f7dbbca9",
"size": "2206",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "deploy-board/deploy_board/webapp/error_views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "158864"
},
{
"name": "Dockerfile",
"bytes": "723"
},
{
"name": "HTML",
"bytes": "364998"
},
{
"name": "Java",
"bytes": "1183331"
},
{
"name": "JavaScript",
"bytes": "2563404"
},
{
"name": "Makefile",
"bytes": "185"
},
{
"name": "Python",
"bytes": "719124"
},
{
"name": "Ruby",
"bytes": "1124"
},
{
"name": "Shell",
"bytes": "21183"
}
],
"symlink_target": ""
}
|
"""
LOMAP: fingerprint calculations
=====
Alchemical free energy calculations hold increasing promise as an aid to drug
discovery efforts. However, applications of these techniques in discovery
projects have been relatively few, partly because of the difficulty of planning
and setting up calculations. The Lead Optimization Mapper (LOMAP) is an
automated algorithm to plan efficient relative free energy calculations between
potential ligands within a substantial of compounds.
"""
# *****************************************************************************
# Lomap2: A toolkit to plan alchemical relative binding affinity calculations
# Copyright 2015 - 2016 UC Irvine and the Authors
#
# Authors: Dr Gaetano Calabro' and Dr David Mobley
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see http://www.gnu.org/licenses/
# *****************************************************************************
# ****************
# MODULE IMPORTS
# ****************
from rdkit import Chem
from rdkit.Chem import rdFMCS
from rdkit.Chem import AllChem
from rdkit.Chem.Draw.MolDrawing import DrawingOptions
from rdkit.Chem import Draw
from rdkit import DataStructs
from rdkit.Chem.Fingerprints import FingerprintMols
import sys
import math
from rdkit import RDLogger
import logging
import argparse
# *******************************
# Figureprint Class
# *******************************
__all__ = ['FIGUREPRINT']
class Figureprint(object):
"""
This class is used to compute the Maximum Common Subgraph (MCS) between two
RDkit molecule objects and to score their similarity by using defined rules
"""
def __init__(self, moli, molj):
"""
Inizialization function
Parameters
----------
moli : RDKit molecule object
the first molecule used to perform the Figureprint calculation
molj : RDKit molecule object
the second molecule used to perform the Figureprint calculation
options : argparse python object
the list of user options
"""
# Set logging level and format
logging.basicConfig(format='%(levelname)s:\t%(message)s', level=logging.INFO)
# Local pointers to the passed molecules
self.moli = moli
self.molj = molj
if not options.verbose == 'pedantic':
lg = RDLogger.logger()
lg.setLevel(RDLogger.CRITICAL)
self.fps_moli = FingerprintMols.FingerprintMol(self.moli)
self.fps_molj = FingerprintMols.FingerprintMol(self.molj)
self.fps_tan = DataStructs.FingerprintSimilarity(self.fps_moli, self.fps_molj)
def get_fps_tan(self):
return self.fps_tan
|
{
"content_hash": "d9d07da7132486a1e11059ec3bd7bbd4",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 86,
"avg_line_length": 31.95098039215686,
"alnum_prop": 0.6655415771709113,
"repo_name": "MobleyLab/Lomap",
"id": "eafce410e5070fc9d13230b33fd6097ed31201cf",
"size": "3321",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "lomap/fp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "274742"
},
{
"name": "Shell",
"bytes": "534"
}
],
"symlink_target": ""
}
|
import sys
import atexit
import logging
import json
import time
from kafka import KafkaProducer
from kafka.errors import KafkaError, KafkaTimeoutError
from pyspark import SparkContext # how to talk to spark
from pyspark.streaming import StreamingContext
from pyspark.streaming.kafka import KafkaUtils
logger_format = "%(asctime)-15s %(message)s"
logging.basicConfig(format=logger_format)
logger = logging.getLogger('stream-processing')
logger.setLevel(logging.INFO)
topic = ""
new_topic = ""
kafka_broker = ""
kafka_producer = ""
def shutdown_hook(producer):
try:
logger.info('flush pending messages to kafka')
producer.flush(10)
logger.info('finish flushing pending messages')
except kafkaError as kafka_error:
logger.warn('Failed to flush pending messages to kafka')
finally:
try:
producer.close(10)
except Exception as e:
logger.warn('Failed to clode kafka connection')
def process(timeobj, rdd):
# - calculate the average
num_of_records = rdd.count()
if num_of_records == 0:
return
price_sum = rdd.map(lambda record: float(json.loads(record[1].decode('utf-8'))[0].get('LastTradePrice'))).reduce(lambda a, b: a+b)
average = price_sum/num_of_records
logger.info('Received %d records from Kafka, average price is %f' % (num_of_records, average))
# - write back to kafka
# {timestamp, average}
data = json.dumps({
'timestamp': time.time(),
'average': average
})
kafka_producer.send(new_topic, value = data)
if __name__ == "__main__":
# kafka broker, topic,new topic and application name
if len(sys.argv) != 4:
print('Usage: stream-processing [topic] [new topic] [kafka-broker]')
exit(1)
topic, new_topic, kafka_broker = sys.argv[1:]
# -setup connection to spark cluster
# local[x] -x number of cores
sc = SparkContext("local[2]", "StockAveragePrice")
sc.setLogLevel('ERROR')
# Streaming(sc,x) - open in x seconds
ssc = StreamingContext(sc, 5)
# - create a data stream from spark
# we can add pur own kafka consumer to process but not recommanded
# due to additional layer
directKafkaStream = KafkaUtils.createDirectStream(ssc, [topic], {'metadata.broker.list':kafka_broker})
# - for each RDD, do something
# Action
directKafkaStream.foreachRDD(process)
# - instantiate kafka producer
kafka_producer = KafkaProducer(bootstrap_servers=kafka_broker)
# - setup proper shutdown hook
# Action
atexit.register(shutdown_hook, kafka_producer)
ssc.start()
ssc.awaitTermination()
|
{
"content_hash": "5e307bce44488a2acd08793bdbd3b9f1",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 131,
"avg_line_length": 27.573033707865168,
"alnum_prop": 0.7294213528932355,
"repo_name": "samli6479/bigdata",
"id": "8e33944434700ee5f105bf03250a7274ea89e3db",
"size": "2559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stream-processing.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "446"
},
{
"name": "JavaScript",
"bytes": "1966"
},
{
"name": "Python",
"bytes": "14446"
},
{
"name": "Shell",
"bytes": "1306"
}
],
"symlink_target": ""
}
|
from django.db import models
class School(models.Model):
name = models.CharField(max_length=100)
class Parent(models.Model):
name = models.CharField(max_length=100)
class Child(models.Model):
mother = models.ForeignKey(Parent, related_name='mothers_children')
father = models.ForeignKey(Parent, related_name='fathers_children')
school = models.ForeignKey(School)
name = models.CharField(max_length=100)
class Poet(models.Model):
name = models.CharField(max_length=100)
def __unicode__(self):
return self.name
class Poem(models.Model):
poet = models.ForeignKey(Poet)
name = models.CharField(max_length=100)
def __unicode__(self):
return self.name
__test__ = {'API_TESTS': """
>>> from django.forms.models import inlineformset_factory
Child has two ForeignKeys to Parent, so if we don't specify which one to use
for the inline formset, we should get an exception.
>>> ifs = inlineformset_factory(Parent, Child)
Traceback (most recent call last):
...
Exception: <class 'regressiontests.inline_formsets.models.Child'> has more than 1 ForeignKey to <class 'regressiontests.inline_formsets.models.Parent'>
These two should both work without a problem.
>>> ifs = inlineformset_factory(Parent, Child, fk_name='mother')
>>> ifs = inlineformset_factory(Parent, Child, fk_name='father')
If we specify fk_name, but it isn't a ForeignKey from the child model to the
parent model, we should get an exception.
>>> ifs = inlineformset_factory(Parent, Child, fk_name='school')
Traceback (most recent call last):
...
Exception: fk_name 'school' is not a ForeignKey to <class 'regressiontests.inline_formsets.models.Parent'>
If the field specified in fk_name is not a ForeignKey, we should get an
exception.
>>> ifs = inlineformset_factory(Parent, Child, fk_name='test')
Traceback (most recent call last):
...
Exception: <class 'regressiontests.inline_formsets.models.Child'> has no field named 'test'
# Regression test for #9171.
>>> ifs = inlineformset_factory(Parent, Child, exclude=('school',), fk_name='mother')
"""
}
|
{
"content_hash": "4198e530e3ef9f33bfde217721b1a1cc",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 151,
"avg_line_length": 30.420289855072465,
"alnum_prop": 0.7208194378275369,
"repo_name": "aprefontaine/TMScheduler",
"id": "9b1f8b4932f3b8018b1a8e49b6a51630344cc6d2",
"size": "2115",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "tests/regressiontests/inline_formsets/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "589667"
},
{
"name": "Python",
"bytes": "5970832"
},
{
"name": "Shell",
"bytes": "3531"
}
],
"symlink_target": ""
}
|
import sys
import os
import urllib2
import traceback
import types
# Appengine monkey patch
sys.path.append(os.path.join(os.path.dirname(__file__), 'appengine-monkey'))
import appengine_monkey
# Appengine imports
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import memcache
from google.appengine.ext import db
from google.appengine.ext.webapp import template
# Django imports
from django.utils import simplejson
# Local imports
import pycmds
import main
import modules
main.CACHING = False
class PythonModule(db.Model):
source = db.TextProperty()
def load_source(name, source):
reload(pycmds)
mod = types.ModuleType(str(name))
mod.__dict__.update(modules.__dict__)
exec source in mod.__dict__
return mod
class SandboxIndex(webapp.RequestHandler):
def get(self):
mods = PythonModule.all().fetch(10)
path = os.path.join(os.path.dirname(__file__), 'templates','sandbox.html')
template_values = {
"mods": [mod.key().name() for mod in mods]
}
self.response.out.write(template.render(path, template_values))
class ModuleEditor(webapp.RequestHandler):
def get(self, name):
mod = PythonModule.get_by_key_name(name)
path = os.path.join(os.path.dirname(__file__), 'templates','editor.html')
template_values = {
"name": name,
"source": mod.source if mod else ""
}
self.response.out.write(template.render(path, template_values))
def post(self, name):
source = self.request.get('source', '').replace("\r\n", "\n")
mod = PythonModule.get_by_key_name(name)
if not mod:
mod = PythonModule(key_name=name)
try:
load_source(name, source)
mod.source = source
mod.put()
out = "Module Saved"
except:
out = traceback.format_exc()
self.response.out.write(out)
class CommandTester(main.CommandHandler):
def get(self, name):
data = PythonModule.get_by_key_name(name)
source = data.source if data else ""
mod2 = load_source(name, source)
main.pycmds = pycmds
main.CommandHandler.get(self)
application = webapp.WSGIApplication([
('/', SandboxIndex),
('/(.*)/commands', CommandTester),
('/(.*)', ModuleEditor)
], debug=True)
if __name__ == "__main__":
run_wsgi_app(application)
|
{
"content_hash": "1b0d78142b5b840820eca0736a549a5f",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 82,
"avg_line_length": 28.693181818181817,
"alnum_prop": 0.6261386138613861,
"repo_name": "csytan/pycmds",
"id": "64f41a5920452bfb241e7c286d566a245f9c9a96",
"size": "2542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sandbox.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1SecurityContext(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'allow_privilege_escalation': 'bool',
'capabilities': 'V1Capabilities',
'privileged': 'bool',
'read_only_root_filesystem': 'bool',
'run_as_non_root': 'bool',
'run_as_user': 'int',
'se_linux_options': 'V1SELinuxOptions'
}
attribute_map = {
'allow_privilege_escalation': 'allowPrivilegeEscalation',
'capabilities': 'capabilities',
'privileged': 'privileged',
'read_only_root_filesystem': 'readOnlyRootFilesystem',
'run_as_non_root': 'runAsNonRoot',
'run_as_user': 'runAsUser',
'se_linux_options': 'seLinuxOptions'
}
def __init__(self, allow_privilege_escalation=None, capabilities=None, privileged=None, read_only_root_filesystem=None, run_as_non_root=None, run_as_user=None, se_linux_options=None):
"""
V1SecurityContext - a model defined in Swagger
"""
self._allow_privilege_escalation = None
self._capabilities = None
self._privileged = None
self._read_only_root_filesystem = None
self._run_as_non_root = None
self._run_as_user = None
self._se_linux_options = None
self.discriminator = None
if allow_privilege_escalation is not None:
self.allow_privilege_escalation = allow_privilege_escalation
if capabilities is not None:
self.capabilities = capabilities
if privileged is not None:
self.privileged = privileged
if read_only_root_filesystem is not None:
self.read_only_root_filesystem = read_only_root_filesystem
if run_as_non_root is not None:
self.run_as_non_root = run_as_non_root
if run_as_user is not None:
self.run_as_user = run_as_user
if se_linux_options is not None:
self.se_linux_options = se_linux_options
@property
def allow_privilege_escalation(self):
"""
Gets the allow_privilege_escalation of this V1SecurityContext.
AllowPrivilegeEscalation controls whether a process can gain more privileges than its parent process. This bool directly controls if the no_new_privs flag will be set on the container process. AllowPrivilegeEscalation is true always when the container is: 1) run as Privileged 2) has CAP_SYS_ADMIN
:return: The allow_privilege_escalation of this V1SecurityContext.
:rtype: bool
"""
return self._allow_privilege_escalation
@allow_privilege_escalation.setter
def allow_privilege_escalation(self, allow_privilege_escalation):
"""
Sets the allow_privilege_escalation of this V1SecurityContext.
AllowPrivilegeEscalation controls whether a process can gain more privileges than its parent process. This bool directly controls if the no_new_privs flag will be set on the container process. AllowPrivilegeEscalation is true always when the container is: 1) run as Privileged 2) has CAP_SYS_ADMIN
:param allow_privilege_escalation: The allow_privilege_escalation of this V1SecurityContext.
:type: bool
"""
self._allow_privilege_escalation = allow_privilege_escalation
@property
def capabilities(self):
"""
Gets the capabilities of this V1SecurityContext.
The capabilities to add/drop when running containers. Defaults to the default set of capabilities granted by the container runtime.
:return: The capabilities of this V1SecurityContext.
:rtype: V1Capabilities
"""
return self._capabilities
@capabilities.setter
def capabilities(self, capabilities):
"""
Sets the capabilities of this V1SecurityContext.
The capabilities to add/drop when running containers. Defaults to the default set of capabilities granted by the container runtime.
:param capabilities: The capabilities of this V1SecurityContext.
:type: V1Capabilities
"""
self._capabilities = capabilities
@property
def privileged(self):
"""
Gets the privileged of this V1SecurityContext.
Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false.
:return: The privileged of this V1SecurityContext.
:rtype: bool
"""
return self._privileged
@privileged.setter
def privileged(self, privileged):
"""
Sets the privileged of this V1SecurityContext.
Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false.
:param privileged: The privileged of this V1SecurityContext.
:type: bool
"""
self._privileged = privileged
@property
def read_only_root_filesystem(self):
"""
Gets the read_only_root_filesystem of this V1SecurityContext.
Whether this container has a read-only root filesystem. Default is false.
:return: The read_only_root_filesystem of this V1SecurityContext.
:rtype: bool
"""
return self._read_only_root_filesystem
@read_only_root_filesystem.setter
def read_only_root_filesystem(self, read_only_root_filesystem):
"""
Sets the read_only_root_filesystem of this V1SecurityContext.
Whether this container has a read-only root filesystem. Default is false.
:param read_only_root_filesystem: The read_only_root_filesystem of this V1SecurityContext.
:type: bool
"""
self._read_only_root_filesystem = read_only_root_filesystem
@property
def run_as_non_root(self):
"""
Gets the run_as_non_root of this V1SecurityContext.
Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.
:return: The run_as_non_root of this V1SecurityContext.
:rtype: bool
"""
return self._run_as_non_root
@run_as_non_root.setter
def run_as_non_root(self, run_as_non_root):
"""
Sets the run_as_non_root of this V1SecurityContext.
Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.
:param run_as_non_root: The run_as_non_root of this V1SecurityContext.
:type: bool
"""
self._run_as_non_root = run_as_non_root
@property
def run_as_user(self):
"""
Gets the run_as_user of this V1SecurityContext.
The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.
:return: The run_as_user of this V1SecurityContext.
:rtype: int
"""
return self._run_as_user
@run_as_user.setter
def run_as_user(self, run_as_user):
"""
Sets the run_as_user of this V1SecurityContext.
The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.
:param run_as_user: The run_as_user of this V1SecurityContext.
:type: int
"""
self._run_as_user = run_as_user
@property
def se_linux_options(self):
"""
Gets the se_linux_options of this V1SecurityContext.
The SELinux context to be applied to the container. If unspecified, the container runtime will allocate a random SELinux context for each container. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.
:return: The se_linux_options of this V1SecurityContext.
:rtype: V1SELinuxOptions
"""
return self._se_linux_options
@se_linux_options.setter
def se_linux_options(self, se_linux_options):
"""
Sets the se_linux_options of this V1SecurityContext.
The SELinux context to be applied to the container. If unspecified, the container runtime will allocate a random SELinux context for each container. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.
:param se_linux_options: The se_linux_options of this V1SecurityContext.
:type: V1SELinuxOptions
"""
self._se_linux_options = se_linux_options
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1SecurityContext):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
{
"content_hash": "4427ef59b8c47e84abb3f545b55b53e6",
"timestamp": "",
"source": "github",
"line_count": 292,
"max_line_length": 421,
"avg_line_length": 39.5513698630137,
"alnum_prop": 0.6511386267209283,
"repo_name": "mbohlool/client-python",
"id": "56cef94e407f5ea521529c7d75ed8da16a530fb0",
"size": "11566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v1_security_context.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8417639"
},
{
"name": "Shell",
"bytes": "16830"
}
],
"symlink_target": ""
}
|
import logging
from hypnotoad.core import plugin
LOG = logging.getLogger('root')
class disk_model_plugin(plugin.data_model_plugin):
def setup(self, config, model_version):
"""Called before the plugin is asked to do anything."""
if config.getboolean('Data Model Options', 'disk_model_plugin_enabled'):
self.plugin_enabled = True
LOG.debug("Disk model plugin enabled")
self.config = config
self.model_version = model_version
else:
self.plugin_enabled = False
def teardown(self):
"""Called to allow the plugin to free anything."""
if self.plugin_enabled:
LOG.debug("Got to disk model plugin teardown")
self.ldap_ctx.unbind_s()
def get_model(self):
"""Look up information in this data model."""
model = []
if self.plugin_enabled:
LOG.debug("Got to disk model plugin get_model")
return model
# EOF
|
{
"content_hash": "0314d66755a831ceffb09cddadbfb8b5",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 80,
"avg_line_length": 25.307692307692307,
"alnum_prop": 0.6058763931104356,
"repo_name": "hpc/hypnotoad",
"id": "60cb54abb545fef101dfa37c3c6d60ec151e142f",
"size": "1040",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hypnotoad/plugins/datamodels/disk_model/disk_model_plugin.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "143307"
}
],
"symlink_target": ""
}
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Textfont(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scattergl"
_path_str = "scattergl.textfont"
_valid_props = {"color", "colorsrc", "family", "familysrc", "size", "sizesrc"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `color`.
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on Chart Studio Cloud for `family`.
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on Chart Studio Cloud for `size`.
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on Chart Studio Cloud for
`color`.
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
`family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud for
`size`.
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs,
):
"""
Construct a new Textfont object
Sets the text font.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.scattergl.Textfont`
color
colorsrc
Sets the source reference on Chart Studio Cloud for
`color`.
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
`family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud for
`size`.
Returns
-------
Textfont
"""
super(Textfont, self).__init__("textfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scattergl.Textfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scattergl.Textfont`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("colorsrc", None)
_v = colorsrc if colorsrc is not None else _v
if _v is not None:
self["colorsrc"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("familysrc", None)
_v = familysrc if familysrc is not None else _v
if _v is not None:
self["familysrc"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
_v = arg.pop("sizesrc", None)
_v = sizesrc if sizesrc is not None else _v
if _v is not None:
self["sizesrc"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
{
"content_hash": "d51a92e04f82bd3a186c9cf5889484e5",
"timestamp": "",
"source": "github",
"line_count": 330,
"max_line_length": 82,
"avg_line_length": 33.833333333333336,
"alnum_prop": 0.5516345723242275,
"repo_name": "plotly/plotly.py",
"id": "f4de12c9947f3d06a7701d9f7805c908b2fd8229",
"size": "11165",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/graph_objs/scattergl/_textfont.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
"""
It creates an opennlp formatted POS dataset from ancora-dep corpus
Rodrigo Agerri (rodrigo.agerri@ehu.es)
19/0/2013
"""
import argparse
import os
import re
def clean_wsj(infile):
inb = [line for line in infile if line.startswith("#") == False]
inb = [line.replace(" ","\t") for line in inb]
inb = [line.replace("\n","\n\t\n\t\n\t\n") for line in inb]
inb = [line.replace("__elliptic__","") for line in inb]
inb = [line.replace("_","#") for line in inb]
inb = [re.sub("#_","",line) for line in inb]
inb = [line.split("\t") for line in inb]
inb = [[line[1],line[3]] for line in inb]
inb = ["_".join(line) for line in inb]
inb = [line.replace("\n_\n","\n") for line in inb]
inc = " ".join(inb)
inc = re.sub(r'\n ','\n',inc)
return inc
def main(arguments):
"removing first line of files ..."
if arguments.dir:
for folder in arguments.dir:
print "processing folder: {0} ...".format(folder)
for dirpath,dirs,docs in sorted(os.walk(folder)):
for doc in sorted(docs):
arguments.file.append(os.path.join(dirpath,doc))
for elem in arguments.file:
pathname,basename = os.path.split(elem)
basefile,extension = os.path.splitext(basename) # get extension of files
if arguments.ext:
infile = open(elem,'r').readlines()
infile1 = clean_wsj(infile)
outfile = open(basefile+arguments.ext,'w')
for line in infile1:
outfile.write(line)
outfile.close()
else:
print "processing file: {0} ...".format(elem)
infile = open(elem,'r').readlines()
infile1 = clean_wsj(infile)
outfile = open(elem + '.open','w')
for line in infile1:
outfile.write(line)
outfile.close()
print "END"
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="creates opennlp POS dataset from WSJ pos corpus")
parser.add_argument('file',nargs='*',help="processes one or more files given as positional argument")
parser.add_argument('--dir',nargs='*',help="processes one or more dirs listed after --dir")
parser.add_argument('--ext',nargs="*",help="specify the extensions of files to be processed: .txt, .csv ...")
parsed_arguments = parser.parse_args()
main(parsed_arguments)
|
{
"content_hash": "53a8e4a578c4129bc36884510b29a632",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 113,
"avg_line_length": 36.84848484848485,
"alnum_prop": 0.5851151315789473,
"repo_name": "ragerri/ixa-pipe-convert",
"id": "a6832ea96fdd65a5e821a2b09624545ea976c630",
"size": "2479",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/ancora-dep2pos.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "182909"
},
{
"name": "Python",
"bytes": "14911"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.contrib.auth.decorators import permission_required, login_required
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.urls import reverse
from djshop.apps.base import views as base_views
from djshop.apps.store.forms.products import NewProductForm, EditProductForm, DeleteProductForm
from djshop.apps.store.models import Product
# List of products
@login_required
def index(request):
products = Product.objects.all().order_by("name")
replacements = {
"products": products
}
return render(request, "store/products/index.html", replacements)
# View a product
@login_required
def view(request, product_id):
product = get_object_or_404(Product, id=product_id)
replacements = {
"product": product
}
return render(request, "store/products/view.html", replacements)
# New product
@login_required
def new(request):
product = Product(creator=request.user)
return base_views.edit(request, instance=product, form_class=NewProductForm,
template_path="store/products/new.html", ok_url=reverse("store:view_products"))
# Edition of product
@login_required
def edit(request, product_id):
product = get_object_or_404(Product, id=product_id)
return base_views.edit(request, instance=product, form_class=EditProductForm,
template_path="store/products/edit.html", ok_url=reverse("store:view_products"))
# Delete a product
@login_required
def delete(request, product_id):
product = get_object_or_404(Product, id=product_id)
return base_views.delete(request, instance=product)
|
{
"content_hash": "65bdc9b5f29bdb78e8937f21bb5bc7af",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 107,
"avg_line_length": 32.15094339622642,
"alnum_prop": 0.7288732394366197,
"repo_name": "diegojromerolopez/djshop",
"id": "df02a4cab971ad52e928f771117f64aca15886f5",
"size": "1729",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/djshop/apps/store/views/products.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "31194"
},
{
"name": "JavaScript",
"bytes": "778"
},
{
"name": "Python",
"bytes": "95348"
},
{
"name": "Shell",
"bytes": "90"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from __future__ import absolute_import
from yarom import yarom_import
import rdflib as R
from .DataTestTemplate import _DataTest
DataObject = yarom_import('PyOpenWorm.dataObject.DataObject')
class SimplePropertyTest(_DataTest):
ctx_classes = (DataObject,)
# XXX: auto generate some of these tests...
def test_same_value_same_id_empty(self):
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
do1 = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
c = DataObject.DatatypeProperty("boots", do)
c1 = DataObject.DatatypeProperty("boots", do1)
self.assertEqual(c.identifier, c1.identifier)
def test_same_value_same_id_not_empty(self):
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
do1 = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
c = DataObject.DatatypeProperty("boots", do)
c1 = DataObject.DatatypeProperty("boots", do1)
do.boots('partition')
do1.boots('partition')
self.assertEqual(c.identifier, c1.identifier)
def test_same_value_same_id_not_empty_object_property(self):
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
do1 = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
dz = self.ctx.DataObject(ident=R.URIRef("http://example.org/vip"))
dz1 = self.ctx.DataObject(ident=R.URIRef("http://example.org/vip"))
c = DataObject.ObjectProperty("boots", do)
c1 = DataObject.ObjectProperty("boots", do1)
do.boots(dz)
do1.boots(dz1)
self.assertEqual(c.identifier, c1.identifier)
def test_diff_value_diff_id_equal(self):
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
do1 = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
c = DataObject.DatatypeProperty("boots", do)
c1 = DataObject.DatatypeProperty("boots", do1)
do.boots('join')
do1.boots('partition')
self.assertEqual(c.identifier, c1.identifier)
def test_diff_prop_same_name_same_object_same_value_same_id(self):
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
c = DataObject.DatatypeProperty("boots", do)
c1 = DataObject.DatatypeProperty("boots", do)
c('join')
c1('join')
self.assertEqual(c.identifier, c1.identifier)
def test_diff_prop_same_name_same_object_diff_value_same_id(self):
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
c = DataObject.DatatypeProperty("boots", do)
c1 = DataObject.DatatypeProperty("boots", do)
c('partition')
c1('join')
self.assertEqual(c.identifier, c1.identifier)
def test_diff_value_insert_order_same_id(self):
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
do1 = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
print (list(self.context.contents_triples()))
c = DataObject.DatatypeProperty("boots", do, multiple=True)
c1 = DataObject.DatatypeProperty("boots", do1, multiple=True)
do.boots('join')
do.boots('simile')
do.boots('partition')
do1.boots('partition')
do1.boots('join')
do1.boots('simile')
self.assertEqual(c.identifier, c1.identifier)
def test_object_property_diff_value_insert_order_same_id(self):
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
do1 = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
oa = self.ctx.DataObject(ident=R.URIRef("http://example.org/a"))
ob = self.ctx.DataObject(ident=R.URIRef("http://example.org/b"))
oc = self.ctx.DataObject(ident=R.URIRef("http://example.org/c"))
c = DataObject.ObjectProperty("boots", do, multiple=True)
c1 = DataObject.ObjectProperty("boots", do1, multiple=True)
do.boots(oa)
do.boots(ob)
do.boots(oc)
do1.boots(oc)
do1.boots(oa)
do1.boots(ob)
self.assertEqual(c.identifier, c1.identifier)
def test_property_get_returns_collection(self):
"""
This is for issue #175.
"""
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
do.boots = DataObject.DatatypeProperty(multiple=True)
do.boots(4)
# self.save()
do = self.ctx.DataObject(ident=R.URIRef("http://example.org"))
do.boots = DataObject.DatatypeProperty(multiple=True)
x = do.boots()
l1 = list(x)
print(l1)
b = list(x)
self.assertEqual([4], b)
class POCacheTest(_DataTest):
ctx_classes = (DataObject,)
def setUp(self):
super(POCacheTest, self).setUp()
o = self.ctx.DataObject(ident=R.URIRef("http://example.org/a"))
DataObject.DatatypeProperty("boots", o)
o.boots('h')
self.save()
def test_cache_refresh_after_triple_add(self):
o = self.ctx.DataObject(ident=R.URIRef("http://example.org/a"))
DataObject.DatatypeProperty("boots", o)
o.boots()
c1 = o.po_cache
self.assertIsNotNone(c1)
self.config['rdf.graph'].add((R.URIRef('http://example.org/a'),
R.URIRef('http://bluhbluh.com'),
R.URIRef('http://bluhah.com')))
o.boots()
self.assertIsNot(c1, o.po_cache)
def test_cache_no_refresh_for_no_change(self):
o = self.ctx.DataObject(ident=R.URIRef("http://example.org/a"))
DataObject.DatatypeProperty("boots", o)
o.boots()
c1 = o.po_cache
self.assertIsNotNone(c1)
o.boots()
self.assertIs(c1, o.po_cache)
def test_cache_refresh_after_triple_remove(self):
o = self.ctx.DataObject(ident=R.URIRef("http://example.org/a"))
DataObject.DatatypeProperty("boots", o)
o.boots()
c1 = o.po_cache
self.assertIsNotNone(c1)
# XXX: Note that it doesn't matter if the triple was
# actually in the graph
self.config['rdf.graph'].remove((R.URIRef('/not/in'),
R.URIRef('/the'),
R.URIRef('/graph')))
o.boots()
self.assertIsNot(c1, o.po_cache)
def test_cache_refresh_clear(self):
o = self.ctx.DataObject(ident=R.URIRef("http://example.org/a"))
DataObject.DatatypeProperty("boots", o)
o.boots()
c1 = o.po_cache
self.assertIsNotNone(c1)
# XXX: Note that it doesn't matter if the triple was
# actually in the graph
o.clear_po_cache()
o.boots()
self.assertIsNot(c1, o.po_cache)
|
{
"content_hash": "0b3caac619d90c326da04211ac368200",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 75,
"avg_line_length": 37.88826815642458,
"alnum_prop": 0.6070480684163964,
"repo_name": "gsarma/PyOpenWorm",
"id": "7e4854f03277f7e6324e554ee2026a07966efc44",
"size": "6782",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tests/SimplePropertyTest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Prolog",
"bytes": "149462"
},
{
"name": "Python",
"bytes": "422141"
},
{
"name": "Shell",
"bytes": "493"
},
{
"name": "TeX",
"bytes": "7280"
}
],
"symlink_target": ""
}
|
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimFeatureElementSubtraction_Void_Opening', [dirname(__file__)])
except ImportError:
import _SimFeatureElementSubtraction_Void_Opening
return _SimFeatureElementSubtraction_Void_Opening
if fp is not None:
try:
_mod = imp.load_module('_SimFeatureElementSubtraction_Void_Opening', fp, pathname, description)
finally:
fp.close()
return _mod
_SimFeatureElementSubtraction_Void_Opening = swig_import_helper()
del swig_import_helper
else:
import _SimFeatureElementSubtraction_Void_Opening
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
import base
class SimFeatureElementSubtraction(base.SimFeatureElement):
__swig_setmethods__ = {}
for _s in [base.SimFeatureElement]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimFeatureElementSubtraction, name, value)
__swig_getmethods__ = {}
for _s in [base.SimFeatureElement]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimFeatureElementSubtraction, name)
__repr__ = _swig_repr
def Name(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Name(self, *args)
def ObjectType(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_ObjectType(self, *args)
def Representation(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Representation(self, *args)
def Tag(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Tag(self, *args)
def __init__(self, *args):
this = _SimFeatureElementSubtraction_Void_Opening.new_SimFeatureElementSubtraction(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction__clone(self, f, c)
__swig_destroy__ = _SimFeatureElementSubtraction_Void_Opening.delete_SimFeatureElementSubtraction
__del__ = lambda self: None
SimFeatureElementSubtraction_swigregister = _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_swigregister
SimFeatureElementSubtraction_swigregister(SimFeatureElementSubtraction)
class SimFeatureElementSubtraction_Void(SimFeatureElementSubtraction):
__swig_setmethods__ = {}
for _s in [SimFeatureElementSubtraction]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimFeatureElementSubtraction_Void, name, value)
__swig_getmethods__ = {}
for _s in [SimFeatureElementSubtraction]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimFeatureElementSubtraction_Void, name)
__repr__ = _swig_repr
def OpeningHeight(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_OpeningHeight(self, *args)
def OpeningWidth(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_OpeningWidth(self, *args)
def OpeningDepth(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_OpeningDepth(self, *args)
def OpeningPerimeter(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_OpeningPerimeter(self, *args)
def OpeningArea(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_OpeningArea(self, *args)
def OpeningVolume(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_OpeningVolume(self, *args)
def TypeDefinition(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_TypeDefinition(self, *args)
def Placement(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Placement(self, *args)
def GeometricRepresentations(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_GeometricRepresentations(self, *args)
def VoidedBldgElement(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_VoidedBldgElement(self, *args)
def __init__(self, *args):
this = _SimFeatureElementSubtraction_Void_Opening.new_SimFeatureElementSubtraction_Void(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void__clone(self, f, c)
__swig_destroy__ = _SimFeatureElementSubtraction_Void_Opening.delete_SimFeatureElementSubtraction_Void
__del__ = lambda self: None
SimFeatureElementSubtraction_Void_swigregister = _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_swigregister
SimFeatureElementSubtraction_Void_swigregister(SimFeatureElementSubtraction_Void)
class SimFeatureElementSubtraction_Void_Opening(SimFeatureElementSubtraction_Void):
__swig_setmethods__ = {}
for _s in [SimFeatureElementSubtraction_Void]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimFeatureElementSubtraction_Void_Opening, name, value)
__swig_getmethods__ = {}
for _s in [SimFeatureElementSubtraction_Void]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimFeatureElementSubtraction_Void_Opening, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimFeatureElementSubtraction_Void_Opening.new_SimFeatureElementSubtraction_Void_Opening(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening__clone(self, f, c)
__swig_destroy__ = _SimFeatureElementSubtraction_Void_Opening.delete_SimFeatureElementSubtraction_Void_Opening
__del__ = lambda self: None
SimFeatureElementSubtraction_Void_Opening_swigregister = _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_swigregister
SimFeatureElementSubtraction_Void_Opening_swigregister(SimFeatureElementSubtraction_Void_Opening)
class SimFeatureElementSubtraction_Void_Opening_sequence(base.sequence_common):
__swig_setmethods__ = {}
for _s in [base.sequence_common]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimFeatureElementSubtraction_Void_Opening_sequence, name, value)
__swig_getmethods__ = {}
for _s in [base.sequence_common]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimFeatureElementSubtraction_Void_Opening_sequence, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimFeatureElementSubtraction_Void_Opening.new_SimFeatureElementSubtraction_Void_Opening_sequence(*args)
try:
self.this.append(this)
except:
self.this = this
def assign(self, n, x):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_assign(self, n, x)
def begin(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_begin(self, *args)
def end(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_end(self, *args)
def rbegin(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_rbegin(self, *args)
def rend(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_rend(self, *args)
def at(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_at(self, *args)
def front(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_front(self, *args)
def back(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_back(self, *args)
def push_back(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_push_back(self, *args)
def pop_back(self):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_pop_back(self)
def detach_back(self, pop=True):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_detach_back(self, pop)
def insert(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_insert(self, *args)
def erase(self, *args):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_erase(self, *args)
def detach(self, position, r, erase=True):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_detach(self, position, r, erase)
def swap(self, x):
return _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_swap(self, x)
__swig_destroy__ = _SimFeatureElementSubtraction_Void_Opening.delete_SimFeatureElementSubtraction_Void_Opening_sequence
__del__ = lambda self: None
SimFeatureElementSubtraction_Void_Opening_sequence_swigregister = _SimFeatureElementSubtraction_Void_Opening.SimFeatureElementSubtraction_Void_Opening_sequence_swigregister
SimFeatureElementSubtraction_Void_Opening_sequence_swigregister(SimFeatureElementSubtraction_Void_Opening_sequence)
# This file is compatible with both classic and new-style classes.
|
{
"content_hash": "5e7f4fcaf17264f315efdc6278815283",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 172,
"avg_line_length": 45.31046931407942,
"alnum_prop": 0.7161979125169309,
"repo_name": "EnEff-BIM/EnEffBIM-Framework",
"id": "aab6606006ae2976b2d861fc010d6e75eff42bdc",
"size": "12758",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SimModel_Python_API/simmodel_swig/Release/SimFeatureElementSubtraction_Void_Opening.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "397980994"
},
{
"name": "HTML",
"bytes": "124134"
},
{
"name": "Python",
"bytes": "2480972"
}
],
"symlink_target": ""
}
|
from MovieData import MovieData
from pymongo import MongoClient
import json
import os
client = MongoClient()
db = client['IR']
collection = db['Movies']
directory_path="D:\info-ret\information-retrival-search-engine\informationRetrival\indexing\IR\IR"
for filename in os.listdir(directory_path):
if ".txt" not in filename:
continue
# combining the path names
file_path = os.path.join(directory_path, filename)
print(file_path)
current_doc = MovieData(file_path)
if current_doc.data_ast is not None:
title=current_doc.get("title")
overview=current_doc.get("overview")
genres=current_doc.get("genres")
genres=json.dumps(genres)
## genre="{"+genres[0]['name']
## for i in genres[1:]:
## genre=genre+", "+i['name']
## genre=genre+"}"
## print(genre)
#genres=current_doc.get("genres")
#print(genres[0]['name'])
try:
json_obj=json.loads("{\"title\":\""+current_doc.get("title")+"\" ,\"overview\":\""+current_doc.get("overview")+"\" ,\"genre\":"+genres+"}")
print(json_obj)
movies_id = collection.insert_one(json_obj)
except ValueError:
print ('Decoding JSON has failed')
|
{
"content_hash": "59f6928182133e3217fb67b26376c94d",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 145,
"avg_line_length": 33.270270270270274,
"alnum_prop": 0.620633631194151,
"repo_name": "BhavyaLight/information-retrival-search-engine",
"id": "a3dfbb19a640be5e7a255e9e00852b89a78db091",
"size": "1231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "informationRetrival/indexing/mongoimp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1953"
},
{
"name": "HTML",
"bytes": "10000"
},
{
"name": "JavaScript",
"bytes": "102"
},
{
"name": "Python",
"bytes": "60117"
},
{
"name": "Shell",
"bytes": "606"
}
],
"symlink_target": ""
}
|
import jinja2
from django_jinja import library
from django.conf import settings
from django.utils.safestring import mark_safe
from django.utils import formats
from django.utils.translation import gettext_lazy as _
from translitua import translit
from dateutil.parser import parse as dt_parse
from catalog.utils import parse_family_member
@library.global_function
def updated_querystring(request, params):
"""Updates current querystring with a given dict of params, removing
existing occurrences of such params. Returns a urlencoded querystring."""
original_params = request.GET.copy()
for key in params:
if key in original_params:
original_params.pop(key)
original_params.update(params)
return original_params.urlencode()
@library.global_function
@jinja2.contextfunction
def context_or_settings(context, name):
"""If template context variable with `name` not set - get default
value from django.settings"""
if name in context:
return context[name]
return getattr(settings, "DEFAULT_" + name.upper())
ranges = [
{"divider": 1e18, "suffix": "E"},
{"divider": 1e15, "suffix": "P"},
{"divider": 1e12, "suffix": "T"},
{"divider": 1e9, "suffix": "G"},
{"divider": 1e6, "suffix": "M"},
{"divider": 1e3, "suffix": "k"},
]
@library.filter
def curformat(value, with_suffix=False):
if value and value != "0":
currency = ""
if "$" in value:
value = value.replace("$", "")
currency = "USD "
if "Β£" in value:
value = value.replace("Β£", "")
currency = "GBP "
if "β¬" in value or "Π" in value:
value = value.replace("β¬", "").replace("Π", "")
currency = "EUR "
try:
num = float(value.replace(",", "."))
formatted = "{}{:,.2f}".format(currency, num)
if with_suffix:
for order in ranges:
if num >= order["divider"]:
formatted = "{}{:,.2f}{}".format(
currency,
float(value.replace(",", ".")) / order["divider"],
order["suffix"],
)
break
return formatted.replace(",", " ").replace(".", ",")
except ValueError:
return value
else:
return mark_safe('<i class="i-value-empty">β</i>')
@library.filter
def emptyformat(value):
if value and value != "" and value != "0":
return value
else:
return mark_safe('<i class="i-value-empty">β</i>')
@library.filter
def date(value):
"""Formats a date according to the given format."""
if value in (None, ""):
return ""
if isinstance(value, str):
value = dt_parse(value)
arg = "DATE_FORMAT"
try:
return formats.date_format(value, arg)
except AttributeError:
raise
try:
return format(value, arg)
except AttributeError:
return ""
@library.filter
def datetime(value):
"""Formats a date according to the given format."""
if value in (None, ""):
return ""
if isinstance(value, str):
value = dt_parse(value)
if value.hour == 0 and value.minute == 0 and value.second == 0:
arg = "DATE_FORMAT"
else:
arg = "DATETIME_FORMAT"
try:
return formats.date_format(value, arg)
except AttributeError:
raise
try:
return format(value, arg)
except AttributeError:
return ""
@library.filter
def extract(value, key, default=0):
return [v["aggregated_data"].get(key, default) for v in value]
@library.global_function
def parse_raw_family_string(family_raw):
"""Parses raw data in family field."""
return map(parse_family_member, filter(None, family_raw.split(";")))
@library.filter
def translit_to_en(value):
return translit(value)
@library.global_function
def display_val(value):
if value:
return value
return ""
@library.filter
def amount_format(value):
try:
num = float(str(value).replace(",", "."))
return "{:.2f}".format(num)
except ValueError:
return value
@library.filter
def maybe_year(value):
try:
num = int(float(value))
return str(num)
except ValueError:
return value
@library.filter
def extended_status(value):
try:
num = int(value)
if num == 0:
return ""
if num == 1:
return _("[ΠΠ΅ Π·Π°ΡΡΠΎΡΠΎΠ²ΡΡΡΡΡΡ]")
elif num == 2:
return _("[ΠΠ΅ Π²ΡΠ΄ΠΎΠΌΠΎ]")
elif num == 3:
return _("[Π§Π»Π΅Π½ ΡΡΠΌ'Ρ Π½Π΅ Π½Π°Π΄Π°Π² ΡΠ½ΡΠΎΡΠΌΠ°ΡΡΡ]")
else:
return value
except ValueError:
return value
|
{
"content_hash": "21adb28bc38eb100bfb8bd82bbb440d9",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 78,
"avg_line_length": 25.010362694300518,
"alnum_prop": 0.5661901802361715,
"repo_name": "dchaplinsky/declarations.com.ua",
"id": "a05bbef416c59ec0ddec6221f3efedae63095d32",
"size": "4888",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "declarations_site/catalog/templatetags/catalog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "3235"
},
{
"name": "HTML",
"bytes": "3593"
},
{
"name": "JavaScript",
"bytes": "1559768"
},
{
"name": "Jinja",
"bytes": "591048"
},
{
"name": "Python",
"bytes": "561374"
},
{
"name": "SCSS",
"bytes": "546001"
},
{
"name": "Shell",
"bytes": "1517"
}
],
"symlink_target": ""
}
|
"""
Utilities for handling ISO 8601 duration format.
"""
import re
iso_duration_re = re.compile('PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?$')
def parse_isoduration(duration):
"""
Convert duration in ISO 8601 format to second(s).
Year, Month, Week, and Day designators are not supported.
Example: 'PT12H30M5S'
"""
result = iso_duration_re.match(duration)
if not result:
raise ValueError(_('Only ISO 8601 duration format of the form '
'PT#H#M#S is supported.'))
t = 0
t += (3600 * int(result.group(1))) if result.group(1) else 0
t += (60 * int(result.group(2))) if result.group(2) else 0
t += int(result.group(3)) if result.group(3) else 0
return t
|
{
"content_hash": "3f310fceac896b26a231d2332c297fc9",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 71,
"avg_line_length": 26.142857142857142,
"alnum_prop": 0.5942622950819673,
"repo_name": "savi-dev/heat",
"id": "df24de72ef7242614098863d0910275698f3fd51",
"size": "1350",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "heat/common/timeutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
a = 'py file plugin'
|
{
"content_hash": "2d8311b2ae846932e34476d9abfe441f",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 20,
"avg_line_length": 21,
"alnum_prop": 0.6190476190476191,
"repo_name": "hohehohe2/hohe2PyUtils",
"id": "7380b1e2ca4d83ed1e708b8cd1d88f6b58c4c56d",
"size": "21",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/plugins/testPlugin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "57235"
}
],
"symlink_target": ""
}
|
from .. import cubes
import numpy as np
from astropy.io import fits
from astropy import wcs
import os
def test_subimage_integ_header():
# getting a dummy .fits file
if not os.path.exists('foo.fits'):
from astropy.utils.data import download_file
tmp_path = download_file('https://db.tt/oleS9xD6')
try:
os.rename(tmp_path, 'foo.fits')
except OSError:
# os.rename doesn't like cross-device links
import shutil
shutil.move(tmp_path, 'foo.fits')
cube = fits.getdata('foo.fits')
header = fits.getheader('foo.fits')
xcen, ycen = 4.5, 4.5
xwidth, ywidth = 2.5, 2.5
# saving results from subimage_integ:
cutData, cutHead = cubes.subimage_integ(cube, xcen, xwidth, ycen, ywidth,
vrange=(0,9), zunits='pixels',
units='pixels', header=header)
assert cutHead['CRPIX1'] == 7.0
assert cutHead['CRPIX2'] == -2.0
w1 = wcs.WCS(header)
w2 = wcs.WCS(cutHead)
# pixel 2,2 in the original image should be pixel 0,0 in the new one
x1,y1,z1 = w1.wcs_pix2world(2,2,0,0)
x2,y2 = w2.wcs_pix2world(0,0,0)
np.testing.assert_almost_equal(x1,x2)
np.testing.assert_almost_equal(y1,y2)
|
{
"content_hash": "27d467396c686086bdc194587c80313a",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 77,
"avg_line_length": 30.209302325581394,
"alnum_prop": 0.5896843725943033,
"repo_name": "mikelum/pyspeckit",
"id": "86f808cc1b0d1e671049be0920d6e039e89fba71",
"size": "1299",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyspeckit/cubes/tests/test_cubetools.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "53"
},
{
"name": "Python",
"bytes": "1198082"
},
{
"name": "Shell",
"bytes": "313"
}
],
"symlink_target": ""
}
|
"""
Cost-Senstive Multi-Class Active Learning
"""
import copy
import os
import numpy as np
import matplotlib
matplotlib.use('tkAgg')
import matplotlib.pyplot as plt
try:
from sklearn.model_selection import train_test_split
except ImportError:
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import StandardScaler
import sklearn.datasets
from sklearn.svm import SVR
# libact classes
from libact.base.dataset import Dataset, import_libsvm_sparse
from libact.models import SVM, LogisticRegression
from libact.query_strategies.multiclass import ActiveLearningWithCostEmbedding as ALCE
from libact.query_strategies import UncertaintySampling, RandomSampling
from libact.labelers import IdealLabeler
from libact.utils import calc_cost
def run(trn_ds, tst_ds, lbr, model, qs, quota, cost_matrix):
C_in, C_out = [], []
for i in range(quota+1):
# Standard usage of libact objects
if i > 0:
ask_id = qs.make_query()
lb = lbr.label(trn_ds.data[ask_id][0])
trn_ds.update(ask_id, lb)
model.train(trn_ds)
trn_X, trn_y = zip(*trn_ds.get_labeled_entries())
tst_X, tst_y = zip(*tst_ds.get_labeled_entries())
C_in = np.append(C_in,
calc_cost(trn_y, model.predict(trn_X), cost_matrix))
C_out = np.append(C_out,
calc_cost(tst_y, model.predict(tst_X), cost_matrix))
return C_in, C_out
def split_train_test(test_size):
# choose a dataset with unbalanced class instances
#data = sklearn.datasets.fetch_mldata('segment')
data = sklearn.datasets.fetch_mldata('vehicle')
X = StandardScaler().fit_transform(data['data'])
target = np.unique(data['target'])
# mapping the targets to 0 to n_classes-1
y = np.array([np.where(target == i)[0][0] for i in data['target']])
X_trn, X_tst, y_trn, y_tst = \
train_test_split(X, y, test_size=test_size, stratify=y)
# making sure each class appears ones initially
init_y_ind = np.array(
[np.where(y_trn == i)[0][0] for i in range(len(target))])
y_ind = np.array([i for i in range(len(X_trn)) if i not in init_y_ind])
trn_ds = Dataset(
np.vstack((X_trn[init_y_ind], X_trn[y_ind])),
np.concatenate((y_trn[init_y_ind], [None] * (len(y_ind)))))
tst_ds = Dataset(X_tst, y_tst)
fully_labeled_trn_ds = Dataset(
np.vstack((X_trn[init_y_ind], X_trn[y_ind])),
np.concatenate((y_trn[init_y_ind], y_trn[y_ind])))
cost_matrix = 2000. * np.random.rand(len(target), len(target))
np.fill_diagonal(cost_matrix, 0)
return trn_ds, tst_ds, fully_labeled_trn_ds, cost_matrix
def main():
test_size = 0.25 # the percentage of samples in the dataset that will be
# randomly selected and assigned to the test set
result = {'E1':[], 'E2':[], 'E3':[]}
for i in range(20):
trn_ds, tst_ds, fully_labeled_trn_ds, cost_matrix = \
split_train_test(test_size)
trn_ds2 = copy.deepcopy(trn_ds)
trn_ds3 = copy.deepcopy(trn_ds)
lbr = IdealLabeler(fully_labeled_trn_ds)
model = SVM(kernel='rbf', decision_function_shape='ovr')
quota = 100 # number of samples to query
qs = UncertaintySampling(
trn_ds, method='sm', model=SVM(decision_function_shape='ovr'))
_, E_out_1 = run(trn_ds, tst_ds, lbr, model, qs, quota, cost_matrix)
result['E1'].append(E_out_1)
qs2 = RandomSampling(trn_ds2)
_, E_out_2 = run(trn_ds2, tst_ds, lbr, model, qs2, quota, cost_matrix)
result['E2'].append(E_out_2)
qs3 = ALCE(trn_ds3, cost_matrix, SVR())
_, E_out_3 = run(trn_ds3, tst_ds, lbr, model, qs3, quota, cost_matrix)
result['E3'].append(E_out_3)
E_out_1 = np.mean(result['E1'], axis=0)
E_out_2 = np.mean(result['E2'], axis=0)
E_out_3 = np.mean(result['E3'], axis=0)
print("Uncertainty: ", E_out_1[::5].tolist())
print("Random: ", E_out_2[::5].tolist())
print("ALCE: ", E_out_3[::5].tolist())
query_num = np.arange(0, quota + 1)
plt.figure(figsize=(10, 8))
plt.plot(query_num, E_out_1, 'g', label='Uncertainty sampling')
plt.plot(query_num, E_out_2, 'k', label='Random')
plt.plot(query_num, E_out_3, 'r', label='ALCE')
plt.xlabel('Number of Queries')
plt.ylabel('Error')
plt.title('Experiment Result')
plt.legend(
loc='upper center', bbox_to_anchor=(0.5, -0.05), fancybox=True, ncol=5)
plt.show()
if __name__ == '__main__':
main()
|
{
"content_hash": "4639658eb5dc01d2081d1bcfffe303c5",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 86,
"avg_line_length": 34.285714285714285,
"alnum_prop": 0.6199561403508772,
"repo_name": "ntucllab/libact",
"id": "b6e5b63c1c6e25ad01f4401ab910900fa6219bed",
"size": "4583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/alce_plot.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "24287"
},
{
"name": "C++",
"bytes": "37716"
},
{
"name": "Python",
"bytes": "206547"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from .accounts_api import AccountsApi
from .authentication_api import AuthenticationApi
from .billing_api import BillingApi
from .bulk_envelopes_api import BulkEnvelopesApi
from .bulk_process_data_api import BulkProcessDataApi
from .bulk_process_data_send_api import BulkProcessDataSendApi
from .cloud_storage_api import CloudStorageApi
from .connect_api import ConnectApi
from .custom_tabs_api import CustomTabsApi
from .diagnostics_api import DiagnosticsApi
from .email_archive_api import EmailArchiveApi
from .envelopes_api import EnvelopesApi
from .folders_api import FoldersApi
from .groups_api import GroupsApi
from .notary_api import NotaryApi
from .organizations_api import OrganizationsApi
from .power_forms_api import PowerFormsApi
from .signing_groups_api import SigningGroupsApi
from .templates_api import TemplatesApi
from .trust_service_providers_api import TrustServiceProvidersApi
from .users_api import UsersApi
from .workspaces_api import WorkspacesApi
|
{
"content_hash": "5179c74fc76d01f02fbebbda388694f2",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 65,
"avg_line_length": 39.18518518518518,
"alnum_prop": 0.8412098298676749,
"repo_name": "docusign/docusign-python-client",
"id": "53585e54640dabd594f45849b255a0bec9706152",
"size": "1058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docusign_esign/apis/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9687716"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class ZmidValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="zmid", parent_name="contourcarpet", **kwargs):
super(ZmidValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {}),
**kwargs,
)
|
{
"content_hash": "ecba61f1c4d5515170d5e50a6a299efb",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 82,
"avg_line_length": 37.75,
"alnum_prop": 0.6114790286975718,
"repo_name": "plotly/plotly.py",
"id": "e830931e7942755d2c7b448b4a7f62ddc050fc04",
"size": "453",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/contourcarpet/_zmid.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
"""MNIST example with static subgraph optimizations.
This is a version of the Chainer MNIST example that has been modified
to support the static subgraph optimizations feature. Note that
the code is mostly unchanged except for the addition of the
`@static_graph` decorator to the model chain's `__call__()` method.
This code is a custom loop version of train_mnist.py. That is, we train
models without using the Trainer class in chainer and instead write a
training loop that manually computes the loss of minibatches and
applies an optimizer to update the model.
"""
from __future__ import print_function
import argparse
import warnings
import numpy
import chainer
from chainer import configuration
from chainer.dataset import convert
import chainer.links as L
from chainer import serializers
import chainerx
import train_mnist
def run_train_loop(
optimizer, train_iter, test_iter, test_count, epoch, device):
model = optimizer.target
train_count = 0
sum_accuracy = 0
sum_loss = 0
while train_iter.epoch < epoch:
batch = train_iter.next()
x_array, t_array = convert.concat_examples(batch, device)
x = chainer.Variable(x_array)
t = chainer.Variable(t_array, requires_grad=False)
optimizer.update(model, x, t)
train_count += len(t)
sum_loss += float(model.loss.array) * len(t)
sum_accuracy += float(model.accuracy.array) * len(t)
if train_iter.is_new_epoch:
print('epoch: ', train_iter.epoch)
print('train mean loss: {}, accuracy: {}'.format(
sum_loss / train_count, sum_accuracy / train_count))
# evaluation
train_count = 0
sum_accuracy = 0
sum_loss = 0
# It is good practice to turn off train mode during evaluation.
with configuration.using_config('train', False):
for batch in test_iter:
x_array, t_array = convert.concat_examples(
batch, device)
x = chainer.Variable(x_array)
t = chainer.Variable(t_array, requires_grad=False)
loss = model(x, t)
sum_loss += float(loss.array) * len(t)
sum_accuracy += float(model.accuracy.array) * len(t)
test_iter.reset()
print('test mean loss: {}, accuracy: {}'.format(
sum_loss / test_count, sum_accuracy / test_count))
sum_accuracy = 0
sum_loss = 0
def main():
parser = argparse.ArgumentParser(description='Chainer example: MNIST')
parser.add_argument('--batchsize', '-b', type=int, default=100,
help='Number of images in each mini-batch')
parser.add_argument('--epoch', '-e', type=int, default=20,
help='Number of sweeps over the dataset to train')
parser.add_argument('--device', '-d', type=str, default='-1',
help='Device specifier. Either ChainerX device '
'specifier or an integer. If non-negative integer, '
'CuPy arrays with specified device id are used. If '
'negative integer, NumPy arrays are used')
parser.add_argument('--out', '-o', default='result',
help='Directory to output the result')
parser.add_argument('--model', '-m', default='MLP',
help='Choose the model: MLP or MLPSideEffect')
parser.add_argument('--resume', '-r', default='',
help='Resume the training from snapshot')
parser.add_argument('--unit', '-u', type=int, default=1000,
help='Number of units')
group = parser.add_argument_group('deprecated arguments')
group.add_argument('--gpu', '-g', dest='device',
type=int, nargs='?', const=0,
help='GPU ID (negative value indicates CPU)')
args = parser.parse_args()
if chainer.get_dtype() == numpy.float16:
warnings.warn(
'This example may cause NaN in FP16 mode.', RuntimeWarning)
device = chainer.get_device(args.device)
print('Device: {}'.format(device))
print('# unit: {}'.format(args.unit))
print('# Minibatch-size: {}'.format(args.batchsize))
print('# epoch: {}'.format(args.epoch))
print('')
device.use()
# Set up a neural network to train
if args.model == 'MLP':
model = L.Classifier(train_mnist.MLP(args.unit, 10))
elif args.model == 'MLPSideEffect':
model = L.Classifier(train_mnist.MLPSideEffect(args.unit, 10))
model.to_device(device)
# Setup an optimizer
optimizer = chainer.optimizers.Adam()
optimizer.setup(model)
# Load the MNIST dataset
train, test = chainer.datasets.get_mnist()
test_count = len(test)
train_iter = chainer.iterators.SerialIterator(train, args.batchsize)
test_iter = chainer.iterators.SerialIterator(
test, args.batchsize, repeat=False, shuffle=False)
if device.xp is not chainerx:
run_train_loop(
optimizer, train_iter, test_iter, test_count, args.epoch, device)
else:
warnings.warn(
'Static subgraph optimization does not support ChainerX and will'
' be disabled.', UserWarning)
with chainer.using_config('use_static_graph', False):
run_train_loop(
optimizer, train_iter, test_iter, test_count, args.epoch,
device)
# Save the model and the optimizer
print('save the model')
serializers.save_npz('mlp.model', model)
print('save the optimizer')
serializers.save_npz('mlp.state', optimizer)
if __name__ == '__main__':
main()
|
{
"content_hash": "4e95c1c66f3ddd744b1fc245309a3d7a",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 77,
"avg_line_length": 37.98026315789474,
"alnum_prop": 0.605231248917374,
"repo_name": "chainer/chainer",
"id": "4a420e5175550cbe01c109faba0cb58b411c346c",
"size": "5773",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/static_graph_optimizations/mnist/train_mnist_custom_loop.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3805"
},
{
"name": "C",
"bytes": "1099"
},
{
"name": "C++",
"bytes": "1688016"
},
{
"name": "CMake",
"bytes": "51351"
},
{
"name": "Cuda",
"bytes": "191633"
},
{
"name": "Dockerfile",
"bytes": "6102"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "6431941"
},
{
"name": "Shell",
"bytes": "50151"
}
],
"symlink_target": ""
}
|
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'i5b8&=mr&&j@*i#mkb20!x@0^5zlfnq*xy08(w0til0xmd@w%s'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'demo.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'demo.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'webservice',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
{
"content_hash": "d72e5eb2a4ad76ed088070adf946fe08",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 127,
"avg_line_length": 34.394904458598724,
"alnum_prop": 0.6866666666666666,
"repo_name": "orangle/Django-soap-webservice",
"id": "e07cb353ae458ec786714f7280183dc7382d71f0",
"size": "5437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/demo/settings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from .module import ExplorimmoModule
__all__ = ['ExplorimmoModule']
|
{
"content_hash": "c8d5aabde1aa9c57063a03e49d19bee3",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 36,
"avg_line_length": 17.5,
"alnum_prop": 0.7285714285714285,
"repo_name": "Phyks/Flatisfy",
"id": "46d87cee8b50157a06b3255450f688d62ef8dc47",
"size": "839",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/explorimmo/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1132"
},
{
"name": "HTML",
"bytes": "1656"
},
{
"name": "JavaScript",
"bytes": "22947"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "323687"
},
{
"name": "Shell",
"bytes": "1311"
},
{
"name": "Vue",
"bytes": "45806"
}
],
"symlink_target": ""
}
|
"""
Include Commons template in home wiki.
This bot functions mainly in the en.wikipedia, because it
compares the names of articles and category in English
language (standard language in Commons). If the name of
an article in Commons will not be in English but with
redirect, this also functions.
Run:
Syntax:
python pwb.py commons_link [action] [pagegenerator]
where action can be one of these:
* pages : Run over articles, include {{commons}}
* categories : Run over categories, include {{commonscat}}
and pagegenerator can be one of these:
¶ms;
"""
#
# (C) Leonardo Gregianin, 2006
# (C) Pywikibot team, 2007-2014
#
# Distributed under the terms of the MIT license.
#
# Ported by Geoffrey "GEOFBOT" Mon for Google Code-In 2013
# User:Sn1per
#
from __future__ import unicode_literals
__version__ = '$Id$'
import re
import pywikibot
from pywikibot import textlib, pagegenerators, i18n, Bot
docuReplacements = {
'¶ms;': pagegenerators.parameterHelp,
}
class CommonsLinkBot(Bot):
"""Commons linking bot."""
def __init__(self, generator, **kwargs):
self.availableOptions.update({
'action': None,
})
super(CommonsLinkBot, self).__init__(**kwargs)
self.generator = generator
self.findTemplate = re.compile(r'\{\{[Ss]isterlinks')
self.findTemplate2 = re.compile(r'\{\{[Cc]ommonscat')
self.findTemplate3 = re.compile(r'\{\{[Cc]ommons')
def run(self):
if not all((self.getOption('action'), self.generator)):
return
catmode = (self.getOption('action') == 'categories')
for page in self.generator:
try:
self.current_page = page
commons = page.site.image_repository()
commonspage = getattr(pywikibot,
('Page', 'Category')[catmode]
)(commons, page.title())
try:
commonspage.get(get_redirect=True)
pagetitle = commonspage.title(withNamespace=not catmode)
if page.title() == pagetitle:
oldText = page.get()
text = oldText
# for Commons/Commonscat template
s = self.findTemplate.search(text)
s2 = getattr(self, 'findTemplate%d'
% (2, 3)[catmode]).search(text)
if s or s2:
pywikibot.output(u'** Already done.')
else:
cats = textlib.getCategoryLinks(text, site=page.site)
text = textlib.replaceCategoryLinks(
u'%s{{commons%s|%s}}'
% (text, ('', 'cat')[catmode], pagetitle),
cats, site=page.site)
comment = i18n.twtranslate(page.site,
'commons_link%s-template-added'
% ('', '-cat')[catmode])
try:
self.userPut(page, oldText, text, summary=comment)
except pywikibot.EditConflict:
pywikibot.output(
u'Skipping %s because of edit conflict'
% page.title())
except pywikibot.NoPage:
pywikibot.output(u'%s does not exist in Commons'
% page.__class__.__name__)
except pywikibot.NoPage:
pywikibot.output(u'Page %s does not exist' % page.title())
except pywikibot.IsRedirectPage:
pywikibot.output(u'Page %s is a redirect; skipping.'
% page.title())
except pywikibot.LockedPage:
pywikibot.output(u'Page %s is locked' % page.title())
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
options = {}
local_args = pywikibot.handle_args(args)
genFactory = pagegenerators.GeneratorFactory()
for arg in local_args:
if arg in ('pages', 'categories'):
options['action'] = arg
elif arg == '-always':
options['always'] = True
else:
genFactory.handleArg(arg)
gen = genFactory.getCombinedGenerator()
if 'action' in options and gen:
gen = pagegenerators.PreloadingGenerator(gen)
bot = CommonsLinkBot(gen, **options)
bot.run()
return True
pywikibot.bot.suggest_help(missing_action='action' not in options,
missing_generator=not gen)
return False
if __name__ == "__main__":
main()
|
{
"content_hash": "84f3ff8e04860703a3d1ab538f5614ff",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 86,
"avg_line_length": 34.08108108108108,
"alnum_prop": 0.5222045995241872,
"repo_name": "trishnaguha/pywikibot-core",
"id": "4320e2db71cd835a5bc4ea88441f9c540e70ae7e",
"size": "5086",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "scripts/commons_link.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "97"
},
{
"name": "Python",
"bytes": "3821251"
}
],
"symlink_target": ""
}
|
"""Convert RTG coverage statistics into a BED file of callable regions.
Provides a supplement to VCF files indicating callable regions where
a no-call indicates reference.
Defaults to requiring more than 4 reads of coverage to be callable.
"""
import sys
import gzip
def main(cov_file):
min_cov = 4
out_file = cov_file.replace(".bed.gz", "-callable.bed")
with gzip.open(cov_file) as in_handle:
with open(out_file, "w") as out_handle:
cur_chrom, cur_start, cur_end = None, None, None
for line in in_handle:
if not line.startswith("#"):
chrom, start, end, _, coverage = line.strip().split()
if int(coverage) > min_cov:
if chrom == cur_chrom:
cur_end = end
else:
if cur_chrom:
out_handle.write("%s\t%s\t%s\n" % (cur_chrom, cur_start, cur_end))
cur_chrom, cur_start, cur_end = chrom, start, end
elif cur_chrom:
out_handle.write("%s\t%s\t%s\n" % (cur_chrom, cur_start, cur_end))
cur_chrom, cur_start, cur_end = (None, None, None)
if __name__ == "__main__":
main(*sys.argv[1:])
|
{
"content_hash": "6458de742a1e9d2b70545a61ec341664",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 98,
"avg_line_length": 41,
"alnum_prop": 0.5167682926829268,
"repo_name": "gifford-lab/bcbio-nextgen",
"id": "59dd8a15d03984bd1517f6cea218275240c46cbf",
"size": "1330",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "scripts/utils/rtg_to_callable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1610466"
},
{
"name": "Ruby",
"bytes": "624"
},
{
"name": "Shell",
"bytes": "14377"
}
],
"symlink_target": ""
}
|
from chatterbot.adapters.storage import DatabaseAdapter
from pymongo import MongoClient
# Use the default host and port
client = MongoClient()
# We can also specify the host and port explicitly
#client = MongoClient('localhost', 27017)
# Specify the name of the database
db = client['test-database']
# The mongo collection of statement documents
statements = db['statements']
class MongoDatabaseAdapter(DatabaseAdapter):
def __init__(self, **kwargs):
pass
def find(self, statement):
#def find(self, key):
return statements.find_one(statement)
def insert(self, key, values):
statement_id = self.statements.insert_one(statement).inserted_id
return statement_id
def update(self, key, **kwargs):
values = self.database.data(key=key)
# Create the statement if it doesn't exist in the database
if not values:
self.database[key] = {}
values = {}
for parameter in kwargs:
values[parameter] = kwargs.get(parameter)
self.database[key] = values
return values
def keys(self):
# The value has to be cast as a list for Python 3 compatibility
return list(self.database[0].keys())
def get_random(self):
"""
Returns a random statement from the database
"""
from random import choice
statement = choice(self.keys())
return {statement: self.find(statement)}
|
{
"content_hash": "fe5537ed8a28f14daf86780536a80e77",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 72,
"avg_line_length": 25.275862068965516,
"alnum_prop": 0.6466575716234653,
"repo_name": "dgoncalves1/ChatterBot",
"id": "ce46f9494bf0501b70cdd56026d0ac63ae2e6bee",
"size": "1466",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "chatterbot/adapters/storage/mongodb.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "43017"
}
],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat module for Python3.x's unittest.mock module
'''
import sys
# Python 2.7
# Note: Could use the pypi mock library on python3.x as well as python2.x. It
# is the same as the python3 stdlib mock library
try:
# Allow wildcard import because we really do want to import all of mock's
# symbols into this compat shim
# pylint: disable=wildcard-import
from unittest.mock import *
except ImportError:
# Python 2
# pylint: disable=wildcard-import
try:
from mock import *
except ImportError:
print('You need the mock library installed on python2.x to run tests')
# Prior to 3.4.4, mock_open cannot handle binary read_data
if sys.version_info >= (3,) and sys.version_info < (3, 4, 4):
file_spec = None
def _iterate_read_data(read_data):
# Helper for mock_open:
# Retrieve lines from read_data via a generator so that separate calls to
# readline, read, and readlines are properly interleaved
sep = b'\n' if isinstance(read_data, bytes) else '\n'
data_as_list = [l + sep for l in read_data.split(sep)]
if data_as_list[-1] == sep:
# If the last line ended in a newline, the list comprehension will have an
# extra entry that's just a newline. Remove this.
data_as_list = data_as_list[:-1]
else:
# If there wasn't an extra newline by itself, then the file being
# emulated doesn't have a newline to end the last line remove the
# newline that our naive format() added
data_as_list[-1] = data_as_list[-1][:-1]
for line in data_as_list:
yield line
def mock_open(mock=None, read_data=''):
"""
A helper function to create a mock to replace the use of `open`. It works
for `open` called directly or used as a context manager.
The `mock` argument is the mock object to configure. If `None` (the
default) then a `MagicMock` will be created for you, with the API limited
to methods or attributes available on standard file handles.
`read_data` is a string for the `read` methoddline`, and `readlines` of the
file handle to return. This is an empty string by default.
"""
def _readlines_side_effect(*args, **kwargs):
if handle.readlines.return_value is not None:
return handle.readlines.return_value
return list(_data)
def _read_side_effect(*args, **kwargs):
if handle.read.return_value is not None:
return handle.read.return_value
return type(read_data)().join(_data)
def _readline_side_effect():
if handle.readline.return_value is not None:
while True:
yield handle.readline.return_value
for line in _data:
yield line
global file_spec
if file_spec is None:
import _io
file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
if mock is None:
mock = MagicMock(name='open', spec=open)
handle = MagicMock(spec=file_spec)
handle.__enter__.return_value = handle
_data = _iterate_read_data(read_data)
handle.write.return_value = None
handle.read.return_value = None
handle.readline.return_value = None
handle.readlines.return_value = None
handle.read.side_effect = _read_side_effect
handle.readline.side_effect = _readline_side_effect()
handle.readlines.side_effect = _readlines_side_effect
mock.return_value = handle
return mock
|
{
"content_hash": "f591b0c37e4526c05a758aa6cb1bb0a1",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 86,
"avg_line_length": 36.26923076923077,
"alnum_prop": 0.6177094379639448,
"repo_name": "e-gob/plataforma-kioscos-autoatencion",
"id": "dd83e39167f0eb09f0bb88f6bf815bba7e1c833c",
"size": "4510",
"binary": false,
"copies": "50",
"ref": "refs/heads/master",
"path": "scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/compat/tests/mock.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "41110"
},
{
"name": "C++",
"bytes": "3804"
},
{
"name": "CSS",
"bytes": "34823"
},
{
"name": "CoffeeScript",
"bytes": "8521"
},
{
"name": "HTML",
"bytes": "61168"
},
{
"name": "JavaScript",
"bytes": "7206"
},
{
"name": "Makefile",
"bytes": "1347"
},
{
"name": "PowerShell",
"bytes": "584344"
},
{
"name": "Python",
"bytes": "25506593"
},
{
"name": "Ruby",
"bytes": "245726"
},
{
"name": "Shell",
"bytes": "5075"
}
],
"symlink_target": ""
}
|
"""
@package mi.dataset.parser.test.test_wavss_a_dcl
@file mi/dataset/parser/test/test_hyd_o_dcl.py
@author Emily Hahn
@brief A test parser for the hydrogen series o instrument through a DCL
"""
__author__ = 'Emily Hahn'
__license__ = 'Apache 2.0'
import os
from nose.plugins.attrib import attr
from mi.core.exceptions import SampleException
from mi.dataset.test.test_parser import ParserUnitTestCase, BASE_RESOURCE_PATH
from mi.dataset.parser.hyd_o_dcl import HydODclParser
RESOURCE_PATH = os.path.join(BASE_RESOURCE_PATH, 'hyd_o', 'dcl', 'resource')
@attr('UNIT', group='mi')
class HydODclParserUnitTestCase(ParserUnitTestCase):
def test_simple_telem(self):
"""
Test a simple telemetered case
"""
with open(os.path.join(RESOURCE_PATH, 'first.hyd1.log'), 'rU') as file_handle:
parser = HydODclParser(file_handle, self.exception_callback, is_telemetered=True)
particles = parser.get_records(8)
self.assert_particles(particles, "first_telem.yml", RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_simple_recov(self):
"""
Test a simple recovered case
"""
with open(os.path.join(RESOURCE_PATH, 'first.hyd1.log'), 'rU') as file_handle:
parser = HydODclParser(file_handle, self.exception_callback, is_telemetered=False)
particles = parser.get_records(8)
self.assert_particles(particles, "first_recov.yml", RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_long_telem(self):
"""
Test with the full file and confirm the correct number of particles occurs
"""
with open(os.path.join(RESOURCE_PATH, '20140904.hyd1.log'), 'rU') as file_handle:
parser = HydODclParser(file_handle, self.exception_callback, is_telemetered=True)
# there are 813 lines in the file, but 70 are ignored so we should get 743 particles
particles = parser.get_records(813)
self.assertEquals(len(particles), 743)
self.assertEqual(self.exception_callback_value, [])
def test_bad_format_telem(self):
"""
Test a file with two lines not formatted properly
"""
with open(os.path.join(RESOURCE_PATH, 'bad_format.hyd1.log'), 'rU') as file_handle:
parser = HydODclParser(file_handle, self.exception_callback, is_telemetered=True)
particles = parser.get_records(10)
self.assertEquals(len(particles), 8)
# particles in the file should still match the good data in first.hyd1.log, just skipping the bad lines
self.assert_particles(particles, "first_telem.yml", RESOURCE_PATH)
# confirm we get two exceptions, one for each bad line
self.assertEqual(len(self.exception_callback_value), 2)
self.assertIsInstance(self.exception_callback_value[0], SampleException)
self.assertIsInstance(self.exception_callback_value[1], SampleException)
def test_log_only(self):
"""
Test with a file that only contains dcl logs, no data, and confirm no particles are returned
"""
with open(os.path.join(RESOURCE_PATH, 'log_only.hyd1.log'), 'rU') as file_handle:
parser = HydODclParser(file_handle, self.exception_callback, is_telemetered=True)
particles = parser.get_records(10)
self.assertEquals(len(particles), 0)
self.assertEqual(self.exception_callback_value, [])
|
{
"content_hash": "432cb5f9f9f4f4761d3b2772c598fa75",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 115,
"avg_line_length": 39.611111111111114,
"alnum_prop": 0.65890603085554,
"repo_name": "JeffRoy/mi-dataset",
"id": "71dafda1509c0c94a3a5e25cccf0a81addda695f",
"size": "3565",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mi/dataset/parser/test/test_hyd_o_dcl.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "3610231"
}
],
"symlink_target": ""
}
|
import os
import sys
import tomli
distributions = sys.argv[1:]
if not distributions:
distributions = os.listdir("stubs")
for distribution in distributions:
with open(f"stubs/{distribution}/METADATA.toml", "rb") as file:
for apt_package in tomli.load(file).get("stubtest_apt_dependencies", []):
print(apt_package)
|
{
"content_hash": "0f7da2f0e8a2acbd249ff2cddbefd6cd",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 81,
"avg_line_length": 26.46153846153846,
"alnum_prop": 0.6947674418604651,
"repo_name": "mdaniel/intellij-community",
"id": "20cbed8a848d768a1a0e3481ed6a1af564da0a11",
"size": "367",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "python/helpers/typeshed/tests/get_apt_packages.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class WidthValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="width", parent_name="bar.marker.line", **kwargs):
super(WidthValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
anim=kwargs.pop("anim", True),
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "style"),
min=kwargs.pop("min", 0),
**kwargs,
)
|
{
"content_hash": "900e870563305060ed4f178c27f9a0b6",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 85,
"avg_line_length": 38,
"alnum_prop": 0.5864661654135338,
"repo_name": "plotly/plotly.py",
"id": "0ada8567f270e54f5e3f0727635131701a970aa0",
"size": "532",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/bar/marker/line/_width.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
'''
Defines the structure of inbound messages.
@author: Peter Parente <parente@cs.unc.edu>
@copyright: Copyright (c) 2008 Peter Parente
@license: BSD License
All rights reserved. This program and the accompanying materials are made
available under the terms of The BSD License which accompanies this
distribution, and is available at
U{http://www.opensource.org/licenses/bsd-license.php}
'''
import Constants
class InboundMessage(object):
'''
Most base input message. Defines the type of message, its name, when it
occurred, if it was modified by special keystrokes, its intended destination,
and if it should be skipped from further processing.
@ivar ResultData: Result of a completion macro returned to the owner task
of the completed task
@type ResultData: object
@ivar UserData: Extra data attached to the message by an observer
@type UserData: object
@ivar Stop: Should this message stop immediately (True) or propogate (False)?
@type Stop: boolean
@ivar Seen: Has this message been handled by at least one object?
@type Seen: boolean
@ivar ID: ID of the event
@type ID: 2-tuple of number
@ivar Name: Name of the event
@type Name: string
@ivar Time: Time at which the event occurred
@type Time: number
@ivar Destination: Object that should this message should be routed to
@type Destination: object
@ivar Modified: Bit field of modifier values
@type Modified: number
@ivar StartPipe: Starting point for input message routing, or None to mean the
top of the pipe
@type StartPipe: object
'''
def __init__(self, ID):
'''
Initializes an instance.
See instance variables for parameter description.
'''
self.ResultData = None
self.UserData = None
self.Stop = True
self.Seen = False
self.ID = ID
self.Name = None
self.Time = 0
self.Destination = None
self.Modified = 0
self.StartPipe = None
def __eq__(self, o):
'''
Determines if this object is equivalent to another message object.
@param o: Object to compare with this message
@type o: L{Input.Messages.InboundMessage}
@return: Whether or not the object and this message are equivalent
@rtype: boolean
'''
# check if this message is equal to another message
if isinstance(o, InboundMessage):
return o.ID == self.ID
# or a specific keyboard command
elif isinstance(o, tuple):
return o == self.ID
else:
return False
def RouteTo(self, dest):
'''
Stores a reference to an object that should receive this message without
letting it filter through the entire system.
@param dest: Object that should receive the message
@type dest: object
'''
self.Destination = dest
def RouteThrough(self, starting):
'''
Stores a reference to the starting point for routing a message. This can be
used to get messages to last focused L{View}s when some other view has
the focus.
@param starting: Start of the input pipe through which the message should be
routed
@type starting: L{Input.Pipe}
'''
self.StartPipe = starting
class TextMessage(InboundMessage):
'''
Chunk of text sent to a L{View}, typically from the the memory menu.
@ivar Text: Text chunk
@type Text: string
'''
def __init__(self, ID, text):
'''
Stores the text chunk. Sets the message ID.
@param text: Text chunk
@type text: string
'''
InboundMessage.__init__(self, ID)
self.Text = text
class KeyMessage(InboundMessage):
'''
Real key press generated by the user and captured by the pyHook library.
@ivar ascii: Character value for the virtual key code
@type ascii: integer
@ivar Press: Was the key pressed (True) or released (False)?
@type Press: boolean
'''
def __init__(self, event, modified, press):
'''
Grabs important information from the event object and stores it. Stores
other params.
@param event: Event object from the pyHook package
@type event: pyHook.KeyboardEvent
@param modified: Bit field of modifier values
@type modified: number
@param press: Was the key pressed (True) or released (False)?
@type press: boolean
'''
InboundMessage.__init__(self, (event.KeyID, event.Extended))
self.Name = event.Key
self.Time = event.Time
self.ascii = event.Ascii
self.Modified = modified
self.Press = press
def GetAsciiChar(self):
'''
@return: Converts the numeric ASCII value to a character
@rtype: string
'''
if self.ascii == 0:
return None
else:
return chr(self.ascii)
Char = property(GetAsciiChar)
def GetShift(self):
'''
@return: Was shift held when the key was pressed?
@rtype: boolean
'''
return self.Modified & Constants.MOD_SHIFT
Shift = property(GetShift)
|
{
"content_hash": "5cc6576848f4493cc9433c8a2d6eddd4",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 80,
"avg_line_length": 29.713414634146343,
"alnum_prop": 0.6810999384362816,
"repo_name": "parente/clique",
"id": "fa5f726d43ae815cb57228312b0d93dae3c16b37",
"size": "4873",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Input/Messages.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "624768"
}
],
"symlink_target": ""
}
|
import sys
import os
import subprocess
import shlex
import logging
import re
import urlparse
import dateutil.parser
import time
import pprint
import dxpy
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
#logger.setLevel(logging.DEBUG)
#logger.addHandler(dxpy.DXLogHandler())
logger.propagate = True
def test():
print("In common.test")
def flat(l):
result = []
for el in l:
if hasattr(el, "__iter__") and not isinstance(el, basestring):
result.extend(flat(el))
else:
result.append(el)
return result
def rstrips(string, substring):
if not string.endswith(substring):
return string
else:
return string[:len(string)-len(substring)]
def touch(fname, times=None):
with open(fname, 'a'):
os.utime(fname, times)
def block_on(command):
process = subprocess.Popen(shlex.split(command), stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
for line in iter(process.stdout.readline, ''):
sys.stdout.write(line)
process.wait()
return process.returncode
def run_pipe(steps, outfile=None):
# TODO: capture stderr
from subprocess import Popen, PIPE
p = None
p_next = None
first_step_n = 1
last_step_n = len(steps)
for n, step in enumerate(steps, start=first_step_n):
logger.debug("step %d: %s" % (n, step))
if n == first_step_n:
if n == last_step_n and outfile: # one-step pipeline with outfile
with open(outfile, 'w') as fh:
print("one step shlex: %s to file: %s" % (shlex.split(step), outfile))
p = Popen(shlex.split(step), stdout=fh)
break
print("first step shlex to stdout: %s" % (shlex.split(step)))
p = Popen(shlex.split(step), stdout=PIPE)
elif n == last_step_n and outfile: # only treat the last step specially if you're sending stdout to a file
with open(outfile, 'w') as fh:
print("last step shlex: %s to file: %s" % (shlex.split(step), outfile))
p_last = Popen(shlex.split(step), stdin=p.stdout, stdout=fh)
p.stdout.close()
p = p_last
else: # handles intermediate steps and, in the case of a pipe to stdout, the last step
print("intermediate step %d shlex to stdout: %s" % (n, shlex.split(step)))
p_next = Popen(shlex.split(step), stdin=p.stdout, stdout=PIPE)
p.stdout.close()
p = p_next
out, err = p.communicate()
return out, err
def uncompress(filename):
# leaves compressed file intact
m = re.match('(.*)(\.((gz)|(Z)|(bz)|(bz2)))', filename)
if m:
basename = m.group(1)
logger.info("Decompressing %s" % (filename))
# logger.info(subprocess.check_output(shlex.split('gzip -dc %s' %(filename))))
out, err = run_pipe([
'gzip -dc %s' % (filename)],
basename)
return basename
else:
return filename
def compress(fname):
# leaves uncompressed file intact
from magic import from_file
compressed_mimetypes = [
"application/x-compress",
"application/x-bzip2",
"application/x-gzip"
]
mime_type = from_file(fname, mime=True)
if mime_type in compressed_mimetypes:
return fname
else:
# the gzip version shipped with Ubuntu 12 does not have --keep/-k so
# have to do this copy manually
from uuid import uuid4
logger.info("Compressing %s" % (fname))
tmpname = uuid4()
subprocess.check_output(shlex.split('cp %s %s' % (fname, tmpname)))
# gzip -n is used in order to make output deterministic
subprocess.check_output(shlex.split('gzip -f -n %s' % (fname)))
new_fname = fname + '.gz'
subprocess.check_output(shlex.split('cp %s %s' % (tmpname, fname)))
return new_fname
def count_lines(fname):
from magic import from_file
compressed_mimetypes = [
"application/x-compress",
"application/x-bzip2",
"application/x-gzip"
]
mime_type = from_file(fname, mime=True)
if mime_type in compressed_mimetypes:
catcommand = 'gzip -dc'
else:
catcommand = 'cat'
out, err = run_pipe([
'%s %s' % (catcommand, fname),
'wc -l'
])
return int(out)
def xcor_fraglen(filename):
# Extract the fragment length estimate from column 3 of the
# cross-correlation scores file
with open(filename, 'r') as fh:
firstline = fh.readline()
fraglen = firstline.split()[2] # third column
return int(fraglen)
def frip(reads_filename, xcor_filename, peaks_filename, chrom_sizes_filename):
# calculate FRiP
fraglen = xcor_fraglen(xcor_filename)
half_fraglen = int(fraglen)/2
reads_in_peaks_fn = 'reads_in_peaks.ta'
out, err = run_pipe([
'slopBed -i %s -g %s -s -l %s -r %s' % (
reads_filename, chrom_sizes_filename, -half_fraglen, half_fraglen),
r"""awk '{if ($2>=0 && $3>=0 && $2<=$3) print $0}'""",
'intersectBed -a stdin -b %s -wa -u' % (peaks_filename)
], reads_in_peaks_fn)
n_reads = count_lines(uncompress(reads_filename))
n_reads_in_peaks = count_lines(reads_in_peaks_fn)
frip_score = float(n_reads_in_peaks)/float(n_reads)
return (n_reads, n_reads_in_peaks, frip_score)
def bed2bb(bed_filename, chrom_sizes, as_file, bed_type='bed6+4'):
if bed_filename.endswith('.bed'):
bb_filename = bed_filename[:-4] + '.bb'
else:
bb_filename = bed_filename + '.bb'
bed_filename_sorted = bed_filename + ".sorted"
logger.debug("In bed2bb with bed_filename=%s, chrom_sizes=%s, as_file=%s" %(bed_filename, chrom_sizes, as_file))
print "Sorting"
print subprocess.check_output(shlex.split("sort -k1,1 -k2,2n -o %s %s" %(bed_filename_sorted, bed_filename)), shell=False, stderr=subprocess.STDOUT)
for fn in [bed_filename, bed_filename_sorted, chrom_sizes, as_file]:
print "head %s" %(fn)
print subprocess.check_output('head %s' %(fn), shell=True, stderr=subprocess.STDOUT)
command = "bedToBigBed -type=%s -as=%s %s %s %s" %(bed_type, as_file, bed_filename_sorted, chrom_sizes, bb_filename)
print command
try:
process = subprocess.Popen(shlex.split(command), stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
for line in iter(process.stdout.readline, ''):
sys.stdout.write(line)
process.wait()
returncode = process.returncode
if returncode != 0:
raise subprocess.CalledProcessError
except:
e = sys.exc_info()[0]
sys.stderr.write('%s: bedToBigBed failed. Skipping bb creation.' %(e))
return None
#print subprocess.check_output('ls -l', shell=True, stderr=subprocess.STDOUT)
#this is necessary in case bedToBegBed failes to create the bb file but doesn't return a non-zero returncode
try:
os.remove(bed_filename_sorted)
except:
pass
if not os.path.isfile(bb_filename):
bb_filename = None
print "Returning bb file %s" %(bb_filename)
return bb_filename
def rescale_scores(fn, scores_col, new_min=10, new_max=1000):
n_peaks = count_lines(fn)
sorted_fn = '%s-sorted' % (fn)
rescaled_fn = '%s-rescaled' % (fn)
out, err = run_pipe([
'sort -k %dgr,%dgr %s' % (scores_col, scores_col, fn),
r"""awk 'BEGIN{FS="\t";OFS="\t"}{if (NF != 0) print $0}'"""],
sorted_fn)
out, err = run_pipe([
'head -n 1 %s' % (sorted_fn),
'cut -f %s' % (scores_col)])
max_score = float(out.strip())
logger.info("rescale_scores: max_score = %s" % (max_score))
out, err = run_pipe([
'tail -n 1 %s' % (sorted_fn),
'cut -f %s' % (scores_col)])
min_score = float(out.strip())
logger.info("rescale_scores: min_score = %s" % (min_score))
a = min_score
b = max_score
x = new_min
y = new_max
if min_score == max_score: # give all peaks new_min
rescale_formula = "x"
else: # n is the unscaled score from scores_col
rescale_formula = "((n-a)*(y-x)/(b-a))+x"
out, err = run_pipe(
[
'cat %s' % (sorted_fn),
r"""awk 'BEGIN{OFS="\t"}{n=$%d;a=%d;b=%d;x=%d;y=%d}"""
% (scores_col, a, b, x, y) +
r"""{$%d=int(%s) ; print $0}'"""
% (scores_col, rescale_formula)
],
rescaled_fn)
return rescaled_fn
def slop_clip(filename, chrom_sizes, bed_type='bed'):
assert bed_type in ['bed', 'gappedPeak'], \
"slop_clip: unsupported bed_type %s" % (bed_type)
clipped_fn = '%s-clipped' % (filename)
# Remove coordinates outside chromosome sizes
pipe = ['slopBed -i %s -g %s -b 0' % (filename, chrom_sizes),
'bedClip stdin %s %s' % (chrom_sizes, clipped_fn)]
print pipe
out, err = run_pipe(pipe)
if bed_type == 'bed':
return clipped_fn
# MACS2 sometimes generates blocks that span outside of the peak
# this causes bedtobigbed to exit with an error
# need to clip those back
elif bed_type == 'gappedPeak':
clipped_gappedPeaks_fn = '%s-gapclipped' % (clipped_fn)
import csv
import copy
with open(clipped_fn, 'rb') as in_fh, open(clipped_gappedPeaks_fn, 'wb') as out_fh:
fieldnames = [
'chrom', 'chromStart', 'chromEnd', 'name', 'score',
'strand', 'thickStart', 'thickEnd', 'reserved',
'blockCount', 'blockSizes', 'blockStarts',
'signalValue', 'pValue', 'qValue']
reader = \
csv.DictReader(in_fh, fieldnames=fieldnames, delimiter='\t')
writer = \
csv.DictWriter(out_fh, fieldnames=fieldnames, delimiter='\t', lineterminator='\n')
for line in reader:
peak = dict(zip(fieldnames, [
line['chrom'],
int(line['chromStart']),
int(line['chromEnd']),
line['name'],
line['score'],
line['strand'],
int(line['thickStart']),
int(line['thickEnd']),
line['reserved'],
int(line['blockCount']),
[int(s) for s in line['blockSizes'].split(',')],
[int(s) for s in line['blockStarts'].split(',')],
line['signalValue'],
line['pValue'],
line['qValue']]))
chromStart = peak['chromStart']
chromEnd = peak['chromEnd']
if peak['thickStart'] < chromStart:
peak['thickStart'] = chromStart
if peak['thickEnd'] > chromEnd:
peak['thickEnd'] = chromEnd
# build blocks in absolute chromosome coordinates
# the peak's blockStarts are zero-based
blocks = [
dict(zip(
['start', 'end'],
[chromStart+peak['blockStarts'][i], chromStart+(peak['blockStarts'][i] + peak['blockSizes'][i])]))
for i in range(peak['blockCount'])]
newblocks = []
for block in blocks:
# drop blocks that are entirely outside the peak
if \
((block['start'] < chromStart and block['end'] < chromstart) or
(block['start'] > chromEnd and block['end'] > chromEnd)):
continue
# clip the end of blocks that extend past the peak
if block['end'] > chromEnd:
block['end'] = chromEnd
# clip the start of blocks that start before the peak
if block['start'] < chromStart:
block['start'] = chromStart
# drop zero-length blocks
if block['start'] == block['end']:
continue
newblocks.append(block)
# bed2bigbed requires a block at the beginning
if not [b for b in newblocks if b['start'] == chromStart]:
newblocks.insert(
0,
{'start': chromStart, 'end': chromStart+1})
# bed2bigbed requires a block at the end
if not [b for b in newblocks if b['end'] == chromEnd]:
newblocks.append(
{'start': chromEnd-1, 'end': chromEnd})
# chromStart + chromStarts[last] + blockSizes[last] must equal chromEnd
# rewrite the new blocks into the peak
peak['blockCount'] = len(newblocks)
peak['blockSizes'] = \
[(block['end']-block['start']) for block in newblocks]
peak['blockStarts'] = \
[block['start']-chromStart for block in newblocks]
if peak['blockCount']:
peak['blockSizes'] = \
','.join([str(x) for x in peak['blockSizes']])
peak['blockStarts'] = \
','.join([str(x) for x in peak['blockStarts']])
writer.writerow(peak)
return clipped_gappedPeaks_fn
def processkey(key=None, keyfile=None):
# these are just for logger testing
# logger.debug('processkey DEBGU')
# logger.info('processkey INFO')
# logger.warning('processkey WARNING')
# logger.error('processkey ERROR')
# logger.critical('processkey CRITICAL')
import json
if not (key or keyfile) and os.getenv('ENCODE_AUTHID',None) and os.getenv('ENCODE_AUTHPW',None) and os.getenv('ENCODE_SERVER',None):
authid = os.getenv('ENCODE_AUTHID',None)
authpw = os.getenv('ENCODE_AUTHPW',None)
server = os.getenv('ENCODE_SERVER',None)
else:
if not keyfile:
if 'KEYFILE' in globals(): #this is to support scripts where KEYFILE is a global
keyfile = KEYFILE
elif os.path.isfile(os.path.expanduser('~/keypairs.json')):
keyfile = os.path.expanduser('~/keypairs.json')
else:
logger.error("Keyfile must be specified, in ~/keypairs.json or in global KEYFILE.")
return None
if key:
try:
keysf = open(keyfile,'r')
except IOError as e:
logger.error("Failed to open keyfile %s" %(keyfile))
logger.error("e.")
return None
except:
raise
keys_json_string = keysf.read()
keysf.close()
try:
keys = json.loads(keys_json_string)
except ValueError as e:
logger.error(e.message)
logger.error("Keyfile %s not in parseable JSON" %(keyfile))
return None
except:
raise
try:
key_dict = keys[key]
except ValueError:
logger.error(e.message)
logger.error("Keyfile %s has no key named %s" %(keyfile,key))
return None
except:
raise
else:
key_dict = {}
if key_dict:
authid = key_dict.get('key')
authpw = key_dict.get('secret')
server = key_dict.get('server')
else:
return None
if not server.endswith("/"):
server += "/"
return (authid,authpw,server)
def encoded_get(url, keypair=None, frame='object', return_response=False):
import urlparse
import urllib
import requests
#it is not strictly necessary to include both the accept header, and format=json, but we do
#so as to get exactly the same URL as one would use in a web browser
RETRY_CODES = [500]
RETRY_EXCEPTIONS = (requests.exceptions.ConnectionError, requests.exceptions.SSLError)
HEADERS = {'accept': 'application/json'}
url_obj = urlparse.urlsplit(url)
new_url_list = list(url_obj)
query = urlparse.parse_qs(url_obj.query)
if 'format' not in query:
new_url_list[3] += "&format=json"
if 'frame' not in query:
new_url_list[3] += "&frame=%s" %(frame)
if 'limit' not in query:
new_url_list[3] += "&limit=all"
if new_url_list[3].startswith('&'):
new_url_list[3] = new_url_list[3].replace('&','',1)
get_url = urlparse.urlunsplit(new_url_list)
logger.debug('encoded_get: %s' %(get_url))
max_retries = 10
max_sleep = 10
while max_retries:
try:
if keypair:
response = requests.get(get_url, auth=keypair, headers=HEADERS)
else:
response = requests.get(get_url, headers=HEADERS)
except RETRY_EXCEPTIONS as e:
logger.warning(
"%s ... %d retries left." % (e, max_retries))
time.sleep(max_sleep - max_retries)
max_retries -= 1
continue
except Exception as e:
logger.error("%s" % (e))
if return_response:
return response
else:
return None
else:
if response.status_code in RETRY_CODES:
logger.warning(
"%d %s ... %d retries left."
% (response.status_code, response.text, max_retries))
time.sleep(max_sleep - max_retries)
max_retries -= 1
continue
if return_response:
return response
else:
try:
return response.json()
except:
return response.text
logger.error("Max retries exhausted.")
if return_response:
return response
else:
return None
def encoded_update(method, url, keypair, payload, return_response):
import urlparse, urllib, requests, json
if method == 'patch':
request_method = requests.patch
elif method == 'post':
request_method = requests.post
elif method == 'put':
request_method = requests.put
else:
logger.error('Invalid HTTP method: %s' %(method))
return
RETRY_CODES = [500]
RETRY_EXCEPTIONS = (requests.exceptions.ConnectionError, requests.exceptions.SSLError)
HEADERS = {'accept': 'application/json', 'content-type': 'application/json'}
max_retries = 10
max_sleep = 10
while max_retries:
try:
response = request_method(
url, auth=keypair, headers=HEADERS, data=json.dumps(payload))
except RETRY_EXCEPTIONS as e:
logger.warning(
"%s ... %d retries left." % (e, max_retries))
time.sleep(max_sleep - max_retries)
max_retries -= 1
continue
except Exception as e:
logger.error("%s" % (e))
return None
else:
if response.status_code in RETRY_CODES:
logger.warning(
"%d %s ... %d retries left."
% (response.status_code, response.text, max_retries))
time.sleep(max_sleep - max_retries)
max_retries -= 1
continue
if return_response:
return response
else:
try:
return response.json()
except:
return response.text
logger.error("Max retries exhausted.")
if return_response:
return response
else:
return None
def encoded_patch(url, keypair, payload, return_response=False):
return encoded_update('patch', url, keypair, payload, return_response)
def encoded_post(url, keypair, payload, return_response=False):
return encoded_update('post', url, keypair, payload, return_response)
def encoded_put(url, keypair, payload, return_response=False):
return encoded_update('put', url, keypair, payload, return_response)
def pprint_json(JSON_obj):
import json
print json.dumps(JSON_obj, sort_keys=True, indent=4, separators=(',', ': '))
def merge_dicts(*dict_args):
'''
Given any number of dicts, shallow copy and merge into a new dict,
precedence goes to key value pairs in latter dicts.
'''
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
def md5(fn):
if 'md5_command' not in globals():
global md5_command
try:
subprocess.check_call('which md5', shell=True)
except:
try:
subprocess.check_call('which md5sum', shell=True)
except:
md5_command = None
else:
md5_command = 'md5sum'
else:
md5_command = 'md5 -q'
md5_output = subprocess.check_output(' '.join([md5_command, fn]), shell=True)
return md5_output.partition(' ')[0].rstrip()
def after(date1, date2):
try:
result = dateutil.parser.parse(date1) > dateutil.parser.parse(date2)
except TypeError:
if not re.search('\+.*$', date1):
date1 += 'T00:00:00-07:00'
if not re.search('\+.*$', date2):
date1 += 'T00:00:00-07:00'
try:
result = dateutil.parser.parse(date1) > dateutil.parser.parse(date2)
except Exception as e:
logger.error("%s Cannot compare %s with %s" %(e, date1, date2))
raise
else:
return result
def biorep_ns_generator(f, server, keypair):
if isinstance(f, dict):
acc = f.get('accession')
else:
m = re.match('^/?(files)?/?(\w*)', f)
if m:
acc = m.group(2)
else:
acc = re.search('ENCFF[0-9]{3}[A-Z]{3}', f).group(0)
if not acc:
return
url = urlparse.urljoin(server, '/files/%s' % (acc))
file_object = encoded_get(url, keypair)
if file_object.get('derived_from'):
for derived_from in file_object.get('derived_from'):
for repnum in biorep_ns_generator(derived_from, server, keypair):
yield repnum
else:
url = urlparse.urljoin(server, '%s' % (file_object.get('replicate')))
replicate_object = encoded_get(url, keypair)
yield replicate_object.get('biological_replicate_number')
def biorep_ns(f, server, keypair):
return [n for n in set(biorep_ns_generator(f, server, keypair)) if n is not None]
def derived_from_references_generator(f, server, keypair):
if isinstance(f, dict):
acc = f.get('accession')
else:
m = re.match('^/?(files)?/?(\w*)', f)
if m:
acc = m.group(2)
else:
acc = re.search('ENCFF[0-9]{3}[A-Z]{3}', f).group(0)
if not acc:
return
url = urlparse.urljoin(server, '/files/%s' % (acc))
file_object = encoded_get(url, keypair)
if not file_object.get('derived_from'):
return
else:
for derived_from_uri in file_object.get('derived_from', []):
derived_from_url = urlparse.urljoin(server, derived_from_uri)
derived_from_file = encoded_get(derived_from_url, keypair)
if derived_from_file.get('output_category') == "reference":
yield derived_from_file.get('@id')
else:
for derived_from_reference in derived_from_references_generator(derived_from_file, server, keypair):
yield derived_from_reference
def derived_from_references(f, server, keypair):
return [n for n in set(derived_from_references_generator(f, server, keypair)) if n is not None]
def expired(credentials):
if after(time.asctime(), credentials.get('expiration')):
return True
else:
return False
def new_creds(file_object, server, keypair):
url = server + '/files/%s/upload/' % (file_object['accession'])
response = encoded_post(url, keypair, {}, return_response=True)
result = response.json()
logger.debug('POST to %s returned %s' % (url, pprint.pformat(result)))
response.raise_for_status()
new_file_object = result['@graph'][0]
return new_file_object.get('upload_credentials')
def s3_cp(file_object, local_fname, server, keypair, overwrite=False):
# TODO check overwrite and regenerate credential if necessary
logger.debug('in s3_cp with file_object:')
logger.debug(pprint.pformat(file_object))
creds = file_object.get('upload_credentials')
if not creds:
url = server + '/files/%s/upload/' % (file_object['accession'])
response_json = encoded_get(url, keypair)
logger.debug('s3_cp: Got %s response_json %s' % (url, response_json))
creds = response_json['@graph'][0]['upload_credentials']
logger.debug('s3_cp: Got creds %s' % (creds))
if expired(creds):
creds = new_creds(file_object, server, keypair)
logger.debug('s3_cp: Got new creds %s' % (creds))
env = os.environ.copy()
env.update({
'AWS_ACCESS_KEY_ID': creds['access_key'],
'AWS_SECRET_ACCESS_KEY': creds['secret_key'],
'AWS_SECURITY_TOKEN': creds['session_token'],
})
logger.info("Uploading file.")
start = time.time()
logger.debug('accession_file local_fname %s' % (local_fname))
logger.debug('accession_file upload_url %s' % (creds['upload_url']))
try:
return_code = subprocess.check_call(
['aws', 's3', 'cp', local_fname, creds['upload_url'], '--quiet'],
env=env)
except subprocess.CalledProcessError as e:
# The aws command returns a non-zero exit code on error.
logger.error("Upload failed with exit code %d" % e.returncode)
return e.returncode
else:
end = time.time()
duration = end - start
logger.info("Uploaded in %.2f seconds" % duration)
return return_code
|
{
"content_hash": "089d011b2b7e727e3eaee232c6dce02e",
"timestamp": "",
"source": "github",
"line_count": 715,
"max_line_length": 152,
"avg_line_length": 36.506293706293704,
"alnum_prop": 0.558539575511455,
"repo_name": "ENCODE-DCC/pipeline-container",
"id": "c018b74ac1e63190cd5a2862ba0a8ac8e63bb0b1",
"size": "26126",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "dnanexus/common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "138701"
},
{
"name": "Python",
"bytes": "313714"
},
{
"name": "R",
"bytes": "75661"
},
{
"name": "Roff",
"bytes": "33540"
},
{
"name": "Shell",
"bytes": "2898"
}
],
"symlink_target": ""
}
|
'''
This checks if all command line args are documented.
Return value is 0 to indicate no error.
Author: @MarcoFalke
'''
from subprocess import check_output
import re
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
CMD_ROOT_DIR = '`git rev-parse --show-toplevel`/%s' % FOLDER_GREP
CMD_GREP_ARGS = r"egrep -r -I '(map(Multi)?Args(\.count\(|\[)|Get(Bool)?Arg\()\"\-[^\"]+?\"' %s | grep -v '%s'" % (CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_DOCS = r"egrep -r -I 'HelpMessageOpt\(\"\-[^\"=]+?(=|\")' %s" % (CMD_ROOT_DIR)
REGEX_ARG = re.compile(r'(?:map(?:Multi)?Args(?:\.count\(|\[)|Get(?:Bool)?Arg\()\"(\-[^\"]+?)\"')
REGEX_DOC = re.compile(r'HelpMessageOpt\(\"(\-[^\"=]+?)(?:=|\")')
# list unsupported, deprecated and duplicate args as they need no documentation
SET_DOC_OPTIONAL = set(['-rpcssl', '-benchmark', '-h', '-help', '-socks', '-tor', '-debugnet', '-whitelistalwaysrelay', '-prematurewitness', '-walletprematurewitness', '-promiscuousmempoolflags', '-blockminsize'])
def main():
used = check_output(CMD_GREP_ARGS, shell=True)
docd = check_output(CMD_GREP_DOCS, shell=True)
args_used = set(re.findall(REGEX_ARG,used))
args_docd = set(re.findall(REGEX_DOC,docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print "Args used : %s" % len(args_used)
print "Args documented : %s" % len(args_docd)
print "Args undocumented: %s" % len(args_need_doc)
print args_need_doc
print "Args unknown : %s" % len(args_unknown)
print args_unknown
exit(len(args_need_doc))
if __name__ == "__main__":
main()
|
{
"content_hash": "0fce593c3f5cdca6c21049882c7d076f",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 213,
"avg_line_length": 40.275,
"alnum_prop": 0.6393544382371198,
"repo_name": "segwit/atbcoin-insight",
"id": "5d843fbcd48baebb207d45e53d666a111113e12b",
"size": "1824",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/devtools/check-doc.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "676425"
},
{
"name": "C++",
"bytes": "5246230"
},
{
"name": "CSS",
"bytes": "24011"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "M4",
"bytes": "175966"
},
{
"name": "Makefile",
"bytes": "102865"
},
{
"name": "Objective-C",
"bytes": "3967"
},
{
"name": "Objective-C++",
"bytes": "7240"
},
{
"name": "Protocol Buffer",
"bytes": "2308"
},
{
"name": "Python",
"bytes": "882249"
},
{
"name": "QMake",
"bytes": "2020"
},
{
"name": "Roff",
"bytes": "3792"
},
{
"name": "Shell",
"bytes": "35223"
}
],
"symlink_target": ""
}
|
import numpy
from chainer import cuda
from chainer import optimizer
class AdaGrad(optimizer.GradientMethod):
"""AdaGrad implementation.
See: http://jmlr.org/papers/v12/duchi11a.html
"""
def __init__(self, lr=0.001, eps=1e-8):
self.lr = lr
self.eps = eps
def init_state(self, param, state):
xp = cuda.get_array_module(param.data)
state['h'] = xp.zeros_like(param.data)
def update_one_cpu(self, param, state):
h = state['h']
grad = param.grad
h += grad * grad
param.data -= self.lr * grad / (numpy.sqrt(h) + self.eps)
def update_one_gpu(self, param, state):
cuda.elementwise(
'T grad, T lr, T eps',
'T param, T h',
'''h += grad * grad;
param -= lr * grad / (sqrt(h) + eps);''',
'adagrad')(param.grad, self.lr, self.eps,
param.data, state['h'])
|
{
"content_hash": "cf0683a4e29c75b8e2357dd48f2c2426",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 65,
"avg_line_length": 26.055555555555557,
"alnum_prop": 0.5341151385927505,
"repo_name": "cemoody/chainer",
"id": "cce2b592d88ae497e3fedfba6f44983363bbdfa1",
"size": "938",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "chainer/optimizers/ada_grad.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "561717"
}
],
"symlink_target": ""
}
|
"""Wrapper module for running eecli.main() from the command line."""
import os
import sys
if not (2, 6) <= sys.version_info[:3] < (3,):
sys.exit('earthengine requires python 2.6 or 2.7.')
def OutputAndExit(message):
sys.stderr.write('%s\n' % message)
sys.exit(1)
EECLI_DIR = os.path.dirname(os.path.abspath(os.path.realpath(__file__)))
if not EECLI_DIR:
OutputAndExit('Unable to determine where earthengine CLI is installed. Sorry,'
' cannot run correctly without this.\n')
# The wrapper script adds all third_party libraries to the Python path, since
# we don't assume any third party libraries are installed system-wide.
THIRD_PARTY_DIR = os.path.join(EECLI_DIR, 'third_party')
sys.path.insert(0, THIRD_PARTY_DIR)
def RunMain():
import eecli # pylint: disable=g-import-not-at-top
sys.exit(eecli.main())
if __name__ == '__main__':
RunMain()
|
{
"content_hash": "d39037e679f5d09467c3d6cbfd0c89af",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 80,
"avg_line_length": 28.483870967741936,
"alnum_prop": 0.6908267270668177,
"repo_name": "gena/earthengine-api",
"id": "36f3905480e01f66543631fa1ac355d5c34e64eb",
"size": "906",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "python/ee/cli/eecli_wrapper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "1778744"
},
{
"name": "Jupyter Notebook",
"bytes": "3844"
},
{
"name": "Python",
"bytes": "439487"
},
{
"name": "Shell",
"bytes": "1447"
}
],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import collections
import contextlib
import multiprocessing
import os
import subprocess
from pants.backend.jvm.tasks.jvm_task import JvmTask
from pants.base.exceptions import TaskError
from pants.binaries import binary_util
from pants.util.dirutil import safe_mkdir, safe_walk
Jvmdoc = collections.namedtuple('Jvmdoc', ['tool_name', 'product_type'])
class JvmdocGen(JvmTask):
@classmethod
def jvmdoc(cls):
"""Subclasses should return their Jvmdoc configuration."""
raise NotImplementedError()
@classmethod
def register_options(cls, register):
super(JvmdocGen, cls).register_options(register)
tool_name = cls.jvmdoc().tool_name
register('--include-codegen', type=bool,
fingerprint=True,
help='Create {0} for generated code.'.format(tool_name))
register('--transitive', default=True, type=bool,
fingerprint=True,
help='Create {0} for the transitive closure of internal targets reachable from the '
'roots specified on the command line.'.format(tool_name))
register('--combined', type=bool,
fingerprint=True,
help='Generate {0} for all targets combined, instead of each target '
'individually.'.format(tool_name))
register('--open', type=bool,
help='Open the generated {0} in a browser (implies --combined).'.format(tool_name))
register('--ignore-failure', type=bool,
fingerprint=True,
help='Do not consider {0} errors to be build errors.'.format(tool_name))
# TODO(John Sirois): This supports the JarPublish task and is an abstraction leak.
# It allows folks doing a local-publish to skip an expensive and un-needed step.
# Remove this flag and instead support conditional requirements being registered against
# the round manager. This may require incremental or windowed flag parsing that happens bit by
# bit as tasks are recursively prepared vs. the current all-at once style.
register('--skip', type=bool,
fingerprint=True,
help='Skip {0} generation.'.format(tool_name))
@classmethod
def product_types(cls):
return [cls.jvmdoc().product_type]
def __init__(self, *args, **kwargs):
super(JvmdocGen, self).__init__(*args, **kwargs)
options = self.get_options()
self._include_codegen = options.include_codegen
self.transitive = options.transitive
self.open = options.open
self.combined = self.open or options.combined
self.ignore_failure = options.ignore_failure
self.skip = options.skip
def generate_doc(self, language_predicate, create_jvmdoc_command):
"""
Generate an execute method given a language predicate and command to create documentation
language_predicate: a function that accepts a target and returns True if the target is of that
language
create_jvmdoc_command: (classpath, directory, *targets) -> command (string) that will generate
documentation documentation for targets
"""
if self.skip:
return
catalog = self.context.products.isrequired(self.jvmdoc().product_type)
if catalog and self.combined:
raise TaskError(
'Cannot provide {} target mappings for combined output'.format(self.jvmdoc().product_type))
def docable(tgt):
return language_predicate(tgt) and (self._include_codegen or not tgt.is_codegen)
targets = self.context.targets(predicate=docable)
if not targets:
return
with self.invalidated(targets) as invalidation_check:
safe_mkdir(self.workdir)
def find_jvmdoc_targets():
invalid_targets = set()
for vt in invalidation_check.invalid_vts:
invalid_targets.update(vt.targets)
if self.transitive:
return invalid_targets
else:
return set(invalid_targets).intersection(set(self.context.target_roots))
jvmdoc_targets = list(find_jvmdoc_targets())
if self.combined:
self._generate_combined(jvmdoc_targets, create_jvmdoc_command)
else:
self._generate_individual(jvmdoc_targets, create_jvmdoc_command)
if catalog:
for target in targets:
gendir = self._gendir(target)
jvmdocs = []
for root, dirs, files in safe_walk(gendir):
jvmdocs.extend(os.path.relpath(os.path.join(root, f), gendir) for f in files)
self.context.products.get(self.jvmdoc().product_type).add(target, gendir, jvmdocs)
def _generate_combined(self, targets, create_jvmdoc_command):
gendir = os.path.join(self.workdir, 'combined')
if targets:
classpath = self.classpath(targets)
safe_mkdir(gendir, clean=True)
command = create_jvmdoc_command(classpath, gendir, *targets)
if command:
self.context.log.debug("Running create_jvmdoc in {} with {}".format(gendir, " ".join(command)))
result, gendir = create_jvmdoc(command, gendir)
self._handle_create_jvmdoc_result(targets, result, command)
if self.open:
binary_util.ui_open(os.path.join(gendir, 'index.html'))
def _generate_individual(self, targets, create_jvmdoc_command):
jobs = {}
for target in targets:
gendir = self._gendir(target)
classpath = self.classpath([target])
command = create_jvmdoc_command(classpath, gendir, target)
if command:
jobs[gendir] = (target, command)
if jobs:
with contextlib.closing(
multiprocessing.Pool(processes=min(len(jobs), multiprocessing.cpu_count()))) as pool:
# map would be a preferable api here but fails after the 1st batch with an internal:
# ...
# File "...src/python/pants/backend/jvm/tasks/jar_create.py", line 170, in javadocjar
# pool.map(createjar, jobs)
# File "...lib/python2.6/multiprocessing/pool.py", line 148, in map
# return self.map_async(func, iterable, chunksize).get()
# File "...lib/python2.6/multiprocessing/pool.py", line 422, in get
# raise self._value
# NameError: global name 'self' is not defined
futures = []
self.context.log.debug("Begin multiprocessing section; output may be misordered or garbled")
try:
for gendir, (target, command) in jobs.items():
self.context.log.debug("Running create_jvmdoc in {} with {}"
.format(gendir, " ".join(command)))
futures.append(pool.apply_async(create_jvmdoc, args=(command, gendir)))
for future in futures:
result, gendir = future.get()
target, command = jobs[gendir]
self._handle_create_jvmdoc_result([target], result, command)
finally:
# In the event of an exception, we want to call terminate() because otherwise
# we get errors on exit when multiprocessing tries to do it, because what
# is dead may never die.
pool.terminate()
self.context.log.debug("End multiprocessing section")
def _handle_create_jvmdoc_result(self, targets, result, command):
if result != 0:
targetlist = ", ".join(map(str, targets))
message = 'Failed to process {} for {} [{}]: {}'.format(
self.jvmdoc().tool_name, targetlist, result, command)
if self.ignore_failure:
self.context.log.warn(message)
else:
raise TaskError(message)
def _gendir(self, target):
return os.path.join(self.workdir, target.id)
def create_jvmdoc(command, gendir):
try:
safe_mkdir(gendir, clean=True)
process = subprocess.Popen(command)
result = process.wait()
return result, gendir
except OSError:
return 1, gendir
|
{
"content_hash": "458c450b2bd5c2b244f5198c4f044fcf",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 103,
"avg_line_length": 39.24875621890547,
"alnum_prop": 0.6543288122702497,
"repo_name": "dbentley/pants",
"id": "7495d335291c7c2d3a1bdec85b165866201579c1",
"size": "8036",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/jvm/tasks/jvmdoc_gen.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "781"
},
{
"name": "CSS",
"bytes": "11572"
},
{
"name": "Cucumber",
"bytes": "919"
},
{
"name": "GAP",
"bytes": "2459"
},
{
"name": "Go",
"bytes": "1569"
},
{
"name": "HTML",
"bytes": "64699"
},
{
"name": "Java",
"bytes": "290988"
},
{
"name": "JavaScript",
"bytes": "31040"
},
{
"name": "Protocol Buffer",
"bytes": "3783"
},
{
"name": "Python",
"bytes": "4277407"
},
{
"name": "Scala",
"bytes": "84066"
},
{
"name": "Shell",
"bytes": "50882"
},
{
"name": "Thrift",
"bytes": "2898"
}
],
"symlink_target": ""
}
|
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-gapcoinrpc"))
from decimal import Decimal
import json
import shutil
import subprocess
import time
from gapcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
START_P2P_PORT=11000
START_RPC_PORT=11100
def check_json_precision():
"""Make sure json library being used does not lose precision converting GAP values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def sync_blocks(rpc_connections):
"""
Wait until everybody has the same block count
"""
while True:
counts = [ x.getblockcount() for x in rpc_connections ]
if counts == [ counts[0] ]*len(counts):
break
time.sleep(1)
def sync_mempools(rpc_connections):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while True:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
break
time.sleep(1)
gapcoind_processes = []
def initialize_chain(test_dir):
"""
Create (or copy from cache) a 200-block-long chain and
4 wallets.
gapcoind and gapcoin-cli must be in search path.
"""
if not os.path.isdir(os.path.join("cache", "node0")):
devnull = open("/dev/null", "w+")
# Create cache directories, run gapcoinds:
for i in range(4):
datadir = os.path.join("cache", "node"+str(i))
os.makedirs(datadir)
with open(os.path.join(datadir, "gapcoin.conf"), 'w') as f:
f.write("regtest=1\n");
f.write("rpcuser=rt\n");
f.write("rpcpassword=rt\n");
f.write("port="+str(START_P2P_PORT+i)+"\n");
f.write("rpcport="+str(START_RPC_PORT+i)+"\n");
args = [ "gapcoind", "-keypool=1", "-datadir="+datadir ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(START_P2P_PORT))
gapcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "gapcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
rpcs = []
for i in range(4):
try:
url = "http://rt:rt@127.0.0.1:%d"%(START_RPC_PORT+i,)
rpcs.append(AuthServiceProxy(url))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 nodes
# gets 25 mature blocks and 25 immature.
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
# Shut them down, and remove debug.logs:
stop_nodes(rpcs)
wait_gapcoinds()
for i in range(4):
os.remove(debug_log("cache", i))
for i in range(4):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
def start_nodes(num_nodes, dir):
# Start gapcoinds, and wait for RPC interface to be up and running:
devnull = open("/dev/null", "w+")
for i in range(num_nodes):
datadir = os.path.join(dir, "node"+str(i))
args = [ "gapcoind", "-datadir="+datadir ]
gapcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "gapcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
# Create&return JSON-RPC connections
rpc_connections = []
for i in range(num_nodes):
url = "http://rt:rt@127.0.0.1:%d"%(START_RPC_PORT+i,)
rpc_connections.append(AuthServiceProxy(url))
return rpc_connections
def debug_log(dir, n_node):
return os.path.join(dir, "node"+str(n_node), "regtest", "debug.log")
def stop_nodes(nodes):
for i in range(len(nodes)):
nodes[i].stop()
del nodes[:] # Emptying array closes connections as a side effect
def wait_gapcoinds():
# Wait for all gapcoinds to cleanly exit
for gapcoind in gapcoind_processes:
gapcoind.wait()
del gapcoind_processes[:]
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(START_P2P_PORT+node_num)
from_connection.addnode(ip_port, "onetry")
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
|
{
"content_hash": "36fd1c85c0fa280fced46ec86351f70b",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 94,
"avg_line_length": 34.55555555555556,
"alnum_prop": 0.5886254019292605,
"repo_name": "gapcoin/gapcoin",
"id": "f1f77207d615e8ff1bef9c62c9afe735d7f39054",
"size": "5260",
"binary": false,
"copies": "1",
"ref": "refs/heads/v0.9.2-gap",
"path": "qa/rpc-tests/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "49034"
},
{
"name": "C++",
"bytes": "3038298"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Makefile",
"bytes": "8732"
},
{
"name": "Objective-C",
"bytes": "1052"
},
{
"name": "Objective-C++",
"bytes": "6330"
},
{
"name": "Python",
"bytes": "110326"
},
{
"name": "Shell",
"bytes": "41948"
}
],
"symlink_target": ""
}
|
"""Module for testing the request review command."""
import os
from shutil import rmtree
from subprocess import PIPE
from subprocess import Popen
import unittest
from brokertest import TestBrokerCommand
if __name__ == "__main__":
import utils
utils.import_depends()
class TestRequestReview(TestBrokerCommand):
@classmethod
def setUpClass(cls):
super(TestRequestReview, cls).setUpClass()
# Run "make clean" on templates before anything else.
testdir = os.path.join(cls.sandboxdir, "reviewtest1", "t")
if os.path.exists(os.path.join(testdir, "Makefile")):
p = Popen(('/usr/bin/make', 'clean'),
cwd=testdir, env=cls.gitenv(
env={'PATH': '/bin:/usr/bin'}),
stdout=PIPE, stderr=PIPE)
(out, err) = p.communicate()
cls.assertEqual(p.returncode, 0,
"Non-zero return code running "
"make clean in sandbox, "
"STDOUT:\n@@@'{}'\n@@@\nSTDERR:\n@@@'{}'@@@\n"
.format(out, err))
def test_111_make_change(self):
sandboxdir = os.path.join(self.sandboxdir, "reviewtest1")
template = self.find_template("aquilon", "archetype", "base",
sandbox="reviewtest1")
with open(template) as f:
contents = f.readlines()
contents.append("#Added by unittest\n")
with open(template, 'w') as f:
f.writelines(contents)
self.gitcommand(["commit", "-a", "-m", "added unittest comment"],
cwd=sandboxdir)
def test_121_publish_reviewtest1_sandbox(self):
sandboxdir = os.path.join(self.sandboxdir, "reviewtest1")
self.successtest(["publish", "--branch", "reviewtest1"],
env=self.gitenv(), cwd=sandboxdir)
# FIXME: Check the branch on the broker directly?
def test_131_publish_reviewtest1_sandbox_no_review_created(self):
command = ["show_review",
"--source", "reviewtest1",
"--target", "prod"]
self.notfoundtest(command)
def test_141_verify_reviewtest1(self):
sandboxdir = os.path.join(self.sandboxdir, "reviewtest1")
p = Popen(["/bin/rm", "-rf", sandboxdir], stdout=1, stderr=2)
p.wait()
self.successtest(["get", "--sandbox", "reviewtest1"])
self.assertTrue(os.path.exists(sandboxdir))
template = self.find_template("aquilon", "archetype", "base",
sandbox="reviewtest1")
self.assertTrue(os.path.exists(template),
"aq get did not retrive '%s'" % template)
with open(template) as f:
contents = f.readlines()
self.assertEqual(contents[-1], "#Added by unittest\n")
def test_151_show_review(self):
review_head = self.head_commit("reviewtest1")
command = ["show_review",
"--source", "reviewtest1",
"--target", "prod"]
out = self.commandtest(command)
self.output_equals(out, """
Review request
Target Domain: prod
Source Sandbox: reviewtest1
Published Commit: %s
Code Review URL: TEST_GERRIT_PR_URL
Testing Status: Untested
Approval Status: No decision
""" % review_head,
command)
def test_161_add_reviewtest_domain(self):
command = ["add_domain",
"--domain", "reviewtestdomain",
"--start", "prod"] + self.valid_just_tcm
self.successtest(command)
def test_171_reviewtest1_sandbox_no_review_created(self):
command = ["show_review",
"--source", "reviewtest1",
"--target", "reviewtestdomain"]
self.notfoundtest(command)
def test_181_request_review(self):
command = ["request_review",
"--source", "reviewtest1",
"--target", "reviewtestdomain"]
self.successtest(command)
def test_191_show_review(self):
review_head = self.head_commit("reviewtest1")
command = ["show_review",
"--source", "reviewtest1",
"--target", "reviewtestdomain"]
out = self.commandtest(command)
self.output_equals(out, """
Review request
Target Domain: reviewtestdomain
Source Sandbox: reviewtest1
Published Commit: %s
Code Review URL: TEST_GERRIT_PR_URL
Testing Status: Untested
Approval Status: No decision
""" % review_head,
command)
def test_999_cleanup(self):
self.statustest(["del_sandbox", "--sandbox", "reviewtest1"])
sandboxdir = os.path.join(self.sandboxdir, "reviewtest1")
rmtree(sandboxdir, ignore_errors=True)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestRequestReview)
unittest.TextTestRunner(verbosity=2).run(suite)
|
{
"content_hash": "d1507c12506f7db08b58111f5e47e43f",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 74,
"avg_line_length": 37.97080291970803,
"alnum_prop": 0.5486351403306421,
"repo_name": "quattor/aquilon",
"id": "7a8a9572cae2af1407d3f055a629d3fabf58a72b",
"size": "5907",
"binary": false,
"copies": "1",
"ref": "refs/heads/upstream",
"path": "tests/broker/test_request_review.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "1823"
},
{
"name": "Makefile",
"bytes": "5732"
},
{
"name": "Mako",
"bytes": "4178"
},
{
"name": "PLSQL",
"bytes": "102109"
},
{
"name": "PLpgSQL",
"bytes": "8091"
},
{
"name": "Pan",
"bytes": "1058"
},
{
"name": "Perl",
"bytes": "6057"
},
{
"name": "Python",
"bytes": "5884984"
},
{
"name": "SQLPL",
"bytes": "869"
},
{
"name": "Shell",
"bytes": "33547"
},
{
"name": "Smarty",
"bytes": "4603"
}
],
"symlink_target": ""
}
|
import os
extension_modules = {}
directory = 'src/xxdata_11'
sources = ['xxdata_11.for', 'xxrptn.for', 'i4unit.for',
'i4fctn.for', 'xxword.for', 'xxcase.for', 'xfelem.for', 'xxslen.for',
'../xxdata_11.pyf', '../helper_functions.for']
extension_modules['_xxdata_11'] = dict(sources=sources, directory=directory)
directory = 'src/xxdata_15'
sources = ['xxdata_15.for', 'xxrptn.for', 'xxmkrp.for', 'i4unit.for',
'i4fctn.for', 'r8fctn.for', 'xxhkey.for', 'xxword.for', 'xxcase.for',
'i4eiz0.for', 'xfelem.for', 'xxslen.for',
'../xxdata_15.pyf', '../helper_functions.for']
extension_modules['_xxdata_15'] = dict(sources=sources, directory=directory)
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('atomic', parent_package, top_path)
for module, values in extension_modules.items():
directory = values['directory']
sources = values['sources']
sources = [os.path.join(directory, i) for i in sources]
config.add_extension(module, sources)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
{
"content_hash": "e98a28ceb9ab159659d9e8a3fe8e4e07",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 76,
"avg_line_length": 34.05555555555556,
"alnum_prop": 0.6598694942903752,
"repo_name": "cfe316/atomic",
"id": "08eb55efbe4f14754abf2f7e5f8d2d115478c432",
"size": "1226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Fortran",
"bytes": "298"
},
{
"name": "Python",
"bytes": "87289"
}
],
"symlink_target": ""
}
|
import intrepyd as ip
import intrepyd.components
import intrepyd.trace
if __name__ == "__main__":
ctx = ip.Context()
int8type = ctx.mk_int8_type()
ten = ctx.mk_number("10", int8type)
counter, Q = ip.components.mk_counter(ctx, "counter", type=int8type, limit=ten)
simulator = ctx.mk_simulator()
tr = ctx.mk_trace()
simulator.add_watch(counter)
simulator.add_watch(Q)
simulator.simulate(tr, 12)
df = tr.get_as_dataframe(ctx.net2name)
print df
|
{
"content_hash": "0b59e1d6e52004906b999f05e47c3016",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 83,
"avg_line_length": 30.375,
"alnum_prop": 0.6604938271604939,
"repo_name": "formalmethods/intrepyd",
"id": "8b93671d2e6619bfafd2d47307370ade7639e03c",
"size": "486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/counter/counter_exe.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ANTLR",
"bytes": "29134"
},
{
"name": "Java",
"bytes": "82352"
},
{
"name": "Python",
"bytes": "844923"
},
{
"name": "Shell",
"bytes": "1049"
}
],
"symlink_target": ""
}
|
"""Tests for the Griddy Power integration."""
|
{
"content_hash": "5e5b9f6a82aa093ca94d419dd577898c",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 45,
"avg_line_length": 46,
"alnum_prop": 0.717391304347826,
"repo_name": "robbiet480/home-assistant",
"id": "415ddc3ba5cd9c3c98f5096747d08093fa72083a",
"size": "46",
"binary": false,
"copies": "13",
"ref": "refs/heads/dev",
"path": "tests/components/griddy/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18837456"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class YValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="y", parent_name="contourcarpet.colorbar", **kwargs):
super(YValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
max=kwargs.pop("max", 3),
min=kwargs.pop("min", -2),
**kwargs,
)
|
{
"content_hash": "080199e45785bc4dd35ac1e718250f9f",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 88,
"avg_line_length": 36.61538461538461,
"alnum_prop": 0.5882352941176471,
"repo_name": "plotly/plotly.py",
"id": "d18682972c6bcff5840ede870f27c365208918d0",
"size": "476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/contourcarpet/colorbar/_y.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
from neutron_lib.api import extensions
from neutron_lib import constants
from neutron._i18n import _
EXTENDED_ATTRIBUTES_2_0 = {
'networks': {
'network_extension': {'allow_post': True,
'allow_put': True,
'default': constants.ATTR_NOT_SPECIFIED,
'is_visible': True,
'enforce_policy': True},
},
'subnets': {
'subnet_extension': {'allow_post': True,
'allow_put': True,
'default': constants.ATTR_NOT_SPECIFIED,
'is_visible': True,
'enforce_policy': True},
},
'ports': {
'port_extension': {'allow_post': True,
'allow_put': True,
'default': constants.ATTR_NOT_SPECIFIED,
'is_visible': True,
'enforce_policy': True},
},
}
class Fake_extension(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
return "ML2 fake extension"
@classmethod
def get_alias(cls):
return "fake_extension"
@classmethod
def get_description(cls):
return _("Adds test attributes to core resources.")
@classmethod
def get_updated(cls):
return "2014-07-16T10:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
|
{
"content_hash": "391b676a76a4c7d230ed62ebbc94ed0b",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 70,
"avg_line_length": 29.037037037037038,
"alnum_prop": 0.4897959183673469,
"repo_name": "eayunstack/neutron",
"id": "7196996dd342a43f1de70a701408f99d95bf22ac",
"size": "2141",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/plugins/ml2/extensions/fake_extension.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "10593193"
},
{
"name": "Shell",
"bytes": "8804"
}
],
"symlink_target": ""
}
|
import unittest
import logging
import Configuration
from . import GeoNamesTestCase
''' Test suite for all tools in the Sun Position Analysis Tools toolbox '''
def getTestSuite():
if Configuration.DEBUG == True:
print(" GeoNamesTestSuite.getSuite")
testSuite = unittest.TestSuite()
''' Add the GeoNames tests '''
loader = unittest.TestLoader()
testSuite.addTest(loader.loadTestsFromTestCase(GeoNamesTestCase.GeoNamesTestCase))
return testSuite
|
{
"content_hash": "f16a9a115f24d2c36a322268e001e708",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 86,
"avg_line_length": 22.347826086956523,
"alnum_prop": 0.6926070038910506,
"repo_name": "Esri/solutions-geoprocessing-toolbox",
"id": "13dab69e0724f3ed4a361343488321952dcd894a",
"size": "1255",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "utils/test/geonames_tests/GeoNamesToolsTestSuite.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "13223"
},
{
"name": "Python",
"bytes": "464067"
}
],
"symlink_target": ""
}
|
from oslo_serialization import jsonutils as json
from tempest.lib.common import rest_client
from tempest.lib.services.network import base
class NetworkVersionsClient(base.BaseNetworkClient):
def list_versions(self):
"""Do a GET / to fetch available API version information.
For more information, please refer to the official API reference:
https://docs.openstack.org/api-ref/network/v2/index.html#list-api-versions
"""
# Note: we do a self.get('/') here because we want to use
# an unversioned URL, not "v2/$project_id/".
resp, body = self.get('/')
body = json.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body)
def show_version(self, version):
"""Do a GET /<version> to fetch available resources.
For more information, please refer to the official API reference:
https://docs.openstack.org/api-ref/network/v2/index.html#show-api-v2-details
"""
resp, body = self.get(version + '/')
body = json.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body)
|
{
"content_hash": "7a4f879bd571c0a086fca347b32f6786",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 84,
"avg_line_length": 36.24242424242424,
"alnum_prop": 0.6630434782608695,
"repo_name": "openstack/tempest",
"id": "5aa79646a3e92db2a29d919f37cec509f19fc5a2",
"size": "1823",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tempest/lib/services/network/versions_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5364077"
},
{
"name": "Shell",
"bytes": "8684"
}
],
"symlink_target": ""
}
|
import io
import json
import mimetypes
import os
import warnings
from abc import ABC, abstractmethod
from multidict import CIMultiDict
from . import hdrs
from .helpers import (PY_36, content_disposition_header, guess_filename,
parse_mimetype, sentinel)
from .streams import DEFAULT_LIMIT
__all__ = ('PAYLOAD_REGISTRY', 'get_payload', 'payload_type', 'Payload',
'BytesPayload', 'StringPayload',
'IOBasePayload', 'BytesIOPayload', 'BufferedReaderPayload',
'TextIOPayload', 'StringIOPayload', 'JsonPayload')
TOO_LARGE_BYTES_BODY = 2 ** 20
class LookupError(Exception):
pass
def get_payload(data, *args, **kwargs):
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
def register_payload(factory, type):
PAYLOAD_REGISTRY.register(factory, type)
class payload_type:
def __init__(self, type):
self.type = type
def __call__(self, factory):
register_payload(factory, self.type)
return factory
class PayloadRegistry:
"""Payload registry.
note: we need zope.interface for more efficient adapter search
"""
def __init__(self):
self._registry = []
def get(self, data, *args, **kwargs):
if isinstance(data, Payload):
return data
for factory, type in self._registry:
if isinstance(data, type):
return factory(data, *args, **kwargs)
raise LookupError()
def register(self, factory, type):
self._registry.append((factory, type))
class Payload(ABC):
_size = None
_headers = None
_content_type = 'application/octet-stream'
def __init__(self, value, *, headers=None, content_type=sentinel,
filename=None, encoding=None, **kwargs):
self._value = value
self._encoding = encoding
self._filename = filename
if headers is not None:
self._headers = CIMultiDict(headers)
if content_type is sentinel and hdrs.CONTENT_TYPE in self._headers:
content_type = self._headers[hdrs.CONTENT_TYPE]
if content_type is sentinel:
content_type = None
self._content_type = content_type
@property
def size(self):
"""Size of the payload."""
return self._size
@property
def filename(self):
"""Filename of the payload."""
return self._filename
@property
def headers(self):
"""Custom item headers"""
return self._headers
@property
def encoding(self):
"""Payload encoding"""
return self._encoding
@property
def content_type(self):
"""Content type"""
if self._content_type is not None:
return self._content_type
elif self._filename is not None:
mime = mimetypes.guess_type(self._filename)[0]
return 'application/octet-stream' if mime is None else mime
else:
return Payload._content_type
def set_content_disposition(self, disptype, quote_fields=True, **params):
"""Sets ``Content-Disposition`` header."""
if self._headers is None:
self._headers = CIMultiDict()
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
disptype, quote_fields=quote_fields, **params)
@abstractmethod
async def write(self, writer):
"""Write payload.
writer is an AbstractStreamWriter instance:
"""
class BytesPayload(Payload):
def __init__(self, value, *args, **kwargs):
assert isinstance(value, (bytes, bytearray, memoryview)), \
"value argument must be byte-ish (%r)" % type(value)
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/octet-stream'
super().__init__(value, *args, **kwargs)
self._size = len(value)
if self._size > TOO_LARGE_BYTES_BODY:
if PY_36:
kwargs = {'source': self}
else:
kwargs = {}
warnings.warn("Sending a large body directly with raw bytes might"
" lock the event loop. You should probably pass an "
"io.BytesIO object instead", ResourceWarning,
**kwargs)
async def write(self, writer):
await writer.write(self._value)
class StringPayload(BytesPayload):
def __init__(self, value, *args,
encoding=None, content_type=None, **kwargs):
if encoding is None:
if content_type is None:
encoding = 'utf-8'
content_type = 'text/plain; charset=utf-8'
else:
mimetype = parse_mimetype(content_type)
encoding = mimetype.parameters.get('charset', 'utf-8')
else:
if content_type is None:
content_type = 'text/plain; charset=%s' % encoding
super().__init__(
value.encode(encoding),
encoding=encoding, content_type=content_type, *args, **kwargs)
class StringIOPayload(StringPayload):
def __init__(self, value, *args, **kwargs):
super().__init__(value.read(), *args, **kwargs)
class IOBasePayload(Payload):
def __init__(self, value, disposition='attachment', *args, **kwargs):
if 'filename' not in kwargs:
kwargs['filename'] = guess_filename(value)
super().__init__(value, *args, **kwargs)
if self._filename is not None and disposition is not None:
self.set_content_disposition(disposition, filename=self._filename)
async def write(self, writer):
try:
chunk = self._value.read(DEFAULT_LIMIT)
while chunk:
await writer.write(chunk)
chunk = self._value.read(DEFAULT_LIMIT)
finally:
self._value.close()
class TextIOPayload(IOBasePayload):
def __init__(self, value, *args,
encoding=None, content_type=None, **kwargs):
if encoding is None:
if content_type is None:
encoding = 'utf-8'
content_type = 'text/plain; charset=utf-8'
else:
mimetype = parse_mimetype(content_type)
encoding = mimetype.parameters.get('charset', 'utf-8')
else:
if content_type is None:
content_type = 'text/plain; charset=%s' % encoding
super().__init__(
value,
content_type=content_type, encoding=encoding, *args, **kwargs)
@property
def size(self):
try:
return os.fstat(self._value.fileno()).st_size - self._value.tell()
except OSError:
return None
async def write(self, writer):
try:
chunk = self._value.read(DEFAULT_LIMIT)
while chunk:
await writer.write(chunk.encode(self._encoding))
chunk = self._value.read(DEFAULT_LIMIT)
finally:
self._value.close()
class BytesIOPayload(IOBasePayload):
@property
def size(self):
position = self._value.tell()
end = self._value.seek(0, os.SEEK_END)
self._value.seek(position)
return end - position
class BufferedReaderPayload(IOBasePayload):
@property
def size(self):
try:
return os.fstat(self._value.fileno()).st_size - self._value.tell()
except OSError:
# data.fileno() is not supported, e.g.
# io.BufferedReader(io.BytesIO(b'data'))
return None
class JsonPayload(BytesPayload):
def __init__(self, value,
encoding='utf-8', content_type='application/json',
dumps=json.dumps, *args, **kwargs):
super().__init__(
dumps(value).encode(encoding),
content_type=content_type, encoding=encoding, *args, **kwargs)
PAYLOAD_REGISTRY = PayloadRegistry()
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
PAYLOAD_REGISTRY.register(StringPayload, str)
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
PAYLOAD_REGISTRY.register(
BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
{
"content_hash": "0754ccc14179eef14529c8f15a9f5053",
"timestamp": "",
"source": "github",
"line_count": 289,
"max_line_length": 79,
"avg_line_length": 29.058823529411764,
"alnum_prop": 0.5915694212907835,
"repo_name": "pfreixes/aiohttp",
"id": "a43e63798644e38dd9419b223460a61585ce45e3",
"size": "8398",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "aiohttp/payload.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "838"
},
{
"name": "Gherkin",
"bytes": "1819"
},
{
"name": "Makefile",
"bytes": "3179"
},
{
"name": "Python",
"bytes": "1351017"
},
{
"name": "Shell",
"bytes": "2846"
}
],
"symlink_target": ""
}
|
import datetime
import json
from staaxe.models import App, Connection, ConnectionInfo, Payload
def export_to_json(appname):
app = App.get(App.name == appname)
ret_obj = {'connections': [], 'metadata': {}}
first_connection_date = datetime.datetime.now()
for connection in app.connection_set.order_by(Connection.date_start):
ret_payloads = []
payloads = connection.payload_set.order_by(Payload.time_since_connection.asc())
for msg_idx, payload in enumerate(payloads):
ret_payloads.append((
int(payload.time_since_connection),
payload.key,
payload.value
))
ret_obj["connections"].append({
"ds": str(connection.date_start),
"de": str(connection.date_end),
"p": ret_payloads,
"nm": connection.messages_count
})
first_connection_date = min(first_connection_date, connection.date_start)
ret_obj["metadata"]["first_connection_date"] = str(first_connection_date)
print(json.dumps(ret_obj, separators=(",", ":")))
|
{
"content_hash": "40d7564faf2cbad320172c151c7af60a",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 87,
"avg_line_length": 29.945945945945947,
"alnum_prop": 0.6083032490974729,
"repo_name": "sebbernery/staaxe-server",
"id": "301aed5be1ebfdc66d74b28b197eb27ca37833f1",
"size": "1108",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/export_to_json.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "7902"
}
],
"symlink_target": ""
}
|
import os
import bdb
import types
from tkinter import *
from idlelib.WindowList import ListedToplevel
from idlelib.ScrolledList import ScrolledList
from idlelib import macosxSupport
class Idb(bdb.Bdb):
def __init__(self, gui):
self.gui = gui
bdb.Bdb.__init__(self)
def user_line(self, frame):
if self.in_rpc_code(frame):
self.set_step()
return
message = self.__frame2message(frame)
self.gui.interaction(message, frame)
def user_exception(self, frame, info):
if self.in_rpc_code(frame):
self.set_step()
return
message = self.__frame2message(frame)
self.gui.interaction(message, frame, info)
def in_rpc_code(self, frame):
if frame.f_code.co_filename.count('rpc.py'):
return True
else:
prev_frame = frame.f_back
if prev_frame.f_code.co_filename.count('Debugger.py'):
# (that test will catch both Debugger.py and RemoteDebugger.py)
return False
return self.in_rpc_code(prev_frame)
def __frame2message(self, frame):
code = frame.f_code
filename = code.co_filename
lineno = frame.f_lineno
basename = os.path.basename(filename)
message = "%s:%s" % (basename, lineno)
if code.co_name != "?":
message = "%s: %s()" % (message, code.co_name)
return message
class Debugger:
vstack = vsource = vlocals = vglobals = None
def __init__(self, pyshell, idb=None):
if idb is None:
idb = Idb(self)
self.pyshell = pyshell
self.idb = idb
self.frame = None
self.make_gui()
self.interacting = 0
def run(self, *args):
try:
self.interacting = 1
return self.idb.run(*args)
finally:
self.interacting = 0
def close(self, event=None):
if self.interacting:
self.top.bell()
return
if self.stackviewer:
self.stackviewer.close(); self.stackviewer = None
# Clean up pyshell if user clicked debugger control close widget.
# (Causes a harmless extra cycle through close_debugger() if user
# toggled debugger from pyshell Debug menu)
self.pyshell.close_debugger()
# Now close the debugger control window....
self.top.destroy()
def make_gui(self):
pyshell = self.pyshell
self.flist = pyshell.flist
self.root = root = pyshell.root
self.top = top = ListedToplevel(root)
self.top.wm_title("Debug Control")
self.top.wm_iconname("Debug")
top.wm_protocol("WM_DELETE_WINDOW", self.close)
self.top.bind("<Escape>", self.close)
#
self.bframe = bframe = Frame(top)
self.bframe.pack(anchor="w")
self.buttons = bl = []
#
self.bcont = b = Button(bframe, text="Go", command=self.cont)
bl.append(b)
self.bstep = b = Button(bframe, text="Step", command=self.step)
bl.append(b)
self.bnext = b = Button(bframe, text="Over", command=self.next)
bl.append(b)
self.bret = b = Button(bframe, text="Out", command=self.ret)
bl.append(b)
self.bret = b = Button(bframe, text="Quit", command=self.quit)
bl.append(b)
#
for b in bl:
b.configure(state="disabled")
b.pack(side="left")
#
self.cframe = cframe = Frame(bframe)
self.cframe.pack(side="left")
#
if not self.vstack:
self.__class__.vstack = BooleanVar(top)
self.vstack.set(1)
self.bstack = Checkbutton(cframe,
text="Stack", command=self.show_stack, variable=self.vstack)
self.bstack.grid(row=0, column=0)
if not self.vsource:
self.__class__.vsource = BooleanVar(top)
self.bsource = Checkbutton(cframe,
text="Source", command=self.show_source, variable=self.vsource)
self.bsource.grid(row=0, column=1)
if not self.vlocals:
self.__class__.vlocals = BooleanVar(top)
self.vlocals.set(1)
self.blocals = Checkbutton(cframe,
text="Locals", command=self.show_locals, variable=self.vlocals)
self.blocals.grid(row=1, column=0)
if not self.vglobals:
self.__class__.vglobals = BooleanVar(top)
self.bglobals = Checkbutton(cframe,
text="Globals", command=self.show_globals, variable=self.vglobals)
self.bglobals.grid(row=1, column=1)
#
self.status = Label(top, anchor="w")
self.status.pack(anchor="w")
self.error = Label(top, anchor="w")
self.error.pack(anchor="w", fill="x")
self.errorbg = self.error.cget("background")
#
self.fstack = Frame(top, height=1)
self.fstack.pack(expand=1, fill="both")
self.flocals = Frame(top)
self.flocals.pack(expand=1, fill="both")
self.fglobals = Frame(top, height=1)
self.fglobals.pack(expand=1, fill="both")
#
if self.vstack.get():
self.show_stack()
if self.vlocals.get():
self.show_locals()
if self.vglobals.get():
self.show_globals()
def interaction(self, message, frame, info=None):
self.frame = frame
self.status.configure(text=message)
#
if info:
type, value, tb = info
try:
m1 = type.__name__
except AttributeError:
m1 = "%s" % str(type)
if value is not None:
try:
m1 = "%s: %s" % (m1, str(value))
except:
pass
bg = "yellow"
else:
m1 = ""
tb = None
bg = self.errorbg
self.error.configure(text=m1, background=bg)
#
sv = self.stackviewer
if sv:
stack, i = self.idb.get_stack(self.frame, tb)
sv.load_stack(stack, i)
#
self.show_variables(1)
#
if self.vsource.get():
self.sync_source_line()
#
for b in self.buttons:
b.configure(state="normal")
#
self.top.wakeup()
self.root.mainloop()
#
for b in self.buttons:
b.configure(state="disabled")
self.status.configure(text="")
self.error.configure(text="", background=self.errorbg)
self.frame = None
def sync_source_line(self):
frame = self.frame
if not frame:
return
filename, lineno = self.__frame2fileline(frame)
if filename[:1] + filename[-1:] != "<>" and os.path.exists(filename):
self.flist.gotofileline(filename, lineno)
def __frame2fileline(self, frame):
code = frame.f_code
filename = code.co_filename
lineno = frame.f_lineno
return filename, lineno
def cont(self):
self.idb.set_continue()
self.root.quit()
def step(self):
self.idb.set_step()
self.root.quit()
def next(self):
self.idb.set_next(self.frame)
self.root.quit()
def ret(self):
self.idb.set_return(self.frame)
self.root.quit()
def quit(self):
self.idb.set_quit()
self.root.quit()
stackviewer = None
def show_stack(self):
if not self.stackviewer and self.vstack.get():
self.stackviewer = sv = StackViewer(self.fstack, self.flist, self)
if self.frame:
stack, i = self.idb.get_stack(self.frame, None)
sv.load_stack(stack, i)
else:
sv = self.stackviewer
if sv and not self.vstack.get():
self.stackviewer = None
sv.close()
self.fstack['height'] = 1
def show_source(self):
if self.vsource.get():
self.sync_source_line()
def show_frame(self, stackitem):
self.frame = stackitem[0] # lineno is stackitem[1]
self.show_variables()
localsviewer = None
globalsviewer = None
def show_locals(self):
lv = self.localsviewer
if self.vlocals.get():
if not lv:
self.localsviewer = NamespaceViewer(self.flocals, "Locals")
else:
if lv:
self.localsviewer = None
lv.close()
self.flocals['height'] = 1
self.show_variables()
def show_globals(self):
gv = self.globalsviewer
if self.vglobals.get():
if not gv:
self.globalsviewer = NamespaceViewer(self.fglobals, "Globals")
else:
if gv:
self.globalsviewer = None
gv.close()
self.fglobals['height'] = 1
self.show_variables()
def show_variables(self, force=0):
lv = self.localsviewer
gv = self.globalsviewer
frame = self.frame
if not frame:
ldict = gdict = None
else:
ldict = frame.f_locals
gdict = frame.f_globals
if lv and gv and ldict is gdict:
ldict = None
if lv:
lv.load_dict(ldict, force, self.pyshell.interp.rpcclt)
if gv:
gv.load_dict(gdict, force, self.pyshell.interp.rpcclt)
def set_breakpoint_here(self, filename, lineno):
self.idb.set_break(filename, lineno)
def clear_breakpoint_here(self, filename, lineno):
self.idb.clear_break(filename, lineno)
def clear_file_breaks(self, filename):
self.idb.clear_all_file_breaks(filename)
def load_breakpoints(self):
"Load PyShellEditorWindow breakpoints into subprocess debugger"
for editwin in self.pyshell.flist.inversedict:
filename = editwin.io.filename
try:
for lineno in editwin.breakpoints:
self.set_breakpoint_here(filename, lineno)
except AttributeError:
continue
class StackViewer(ScrolledList):
def __init__(self, master, flist, gui):
if macosxSupport.runningAsOSXApp():
# At least on with the stock AquaTk version on OSX 10.4 you'll
# get an shaking GUI that eventually kills IDLE if the width
# argument is specified.
ScrolledList.__init__(self, master)
else:
ScrolledList.__init__(self, master, width=80)
self.flist = flist
self.gui = gui
self.stack = []
def load_stack(self, stack, index=None):
self.stack = stack
self.clear()
for i in range(len(stack)):
frame, lineno = stack[i]
try:
modname = frame.f_globals["__name__"]
except:
modname = "?"
code = frame.f_code
filename = code.co_filename
funcname = code.co_name
import linecache
sourceline = linecache.getline(filename, lineno)
sourceline = sourceline.strip()
if funcname in ("?", "", None):
item = "%s, line %d: %s" % (modname, lineno, sourceline)
else:
item = "%s.%s(), line %d: %s" % (modname, funcname,
lineno, sourceline)
if i == index:
item = "> " + item
self.append(item)
if index is not None:
self.select(index)
def popup_event(self, event):
"override base method"
if self.stack:
return ScrolledList.popup_event(self, event)
def fill_menu(self):
"override base method"
menu = self.menu
menu.add_command(label="Go to source line",
command=self.goto_source_line)
menu.add_command(label="Show stack frame",
command=self.show_stack_frame)
def on_select(self, index):
"override base method"
if 0 <= index < len(self.stack):
self.gui.show_frame(self.stack[index])
def on_double(self, index):
"override base method"
self.show_source(index)
def goto_source_line(self):
index = self.listbox.index("active")
self.show_source(index)
def show_stack_frame(self):
index = self.listbox.index("active")
if 0 <= index < len(self.stack):
self.gui.show_frame(self.stack[index])
def show_source(self, index):
if not (0 <= index < len(self.stack)):
return
frame, lineno = self.stack[index]
code = frame.f_code
filename = code.co_filename
if os.path.isfile(filename):
edit = self.flist.open(filename)
if edit:
edit.gotoline(lineno)
class NamespaceViewer:
def __init__(self, master, title, dict=None):
width = 0
height = 40
if dict:
height = 20*len(dict) # XXX 20 == observed height of Entry widget
self.master = master
self.title = title
import reprlib
self.repr = reprlib.Repr()
self.repr.maxstring = 60
self.repr.maxother = 60
self.frame = frame = Frame(master)
self.frame.pack(expand=1, fill="both")
self.label = Label(frame, text=title, borderwidth=2, relief="groove")
self.label.pack(fill="x")
self.vbar = vbar = Scrollbar(frame, name="vbar")
vbar.pack(side="right", fill="y")
self.canvas = canvas = Canvas(frame,
height=min(300, max(40, height)),
scrollregion=(0, 0, width, height))
canvas.pack(side="left", fill="both", expand=1)
vbar["command"] = canvas.yview
canvas["yscrollcommand"] = vbar.set
self.subframe = subframe = Frame(canvas)
self.sfid = canvas.create_window(0, 0, window=subframe, anchor="nw")
self.load_dict(dict)
dict = -1
def load_dict(self, dict, force=0, rpc_client=None):
if dict is self.dict and not force:
return
subframe = self.subframe
frame = self.frame
for c in list(subframe.children.values()):
c.destroy()
self.dict = None
if not dict:
l = Label(subframe, text="None")
l.grid(row=0, column=0)
else:
#names = sorted(dict)
###
# Because of (temporary) limitations on the dict_keys type (not yet
# public or pickleable), have the subprocess to send a list of
# keys, not a dict_keys object. sorted() will take a dict_keys
# (no subprocess) or a list.
#
# There is also an obscure bug in sorted(dict) where the
# interpreter gets into a loop requesting non-existing dict[0],
# dict[1], dict[2], etc from the RemoteDebugger.DictProxy.
###
keys_list = dict.keys()
names = sorted(keys_list)
###
row = 0
for name in names:
value = dict[name]
svalue = self.repr.repr(value) # repr(value)
# Strip extra quotes caused by calling repr on the (already)
# repr'd value sent across the RPC interface:
if rpc_client:
svalue = svalue[1:-1]
l = Label(subframe, text=name)
l.grid(row=row, column=0, sticky="nw")
l = Entry(subframe, width=0, borderwidth=0)
l.insert(0, svalue)
l.grid(row=row, column=1, sticky="nw")
row = row+1
self.dict = dict
# XXX Could we use a <Configure> callback for the following?
subframe.update_idletasks() # Alas!
width = subframe.winfo_reqwidth()
height = subframe.winfo_reqheight()
canvas = self.canvas
self.canvas["scrollregion"] = (0, 0, width, height)
if height > 300:
canvas["height"] = 300
frame.pack(expand=1)
else:
canvas["height"] = height
frame.pack(expand=0)
def close(self):
self.frame.destroy()
|
{
"content_hash": "b70df6984f270f176fe9a049c6d8b987",
"timestamp": "",
"source": "github",
"line_count": 491,
"max_line_length": 79,
"avg_line_length": 33.33401221995927,
"alnum_prop": 0.5401111993645751,
"repo_name": "timm/timmnix",
"id": "d4872ed42af8201de11c0b764e70cc243e7ba900",
"size": "16367",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "pypy3-v5.5.0-linux64/lib-python/3/idlelib/Debugger.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1641"
},
{
"name": "Batchfile",
"bytes": "1234"
},
{
"name": "C",
"bytes": "436685"
},
{
"name": "CSS",
"bytes": "96"
},
{
"name": "Common Lisp",
"bytes": "4"
},
{
"name": "Emacs Lisp",
"bytes": "290698"
},
{
"name": "HTML",
"bytes": "111577"
},
{
"name": "Makefile",
"bytes": "1681"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "PowerShell",
"bytes": "1540"
},
{
"name": "Prolog",
"bytes": "14301"
},
{
"name": "Python",
"bytes": "21267592"
},
{
"name": "Roff",
"bytes": "21080"
},
{
"name": "Shell",
"bytes": "27687"
},
{
"name": "TeX",
"bytes": "3052861"
},
{
"name": "VBScript",
"bytes": "481"
}
],
"symlink_target": ""
}
|
import dredd_hooks as hooks
import sys
# HELPERS
# NOTE move in separated module
import os
import sys
sys.path.append("/opt/xos")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
import django
from core.models import *
from services.volt.models import *
from services.vsg.models import *
from services.vtr.models import *
import urllib2
import json
from django.utils import timezone
django.setup()
def doLogin(username, password):
url = "http://127.0.0.1:8000/xoslib/login?username=%s&password=%s" % (username, password)
res = urllib2.urlopen(url).read()
parsed = json.loads(res)
return {'token': parsed['xoscsrftoken'], 'sessionid': parsed['xossessionid']}
def cleanDB():
# deleting all subscribers
for s in CordSubscriberRoot.objects.all():
s.delete(purge=True)
# deleting all slices
for s in Slice.objects.all():
s.delete(purge=True)
# deleting all Services
for s in Service.objects.all():
s.delete(purge=True)
# deleting all Tenants
for s in Tenant.objects.all():
s.delete(purge=True)
# deleting all Networks
for s in Network.objects.all():
s.delete(purge=True)
# deleting all NetworkTemplates
for s in NetworkTemplate.objects.all():
s.delete(purge=True)
for s in NetworkSlice.objects.all():
s.delete(purge=True)
for s in AddressPool.objects.all():
s.delete(purge=True)
for s in Flavor.objects.all():
s.delete(purge=True)
for s in Image.objects.all():
s.delete(purge=True)
# print 'DB Cleaned'
def createTestSubscriber():
cleanDB()
createFlavors()
# load user
user = User.objects.get(email="padmin@vicci.org")
# network template
private_template = NetworkTemplate()
private_template.name = 'Private Network'
private_template.save()
# creating the test subscriber
subscriber = CordSubscriberRoot(name='Test Subscriber 1', id=1)
subscriber.created = timezone.now()
subscriber.save()
# vRouter service
vrouter_service = VRouterService()
vrouter_service.name = 'service_vrouter'
vrouter_service.save()
# address pools
ap_vsg = AddressPool()
ap_vsg.service = vrouter_service
ap_vsg.name = 'addresses_vsg'
ap_vsg.addresses = '10.168.0.0'
ap_vsg.gateway_ip = '10.168.0.1'
ap_vsg.gateway_mac = '02:42:0a:a8:00:01'
ap_vsg.save()
# print 'vRouter created'
# Site
site = Site.objects.get(name='mysite')
# vSG service
vsg_service = VSGService()
vsg_service.name = 'service_vsg'
# vSG slice
vsg_slice = Slice(id=2)
vsg_slice.name = site.login_base + "_testVsg"
vsg_slice.service = vsg_service.id
vsg_slice.site = site
vsg_slice.caller = user
vsg_slice.save()
vsg_service.save()
# volt service
volt_service = VOLTService()
volt_service.name = 'service_volt'
volt_service.save()
# cvpe image
createImage('ubuntu-vcpe4')
# vcpe slice
vcpe_slice = Slice(id=3)
vcpe_slice.name = site.login_base + "_testVcpe"
vcpe_slice.service = Service.objects.get(kind='vCPE')
vcpe_slice.site = site
vcpe_slice.caller = user
vcpe_slice.save()
# print 'vcpe_slice created'
# create a lan network
lan_net = Network(id=1)
lan_net.name = 'lan_network'
lan_net.owner = vcpe_slice
lan_net.template = private_template
lan_net.save()
# print 'lan_network created'
# add relation between vcpe slice and lan network
vcpe_network = NetworkSlice()
vcpe_network.network = lan_net
vcpe_network.slice = vcpe_slice
vcpe_network.save()
# print 'vcpe network relation added'
# vbng service
vbng_service = VBNGService()
vbng_service.name = 'service_vbng'
vbng_service.save()
# print 'vbng_service creater'
# volt tenant
vt = VOLTTenant(subscriber=subscriber.id, id=1)
vt.s_tag = "222"
vt.c_tag = "432"
vt.provider_service_id = volt_service.id
vt.caller = user
vt.save()
# print "Subscriber Created"
def deleteTruckrolls():
for s in VTRTenant.objects.all():
s.delete(purge=True)
def setUpTruckroll():
service_vtr = VTRService()
service_vtr.name = 'service_vtr'
service_vtr.save()
def createTruckroll():
setUpTruckroll()
tn = VTRTenant(id=1)
tn.created = timezone.now()
tn.save()
def createFlavors():
small = Flavor(id=1)
small.name = "m1.small"
small.created = timezone.now()
small.save()
medium = Flavor(id=2)
medium.name = "m1.medium"
medium.created = timezone.now()
medium.save()
large = Flavor(id=3)
large.name = "m1.large"
large.created = timezone.now()
large.save()
def createSlice():
site = Site.objects.get(name='mysite')
user = User.objects.get(email="padmin@vicci.org")
sl = Slice(id=1)
sl.created = timezone.now()
sl.name = site.login_base + "_testSlice"
sl.site = site
sl.caller = user
sl.save()
return sl
def createDeployment():
deployment = Deployment(id=1)
deployment.created = timezone.now()
deployment.name = 'MyTestDeployment'
deployment.save()
return deployment
def createImage(name):
img = Image(id=1)
img.name = name
img.created = timezone.now()
img.disk_format = 'QCOW2'
img.kind = 'vm'
img.save()
return img
def createNode(deployment):
site = Site.objects.get(name='mysite')
site_deployment = SiteDeployment(id=1)
site_deployment.site = site
site_deployment.created = timezone.now()
site_deployment.deployment = deployment
site_deployment.save()
node = Node(id=1)
node.name = 'test-node'
node.created = timezone.now()
node.site = site
node.site_deployment = site_deployment
node.save()
return node
def setupInstance():
deployment = createDeployment()
sl = createSlice()
node = createNode(deployment)
img = createImage('test-image')
# print {'image': img.id, 'deployment': deployment.id, 'slice': sl.id}
return {'image': img, 'deployment': deployment, 'slice': sl}
def createInstance():
requirements = setupInstance()
user = User.objects.get(email="padmin@vicci.org")
instance = Instance(id=1)
instance.name = 'test-instance'
instance.created = timezone.now()
instance.node = Node.objects.all()[0]
instance.image = requirements['image']
instance.slice = requirements['slice']
instance.deployment = requirements['deployment']
instance.caller = user
instance.save()
def createService():
service = Service(id=1)
service.name = 'test-service'
service.save()
# setupInstance()
# depl = createDeployment()
# createTestSubscriber()
# createInstance()
# createSlice()
# createNode(depl)
# createImage('test-image')
# createFlavors()
# createTruckroll()
# setUpTruckroll()
createService()
|
{
"content_hash": "2e58a8a44c7a2c2b282d9520b4c0e317",
"timestamp": "",
"source": "github",
"line_count": 296,
"max_line_length": 93,
"avg_line_length": 23.375,
"alnum_prop": 0.6522618875560052,
"repo_name": "cboling/xos",
"id": "b81e239035e3913165d3b8d5b029db7731ec21c6",
"size": "6919",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "xos/tests/api/helpers/before_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "API Blueprint",
"bytes": "35778"
},
{
"name": "CSS",
"bytes": "932347"
},
{
"name": "HTML",
"bytes": "724460"
},
{
"name": "JavaScript",
"bytes": "1113855"
},
{
"name": "M4",
"bytes": "47443"
},
{
"name": "Makefile",
"bytes": "55516"
},
{
"name": "Python",
"bytes": "1748917"
},
{
"name": "Ruby",
"bytes": "512"
},
{
"name": "Shell",
"bytes": "67259"
}
],
"symlink_target": ""
}
|
import os
import coverage
import colorize
from django.conf import settings
from django.test.simple import DjangoTestSuiteRunner
class CoverageTestRunner(DjangoTestSuiteRunner):
def run_tests(self, test_labels, verbosity=1, interactive=True, extra_tests=[]):
coveragemodules = getattr(settings, 'COVERAGE_MODULES', [])
if coveragemodules:
coverage.start()
self.setup_test_environment()
suite = self.build_suite(test_labels, extra_tests)
old_config = self.setup_databases()
result = self.run_suite(suite)
if coveragemodules:
coverage.stop()
coveragedir = getattr(settings, 'COVERAGE_DIR', './build/coverage')
if not os.path.exists(coveragedir):
os.makedirs(coveragedir)
modules = []
for module_string in coveragemodules:
module = __import__(module_string, globals(), locals(), [""])
modules.append(module)
f,s,m,mf = coverage.analysis(module)
fp = file(os.path.join(coveragedir, module_string + ".html"), "wb")
colorize.colorize_file(f, outstream=fp, not_covered=mf)
fp.close()
coverage.report(modules, show_missing=0)
coverage.erase()
self.teardown_databases(old_config)
self.teardown_test_environment()
return len(result.failures) + len(result.errors)
|
{
"content_hash": "ca8499b9ff11508d777ac8ec529ad352",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 84,
"avg_line_length": 34.61904761904762,
"alnum_prop": 0.6121045392022009,
"repo_name": "chop-dbhi/django-forkit",
"id": "afd6a3da4906418a9859615f9b81d578ffc9dc71",
"size": "1454",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "forkit/tests/coverage_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "48457"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.conf.urls import url
from . import views
urlpatterns = [
# DetailView
url(r'^detail/(?P<slug>[-\w]+)/$',
views.ArticleDetailView.as_view(),
name='article_detail'),
# ListView
url(r'^list/dict/$',
views.DictList.as_view()),
url(r'^list/dict/paginated/$',
views.DictList.as_view(page_size=1)),
url(r'^list/artists/$',
views.ArtistList.as_view(),
name="artists_list"),
url(r'^list/authors/$',
views.AuthorList.as_view(),
name="authors_list"),
url(r'^list/authors/paginated/$',
views.AuthorList.as_view(page_size=30)),
url(r'^list/authors/paginated-orphaned/$',
views.AuthorList.as_view(page_size=30, orphans=2)),
url(r'^list/authors/notempty/$',
views.AuthorList.as_view(allow_empty=False)),
url(r'^list/authors/notempty/paginated/$',
views.AuthorList.as_view(allow_empty=False, page_size=2)),
url(r'^list/authors/template_name/$',
views.AuthorList.as_view(template_name='views_tests/list.html')),
url(r'^list/authors/template_name_suffix/$',
views.AuthorList.as_view(template_name_suffix='_objects')),
url(r'^list/authors/context_object_name/$',
views.AuthorList.as_view(context_object_name='author_list')),
url(r'^list/authors/dupe_context_object_name/$',
views.AuthorList.as_view(context_object_name='object_list')),
url(r'^list/authors/invalid/$',
views.AuthorList.as_view(queryset=None)),
url(r'^list/authors/paginated/custom_page_kwarg/$',
views.AuthorList.as_view(page_size=30, page_kwarg='pagina')),
url(r'^list/books/sorted/$',
views.BookList.as_view(ordering='name')),
url(r'^list/books/sortedbypagesandnamedesc/$',
views.BookList.as_view(ordering=('pages', '-name'))),
]
|
{
"content_hash": "8f953622cbaede9344dfe8c49f829fc2",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 73,
"avg_line_length": 38.204081632653065,
"alnum_prop": 0.6362179487179487,
"repo_name": "samuelmaudo/yepes",
"id": "1d0be41d48befb1a2b1eeee877a9b94134134854",
"size": "1896",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/views/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "1485"
},
{
"name": "CSS",
"bytes": "2805"
},
{
"name": "HTML",
"bytes": "18543"
},
{
"name": "JavaScript",
"bytes": "56039"
},
{
"name": "Python",
"bytes": "2415982"
}
],
"symlink_target": ""
}
|
from cStringIO import StringIO
from Operand import *
from Instruction import *
from FlowControl import *
from Constants import *
from Opcodes import *
from Formatter import *
from Disass import Disassembler
from Exceptions import *
class CubinFile(object):
kernels = None
architecture = None
abiversion = None
modname = None
kernels_byname = None
def __init__(self):
self.kernels = []
self.kernels_byname = {}
def write(self, f):
"""Write cubin data to f"""
# Write intro
# Write constant segments
f.write("architecture {sm_10}\n")
f.write("abiversion {0}\n")
f.write("modname {cubin}\n")
# test zone 0
#f.write("consts {\n")
#f.write("\tname = ww\n")
#f.write("\tsegname = const\n")
#f.write("\tsegnum = 0\n")
#f.write("\toffset = 0\n")
#f.write("\tbytes = 4\n")
#f.write("\tmem {\n")
#f.write("\t\t0x12345678 0x20000010 0x20000020 0x20000030\n") # same as for code
#f.write("\t}\n")
#f.write("}\n")
for kernel in self.kernels:
f.write("code {\n")
f.write("\tname = %s\n" % kernel.name)
f.write("\tlmem = %i\n" % kernel.lmem)
f.write("\tsmem = %i\n" % kernel.smem)
f.write("\treg = %i\n" % kernel.reg)
f.write("\tbar = %i\n" % kernel.bar)
f.write("\tbincode {\n")
# kernel.bincode
# up to four 32 bit values per line
for i in xrange(0, len(kernel.bincode), 4):
f.write("\t\t"+("".join(["0x%08x " % x for x in kernel.bincode[i:i+4]]))+"\n")
f.write("\t}\n")
# XXX write local constant stuff
# test zone 1
#f.write("\tconst {\n")
#f.write("\t\tsegname = const\n")
#f.write("\t\tsegnum = 1\n")
#f.write("\t\toffset = 0\n")
#f.write("\t\tbytes = 4\n")
#f.write("\t\tmem {\n")
#f.write("\t\t\t0x56789abc 0x10000010 0x10000020 0x10000030\n") # same as for code
#f.write("\t\t}\n")
#f.write("\t}\n")
f.write("}\n")
class Label(object):
name = None
inst = None
addr = None
def __init__(self, name):
self.name = name
def assemble(self):
# Label has no instruction representation
self.inst = []
def __repr__(self):
return self.name + ":"
class Kernel(object):
name = None
lmem = None # Amount of local mem used
smem = None # Amount of shared mem used
reg = None # Number of registers
bar = None # Number of barriers
bincode = None
const = None
instructions = None # Disassembled kernel
def __init__(self):
self.const = []
def __repr__(self):
rv = StringIO()
self.disassemble(rv)
return rv.getvalue()
def disassemble(self, rv, formatter=Formatter()):
"""Disassemble the cubin instructions in this kernel"""
# Phase 1 -- decode instructions
ptr = 0
disa = Disassembler()
instructions = []
while ptr < len(self.bincode):
base = ptr*4
inst = [self.bincode[ptr]]
ptr += 1
if inst[0] & 1:
inst.append(self.bincode[ptr])
ptr += 1
instructions.append(disa.decode(base, inst))
# Phase 2 -- labels, sort in order of address
label_set = set()
for i in instructions:
for o in i.dst_operands:
if o.indirection == OP_INDIRECTION_CODE and o.source == OP_SOURCE_IMMEDIATE:
label_set.add(o.value)
labels = list(label_set)
labels.sort()
label_map = dict([(l, "label%i" % x) for x,l in enumerate(labels)])
# Phase 3 -- fill in labels in program arguments
for i in instructions:
for o in i.dst_operands:
if o.indirection == OP_INDIRECTION_CODE and o.source == OP_SOURCE_IMMEDIATE:
o.label = label_map[o.value]
# Phase 4 -- print
for i in instructions:
formatter.address(rv, i.address)
formatter.bincode(rv, (" ".join(["%08x" % x for x in i.inst])))
if i.address in label_map:
formatter.label(rv, label_map[i.address])
i.dump(rv, formatter)
formatter.newline(rv)
# Phase 5 -- print constants
for seg in self.const:
formatter.const_hdr(rv, seg.segname, seg.segnum, seg.offset, seg.bytes)
formatter.const_data(rv, seg.mem)
def assemble(self):
# Phase 1 -- assemble instructions, fill in addresses
bincode = []
label_map = {}
for inst in self.instructions:
inst.addr = len(bincode)*4 # Fill in addresses
inst.assemble()
bincode.extend(inst.inst)
# Create label map
if isinstance(inst, Label):
if inst.name in label_map:
raise CompilationError(inst.line, "Duplicate label %s" % inst.name)
label_map[inst.name] = inst
# Phase 2 -- fill in labels
for inst in self.instructions:
if isinstance(inst, Instruction):
dirty = False
for o in inst.dst_operands:
if o.indirection == OP_INDIRECTION_CODE and o.source == OP_SOURCE_IMMEDIATE:
try:
o.value = label_map[o.label].addr
except KeyError:
raise CompilationError(inst.line, "Undefined label %s" % o.label)
dirty = True
if dirty:
# Relocate
inst.assemble()
idx = inst.addr>>2
bincode[idx:idx+len(inst.inst)] = inst.inst
self.bincode = bincode
#print self.instructions
class Const(object):
segname = None
segnum = None
offset = None
bytes = None
mem = None
class Dummy:
"""Dummy environment that absorbs environments that we are not interested in"""
def extend(self, x):
pass
_numeric = ["offset","segnum","bytes","lmem","smem","reg","bar"]
def load(name):
"""Load a cubin binary assembly file"""
f = open(name, "r")
ex = CubinFile()
inside = [ex]
while True:
line = f.readline()
if not line:
break
line = line[0:-1]
if line.strip() == "":
# Empty line
continue
closebrace = line.rfind("}")
openbrace = line.find("{")
equalpos = line.find("=")
if openbrace != -1:
cmd = line[0:openbrace].strip()
if closebrace != -1:
value = line[openbrace+1:closebrace]
setattr(inside[-1], cmd, value)
else:
#print cmd, "open"
if cmd == "code":
kernel = Kernel()
inside[-1].kernels.append(kernel)
inside.append(kernel)
elif cmd == "bincode":
inst = []
inside[-1].bincode = inst
inside.append(inst)
elif cmd == "const":
const = Const()
inside[-1].const.append(const)
inside.append(const)
elif cmd == "mem":
inst = []
inside[-1].mem = inst
inside.append(inst)
elif cmd == "consts" or cmd == "sampler" or cmd == "reloc":
# Ignore
inside.append(Dummy())
elif cmd == "params_SMEM":
# Ignore
inside.append(Dummy())
else:
raise ValueError("Invalid environment %s" % cmd)
elif closebrace != -1:
#print inside[-1], "closed"
inside.pop()
elif equalpos != -1:
valname = line[0:equalpos].strip()
valvalue = line[equalpos+1:].strip()
if valname in _numeric:
valvalue = int(valvalue)
setattr(inside[-1], valname, valvalue)
else:
# Bincode?
inst = line.strip().split(" ")
inst = [int(x,0) for x in inst]
inside[-1].extend(inst)
#print "inst", inst
# Fill in name->kernel map
for kernel in ex.kernels:
ex.kernels_byname[kernel.name] = kernel
return ex
|
{
"content_hash": "cb4a3eba9fea2b8fd390bfbf82fbbe4c",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 96,
"avg_line_length": 33.25757575757576,
"alnum_prop": 0.49031890660592253,
"repo_name": "laanwj/decuda",
"id": "27304dc3ce3cebf9666fca88c6ea2ed0df26dd61",
"size": "8867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CubinFile.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1662"
},
{
"name": "C++",
"bytes": "1533"
},
{
"name": "Python",
"bytes": "207886"
}
],
"symlink_target": ""
}
|
from django import template
from geoprisma.models import Service
register = template.Library()
@register.simple_tag
def printWidgetOptionss(options):
"""
Construit un tableau de clef:valeur pour l'affichage javascript
Args:
options: Liste d'option
Returns:
La liste formatee
"""
options = regroupOptionsByName(options)
i = 1
strOptions = '{\n'
for option in options:
if type(option) is list:
j = 1
strOptions += '"'+option[0].name+'s": ['
for suboption in option:
strOptions += printOptionByType(suboption.value)
if j < len(option):
strOptions += ','
j += 1
else:
strOptions += ']'
if i < len(options):
strOptions += ',\n'
i += 1
else:
strOptions += '"' + option.name + '":' + printOptionByType(option.value)
if i < len(options):
strOptions += ',\n'
i += 1
strOptions += '}'
return strOptions
def regroupOptionsByName(options):
"""
Regroupe les option d'une liste par nom
Args:
options: La liste d'option
Returns:
Liste d'option groupe
"""
groupedOptions = []
groups = {}
for option in options:
groups.setdefault(option.name, list()).append(option)
for key in groups:
if len(groups[key]) > 1:
groupedOptions.append(groups[key])
else:
groupedOptions.append(groups[key].pop())
return groupedOptions
def printOptionByType(option):
"""
Format l'option selon son type
Args:
option: L'option
Returns:
L'option formatee
"""
if option.isdigit() or option == "true" or option == "false":
return option
else:
return '"' + option.replace("\"", "'") + '"'
|
{
"content_hash": "149c701f8d018e859e5e371ce6bb2ffa",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 84,
"avg_line_length": 26.826666666666668,
"alnum_prop": 0.5124254473161034,
"repo_name": "groupe-conseil-nutshimit-nippour/django-geoprisma",
"id": "3c767bc4f0588cad044250f354f7a25dd11dda0d",
"size": "2012",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "geoprisma/templatetags/widget_extras.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "4704"
},
{
"name": "C",
"bytes": "7006"
},
{
"name": "CSS",
"bytes": "5396402"
},
{
"name": "HTML",
"bytes": "50384401"
},
{
"name": "JavaScript",
"bytes": "15252022"
},
{
"name": "PHP",
"bytes": "5326"
},
{
"name": "Python",
"bytes": "375512"
},
{
"name": "Ruby",
"bytes": "5174"
},
{
"name": "Shell",
"bytes": "5151"
},
{
"name": "XSLT",
"bytes": "44334"
}
],
"symlink_target": ""
}
|
"""Device discovery functions for Zigbee Home Automation."""
from __future__ import annotations
from collections import Counter
from collections.abc import Callable
import logging
from typing import TYPE_CHECKING
from homeassistant.const import CONF_TYPE, Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.entity_registry import async_entries_for_device
from homeassistant.helpers.typing import ConfigType
from . import const as zha_const, registries as zha_regs
from .. import ( # noqa: F401 pylint: disable=unused-import,
alarm_control_panel,
binary_sensor,
button,
climate,
cover,
device_tracker,
fan,
light,
lock,
number,
select,
sensor,
siren,
switch,
)
from .channels import base
if TYPE_CHECKING:
from ..entity import ZhaEntity
from .channels import ChannelPool
from .device import ZHADevice
from .gateway import ZHAGateway
from .group import ZHAGroup
_LOGGER = logging.getLogger(__name__)
@callback
async def async_add_entities(
_async_add_entities: AddEntitiesCallback,
entities: list[
tuple[
type[ZhaEntity],
tuple[str, ZHADevice, list[base.ZigbeeChannel]],
]
],
) -> None:
"""Add entities helper."""
if not entities:
return
to_add = [ent_cls.create_entity(*args) for ent_cls, args in entities]
entities_to_add = [entity for entity in to_add if entity is not None]
_async_add_entities(entities_to_add, update_before_add=False)
entities.clear()
class ProbeEndpoint:
"""All discovered channels and entities of an endpoint."""
def __init__(self) -> None:
"""Initialize instance."""
self._device_configs: ConfigType = {}
@callback
def discover_entities(self, channel_pool: ChannelPool) -> None:
"""Process an endpoint on a zigpy device."""
self.discover_by_device_type(channel_pool)
self.discover_multi_entities(channel_pool)
self.discover_by_cluster_id(channel_pool)
self.discover_multi_entities(channel_pool, config_diagnostic_entities=True)
zha_regs.ZHA_ENTITIES.clean_up()
@callback
def discover_by_device_type(self, channel_pool: ChannelPool) -> None:
"""Process an endpoint on a zigpy device."""
unique_id = channel_pool.unique_id
component: str | None = self._device_configs.get(unique_id, {}).get(CONF_TYPE)
if component is None:
ep_profile_id = channel_pool.endpoint.profile_id
ep_device_type = channel_pool.endpoint.device_type
component = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type)
if component and component in zha_const.PLATFORMS:
channels = channel_pool.unclaimed_channels()
entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity(
component, channel_pool.manufacturer, channel_pool.model, channels
)
if entity_class is None:
return
channel_pool.claim_channels(claimed)
channel_pool.async_new_entity(component, entity_class, unique_id, claimed)
@callback
def discover_by_cluster_id(self, channel_pool: ChannelPool) -> None:
"""Process an endpoint on a zigpy device."""
items = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.items()
single_input_clusters = {
cluster_class: match
for cluster_class, match in items
if not isinstance(cluster_class, int)
}
remaining_channels = channel_pool.unclaimed_channels()
for channel in remaining_channels:
if channel.cluster.cluster_id in zha_regs.CHANNEL_ONLY_CLUSTERS:
channel_pool.claim_channels([channel])
continue
component = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.get(
channel.cluster.cluster_id
)
if component is None:
for cluster_class, match in single_input_clusters.items():
if isinstance(channel.cluster, cluster_class):
component = match
break
self.probe_single_cluster(component, channel, channel_pool)
# until we can get rid off registries
self.handle_on_off_output_cluster_exception(channel_pool)
@staticmethod
def probe_single_cluster(
component: Platform | None,
channel: base.ZigbeeChannel,
ep_channels: ChannelPool,
) -> None:
"""Probe specified cluster for specific component."""
if component is None or component not in zha_const.PLATFORMS:
return
channel_list = [channel]
unique_id = f"{ep_channels.unique_id}-{channel.cluster.cluster_id}"
entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity(
component, ep_channels.manufacturer, ep_channels.model, channel_list
)
if entity_class is None:
return
ep_channels.claim_channels(claimed)
ep_channels.async_new_entity(component, entity_class, unique_id, claimed)
def handle_on_off_output_cluster_exception(self, ep_channels: ChannelPool) -> None:
"""Process output clusters of the endpoint."""
profile_id = ep_channels.endpoint.profile_id
device_type = ep_channels.endpoint.device_type
if device_type in zha_regs.REMOTE_DEVICE_TYPES.get(profile_id, []):
return
for cluster_id, cluster in ep_channels.endpoint.out_clusters.items():
component = zha_regs.SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS.get(
cluster.cluster_id
)
if component is None:
continue
channel_class = zha_regs.ZIGBEE_CHANNEL_REGISTRY.get(
cluster_id, base.ZigbeeChannel
)
channel = channel_class(cluster, ep_channels)
self.probe_single_cluster(component, channel, ep_channels)
@staticmethod
@callback
def discover_multi_entities(
channel_pool: ChannelPool,
config_diagnostic_entities: bool = False,
) -> None:
"""Process an endpoint on and discover multiple entities."""
ep_profile_id = channel_pool.endpoint.profile_id
ep_device_type = channel_pool.endpoint.device_type
cmpt_by_dev_type = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type)
if config_diagnostic_entities:
matches, claimed = zha_regs.ZHA_ENTITIES.get_config_diagnostic_entity(
channel_pool.manufacturer,
channel_pool.model,
list(channel_pool.all_channels.values()),
)
else:
matches, claimed = zha_regs.ZHA_ENTITIES.get_multi_entity(
channel_pool.manufacturer,
channel_pool.model,
channel_pool.unclaimed_channels(),
)
channel_pool.claim_channels(claimed)
for component, ent_n_chan_list in matches.items():
for entity_and_channel in ent_n_chan_list:
_LOGGER.debug(
"'%s' component -> '%s' using %s",
component,
entity_and_channel.entity_class.__name__,
[ch.name for ch in entity_and_channel.claimed_channel],
)
for component, ent_n_chan_list in matches.items():
for entity_and_channel in ent_n_chan_list:
if component == cmpt_by_dev_type:
# for well known device types, like thermostats we'll take only 1st class
channel_pool.async_new_entity(
component,
entity_and_channel.entity_class,
channel_pool.unique_id,
entity_and_channel.claimed_channel,
)
break
first_ch = entity_and_channel.claimed_channel[0]
channel_pool.async_new_entity(
component,
entity_and_channel.entity_class,
f"{channel_pool.unique_id}-{first_ch.cluster.cluster_id}",
entity_and_channel.claimed_channel,
)
def initialize(self, hass: HomeAssistant) -> None:
"""Update device overrides config."""
zha_config: ConfigType = hass.data[zha_const.DATA_ZHA].get(
zha_const.DATA_ZHA_CONFIG, {}
)
if overrides := zha_config.get(zha_const.CONF_DEVICE_CONFIG):
self._device_configs.update(overrides)
class GroupProbe:
"""Determine the appropriate component for a group."""
_hass: HomeAssistant
def __init__(self) -> None:
"""Initialize instance."""
self._unsubs: list[Callable[[], None]] = []
def initialize(self, hass: HomeAssistant) -> None:
"""Initialize the group probe."""
self._hass = hass
self._unsubs.append(
async_dispatcher_connect(
hass, zha_const.SIGNAL_GROUP_ENTITY_REMOVED, self._reprobe_group
)
)
def cleanup(self) -> None:
"""Clean up on when zha shuts down."""
for unsub in self._unsubs[:]:
unsub()
self._unsubs.remove(unsub)
@callback
def _reprobe_group(self, group_id: int) -> None:
"""Reprobe a group for entities after its members change."""
zha_gateway: ZHAGateway = self._hass.data[zha_const.DATA_ZHA][
zha_const.DATA_ZHA_GATEWAY
]
if (zha_group := zha_gateway.groups.get(group_id)) is None:
return
self.discover_group_entities(zha_group)
@callback
def discover_group_entities(self, group: ZHAGroup) -> None:
"""Process a group and create any entities that are needed."""
# only create a group entity if there are 2 or more members in a group
if len(group.members) < 2:
_LOGGER.debug(
"Group: %s:0x%04x has less than 2 members - skipping entity discovery",
group.name,
group.group_id,
)
return
entity_domains = GroupProbe.determine_entity_domains(self._hass, group)
if not entity_domains:
return
zha_gateway: ZHAGateway = self._hass.data[zha_const.DATA_ZHA][
zha_const.DATA_ZHA_GATEWAY
]
for domain in entity_domains:
entity_class = zha_regs.ZHA_ENTITIES.get_group_entity(domain)
if entity_class is None:
continue
self._hass.data[zha_const.DATA_ZHA][domain].append(
(
entity_class,
(
group.get_domain_entity_ids(domain),
f"{domain}_zha_group_0x{group.group_id:04x}",
group.group_id,
zha_gateway.coordinator_zha_device,
),
)
)
async_dispatcher_send(self._hass, zha_const.SIGNAL_ADD_ENTITIES)
@staticmethod
def determine_entity_domains(hass: HomeAssistant, group: ZHAGroup) -> list[str]:
"""Determine the entity domains for this group."""
entity_domains: list[str] = []
zha_gateway: ZHAGateway = hass.data[zha_const.DATA_ZHA][
zha_const.DATA_ZHA_GATEWAY
]
all_domain_occurrences = []
for member in group.members:
if member.device.is_coordinator:
continue
entities = async_entries_for_device(
zha_gateway.ha_entity_registry,
member.device.device_id,
include_disabled_entities=True,
)
all_domain_occurrences.extend(
[
entity.domain
for entity in entities
if entity.domain in zha_regs.GROUP_ENTITY_DOMAINS
]
)
if not all_domain_occurrences:
return entity_domains
# get all domains we care about if there are more than 2 entities of this domain
counts = Counter(all_domain_occurrences)
entity_domains = [domain[0] for domain in counts.items() if domain[1] >= 2]
_LOGGER.debug(
"The entity domains are: %s for group: %s:0x%04x",
entity_domains,
group.name,
group.group_id,
)
return entity_domains
PROBE = ProbeEndpoint()
GROUP_PROBE = GroupProbe()
|
{
"content_hash": "69566c8f15c2d9a6bc991e98d6924a5a",
"timestamp": "",
"source": "github",
"line_count": 347,
"max_line_length": 93,
"avg_line_length": 36.71757925072046,
"alnum_prop": 0.5923396907621066,
"repo_name": "nkgilley/home-assistant",
"id": "6b690f4da085acee1a4bfb6c04064421f2b2acd9",
"size": "12741",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/zha/core/discovery.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "51597279"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
import datetime
import random
import numpy as np
from hyperopt import hp, fmin, tpe
import os
import sys
from sklearn.linear_model import Ridge
sys.path.insert(0, os.getcwd())
import qml_workdir.classes.config
from qml.cv import QCV
from qml.helpers import get_engine
from qml.models import QXgb, QAvg, QRankedAvg, QRankedByLineAvg, QStackModel
from qml_workdir.classes.models import qm
if __name__ == "__main__":
_, conn = get_engine()
cv = QCV(qm)
CV_SCORE_TO_SELECT = 0.53741
CV_SCORE_TO_STOP = 0.542
ROUNDS = 1000
res = conn.execute(
"""
select data_id, cls, descr,
substring_index(group_concat(model_id order by cv_score), ',', 20) as models
from qml_results r
inner join qml_models m using(model_id)
where m.level<=2 and cv_score < {}
group by data_id, cls, descr
""".format(CV_SCORE_TO_SELECT)
).fetchall()
results = []
for r in res:
for m in r['models'].split(','):
results.append([int(m), r['data_id'], 1000])
for i in range(5000):
random.shuffle(results)
models = list(results[:random.randint(2, 10)])
models = sorted(models, key=lambda x: (x[0], x[1]))
print('{}/{}'.format(i, ROUNDS), models)
model_id = qm.add_by_params(
QAvg(models)
)
print(cv.cross_val(model_id, -1, early_stop_cv=lambda x: x>CV_SCORE_TO_STOP))
conn.execute("update qml_models set level=3 where model_id={}".format(model_id))
# model_id = qm.add_by_params(
# QAvg(models, is_geom=True)
# )
# print(cv.cross_val(model_id, -1, early_stop_cv=lambda x: x>CV_SCORE_TO_STOP))
# conn.execute("update qml_models set level=3 where model_id={}".format(model_id))
#
# model_id = qm.add_by_params(
# QRankedAvg(models)
# )
# print(cv.cross_val(model_id, -1, early_stop_cv=lambda x: x>CV_SCORE_TO_STOP))
# conn.execute("update qml_models set level=3 where model_id={}".format(model_id))
# model_id = qm.add_by_params(
# QRankedByLineAvg(models)
# )
# print(cv.cross_val(model_id, -1, early_stop_cv=lambda x: x>CV_SCORE_TO_STOP))
# conn.execute("update qml_models set level=3 where model_id={}".format(model_id))
if len(models) > 8:
model_id2 = qm.add_by_params(
Ridge(alpha=0.01),
)
model_id = qm.add_by_params(
QStackModel(models, second_layer_model=model_id2, nsplits=2)
)
print(cv.cross_val(model_id, -1, early_stop_cv=lambda x: x>CV_SCORE_TO_STOP))
conn.execute("update qml_models set level=3 where model_id={}".format(model_id))
model_id2 = qm.add_by_params(
Ridge(alpha=0.05),
)
model_id = qm.add_by_params(
QStackModel(models, second_layer_model=model_id2, nsplits=2)
)
print(cv.cross_val(model_id, -1, early_stop_cv=lambda x: x>CV_SCORE_TO_STOP))
conn.execute("update qml_models set level=3 where model_id={}".format(model_id))
conn.execute(
"update qml_models set level=3 where level=1 and cls in ('qavg', 'qrankedavg', 'QRankedByLineAvg', 'QStackModel')")
|
{
"content_hash": "7c6b473b0fdef2271338c22c1b368a43",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 123,
"avg_line_length": 33.52,
"alnum_prop": 0.5781622911694511,
"repo_name": "quantum13/mlbootcamp5",
"id": "de18b7f746ee4c3caec017e2f3b541c93181e4f5",
"size": "3352",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qml_workdir/ensembling/level3_model01s.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "440087"
},
{
"name": "Python",
"bytes": "298477"
}
],
"symlink_target": ""
}
|
__author__ = 'abr'
dataDir = '/home/abr/Downloads/000/'
_droidPath = "/home/abr/Downloads/droid/droid.sh"
_tikaWrapperPath = "/home/abr/Projects/git/TikaWrapper/target/tikaWrapper-0.0.2-jar-with-dependencies.jar"
_fidoPath = "/home/abr/Downloads/openplanets-fido-991c16a/fido/fido.py"
comleteFile = "scapetesting/govdocs1/csv/complete.csv"
truthFile = '/home/abr/Downloads/groundtruth-fitools/govdocsDetails 6-25-2010.csv'
|
{
"content_hash": "9110b7d6d0ffc933fd911ce33606b8e9",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 106,
"avg_line_length": 30.714285714285715,
"alnum_prop": 0.7581395348837209,
"repo_name": "openpreserve/Scape-Tool-Tester",
"id": "7e529fa61e5e0b87491e440034b62f86f012889e",
"size": "430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "22459"
}
],
"symlink_target": ""
}
|
from django.core.exceptions import ValidationError
from django.http import HttpRequest, HttpResponse
from django.utils.translation import gettext as _
from zerver.actions.message_send import send_rate_limited_pm_notification_to_bot_owner
from zerver.decorator import webhook_view
from zerver.lib.exceptions import JsonableError
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.send_email import FromAddress
from zerver.lib.validator import WildValue, check_string, to_wild_value
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile
MISCONFIGURED_PAYLOAD_ERROR_MESSAGE = """
Hi there! Your bot {bot_name} just received a Zabbix payload that is missing
some data that Zulip requires. This usually indicates a configuration issue
in your Zabbix webhook settings. Please make sure that you set the
script correctly and provide all the required parameters
when configuring the Zabbix webhook. Contact {support_email} if you
need further help!
"""
ZABBIX_TOPIC_TEMPLATE = "{hostname}"
ZABBIX_MESSAGE_TEMPLATE = """
{status} ({severity}) alert on [{hostname}]({link}):
* {trigger}
* {item}
""".strip()
@webhook_view("Zabbix")
@has_request_variables
def api_zabbix_webhook(
request: HttpRequest,
user_profile: UserProfile,
payload: WildValue = REQ(argument_type="body", converter=to_wild_value),
) -> HttpResponse:
try:
body = get_body_for_http_request(payload)
subject = get_subject_for_http_request(payload)
except ValidationError:
message = MISCONFIGURED_PAYLOAD_ERROR_MESSAGE.format(
bot_name=user_profile.full_name,
support_email=FromAddress.SUPPORT,
).strip()
send_rate_limited_pm_notification_to_bot_owner(user_profile, user_profile.realm, message)
raise JsonableError(_("Invalid payload"))
check_send_webhook_message(request, user_profile, subject, body)
return json_success(request)
def get_subject_for_http_request(payload: WildValue) -> str:
return ZABBIX_TOPIC_TEMPLATE.format(hostname=payload["hostname"].tame(check_string))
def get_body_for_http_request(payload: WildValue) -> str:
hostname = payload["hostname"].tame(check_string)
severity = payload["severity"].tame(check_string)
status = payload["status"].tame(check_string)
item = payload["item"].tame(check_string)
trigger = payload["trigger"].tame(check_string)
link = payload["link"].tame(check_string)
data = {
"hostname": hostname,
"severity": severity,
"status": status,
"item": item,
"trigger": trigger,
"link": link,
}
return ZABBIX_MESSAGE_TEMPLATE.format(**data)
|
{
"content_hash": "99d4d69bb6ae834304df0c9951a596bc",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 97,
"avg_line_length": 36.25,
"alnum_prop": 0.7263157894736842,
"repo_name": "zulip/zulip",
"id": "c76172313bd395810ba4f61ecc665422bc3a1604",
"size": "2755",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "zerver/webhooks/zabbix/view.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "509211"
},
{
"name": "Dockerfile",
"bytes": "4219"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "696430"
},
{
"name": "Handlebars",
"bytes": "384277"
},
{
"name": "JavaScript",
"bytes": "4098367"
},
{
"name": "Perl",
"bytes": "10163"
},
{
"name": "Puppet",
"bytes": "112433"
},
{
"name": "Python",
"bytes": "10336945"
},
{
"name": "Ruby",
"bytes": "3166"
},
{
"name": "Shell",
"bytes": "147162"
},
{
"name": "TypeScript",
"bytes": "286785"
}
],
"symlink_target": ""
}
|
import os
import boto3
from ..cli import echo
from .base import BaseRemoteStorage
class RemoteStorage(BaseRemoteStorage):
def __init__(self, config, root):
super(RemoteStorage, self).__init__(config, root)
self.check_s3_config()
def check_s3_config(self):
assert 'key' in self.config, '`key` not found in configuration'
assert 'secret' in self.config, '`secret` not found in configuration'
assert 'bucket' in self.config, '`bucket` not found in configuration'
def build_s3_args(self):
args = ('s3', )
kwargs = {
'aws_access_key_id': self.config['key'],
'aws_secret_access_key': self.config['secret']
}
if 'region' in self.config:
kwargs['region_name'] = self.config['region']
return args, kwargs
def __enter__(self):
args, kwargs = self.build_s3_args()
s3 = boto3.resource(*args, **kwargs)
self.bucket = s3.Bucket(self.config['bucket'])
return self
def __exit__(self, *args):
pass
def to_remote(self, path):
if not path.startswith(self.root):
return path
length = len(self.root)
return path[length:].lstrip('/')
def to_local(self, path):
return self.root + '/' + path
def push(self, filename):
remote_filename = self.to_remote(filename)
echo(filename)
self.bucket.upload_file(filename, remote_filename)
def pull(self, path):
remote_path = self.to_remote(path)
for object_summary in self.bucket.objects.filter(Prefix=remote_path):
local_filename = self.to_local(object_summary.key)
echo(local_filename)
dirname = os.path.dirname(local_filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
self.bucket.download_file(object_summary.key, local_filename)
|
{
"content_hash": "34bbde393c8f3f2a48d9b17171491ff1",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 77,
"avg_line_length": 26.52054794520548,
"alnum_prop": 0.5945247933884298,
"repo_name": "saalaa/no-cloud",
"id": "af966cfdaa42ebe7e1f46e15bd0ff0880b8c3a37",
"size": "2042",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "no_cloud/remote/s3.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2561"
},
{
"name": "Python",
"bytes": "27424"
},
{
"name": "Shell",
"bytes": "206"
}
],
"symlink_target": ""
}
|
__all__ = ['__version__']
import pbr.version
version_info = pbr.version.VersionInfo('eclcli')
try:
__version__ = version_info.version_string()
except AttributeError:
__version__ = None
|
{
"content_hash": "e0254ae02eb73137f5faae878e96da01",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 48,
"avg_line_length": 21.666666666666668,
"alnum_prop": 0.6615384615384615,
"repo_name": "anythingrandom/eclcli",
"id": "8112e4265ecd81fe58d607fb8289979022724cde",
"size": "761",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "eclcli/__init__.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1647657"
}
],
"symlink_target": ""
}
|
"""
Tests of runtime settings.
"""
import sys
import os
import docutils_difflib
import pprint
import warnings
import unittest
from types import StringType
import DocutilsTestSupport # must be imported before docutils
from docutils import frontend, utils
from docutils.writers import html4css1, pep_html
from docutils.parsers import rst
warnings.filterwarnings(action='ignore',
category=frontend.ConfigDeprecationWarning)
def fixpath(path):
return os.path.abspath(os.path.join(*(path.split('/'))))
class ConfigFileTests(unittest.TestCase):
config_files = {'old': fixpath('data/config_old.txt'),
'one': fixpath('data/config_1.txt'),
'two': fixpath('data/config_2.txt'),
'list': fixpath('data/config_list.txt'),
'list2': fixpath('data/config_list_2.txt'),
'error': fixpath('data/config_error_handler.txt')}
settings = {
'old': {'datestamp': '%Y-%m-%d %H:%M UTC',
'generator': 1,
'no_random': 1,
'python_home': 'http://www.python.org',
'source_link': 1,
'stylesheet': None,
'stylesheet_path': fixpath('data/stylesheets/pep.css'),
'template': fixpath('data/pep-html-template')},
'one': {'datestamp': '%Y-%m-%d %H:%M UTC',
'generator': 1,
'no_random': 1,
'python_home': 'http://www.python.org',
'record_dependencies': utils.DependencyList(),
'source_link': 1,
'stylesheet': None,
'stylesheet_path': fixpath('data/stylesheets/pep.css'),
'tab_width': 8,
'template': fixpath('data/pep-html-template'),
'trim_footnote_reference_space': 1},
'two': {'footnote_references': 'superscript',
'generator': 0,
'record_dependencies': utils.DependencyList(),
'stylesheet': None,
'stylesheet_path': fixpath('data/test.css'),
'trim_footnote_reference_space': None},
'list': {'expose_internals': ['a', 'b', 'c', 'd', 'e']},
'list2': {'expose_internals': ['a', 'b', 'c', 'd', 'e', 'f']},
'error': {'error_encoding': 'ascii',
'error_encoding_error_handler': 'strict'},
}
compare = docutils_difflib.Differ().compare
"""Comparison method shared by all tests."""
def setUp(self):
self.option_parser = frontend.OptionParser(
components=(pep_html.Writer, rst.Parser), read_config_files=None)
def files_settings(self, *names):
settings = frontend.Values()
for name in names:
settings.update(self.option_parser.get_config_file_settings(
self.config_files[name]), self.option_parser)
return settings.__dict__
def expected_settings(self, *names):
expected = {}
for name in names:
expected.update(self.settings[name])
return expected
def compare_output(self, result, expected):
"""`result` and `expected` should both be dicts."""
self.assert_(result.has_key('record_dependencies'))
if not expected.has_key('record_dependencies'):
# Delete it if we don't want to test it.
del result['record_dependencies']
result = pprint.pformat(result) + '\n'
expected = pprint.pformat(expected) + '\n'
try:
self.assertEquals(result, expected)
except AssertionError:
print >>sys.stderr, '\n%s\n' % (self,)
print >>sys.stderr, '-: expected\n+: result'
print >>sys.stderr, ''.join(self.compare(expected.splitlines(1),
result.splitlines(1)))
raise
def test_nofiles(self):
self.compare_output(self.files_settings(),
self.expected_settings())
def test_old(self):
self.compare_output(self.files_settings('old'),
self.expected_settings('old'))
def test_one(self):
self.compare_output(self.files_settings('one'),
self.expected_settings('one'))
def test_multiple(self):
self.compare_output(self.files_settings('one', 'two'),
self.expected_settings('one', 'two'))
def test_old_and_new(self):
self.compare_output(self.files_settings('old', 'two'),
self.expected_settings('old', 'two'))
def test_list(self):
self.compare_output(self.files_settings('list'),
self.expected_settings('list'))
def test_list2(self):
self.compare_output(self.files_settings('list', 'list2'),
self.expected_settings('list2'))
def test_error_handler(self):
self.compare_output(self.files_settings('error'),
self.expected_settings('error'))
class ConfigEnvVarFileTests(ConfigFileTests):
"""
Repeats the tests of `ConfigFileTests` using the ``DOCUTILSCONFIG``
environment variable and the standard Docutils config file mechanism.
"""
def setUp(self):
ConfigFileTests.setUp(self)
self.orig_environ = os.environ
os.environ = os.environ.copy()
def files_settings(self, *names):
files = [self.config_files[name] for name in names]
os.environ['DOCUTILSCONFIG'] = os.pathsep.join(files)
settings = self.option_parser.get_standard_config_settings()
return settings.__dict__
def tearDown(self):
os.environ = self.orig_environ
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "cd0d3c2422c526b5841078ef4635eec4",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 77,
"avg_line_length": 36.57861635220126,
"alnum_prop": 0.5594910591471802,
"repo_name": "indro/t2c",
"id": "4ffc62e108a7dc48993584593e5dfd97d80c9945",
"size": "6055",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "libs/external_libs/docutils-0.4/test/test_settings.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "4084"
},
{
"name": "Assembly",
"bytes": "3294"
},
{
"name": "Boo",
"bytes": "1111"
},
{
"name": "C",
"bytes": "146718"
},
{
"name": "C#",
"bytes": "17611"
},
{
"name": "C++",
"bytes": "79372"
},
{
"name": "CSS",
"bytes": "165869"
},
{
"name": "Clojure",
"bytes": "21964"
},
{
"name": "Common Lisp",
"bytes": "48874"
},
{
"name": "D",
"bytes": "5475"
},
{
"name": "Dylan",
"bytes": "683"
},
{
"name": "Emacs Lisp",
"bytes": "126207"
},
{
"name": "Erlang",
"bytes": "8972"
},
{
"name": "FORTRAN",
"bytes": "27700"
},
{
"name": "Haskell",
"bytes": "40419"
},
{
"name": "Java",
"bytes": "81362"
},
{
"name": "JavaScript",
"bytes": "75388"
},
{
"name": "Logtalk",
"bytes": "7260"
},
{
"name": "Lua",
"bytes": "8677"
},
{
"name": "Matlab",
"bytes": "469"
},
{
"name": "OCaml",
"bytes": "42416"
},
{
"name": "Objective-C",
"bytes": "778"
},
{
"name": "PHP",
"bytes": "17078"
},
{
"name": "Pascal",
"bytes": "84519"
},
{
"name": "Perl",
"bytes": "37504"
},
{
"name": "Python",
"bytes": "8018145"
},
{
"name": "R",
"bytes": "3468"
},
{
"name": "Ruby",
"bytes": "91230"
},
{
"name": "Scala",
"bytes": "272"
},
{
"name": "Scheme",
"bytes": "45856"
},
{
"name": "Shell",
"bytes": "117254"
},
{
"name": "Smalltalk",
"bytes": "15501"
},
{
"name": "VimL",
"bytes": "16660"
},
{
"name": "Visual Basic",
"bytes": "846"
},
{
"name": "XSLT",
"bytes": "755"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('crowdsourcing', '0013_auto_20151221_2208'),
]
operations = [
migrations.DeleteModel(
name='RequesterRanking',
),
]
|
{
"content_hash": "23c8300fb7c1f52d3fbcce84216468cf",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 53,
"avg_line_length": 19.4375,
"alnum_prop": 0.6237942122186495,
"repo_name": "shirishgoyal/crowdsource-platform",
"id": "21094bf9aa4c129c27c2495ec54a90eaa721e056",
"size": "335",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop2",
"path": "crowdsourcing/migrations/0063_delete_requesterranking.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "63075"
},
{
"name": "HTML",
"bytes": "229504"
},
{
"name": "JavaScript",
"bytes": "312581"
},
{
"name": "Python",
"bytes": "748797"
},
{
"name": "Shell",
"bytes": "838"
}
],
"symlink_target": ""
}
|
from django.contrib.auth.models import Group, User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.shortcuts import reverse
from model_utils.models import TimeStampedModel
class WorkbenchUser(TimeStampedModel):
user = models.OneToOneField(User, on_delete=models.CASCADE)
netid = models.CharField(max_length=200)
can_run_experiments = models.BooleanField(default=True)
REQUIRED_FIELDS = ['netid',]
def __str__(self):
return self.user.username
def get_absolute_url(self):
return reverse('view_profile', kwargs={'username': self.user.username})
def get_workbench_user(user):
return WorkbenchUser.objects.get(user=user)
@receiver(post_save, sender=User)
def create_workbench_user(sender, instance, created, **kwargs):
if created:
workbench_user = WorkbenchUser.objects.filter(user=instance)
if not workbench_user:
new_workbench_user = WorkbenchUser(user=instance, netid='superuser')
new_workbench_user.save()
def get_researcher_group():
return Group.objects.get(name='Researcher')
|
{
"content_hash": "7ebf41b537697cefb218d6ea2e173f0c",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 80,
"avg_line_length": 31.27027027027027,
"alnum_prop": 0.7268798617113224,
"repo_name": "MOOCworkbench/MOOCworkbench",
"id": "f04280472b6a7103a17cd7fc057f85cd86498d0c",
"size": "1157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "user_manager/models.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1942"
},
{
"name": "HTML",
"bytes": "129189"
},
{
"name": "Python",
"bytes": "423140"
},
{
"name": "Shell",
"bytes": "952"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import copy
import datetime
import re
import threading
import unittest
import warnings
from decimal import Decimal, Rounded
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import no_style
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connection, connections,
reset_queries, transaction,
)
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.postgresql_psycopg2 import version as pg_version
from django.db.backends.signals import connection_created
from django.db.backends.utils import CursorWrapper, format_number
from django.db.models import Avg, StdDev, Sum, Variance
from django.db.models.sql.constants import CURSOR
from django.db.utils import ConnectionHandler
from django.test import (
TestCase, TransactionTestCase, mock, override_settings, skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import ignore_warnings, str_prefix
from django.utils import six
from django.utils.deprecation import RemovedInDjango19Warning
from django.utils.six.moves import range
from . import models
class DummyBackendTest(TestCase):
def test_no_databases(self):
"""
Test that empty DATABASES setting default to the dummy backend.
"""
DATABASES = {}
conns = ConnectionHandler(DATABASES)
self.assertEqual(conns[DEFAULT_DB_ALIAS].settings_dict['ENGINE'],
'django.db.backends.dummy')
with self.assertRaises(ImproperlyConfigured):
conns[DEFAULT_DB_ALIAS].ensure_connection()
@unittest.skipUnless(connection.vendor == 'oracle', "Test only for Oracle")
class OracleTests(unittest.TestCase):
def test_quote_name(self):
# Check that '%' chars are escaped for query execution.
name = '"SOME%NAME"'
quoted_name = connection.ops.quote_name(name)
self.assertEqual(quoted_name % (), name)
def test_dbms_session(self):
# If the backend is Oracle, test that we can call a standard
# stored procedure through our cursor wrapper.
from django.db.backends.oracle.base import convert_unicode
with connection.cursor() as cursor:
cursor.callproc(convert_unicode('DBMS_SESSION.SET_IDENTIFIER'),
[convert_unicode('_django_testing!')])
def test_cursor_var(self):
# If the backend is Oracle, test that we can pass cursor variables
# as query parameters.
from django.db.backends.oracle.base import Database
with connection.cursor() as cursor:
var = cursor.var(Database.STRING)
cursor.execute("BEGIN %s := 'X'; END; ", [var])
self.assertEqual(var.getvalue(), 'X')
def test_long_string(self):
# If the backend is Oracle, test that we can save a text longer
# than 4000 chars and read it properly
with connection.cursor() as cursor:
cursor.execute('CREATE TABLE ltext ("TEXT" NCLOB)')
long_str = ''.join(six.text_type(x) for x in range(4000))
cursor.execute('INSERT INTO ltext VALUES (%s)', [long_str])
cursor.execute('SELECT text FROM ltext')
row = cursor.fetchone()
self.assertEqual(long_str, row[0].read())
cursor.execute('DROP TABLE ltext')
def test_client_encoding(self):
# If the backend is Oracle, test that the client encoding is set
# correctly. This was broken under Cygwin prior to r14781.
connection.ensure_connection()
self.assertEqual(connection.connection.encoding, "UTF-8")
self.assertEqual(connection.connection.nencoding, "UTF-8")
def test_order_of_nls_parameters(self):
# an 'almost right' datetime should work with configured
# NLS parameters as per #18465.
with connection.cursor() as cursor:
query = "select 1 from dual where '1936-12-29 00:00' < sysdate"
# Test that the query succeeds without errors - pre #18465 this
# wasn't the case.
cursor.execute(query)
self.assertEqual(cursor.fetchone()[0], 1)
@unittest.skipUnless(connection.vendor == 'sqlite', "Test only for SQLite")
class SQLiteTests(TestCase):
longMessage = True
def test_autoincrement(self):
"""
Check that auto_increment fields are created with the AUTOINCREMENT
keyword in order to be monotonically increasing. Refs #10164.
"""
with connection.schema_editor(collect_sql=True) as editor:
editor.create_model(models.Square)
statements = editor.collected_sql
match = re.search('"id" ([^,]+),', statements[0])
self.assertIsNotNone(match)
self.assertEqual('integer NOT NULL PRIMARY KEY AUTOINCREMENT',
match.group(1), "Wrong SQL used to create an auto-increment "
"column on SQLite")
def test_aggregation(self):
"""
#19360: Raise NotImplementedError when aggregating on date/time fields.
"""
for aggregate in (Sum, Avg, Variance, StdDev):
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate, aggregate('time'))
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate, aggregate('date'))
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate, aggregate('last_modified'))
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate,
**{'complex': aggregate('last_modified') + aggregate('last_modified')})
@unittest.skipUnless(connection.vendor == 'postgresql', "Test only for PostgreSQL")
class PostgreSQLTests(TestCase):
def assert_parses(self, version_string, version):
self.assertEqual(pg_version._parse_version(version_string), version)
def test_parsing(self):
"""Test PostgreSQL version parsing from `SELECT version()` output"""
self.assert_parses("PostgreSQL 9.3 beta4", 90300)
self.assert_parses("PostgreSQL 9.3", 90300)
self.assert_parses("EnterpriseDB 9.3", 90300)
self.assert_parses("PostgreSQL 9.3.6", 90306)
self.assert_parses("PostgreSQL 9.4beta1", 90400)
self.assert_parses("PostgreSQL 9.3.1 on i386-apple-darwin9.2.2, compiled by GCC i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 (Apple Inc. build 5478)", 90301)
def test_nodb_connection(self):
"""
Test that the _nodb_connection property fallbacks to the default connection
database when access to the 'postgres' database is not granted.
"""
def mocked_connect(self):
if self.settings_dict['NAME'] is None:
raise DatabaseError()
return ''
nodb_conn = connection._nodb_connection
self.assertIsNone(nodb_conn.settings_dict['NAME'])
# Now assume the 'postgres' db isn't available
del connection._nodb_connection
with warnings.catch_warnings(record=True) as w:
with mock.patch('django.db.backends.base.base.BaseDatabaseWrapper.connect',
side_effect=mocked_connect, autospec=True):
nodb_conn = connection._nodb_connection
del connection._nodb_connection
self.assertIsNotNone(nodb_conn.settings_dict['NAME'])
self.assertEqual(nodb_conn.settings_dict['NAME'], settings.DATABASES[DEFAULT_DB_ALIAS]['NAME'])
# Check a RuntimeWarning nas been emitted
self.assertEqual(len(w), 1)
self.assertEqual(w[0].message.__class__, RuntimeWarning)
def test_version_detection(self):
"""Test PostgreSQL version detection"""
# Helper mocks
class CursorMock(object):
"Very simple mock of DB-API cursor"
def execute(self, arg):
pass
def fetchone(self):
return ["PostgreSQL 9.3"]
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
class OlderConnectionMock(object):
"Mock of psycopg2 (< 2.0.12) connection"
def cursor(self):
return CursorMock()
# psycopg2 < 2.0.12 code path
conn = OlderConnectionMock()
self.assertEqual(pg_version.get_version(conn), 90300)
def test_connect_and_rollback(self):
"""
PostgreSQL shouldn't roll back SET TIME ZONE, even if the first
transaction is rolled back (#17062).
"""
databases = copy.deepcopy(settings.DATABASES)
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Ensure the database default time zone is different than
# the time zone in new_connection.settings_dict. We can
# get the default time zone by reset & show.
cursor = new_connection.cursor()
cursor.execute("RESET TIMEZONE")
cursor.execute("SHOW TIMEZONE")
db_default_tz = cursor.fetchone()[0]
new_tz = 'Europe/Paris' if db_default_tz == 'UTC' else 'UTC'
new_connection.close()
# Fetch a new connection with the new_tz as default
# time zone, run a query and rollback.
new_connection.settings_dict['TIME_ZONE'] = new_tz
new_connection.set_autocommit(False)
cursor = new_connection.cursor()
new_connection.rollback()
# Now let's see if the rollback rolled back the SET TIME ZONE.
cursor.execute("SHOW TIMEZONE")
tz = cursor.fetchone()[0]
self.assertEqual(new_tz, tz)
finally:
new_connection.close()
def test_connect_non_autocommit(self):
"""
The connection wrapper shouldn't believe that autocommit is enabled
after setting the time zone when AUTOCOMMIT is False (#21452).
"""
databases = copy.deepcopy(settings.DATABASES)
databases[DEFAULT_DB_ALIAS]['AUTOCOMMIT'] = False
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Open a database connection.
new_connection.cursor()
self.assertFalse(new_connection.get_autocommit())
finally:
new_connection.close()
def test_connect_isolation_level(self):
"""
Regression test for #18130 and #24318.
"""
from psycopg2.extensions import (
ISOLATION_LEVEL_READ_COMMITTED as read_committed,
ISOLATION_LEVEL_SERIALIZABLE as serializable,
)
# Since this is a django.test.TestCase, a transaction is in progress
# and the isolation level isn't reported as 0. This test assumes that
# PostgreSQL is configured with the default isolation level.
# Check the level on the psycopg2 connection, not the Django wrapper.
self.assertEqual(connection.connection.isolation_level, read_committed)
databases = copy.deepcopy(settings.DATABASES)
databases[DEFAULT_DB_ALIAS]['OPTIONS']['isolation_level'] = serializable
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Start a transaction so the isolation level isn't reported as 0.
new_connection.set_autocommit(False)
# Check the level on the psycopg2 connection, not the Django wrapper.
self.assertEqual(new_connection.connection.isolation_level, serializable)
finally:
new_connection.close()
def _select(self, val):
with connection.cursor() as cursor:
cursor.execute("SELECT %s", (val,))
return cursor.fetchone()[0]
def test_select_ascii_array(self):
a = ["awef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_select_unicode_array(self):
a = ["α²awef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_lookup_cast(self):
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
do = DatabaseOperations(connection=None)
for lookup in ('iexact', 'contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith', 'regex', 'iregex'):
self.assertIn('::text', do.lookup_cast(lookup))
def test_correct_extraction_psycopg2_version(self):
from django.db.backends.postgresql_psycopg2.base import psycopg2_version
version_path = 'django.db.backends.postgresql_psycopg2.base.Database.__version__'
with mock.patch(version_path, '2.6.9'):
self.assertEqual(psycopg2_version(), (2, 6, 9))
with mock.patch(version_path, '2.5.dev0'):
self.assertEqual(psycopg2_version(), (2, 5))
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') - see
#12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
years = models.SchoolClass.objects.dates('last_updated', 'year')
self.assertEqual(list(years), [datetime.date(2010, 1, 1)])
def test_django_date_extract(self):
"""
Test the custom ``django_date_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') - see #12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
classes = models.SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
@override_settings(DEBUG=True)
class LastExecutedQueryTest(TestCase):
def test_last_executed_query(self):
"""
last_executed_query should not raise an exception even if no previous
query has been run.
"""
cursor = connection.cursor()
try:
connection.ops.last_executed_query(cursor, '', ())
except Exception:
self.fail("'last_executed_query' should not raise an exception.")
def test_debug_sql(self):
list(models.Reporter.objects.filter(first_name="test"))
sql = connection.queries[-1]['sql'].lower()
self.assertIn("select", sql)
self.assertIn(models.Reporter._meta.db_table, sql)
def test_query_encoding(self):
"""
Test that last_executed_query() returns an Unicode string
"""
data = models.RawData.objects.filter(raw_data=b'\x00\x46 \xFE').extra(select={'fΓΆΓΆ': 1})
sql, params = data.query.sql_with_params()
cursor = data.query.get_compiler('default').execute_sql(CURSOR)
last_sql = cursor.db.ops.last_executed_query(cursor, sql, params)
self.assertIsInstance(last_sql, six.text_type)
@unittest.skipUnless(connection.vendor == 'sqlite',
"This test is specific to SQLite.")
def test_no_interpolation_on_sqlite(self):
# Regression for #17158
# This shouldn't raise an exception
query = "SELECT strftime('%Y', 'now');"
connection.cursor().execute(query)
self.assertEqual(connection.queries[-1]['sql'],
str_prefix("QUERY = %(_)s\"SELECT strftime('%%Y', 'now');\" - PARAMS = ()"))
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
cursor = connection.cursor()
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.table_name_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
self.assertRaises(Exception, cursor.executemany, query, [(1, 2, 3)])
self.assertRaises(Exception, cursor.executemany, query, [(1,)])
# Unfortunately, the following tests would be a good test to run on all
# backends, but it breaks MySQL hard. Until #13711 is fixed, it can't be run
# everywhere (although it would be an effective test of #13711).
class LongNameTest(TransactionTestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
available_apps = ['backends']
def test_sequence_name_length_limits_create(self):
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
def test_sequence_name_length_limits_m2m(self):
"""Test an m2m save of a model with a long name and a long m2m field name doesn't error as on Django >=1.2 this now uses object saves. Ref #8901"""
obj = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
rel_obj = models.Person.objects.create(first_name='Django', last_name='Reinhardt')
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
def test_sequence_name_length_limits_flush(self):
"""Test that sequence resetting as part of a flush with model with long name and long pk name doesn't error. Ref #8901"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sequences = [
{
'column': VLM._meta.pk.column,
'table': VLM._meta.db_table
},
]
cursor = connection.cursor()
for statement in connection.ops.sql_flush(no_style(), tables, sequences):
cursor.execute(statement)
class SequenceResetTest(TestCase):
def test_generic_relation(self):
"Sequence names are correct when resetting generic relations (Ref #13941)"
# Create an object with a manually specified PK
models.Post.objects.create(id=10, name='1st post', text='hello world')
# Reset the sequences for the database
cursor = connection.cursor()
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(no_style(), [models.Post])
for sql in commands:
cursor.execute(sql)
# If we create a new object now, it should have a PK greater
# than the PK we specified manually.
obj = models.Post.objects.create(name='New post', text='goodbye world')
self.assertGreater(obj.pk, 10)
# This test needs to run outside of a transaction, otherwise closing the
# connection would implicitly rollback and cause problems during teardown.
class ConnectionCreatedSignalTest(TransactionTestCase):
available_apps = []
# Unfortunately with sqlite3 the in-memory test database cannot be closed,
# and so it cannot be re-opened during testing.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_signal(self):
data = {}
def receiver(sender, connection, **kwargs):
data["connection"] = connection
connection_created.connect(receiver)
connection.close()
connection.cursor()
self.assertIs(data["connection"].connection, connection.connection)
connection_created.disconnect(receiver)
data.clear()
connection.cursor()
self.assertEqual(data, {})
class EscapingChecks(TestCase):
"""
All tests in this test case are also run with settings.DEBUG=True in
EscapingChecksDebug test case, to also test CursorDebugWrapper.
"""
bare_select_suffix = connection.features.bare_select_suffix
def test_paramless_no_escaping(self):
cursor = connection.cursor()
cursor.execute("SELECT '%s'" + self.bare_select_suffix)
self.assertEqual(cursor.fetchall()[0][0], '%s')
def test_parameter_escaping(self):
cursor = connection.cursor()
cursor.execute("SELECT '%%', %s" + self.bare_select_suffix, ('%d',))
self.assertEqual(cursor.fetchall()[0], ('%', '%d'))
@unittest.skipUnless(connection.vendor == 'sqlite',
"This is an sqlite-specific issue")
def test_sqlite_parameter_escaping(self):
# '%s' escaping support for sqlite3 #13648
cursor = connection.cursor()
cursor.execute("select strftime('%s', date('now'))")
response = cursor.fetchall()[0][0]
# response should be an non-zero integer
self.assertTrue(int(response))
@override_settings(DEBUG=True)
class EscapingChecksDebug(EscapingChecks):
pass
class BackendTestCase(TransactionTestCase):
available_apps = ['backends']
def create_squares_with_executemany(self, args):
self.create_squares(args, 'format', True)
def create_squares(self, args, paramstyle, multiple):
cursor = connection.cursor()
opts = models.Square._meta
tbl = connection.introspection.table_name_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field('root').column)
f2 = connection.ops.quote_name(opts.get_field('square').column)
if paramstyle == 'format':
query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2)
elif paramstyle == 'pyformat':
query = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (tbl, f1, f2)
else:
raise ValueError("unsupported paramstyle in test")
if multiple:
cursor.executemany(query, args)
else:
cursor.execute(query, args)
def test_cursor_executemany(self):
# Test cursor.executemany #4896
args = [(i, i ** 2) for i in range(-5, 6)]
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 11)
for i in range(-5, 6):
square = models.Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
def test_cursor_executemany_with_empty_params_list(self):
# Test executemany with params=[] does nothing #4765
args = []
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 0)
def test_cursor_executemany_with_iterator(self):
# Test executemany accepts iterators #10320
args = iter((i, i ** 2) for i in range(-3, 2))
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 5)
args = iter((i, i ** 2) for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 9)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_execute_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = {'root': 3, 'square': 9}
self.create_squares(args, 'pyformat', multiple=False)
self.assertEqual(models.Square.objects.count(), 1)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)]
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 11)
for i in range(-5, 6):
square = models.Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat_iterator(self):
args = iter({'root': i, 'square': i ** 2} for i in range(-3, 2))
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 5)
args = iter({'root': i, 'square': i ** 2} for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 9)
def test_unicode_fetches(self):
# fetchone, fetchmany, fetchall return strings as unicode objects #6254
qn = connection.ops.quote_name
models.Person(first_name="John", last_name="Doe").save()
models.Person(first_name="Jane", last_name="Doe").save()
models.Person(first_name="Mary", last_name="Agnelline").save()
models.Person(first_name="Peter", last_name="Parker").save()
models.Person(first_name="Clark", last_name="Kent").save()
opts2 = models.Person._meta
f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
query2 = ('SELECT %s, %s FROM %s ORDER BY %s'
% (qn(f3.column), qn(f4.column), connection.introspection.table_name_converter(opts2.db_table),
qn(f3.column)))
cursor = connection.cursor()
cursor.execute(query2)
self.assertEqual(cursor.fetchone(), ('Clark', 'Kent'))
self.assertEqual(list(cursor.fetchmany(2)), [('Jane', 'Doe'), ('John', 'Doe')])
self.assertEqual(list(cursor.fetchall()), [('Mary', 'Agnelline'), ('Peter', 'Parker')])
def test_unicode_password(self):
old_password = connection.settings_dict['PASSWORD']
connection.settings_dict['PASSWORD'] = "franΓ§ois"
try:
connection.cursor()
except DatabaseError:
# As password is probably wrong, a database exception is expected
pass
except Exception as e:
self.fail("Unexpected error raised with unicode password: %s" % e)
finally:
connection.settings_dict['PASSWORD'] = old_password
def test_database_operations_helper_class(self):
# Ticket #13630
self.assertTrue(hasattr(connection, 'ops'))
self.assertTrue(hasattr(connection.ops, 'connection'))
self.assertEqual(connection, connection.ops.connection)
def test_database_operations_init(self):
"""
Test that DatabaseOperations initialization doesn't query the database.
See #17656.
"""
with self.assertNumQueries(0):
connection.ops.__class__(connection)
def test_cached_db_features(self):
self.assertIn(connection.features.supports_transactions, (True, False))
self.assertIn(connection.features.supports_stddev, (True, False))
self.assertIn(connection.features.can_introspect_foreign_keys, (True, False))
def test_duplicate_table_error(self):
""" Test that creating an existing table returns a DatabaseError """
cursor = connection.cursor()
query = 'CREATE TABLE %s (id INTEGER);' % models.Article._meta.db_table
with self.assertRaises(DatabaseError):
cursor.execute(query)
def test_cursor_contextmanager(self):
"""
Test that cursors can be used as a context manager
"""
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
# Both InterfaceError and ProgrammingError seem to be used when
# accessing closed cursor (psycopg2 has InterfaceError, rest seem
# to use ProgrammingError).
with self.assertRaises(connection.features.closed_cursor_error_class):
# cursor should be closed, so no queries should be possible.
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
@unittest.skipUnless(connection.vendor == 'postgresql',
"Psycopg2 specific cursor.closed attribute needed")
def test_cursor_contextmanager_closing(self):
# There isn't a generic way to test that cursors are closed, but
# psycopg2 offers us a way to check that by closed attribute.
# So, run only on psycopg2 for that reason.
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
self.assertTrue(cursor.closed)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_is_usable_after_database_disconnects(self):
"""
Test that is_usable() doesn't crash when the database disconnects.
Regression for #21553.
"""
# Open a connection to the database.
with connection.cursor():
pass
# Emulate a connection close by the database.
connection._close()
# Even then is_usable() should not raise an exception.
try:
self.assertFalse(connection.is_usable())
finally:
# Clean up the mess created by connection._close(). Since the
# connection is already closed, this crashes on some backends.
try:
connection.close()
except Exception:
pass
@override_settings(DEBUG=True)
def test_queries(self):
"""
Test the documented API of connection.queries.
"""
with connection.cursor() as cursor:
reset_queries()
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
six.assertCountEqual(self, connection.queries[0].keys(), ['sql', 'time'])
reset_queries()
self.assertEqual(0, len(connection.queries))
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
@override_settings(DEBUG=True)
def test_queries_limit(self):
"""
Test that the backend doesn't store an unlimited number of queries.
Regression for #12581.
"""
old_queries_limit = BaseDatabaseWrapper.queries_limit
BaseDatabaseWrapper.queries_limit = 3
new_connections = ConnectionHandler(settings.DATABASES)
new_connection = new_connections[DEFAULT_DB_ALIAS]
# Initialize the connection and clear initialization statements.
with new_connection.cursor():
pass
new_connection.queries_log.clear()
try:
with new_connection.cursor() as cursor:
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(2, len(new_connection.queries))
self.assertEqual(0, len(w))
with new_connection.cursor() as cursor:
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(3, len(new_connection.queries))
self.assertEqual(1, len(w))
self.assertEqual(str(w[0].message), "Limit for query logging "
"exceeded, only the last 3 queries will be returned.")
finally:
BaseDatabaseWrapper.queries_limit = old_queries_limit
new_connection.close()
# We don't make these tests conditional because that means we would need to
# check and differentiate between:
# * MySQL+InnoDB, MySQL+MYISAM (something we currently can't do).
# * if sqlite3 (if/once we get #14204 fixed) has referential integrity turned
# on or not, something that would be controlled by runtime support and user
# preference.
# verify if its type is django.database.db.IntegrityError.
class FkConstraintsTests(TransactionTestCase):
available_apps = ['backends']
def setUp(self):
# Create a Reporter.
self.r = models.Reporter.objects.create(first_name='John', last_name='Smith')
def test_integrity_checks_on_creation(self):
"""
Try to create a model instance that violates a FK constraint. If it
fails it should fail with IntegrityError.
"""
a1 = models.Article(headline="This is a test", pub_date=datetime.datetime(2005, 7, 27), reporter_id=30)
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy models. Refs #17519
a2 = models.Article(headline='This is another test', reporter=self.r,
pub_date=datetime.datetime(2012, 8, 3),
reporter_proxy_id=30)
self.assertRaises(IntegrityError, a2.save)
def test_integrity_checks_on_update(self):
"""
Try to update a model instance introducing a FK constraint violation.
If it fails it should fail with IntegrityError.
"""
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a1 = models.Article.objects.get(headline="Test article")
a1.reporter_id = 30
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy models. Refs #17519
# Create another article
r_proxy = models.ReporterProxy.objects.get(pk=self.r.pk)
models.Article.objects.create(headline='Another article',
pub_date=datetime.datetime(1988, 5, 15),
reporter=self.r, reporter_proxy=r_proxy)
# Retrieve the second article from the DB
a2 = models.Article.objects.get(headline='Another article')
a2.reporter_proxy_id = 30
self.assertRaises(IntegrityError, a2.save)
def test_disable_constraint_checks_manually(self):
"""
When constraint checks are disabled, should be able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
connection.disable_constraint_checking()
a.save()
connection.enable_constraint_checking()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_disable_constraint_checks_context_manager(self):
"""
When constraint checks are disabled (using context manager), should be able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_check_constraints(self):
"""
Constraint checks should raise an IntegrityError when bad data is in the DB.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
with connection.constraint_checks_disabled():
a.save()
with self.assertRaises(IntegrityError):
connection.check_constraints()
transaction.set_rollback(True)
class ThreadTests(TransactionTestCase):
available_apps = ['backends']
def test_default_connection_thread_local(self):
"""
Ensure that the default connection (i.e. django.db.connection) is
different for each thread.
Refs #17258.
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
connection.cursor()
connections_dict[id(connection)] = connection
def runner():
# Passing django.db.connection between threads doesn't work while
# connections[DEFAULT_DB_ALIAS] does.
from django.db import connections
connection = connections[DEFAULT_DB_ALIAS]
# Allow thread sharing so the connection can be closed by the
# main thread.
connection.allow_thread_sharing = True
connection.cursor()
connections_dict[id(connection)] = connection
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
# Check that each created connection got different inner connection.
self.assertEqual(
len(set(conn.connection for conn in connections_dict.values())),
3)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_connections_thread_local(self):
"""
Ensure that the connections are different for each thread.
Refs #17258.
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
for conn in connections.all():
connections_dict[id(conn)] = conn
def runner():
from django.db import connections
for conn in connections.all():
# Allow thread sharing so the connection can be closed by the
# main thread.
conn.allow_thread_sharing = True
connections_dict[id(conn)] = conn
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEqual(len(connections_dict), 6)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_pass_connection_between_threads(self):
"""
Ensure that a connection can be passed from one thread to the other.
Refs #17258.
"""
models.Person.objects.create(first_name="John", last_name="Doe")
def do_thread():
def runner(main_thread_connection):
from django.db import connections
connections['default'] = main_thread_connection
try:
models.Person.objects.get(first_name="John", last_name="Doe")
except Exception as e:
exceptions.append(e)
t = threading.Thread(target=runner, args=[connections['default']])
t.start()
t.join()
# Without touching allow_thread_sharing, which should be False by default.
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to False
connections['default'].allow_thread_sharing = False
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to True
connections['default'].allow_thread_sharing = True
exceptions = []
do_thread()
# All good
self.assertEqual(exceptions, [])
def test_closing_non_shared_connections(self):
"""
Ensure that a connection that is not explicitly shareable cannot be
closed by another thread.
Refs #17258.
"""
# First, without explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# The exception was raised
self.assertEqual(len(exceptions), 1)
# Then, with explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
# Enable thread sharing
connections['default'].allow_thread_sharing = True
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# No exception was raised
self.assertEqual(len(exceptions), 0)
class MySQLPKZeroTests(TestCase):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for autoincrement primary key.
"""
@skipIfDBFeature('allows_auto_pk_0')
def test_zero_as_autoval(self):
with self.assertRaises(ValueError):
models.Square.objects.create(id=0, root=0, square=1)
class DBConstraintTestCase(TestCase):
def test_can_reference_existent(self):
obj = models.Object.objects.create()
ref = models.ObjectReference.objects.create(obj=obj)
self.assertEqual(ref.obj, obj)
ref = models.ObjectReference.objects.get(obj=obj)
self.assertEqual(ref.obj, obj)
def test_can_reference_non_existent(self):
self.assertFalse(models.Object.objects.filter(id=12345).exists())
ref = models.ObjectReference.objects.create(obj_id=12345)
ref_new = models.ObjectReference.objects.get(obj_id=12345)
self.assertEqual(ref, ref_new)
with self.assertRaises(models.Object.DoesNotExist):
ref.obj
def test_many_to_many(self):
obj = models.Object.objects.create()
obj.related_objects.create()
self.assertEqual(models.Object.objects.count(), 2)
self.assertEqual(obj.related_objects.count(), 1)
intermediary_model = models.Object._meta.get_field("related_objects").rel.through
intermediary_model.objects.create(from_object_id=obj.id, to_object_id=12345)
self.assertEqual(obj.related_objects.count(), 1)
self.assertEqual(intermediary_model.objects.count(), 2)
class BackendUtilTests(TestCase):
def test_format_number(self):
"""
Test the format_number converter utility
"""
def equal(value, max_d, places, result):
self.assertEqual(format_number(Decimal(value), max_d, places), result)
equal('0', 12, 3,
'0.000')
equal('0', 12, 8,
'0.00000000')
equal('1', 12, 9,
'1.000000000')
equal('0.00000000', 12, 8,
'0.00000000')
equal('0.000000004', 12, 8,
'0.00000000')
equal('0.000000008', 12, 8,
'0.00000001')
equal('0.000000000000000000999', 10, 8,
'0.00000000')
equal('0.1234567890', 12, 10,
'0.1234567890')
equal('0.1234567890', 12, 9,
'0.123456789')
equal('0.1234567890', 12, 8,
'0.12345679')
equal('0.1234567890', 12, 5,
'0.12346')
equal('0.1234567890', 12, 3,
'0.123')
equal('0.1234567890', 12, 1,
'0.1')
equal('0.1234567890', 12, 0,
'0')
equal('0.1234567890', None, 0,
'0')
equal('1234567890.1234567890', None, 0,
'1234567890')
equal('1234567890.1234567890', None, 2,
'1234567890.12')
equal('0.1234', 5, None,
'0.1234')
equal('123.12', 5, None,
'123.12')
with self.assertRaises(Rounded):
equal('0.1234567890', 5, None,
'0.12346')
with self.assertRaises(Rounded):
equal('1234567890.1234', 5, None,
'1234600000')
@ignore_warnings(category=UserWarning,
message="Overriding setting DATABASES can lead to unexpected behavior")
class DBTestSettingsRenamedTests(TestCase):
mismatch_msg = ("Connection 'test-deprecation' has mismatched TEST "
"and TEST_* database settings.")
def setUp(self):
super(DBTestSettingsRenamedTests, self).setUp()
self.handler = ConnectionHandler()
self.db_settings = {'default': {}}
def test_mismatched_database_test_settings_1(self):
# if the TEST setting is used, all TEST_* keys must appear in it.
self.db_settings.update({
'test-deprecation': {
'TEST': {},
'TEST_NAME': 'foo',
}
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_database_test_settings_2(self):
# if the TEST setting is used, all TEST_* keys must match.
self.db_settings.update({
'test-deprecation': {
'TEST': {'NAME': 'foo'},
'TEST_NAME': 'bar',
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_database_test_settings_3(self):
# Verifies the mapping of an aliased key.
self.db_settings.update({
'test-deprecation': {
'TEST': {'CREATE_DB': 'foo'},
'TEST_CREATE': 'bar',
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_database_test_settings_4(self):
# Verifies the mapping of an aliased key when the aliased key is missing.
self.db_settings.update({
'test-deprecation': {
'TEST': {},
'TEST_CREATE': 'bar',
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_settings_old_none(self):
self.db_settings.update({
'test-deprecation': {
'TEST': {'CREATE_DB': None},
'TEST_CREATE': '',
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_mismatched_settings_new_none(self):
self.db_settings.update({
'test-deprecation': {
'TEST': {},
'TEST_CREATE': None,
},
})
with override_settings(DATABASES=self.db_settings):
with self.assertRaisesMessage(ImproperlyConfigured, self.mismatch_msg):
self.handler.prepare_test_settings('test-deprecation')
def test_matched_test_settings(self):
# should be able to define new settings and the old, if they match
self.db_settings.update({
'test-deprecation': {
'TEST': {'NAME': 'foo'},
'TEST_NAME': 'foo',
},
})
with override_settings(DATABASES=self.db_settings):
self.handler.prepare_test_settings('test-deprecation')
def test_new_settings_only(self):
# should be able to define new settings without the old
self.db_settings.update({
'test-deprecation': {
'TEST': {'NAME': 'foo'},
},
})
with override_settings(DATABASES=self.db_settings):
self.handler.prepare_test_settings('test-deprecation')
@ignore_warnings(category=RemovedInDjango19Warning)
def test_old_settings_only(self):
# should be able to define old settings without the new
self.db_settings.update({
'test-deprecation': {
'TEST_NAME': 'foo',
},
})
with override_settings(DATABASES=self.db_settings):
self.handler.prepare_test_settings('test-deprecation')
def test_empty_settings(self):
with override_settings(DATABASES=self.db_settings):
self.handler.prepare_test_settings('default')
@unittest.skipUnless(connection.vendor == 'sqlite', 'SQLite specific test.')
@skipUnlessDBFeature('can_share_in_memory_db')
class TestSqliteThreadSharing(TransactionTestCase):
available_apps = ['backends']
def test_database_sharing_in_threads(self):
def create_object():
models.Object.objects.create()
create_object()
thread = threading.Thread(target=create_object)
thread.start()
thread.join()
self.assertEqual(models.Object.objects.count(), 2)
|
{
"content_hash": "5c1faf4a8e372468310d837242d2139e",
"timestamp": "",
"source": "github",
"line_count": 1281,
"max_line_length": 161,
"avg_line_length": 40.53239656518345,
"alnum_prop": 0.6208928777782058,
"repo_name": "nzavagli/UnrealPy",
"id": "11136e3b0973a965a35867745607799a6be023c8",
"size": "52003",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/django-1.8.2/tests/backends/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "APL",
"bytes": "587"
},
{
"name": "ASP",
"bytes": "2753"
},
{
"name": "ActionScript",
"bytes": "5686"
},
{
"name": "Ada",
"bytes": "94225"
},
{
"name": "Agda",
"bytes": "3154"
},
{
"name": "Alloy",
"bytes": "6579"
},
{
"name": "ApacheConf",
"bytes": "12482"
},
{
"name": "AppleScript",
"bytes": "421"
},
{
"name": "Assembly",
"bytes": "1093261"
},
{
"name": "AutoHotkey",
"bytes": "3733"
},
{
"name": "AutoIt",
"bytes": "667"
},
{
"name": "Awk",
"bytes": "63276"
},
{
"name": "Batchfile",
"bytes": "147828"
},
{
"name": "BlitzBasic",
"bytes": "185102"
},
{
"name": "BlitzMax",
"bytes": "2387"
},
{
"name": "Boo",
"bytes": "1111"
},
{
"name": "Bro",
"bytes": "7337"
},
{
"name": "C",
"bytes": "108397183"
},
{
"name": "C#",
"bytes": "156749"
},
{
"name": "C++",
"bytes": "13535833"
},
{
"name": "CLIPS",
"bytes": "6933"
},
{
"name": "CMake",
"bytes": "12441"
},
{
"name": "COBOL",
"bytes": "114812"
},
{
"name": "CSS",
"bytes": "430375"
},
{
"name": "Ceylon",
"bytes": "1387"
},
{
"name": "Chapel",
"bytes": "4366"
},
{
"name": "Cirru",
"bytes": "2574"
},
{
"name": "Clean",
"bytes": "9679"
},
{
"name": "Clojure",
"bytes": "23871"
},
{
"name": "CoffeeScript",
"bytes": "20149"
},
{
"name": "ColdFusion",
"bytes": "9006"
},
{
"name": "Common Lisp",
"bytes": "49017"
},
{
"name": "Coq",
"bytes": "66"
},
{
"name": "Cucumber",
"bytes": "390"
},
{
"name": "Cuda",
"bytes": "776"
},
{
"name": "D",
"bytes": "7556"
},
{
"name": "DIGITAL Command Language",
"bytes": "425938"
},
{
"name": "DTrace",
"bytes": "6706"
},
{
"name": "Dart",
"bytes": "591"
},
{
"name": "Dylan",
"bytes": "6343"
},
{
"name": "Ecl",
"bytes": "2599"
},
{
"name": "Eiffel",
"bytes": "2145"
},
{
"name": "Elixir",
"bytes": "4340"
},
{
"name": "Emacs Lisp",
"bytes": "18303"
},
{
"name": "Erlang",
"bytes": "5746"
},
{
"name": "F#",
"bytes": "19156"
},
{
"name": "FORTRAN",
"bytes": "38458"
},
{
"name": "Factor",
"bytes": "10194"
},
{
"name": "Fancy",
"bytes": "2581"
},
{
"name": "Fantom",
"bytes": "25331"
},
{
"name": "GAP",
"bytes": "29880"
},
{
"name": "GLSL",
"bytes": "450"
},
{
"name": "Gnuplot",
"bytes": "11501"
},
{
"name": "Go",
"bytes": "5444"
},
{
"name": "Golo",
"bytes": "1649"
},
{
"name": "Gosu",
"bytes": "2853"
},
{
"name": "Groff",
"bytes": "3458639"
},
{
"name": "Groovy",
"bytes": "2586"
},
{
"name": "HTML",
"bytes": "92126540"
},
{
"name": "Haskell",
"bytes": "49593"
},
{
"name": "Haxe",
"bytes": "16812"
},
{
"name": "Hy",
"bytes": "7237"
},
{
"name": "IDL",
"bytes": "2098"
},
{
"name": "Idris",
"bytes": "2771"
},
{
"name": "Inform 7",
"bytes": "1944"
},
{
"name": "Inno Setup",
"bytes": "18796"
},
{
"name": "Ioke",
"bytes": "469"
},
{
"name": "Isabelle",
"bytes": "21392"
},
{
"name": "Jasmin",
"bytes": "9428"
},
{
"name": "Java",
"bytes": "4040623"
},
{
"name": "JavaScript",
"bytes": "223927"
},
{
"name": "Julia",
"bytes": "27687"
},
{
"name": "KiCad",
"bytes": "475"
},
{
"name": "Kotlin",
"bytes": "971"
},
{
"name": "LSL",
"bytes": "160"
},
{
"name": "Lasso",
"bytes": "18650"
},
{
"name": "Lean",
"bytes": "6921"
},
{
"name": "Limbo",
"bytes": "9891"
},
{
"name": "Liquid",
"bytes": "862"
},
{
"name": "LiveScript",
"bytes": "972"
},
{
"name": "Logos",
"bytes": "19509"
},
{
"name": "Logtalk",
"bytes": "7260"
},
{
"name": "Lua",
"bytes": "8677"
},
{
"name": "Makefile",
"bytes": "2053844"
},
{
"name": "Mask",
"bytes": "815"
},
{
"name": "Mathematica",
"bytes": "191"
},
{
"name": "Max",
"bytes": "296"
},
{
"name": "Modelica",
"bytes": "6213"
},
{
"name": "Modula-2",
"bytes": "23838"
},
{
"name": "Module Management System",
"bytes": "14798"
},
{
"name": "Monkey",
"bytes": "2587"
},
{
"name": "Moocode",
"bytes": "3343"
},
{
"name": "MoonScript",
"bytes": "14862"
},
{
"name": "Myghty",
"bytes": "3939"
},
{
"name": "NSIS",
"bytes": "7663"
},
{
"name": "Nemerle",
"bytes": "1517"
},
{
"name": "NewLisp",
"bytes": "42726"
},
{
"name": "Nimrod",
"bytes": "37191"
},
{
"name": "Nit",
"bytes": "55581"
},
{
"name": "Nix",
"bytes": "2448"
},
{
"name": "OCaml",
"bytes": "42416"
},
{
"name": "Objective-C",
"bytes": "104883"
},
{
"name": "Objective-J",
"bytes": "15340"
},
{
"name": "Opa",
"bytes": "172"
},
{
"name": "OpenEdge ABL",
"bytes": "49943"
},
{
"name": "PAWN",
"bytes": "6555"
},
{
"name": "PHP",
"bytes": "68611"
},
{
"name": "PLSQL",
"bytes": "45772"
},
{
"name": "Pan",
"bytes": "1241"
},
{
"name": "Pascal",
"bytes": "349743"
},
{
"name": "Perl",
"bytes": "5931502"
},
{
"name": "Perl6",
"bytes": "113623"
},
{
"name": "PigLatin",
"bytes": "6657"
},
{
"name": "Pike",
"bytes": "8479"
},
{
"name": "PostScript",
"bytes": "18216"
},
{
"name": "PowerShell",
"bytes": "14236"
},
{
"name": "Prolog",
"bytes": "43750"
},
{
"name": "Protocol Buffer",
"bytes": "3401"
},
{
"name": "Puppet",
"bytes": "130"
},
{
"name": "Python",
"bytes": "122886156"
},
{
"name": "QML",
"bytes": "3912"
},
{
"name": "R",
"bytes": "49247"
},
{
"name": "Racket",
"bytes": "11341"
},
{
"name": "Rebol",
"bytes": "17708"
},
{
"name": "Red",
"bytes": "10536"
},
{
"name": "Redcode",
"bytes": "830"
},
{
"name": "Ruby",
"bytes": "91403"
},
{
"name": "Rust",
"bytes": "6788"
},
{
"name": "SAS",
"bytes": "15603"
},
{
"name": "SaltStack",
"bytes": "1040"
},
{
"name": "Scala",
"bytes": "730"
},
{
"name": "Scheme",
"bytes": "50346"
},
{
"name": "Scilab",
"bytes": "943"
},
{
"name": "Shell",
"bytes": "2925097"
},
{
"name": "ShellSession",
"bytes": "320"
},
{
"name": "Smali",
"bytes": "832"
},
{
"name": "Smalltalk",
"bytes": "158636"
},
{
"name": "Smarty",
"bytes": "523"
},
{
"name": "SourcePawn",
"bytes": "130"
},
{
"name": "Standard ML",
"bytes": "36869"
},
{
"name": "Swift",
"bytes": "2035"
},
{
"name": "SystemVerilog",
"bytes": "265"
},
{
"name": "Tcl",
"bytes": "6077233"
},
{
"name": "TeX",
"bytes": "487999"
},
{
"name": "Tea",
"bytes": "391"
},
{
"name": "TypeScript",
"bytes": "535"
},
{
"name": "VHDL",
"bytes": "4446"
},
{
"name": "VimL",
"bytes": "32053"
},
{
"name": "Visual Basic",
"bytes": "19441"
},
{
"name": "XQuery",
"bytes": "4289"
},
{
"name": "XS",
"bytes": "178055"
},
{
"name": "XSLT",
"bytes": "1995174"
},
{
"name": "Xtend",
"bytes": "727"
},
{
"name": "Yacc",
"bytes": "25665"
},
{
"name": "Zephir",
"bytes": "485"
},
{
"name": "eC",
"bytes": "31545"
},
{
"name": "mupad",
"bytes": "2442"
},
{
"name": "nesC",
"bytes": "23697"
},
{
"name": "xBase",
"bytes": "3349"
}
],
"symlink_target": ""
}
|
"""
Gini based Inequality Metrics
"""
__author__ = "Sergio J. Rey <srey@asu.edu> "
import numpy as np
from scipy.stats import norm as NORM
__all__ = ['Gini', 'Gini_Spatial']
def _gini(x):
"""
Memory efficient calculation of Gini coefficient in relative mean difference form
Parameters
----------
x : array-like
Attributes
----------
g : float
Gini coefficient
Notes
-----
Based on http://www.statsdirect.com/help/default.htm#nonparametric_methods/gini.htm
"""
n = len(x)
try:
x_sum = x.sum()
except AttributeError:
x = np.asarray(x)
x_sum = x.sum()
n_x_sum = n * x_sum
r_x = (2. * np.arange(1, len(x)+1) * x[np.argsort(x)]).sum()
return (r_x - n_x_sum - x_sum) / n_x_sum
class Gini:
"""
Classic Gini coefficient in absolute deviation form
Parameters
----------
y : array (n,1)
attribute
Attributes
----------
g : float
Gini coefficient
"""
def __init__(self, x):
self.g = _gini(x)
class Gini_Spatial:
"""
Spatial Gini coefficient
Provides for computationally based inference regarding the contribution of
spatial neighbor pairs to overall inequality across a set of regions. See :cite:`Rey_2013_sea`.
Parameters
----------
y : array (n,1)
attribute
w : binary spatial weights object
permutations : int (default = 99)
number of permutations for inference
Attributes
----------
g : float
Gini coefficient
wg : float
Neighbor inequality component (geographic inequality)
wcg : float
Non-neighbor inequality component (geographic complement inequality)
wcg_share : float
Share of inequality in non-neighbor component
If Permuations > 0
p_sim : float
pseudo p-value for spatial gini
e_wcg : float
expected value of non-neighbor inequality component (level) from permutations
s_wcg : float
standard deviation non-neighbor inequality component (level) from permutations
z_wcg : float
z-value non-neighbor inequality component (level) from permutations
p_z_sim : float
pseudo p-value based on standard normal approximation of permutation based values
Examples
--------
>>> import pysal.lib
>>> import numpy as np
>>> from pysal.explore.inequality.gini import Gini_Spatial
Use data from the 32 Mexican States, Decade frequency 1940-2010
>>> f=pysal.lib.io.open(pysal.lib.examples.get_path("mexico.csv"))
>>> vnames=["pcgdp%d"%dec for dec in range(1940,2010,10)]
>>> y=np.transpose(np.array([f.by_col[v] for v in vnames]))
Define regime neighbors
>>> regimes=np.array(f.by_col('hanson98'))
>>> w = pysal.lib.weights.block_weights(regimes)
>>> np.random.seed(12345)
>>> gs = Gini_Spatial(y[:,0],w)
>>> gs.p_sim
0.04
>>> gs.wcg
4353856.0
>>> gs.e_wcg
4170356.7474747472
Thus, the amount of inequality between pairs of states that are not in the
same regime (neighbors) is significantly higher than what is expected
under the null of random spatial inequality.
"""
def __init__(self, x, w, permutations=99):
x = np.asarray(x)
g = _gini(x)
self.g = g
n = len(x)
den = x.mean() * 2 * n**2
d = g * den
wg = self._calc(x, w)
wcg = d - wg
self.g = g
self.wcg = wcg
self.wg = wg
self.dtotal = d
self.den = den
self.wcg_share = wcg / den
if permutations:
ids = np.arange(n)
wcgp = np.zeros((permutations, ))
for perm in range(permutations):
np.random.shuffle(ids)
wcgp[perm] = d - self._calc(x[ids], w)
above = wcgp >= self.wcg
larger = above.sum()
if (permutations - larger) < larger:
larger = permutations - larger
self.wcgp = wcgp
self.p_sim = (larger + 1.) / (permutations + 1.)
self.e_wcg = wcgp.mean()
self.s_wcg = wcgp.std()
self.z_wcg = (self.wcg - self.e_wcg) / self.s_wcg
self.p_z_sim = 1.0 - NORM.cdf(self.z_wcg)
def _calc(self, x, w):
sad_sum = 0.0
for i, js in w.neighbors.items():
sad_sum += np.abs(x[i]-x[js]).sum()
return sad_sum
|
{
"content_hash": "c8dc161020beb27e375b8d9e311b20b3",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 99,
"avg_line_length": 24.026881720430108,
"alnum_prop": 0.5679122846274335,
"repo_name": "lixun910/pysal",
"id": "3a2f4b0f645a4ffe13e20c1923febdf012a8a71d",
"size": "4469",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pysal/explore/inequality/gini.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "1315254"
},
{
"name": "Jupyter Notebook",
"bytes": "1407521"
},
{
"name": "Makefile",
"bytes": "526"
},
{
"name": "OpenEdge ABL",
"bytes": "595378"
},
{
"name": "Python",
"bytes": "3994938"
},
{
"name": "Shell",
"bytes": "3743"
}
],
"symlink_target": ""
}
|
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
# these strings are filled in when 'setup.py versioneer' creates _version.py
tag_prefix = ""
parentdir_prefix = "pystallone-"
versionfile_source = "pystallone/_version.py"
import os, sys, re, subprocess, errno
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % args[0])
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version >= '3':
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % args[0])
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs,"r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
if not keywords:
return {} # keyword-finding function failed to find keywords
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return { "version": r,
"full": keywords["full"].strip() }
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": keywords["full"].strip(),
"full": keywords["full"].strip() }
def git_versions_from_vcs(tag_prefix, root, verbose=False):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
return {}
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
keywords = { "refnames": git_refnames, "full": git_full }
ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
if ver:
return ver
try:
root = os.path.abspath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in range(len(versionfile_source.split(os.sep))):
root = os.path.dirname(root)
except NameError:
return default
return (git_versions_from_vcs(tag_prefix, root, verbose)
or versions_from_parentdir(parentdir_prefix, root, verbose)
or default)
|
{
"content_hash": "97b1d8ed9bbf1d0b8ac6f0d97c2924bb",
"timestamp": "",
"source": "github",
"line_count": 172,
"max_line_length": 87,
"avg_line_length": 40.06976744186046,
"alnum_prop": 0.585896691816599,
"repo_name": "markovmodel/pystallone",
"id": "cce5532556b0d7cf01f0dc63c785b26584d86618",
"size": "7426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pystallone/_version.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "6787"
},
{
"name": "Python",
"bytes": "73848"
}
],
"symlink_target": ""
}
|
from fenalib.assert_utils import assert_type, assert_list_types
from fenalib.repr_utils import addrepr
@addrepr
class McFunction:
"""
Args:
full_path (str)
mfunc_name (str)
debug (bool)
Attributes:
full_path (str): The full path to the mcfunction file
mfunc_name (str)
debug (bool): Whether to debug or not with the debug command line option
commands (list or tuple): The full sequence of strings in an mcfunction
"""
def __init__(self, mfunc_name, full_path, debug):
assert_type(mfunc_name, str)
assert_type(full_path, str)
assert_type(debug, bool)
self.mfunc_name = mfunc_name
self.full_path = full_path
self.debug = debug
self.commands = []
self._finalized = False
@property
def finalized(self):
return self._finalized
def add_command(self, command):
"""
Adds the given command to the commands list
Args:
command (str)
"""
assert_type(command, str)
assert not self._finalized, "Cannot add a command if finalized"
self.commands.append(command)
def finalize(self):
"""
Converts all mutable attributes into immutable attributes
"""
assert_type(self.commands, list)
assert not self._finalized
self.commands = tuple(self.commands)
self._finalized = True
def __str__(self):
return f"McFunction[{self.mfunc_name}]"
|
{
"content_hash": "f4628467eebe8d5dc53558babc72d56c",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 80,
"avg_line_length": 27.636363636363637,
"alnum_prop": 0.6013157894736842,
"repo_name": "Aquafina-water-bottle/Command-Compiler-Unlimited",
"id": "e99a363b3e37fbe925d2c3f03862a03206820079",
"size": "1520",
"binary": false,
"copies": "1",
"ref": "refs/heads/fena_v12",
"path": "fenalib/mcfunction.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "106276"
}
],
"symlink_target": ""
}
|
import unittest
import threading
import time
import yaml
import json
import time
import logging
from multiprocessing import Process
try:
from test.onboarding import fakeslm_onboarding
except:
from onboarding import fakeslm_onboarding
try:
from test.instantiation import fakeslm_instantiation
except:
from instantiation import fakeslm_instantiation
try:
from test.updating import fakeslm_updating
except:
from updating import fakeslm_updating
try:
from test.terminating import fakeslm_termination
except:
from terminating import fakeslm_termination
from sonmanobase.messaging import ManoBrokerRequestResponseConnection
from son_mano_specific_manager_registry.specificmanagerregistry import SpecificManagerRegistry
logging.basicConfig(level=logging.INFO)
logging.getLogger('amqp-storm').setLevel(logging.INFO)
LOG = logging.getLogger("son-mano-plugins:smr_test")
logging.getLogger("son-mano-base:messaging").setLevel(logging.INFO)
logging.getLogger("son-mano-base:plugin").setLevel(logging.INFO)
LOG.setLevel(logging.INFO)
class test_SMR_functionalities(unittest.TestCase):
@classmethod
def setUpClass(self):
print("SetUpClass triggered")
self.manoconn = ManoBrokerRequestResponseConnection('smr-unittest')
self.smr_proc = Process(target=SpecificManagerRegistry)
self.smr_proc.daemon = True
self.smr_proc.start()
self.wait_for_ssm_event = threading.Event()
self.wait_for_ssm_event.clear()
self.wait_for_fsm_event = threading.Event()
self.wait_for_fsm_event.clear()
self.event1 = False
self.event2 = False
LOG.info('SMR is active')
@classmethod
def tearDownClass(self):
if self.smr_proc is not None:
self.smr_proc.terminate()
del self.smr_proc
try:
self.manoconn.stop_connection()
except Exception as e:
LOG.exception("Stop connection exception.")
del self.wait_for_fsm_event
del self.wait_for_ssm_event
def ssm_eventFinished(self):
self.wait_for_ssm_event.set()
def waitForSSMEvent(self, timeout=5, msg="Event timed out."):
if not self.wait_for_ssm_event.wait(timeout):
self.assertEqual(True, False, msg=msg)
def fsm_eventFinished(self):
self.wait_for_fsm_event.set()
def waitForFSMEvent(self, timeout=5, msg="Event timed out."):
if not self.wait_for_fsm_event.wait(timeout):
self.assertEqual(True, False, msg=msg)
def test_1_SMR_onboard(self):
self.event1 = False
self.event2 = False
def on_ssm_onboarding_result(ch, method, properties, message):
if properties.app_id == 'son-plugin.SpecificManagerRegistry':
result = yaml.load(message)
self.assertTrue(list(result.keys()) == ['sonssmservice1dumb1','sonssmservice1placement1'] or
list(result.keys()) == ['sonssmservice1placement1', 'sonssmservice1dumb1'],
msg='not all SSMs results received')
self.assertTrue(result['sonssmservice1dumb1']['status'] == 'On-boarded',
msg='error in onbording sonssmservice1dumb1')
self.assertTrue(result['sonssmservice1dumb1']['error'] == 'None',
msg='error in onbording sonssmservice1dumb1')
self.assertTrue(result['sonssmservice1placement1']['status'] == 'On-boarded',
msg='error in onbording sonssmservice1dumb1')
self.assertTrue(result['sonssmservice1placement1']['error'] == 'None',
msg='error in onbording sonssmservice1placement1')
self.ssm_eventFinished()
def on_fsm_onboarding_result(ch, method, properties, message):
if properties.app_id == 'son-plugin.SpecificManagerRegistry':
result = yaml.load(message)
if list(result.keys()) == ['sonfsmservice1function1dumb1']:
self.assertTrue(list(result.keys()) == ['sonfsmservice1function1dumb1'],
msg='not all FSMs results in VNFD1 received')
self.assertTrue(result['sonfsmservice1function1dumb1']['status'] == 'On-boarded',
msg='error in onbording sonssmservice1dumb1')
self.assertTrue(result['sonfsmservice1function1dumb1']['error'] == 'None',
msg='error in onbording sonfsmservice1function1dumb1')
self.event1 = True
else:
self.assertTrue(list(result.keys()) ==
['sonfsmservice1function1monitoring1', 'sonfsmservice1firewallconfiguration1']or
list(result.keys()) ==
['sonfsmservice1firewallconfiguration1','sonfsmservice1function1monitoring1']
, msg='not all FSMs results in VNFD2 received')
self.assertTrue(result['sonfsmservice1function1monitoring1']['status'] == 'On-boarded',
msg='error in onbording sonssmservice1dumb1')
self.assertTrue(result['sonfsmservice1function1monitoring1']['error'] == 'None',
msg='error in onbording sonfsmservice1function1monitoring1')
self.assertTrue(result['sonfsmservice1firewallconfiguration1']['status'] == 'On-boarded',
msg='error in onbording sonssmservice1dumb1')
self.assertTrue(result['sonfsmservice1firewallconfiguration1']['error'] == 'None',
msg='error in onbording sonfsmservice1firewallconfiguration1')
self.event2 = True
if self.event1 and self.event2 == True:
self.fsm_eventFinished()
self.manoconn.subscribe(on_ssm_onboarding_result, 'specific.manager.registry.ssm.on-board')
self.manoconn.subscribe(on_fsm_onboarding_result, 'specific.manager.registry.fsm.on-board')
onboaring_proc = Process(target=fakeslm_onboarding)
onboaring_proc.daemon = True
onboaring_proc.start()
self.waitForSSMEvent(timeout=70 , msg='SSM Onboarding request not received.')
self.waitForFSMEvent(timeout=70, msg='FSM Onboarding request not received.')
self.wait_for_fsm_event.clear()
self.wait_for_ssm_event.clear()
onboaring_proc.terminate()
del onboaring_proc
# def test_2_SMR_instantiation(self):
# self.event1 = False
# self.event2 = False
# def on_ssm_instantiation_result(ch, method, properties, message):
# if properties.app_id == 'son-plugin.SpecificManagerRegistry':
# result = yaml.load(message)
# self.assertTrue(list(result.keys()) == ['sonssmservice1dumb1', 'sonssmservice1placement1'] or
# list(result.keys()) == ['sonssmservice1placement1', 'sonssmservice1dumb1'],
# msg='not all SSMs results received')
# self.assertTrue(result['sonssmservice1dumb1']['status'] == 'Instantiated',
# msg='error in instantiation sonssmservice1dumb1')
# self.assertTrue(result['sonssmservice1dumb1']['error'] == 'None',
# msg='error in instantiation sonssmservice1dumb1')
# self.assertTrue(result['sonssmservice1placement1']['status'] == 'Instantiated',
# msg='error in instantiation sonssmservice1placement1')
# self.assertTrue(result['sonssmservice1placement1']['error'] == 'None',
# msg='error in instantiation sonssmservice1placement1')
# self.ssm_eventFinished()
# def on_fsm_instantiation_result(ch, method, properties, message):
# if properties.app_id == 'son-plugin.SpecificManagerRegistry':
# result = yaml.load(message)
# if list(result.keys()) == ['sonfsmservice1function1dumb1']:
# self.assertTrue(list(result.keys()) == ['sonfsmservice1function1dumb1'],
# msg='not all FSMs instantiation results in VNFD1 received')
# self.assertTrue(result['sonfsmservice1function1dumb1']['status'] == 'Instantiated',
# msg='error in instantiation sonfsmservice1function1dumb1')
# self.assertTrue(result['sonfsmservice1function1dumb1']['error'] == 'None',
# msg='error in instantiation sonfsmservice1function1dumb1')
# self.event1 = True
# else:
# self.assertTrue(list(result.keys()) ==
# ['sonfsmservice1function1monitoring1', 'sonfsmservice1firewallconfiguration1'] or
# list(result.keys()) ==
# ['sonfsmservice1firewallconfiguration1', 'sonfsmservice1function1monitoring1']
# , msg='not all FSMs instantiation results in VNFD2 received')
# self.assertTrue(result['sonfsmservice1function1monitoring1']['status'] == 'Instantiated',
# msg='error in instantiation sonfsmservice1function1monitoring1')
# self.assertTrue(result['sonfsmservice1function1monitoring1']['error'] == 'None',
# msg='error in instantiation sonfsmservice1function1monitoring1')
# self.assertTrue(result['sonfsmservice1firewallconfiguration1']['status'] == 'Instantiated',
# msg='error in instantiation sonfsmservice1firewallconfiguration1')
# self.assertTrue(result['sonfsmservice1firewallconfiguration1']['error'] == 'None',
# msg='error in instantiation sonfsmservice1firewallconfiguration1')
# self.event2 = True
# if self.event1 and self.event2:
# self.fsm_eventFinished()
# self.manoconn.subscribe(on_ssm_instantiation_result, 'specific.manager.registry.ssm.instantiate')
# self.manoconn.subscribe(on_fsm_instantiation_result, 'specific.manager.registry.fsm.instantiate')
# instantiation_proc = Process(target=fakeslm_instantiation)
# instantiation_proc.daemon = True
# instantiation_proc.start()
# self.waitForSSMEvent(timeout=70, msg='SSM instantiation request not received.')
# self.waitForFSMEvent(timeout=70, msg='FSM instantiation request not received.')
# self.wait_for_ssm_event.clear()
# self.wait_for_fsm_event.clear()
# instantiation_proc.terminate()
# del instantiation_proc
# def test_3_SMR_update(self):
# def on_ssm_updating_result(ch, method, properties, message):
# if properties.app_id == 'son-plugin.SpecificManagerRegistry':
# result = yaml.load(message)
# self.assertTrue(list(result.keys()) == ['sonssmservice1dumb1'],
# msg='not all SSMs results received')
# self.assertTrue(result['sonssmservice1dumb1']['status'] == 'Updated',
# msg='error in updating status filed sonssmservice1dumb1')
# self.assertTrue(result['sonssmservice1dumb1']['error'] == 'None',
# msg='error in updating error filed sonssmservice1dumb1')
# self.ssm_eventFinished()
# def on_fsm_updating_result(ch, method, properties, message):
# if properties.app_id == 'son-plugin.SpecificManagerRegistry':
# result = yaml.load(message)
# self.assertTrue(list(result.keys()) ==
# ['sonfsmservice1function1updateddumb1']
# , msg='not all FSMs updating results in VNFD2 received')
# self.assertTrue(result['sonfsmservice1function1updateddumb1']['status'] == 'Updated',
# msg='error in updating sonfsmservice1function1monitoring1')
# self.assertTrue(result['sonfsmservice1function1updateddumb1']['error'] == 'None',
# msg='error in updating sonfsmservice1function1monitoring1')
# self.fsm_eventFinished()
# self.manoconn.subscribe(on_ssm_updating_result, 'specific.manager.registry.ssm.update')
# self.manoconn.subscribe(on_fsm_updating_result, 'specific.manager.registry.fsm.update')
# updating_proc = Process(target=fakeslm_updating)
# updating_proc.daemon = True
# updating_proc.start()
# self.waitForSSMEvent(timeout=70, msg='SSM updating request not received.')
# self.waitForFSMEvent(timeout=70, msg='FSM updating request not received.')
# self.wait_for_fsm_event.clear()
# self.wait_for_ssm_event.clear()
# updating_proc.terminate()
# del updating_proc
# def test_4_SMR_terminate(self):
# self.event1 = False
# self.event2 = False
# def on_ssm_termination_result(ch, method, properties, message):
# if properties.app_id == 'son-plugin.SpecificManagerRegistry':
# result = yaml.load(message)
# self.assertTrue(list(result.keys()) == ['sonssmservice1dumb1','sonssmservice1placement1'] or
# ['sonssmservice1placement1','sonssmservice1dumb1'],
# msg='not all SSMs results received')
# self.assertTrue(result['sonssmservice1dumb1']['status'] == 'Terminated',
# msg='error in termination status field sonssmservice1dumb1')
# self.assertTrue(result['sonssmservice1dumb1']['error'] == 'None',
# msg='error in termination error field sonssmservice1dumb1')
# self.assertTrue(result['sonssmservice1placement1']['status'] == 'Terminated',
# msg='error in termination status field sonssmservice1placement1')
# self.assertTrue(result['sonssmservice1placement1']['error'] == 'None',
# msg='error in termination error field sonssmservice1placement1')
# self.ssm_eventFinished()
# def on_fsm_termination_result(ch, method, properties, message):
# if properties.app_id == 'son-plugin.SpecificManagerRegistry':
# result = yaml.load(message)
# if list(result.keys()) == ['sonfsmservice1function1dumb1']:
# self.assertTrue(result['sonfsmservice1function1dumb1']['status'] == 'Terminated',
# msg='error in termination status field sonfsmservice1function1dumb1')
# self.assertTrue(result['sonfsmservice1function1dumb1']['error'] == 'None',
# msg='error in termination error field sonfsmservice1function1dumb1')
# self.event1 = True
# else:
# self.assertTrue(list(result.keys()) ==
# ['sonfsmservice1function1monitoring1', 'sonfsmservice1function1updateddumb1'] or
# list(result.keys()) ==
# ['sonfsmservice1function1updateddumb1', 'sonfsmservice1function1monitoring1']
# , msg='not all FSMs Termination results in vnfdt2 received')
# self.assertTrue(result['sonfsmservice1function1monitoring1']['status'] == 'Terminated',
# msg='error in termination status field sonfsmservice1function1monitoring1')
# self.assertTrue(result['sonfsmservice1function1monitoring1']['error'] == 'None',
# msg='error in termination error field sonfsmservice1function1monitoring1')
# self.assertTrue(result['sonfsmservice1function1updateddumb1']['status'] == 'Terminated',
# msg='error in termination status field sonfsmservice1function1updateddumb1')
# self.assertTrue(result['sonfsmservice1function1updateddumb1']['error'] == 'None',
# msg='error in termination error field sonfsmservice1function1updateddumb1')
# self.event2 = True
# self.fsm_eventFinished()
# if self.event1 and self.event2:
# self.fsm_eventFinished()
# self.manoconn.subscribe(on_ssm_termination_result, 'specific.manager.registry.ssm.terminate')
# self.manoconn.subscribe(on_fsm_termination_result, 'specific.manager.registry.fsm.terminate')
# termination_proc = Process(target=fakeslm_termination)
# termination_proc.daemon = True
# termination_proc.start()
# self.waitForSSMEvent(timeout=70, msg='SSM termination request not received.')
# self.waitForFSMEvent(timeout=70, msg='FSM termination request not received.')
# self.wait_for_fsm_event.clear()
# self.wait_for_ssm_event.clear()
# termination_proc.terminate()
# del termination_proc
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "8a748e481d24821b9d3f14590e438727",
"timestamp": "",
"source": "github",
"line_count": 407,
"max_line_length": 119,
"avg_line_length": 43.8009828009828,
"alnum_prop": 0.5927525663319684,
"repo_name": "tsoenen/son-mano-framework",
"id": "bde01d19e88f0a52d6c070fa32fff89f04ee5a63",
"size": "19281",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plugins/son-mano-specificmanager/test/test_smr.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "12205"
},
{
"name": "Python",
"bytes": "598734"
},
{
"name": "Shell",
"bytes": "43641"
}
],
"symlink_target": ""
}
|
from Main.models.categories import Category, Product_category
from Main.models.products import Product
from Main.utilities.logs import logError
from django.utils import timezone
import datetime
def getCategoryFromProduct(product):
category = Product_category.objects.filter(product_id=product["id"])[0].category.get_as_dict()
category2 = None
category1 = None
category0 = None
try:
if category["level_depth"] == 2:
category2 = category
category1 = Category.objects.get(id=category["parent_id"]).get_as_dict()
category0 = Category.objects.get(id=category2["parent_id"]).get_as_dict()
elif category["level_depth"] == 1:
category1 = category
category0 = Category.objects.get(id=category1["parent_id"]).get_as_dict()
else:
category0 = category
except:
logError("category not found on product page id:" + str(product["id"]))
return {"category0": category}
return {
"category2": category2,
"category1": category1,
"category0": category0
}
def getDailyDeals():
#todo modify to get the actual deals
products = Product.objects.filter(daily_deal=True)[:5]
tmp = []
end = (timezone.localtime(timezone.now()) + datetime.timedelta(days=1)).strftime('%m/%d/%Y 00:00:00')
for prod in products:
tmp.append(prod.get_as_big_dict())
return {
"products": tmp,
"endtime": end
}
def getLayoutCategories():
categories = Category.objects.filter(active=True, level_depth=0).order_by('position')
final_categories = []
for category in categories:
final_categories.append(category.get_as_dict())
return final_categories
def getMainDict():
return {
"layoutCategories": getLayoutCategories()
}
|
{
"content_hash": "f79763eebbde346c249ec8007935050f",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 105,
"avg_line_length": 29.984126984126984,
"alnum_prop": 0.6273160402329275,
"repo_name": "G4brym/Ecomerce",
"id": "0586848a0f657d182dee332c3f79994c00867b84",
"size": "1889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Main/utilities/general.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "416371"
},
{
"name": "HTML",
"bytes": "3709120"
},
{
"name": "JavaScript",
"bytes": "1241034"
},
{
"name": "Python",
"bytes": "79660"
}
],
"symlink_target": ""
}
|
"""Agent action implementations"""
import logging
import six
from cliff import command
from cliff import lister
from cliff import show
from openstackclient.common import utils
class CreateAgent(show.ShowOne):
"""Create compute agent command"""
log = logging.getLogger(__name__ + ".CreateAgent")
def get_parser(self, prog_name):
parser = super(CreateAgent, self).get_parser(prog_name)
parser.add_argument(
"os",
metavar="<os>",
help="Type of OS")
parser.add_argument(
"architecture",
metavar="<architecture>",
help="Type of architecture")
parser.add_argument(
"version",
metavar="<version>",
help="Version")
parser.add_argument(
"url",
metavar="<url>",
help="URL")
parser.add_argument(
"md5hash",
metavar="<md5hash>",
help="MD5 hash")
parser.add_argument(
"hypervisor",
metavar="<hypervisor>",
help="Type of hypervisor",
default="xen")
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
args = (
parsed_args.os,
parsed_args.architecture,
parsed_args.version,
parsed_args.url,
parsed_args.md5hash,
parsed_args.hypervisor
)
agent = compute_client.agents.create(*args)._info.copy()
return zip(*sorted(six.iteritems(agent)))
class DeleteAgent(command.Command):
"""Delete compute agent command"""
log = logging.getLogger(__name__ + ".DeleteAgent")
def get_parser(self, prog_name):
parser = super(DeleteAgent, self).get_parser(prog_name)
parser.add_argument(
"id",
metavar="<id>",
help="ID of agent to delete")
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
compute_client.agents.delete(parsed_args.id)
return
class ListAgent(lister.Lister):
"""List compute agent command"""
log = logging.getLogger(__name__ + ".ListAgent")
def get_parser(self, prog_name):
parser = super(ListAgent, self).get_parser(prog_name)
parser.add_argument(
"--hypervisor",
metavar="<hypervisor>",
help="Type of hypervisor")
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
columns = (
"Agent ID",
"Hypervisor",
"OS",
"Architecture",
"Version",
"Md5Hash",
"URL"
)
data = compute_client.agents.list(parsed_args.hypervisor)
return (columns,
(utils.get_item_properties(
s, columns,
) for s in data))
class SetAgent(show.ShowOne):
"""Set compute agent command"""
log = logging.getLogger(__name__ + ".SetAgent")
def get_parser(self, prog_name):
parser = super(SetAgent, self).get_parser(prog_name)
parser.add_argument(
"id",
metavar="<id>",
help="ID of the agent")
parser.add_argument(
"version",
metavar="<version>",
help="Version of the agent")
parser.add_argument(
"url",
metavar="<url>",
help="URL")
parser.add_argument(
"md5hash",
metavar="<md5hash>",
help="MD5 hash")
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
args = (
parsed_args.id,
parsed_args.version,
parsed_args.url,
parsed_args.md5hash
)
agent = compute_client.agents.update(*args)._info.copy()
return zip(*sorted(six.iteritems(agent)))
|
{
"content_hash": "ade23f155bd58b1a2acea4b34e7c6ffb",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 65,
"avg_line_length": 28.7248322147651,
"alnum_prop": 0.5432242990654206,
"repo_name": "varunarya10/python-openstackclient",
"id": "14c4b2c7f5b8512f1760f8adda1308dc7950ba15",
"size": "4888",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "openstackclient/compute/v2/agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "927954"
},
{
"name": "Shell",
"bytes": "2264"
}
],
"symlink_target": ""
}
|
import ldap
import os
import logging
logger = logging.getLogger(__name__)
class Authenticator:
def __init__(self, config):
if config.get('debug'):
self.authenticate = self.debug_auth
return
self.authenticate = self.ldap_auth
if 'ldap_cert_path' in config:
self.cert_path = config['ldap_cert_path']
if not os.access(self.cert_path, os.R_OK):
logger.error("Failed to read ldap_cert_path certificate")
raise IOError
else:
self.cert_path = None
self.bind_user = config.get('ldap_bind_user')
self.bind_password = config.get('ldap_bind_password')
self.search_filter = config.get('ldap_search_filter')
self.ldap_url = config.get('ldap_url')
self.base_dn = config.get('ldap_base_dn')
self.user_suffix = config.get('ldap_user_suffix')
def ldap_auth(self, username, password):
if self.cert_path:
ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, self.cert_path)
connection = ldap.initialize(self.ldap_url)
connection.set_option(ldap.OPT_REFERRALS, 0)
if not password:
return False
auth_user = username + self.user_suffix
try:
if self.bind_user:
# use search filter to find DN of username
connection.simple_bind_s(self.bind_user, self.bind_password)
sfilter = self.search_filter % username
result = connection.search_s(self.base_dn, ldap.SCOPE_SUBTREE, sfilter, ['dn'])
if len(result) < 1:
return False
auth_user = result[0][0]
connection.simple_bind_s(auth_user, password)
except ldap.INVALID_CREDENTIALS:
return False
except (ldap.SERVER_DOWN, ldap.INVALID_DN_SYNTAX) as err:
logger.warn("%s", err)
return None
return True
def debug_auth(self, username, password):
return True
|
{
"content_hash": "fc48cd77a9d0006f2bcc4a8ff818e19c",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 95,
"avg_line_length": 32.36507936507937,
"alnum_prop": 0.5777341834232467,
"repo_name": "dwang159/oncall",
"id": "4f5ececb46ea789b0776ba4aafb80046eab60384",
"size": "2198",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/oncall/auth/modules/ldap_example.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "57828"
},
{
"name": "Dockerfile",
"bytes": "1236"
},
{
"name": "HTML",
"bytes": "104894"
},
{
"name": "JavaScript",
"bytes": "448003"
},
{
"name": "Makefile",
"bytes": "400"
},
{
"name": "Python",
"bytes": "468533"
},
{
"name": "Shell",
"bytes": "1494"
},
{
"name": "TSQL",
"bytes": "4158"
}
],
"symlink_target": ""
}
|
"""
aip public
"""
from .ocr import AipOcr
from .nlp import AipNlp
from .face import AipFace
from .imagecensor import AipImageCensor
from .imagecensor import AipImageCensor as AipContentCensor
from .kg import AipKg
from .speech import AipSpeech
from .imageclassify import AipImageClassify
from .imagesearch import AipImageSearch
from .bodyanalysis import AipBodyAnalysis
from .easydl import EasyDL
|
{
"content_hash": "4f086129cf638dd5e7134d1cbfcfc646",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 59,
"avg_line_length": 26.8,
"alnum_prop": 0.8233830845771144,
"repo_name": "xbed/Mixly_Arduino",
"id": "e0cf3daf34128fe70df4e7c6f9aff27ee03b48b4",
"size": "426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mixly_arduino/mixpyBuild/aip/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2804"
},
{
"name": "C",
"bytes": "40574522"
},
{
"name": "C++",
"bytes": "9024916"
},
{
"name": "CSS",
"bytes": "123363"
},
{
"name": "HTML",
"bytes": "2561454"
},
{
"name": "Java",
"bytes": "15310"
},
{
"name": "JavaScript",
"bytes": "4963410"
},
{
"name": "Logos",
"bytes": "19758"
},
{
"name": "Makefile",
"bytes": "32606"
},
{
"name": "Objective-C",
"bytes": "86700"
},
{
"name": "Processing",
"bytes": "97439"
},
{
"name": "Python",
"bytes": "278933"
},
{
"name": "Shell",
"bytes": "17928"
},
{
"name": "TypeScript",
"bytes": "150"
},
{
"name": "Visual Basic",
"bytes": "116"
}
],
"symlink_target": ""
}
|
"""Light platform support for yeelight."""
import logging
import voluptuous as vol
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.service import extract_entity_ids
from homeassistant.util.color import (
color_temperature_mired_to_kelvin as mired_to_kelvin,
color_temperature_kelvin_to_mired as kelvin_to_mired)
from homeassistant.const import CONF_HOST, ATTR_ENTITY_ID, CONF_NAME
from homeassistant.core import callback
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_COLOR_TEMP,
ATTR_FLASH, FLASH_SHORT, FLASH_LONG, ATTR_EFFECT, SUPPORT_BRIGHTNESS,
SUPPORT_COLOR, SUPPORT_TRANSITION, SUPPORT_COLOR_TEMP, SUPPORT_FLASH,
SUPPORT_EFFECT, Light)
import homeassistant.util.color as color_util
from . import (
CONF_TRANSITION, DATA_YEELIGHT, CONF_MODE_MUSIC,
CONF_SAVE_ON_CHANGE, CONF_CUSTOM_EFFECTS, DATA_UPDATED,
YEELIGHT_SERVICE_SCHEMA, DOMAIN, ATTR_TRANSITIONS,
YEELIGHT_FLOW_TRANSITION_SCHEMA, ACTION_RECOVER, CONF_FLOW_PARAMS,
ATTR_ACTION, ATTR_COUNT)
_LOGGER = logging.getLogger(__name__)
SUPPORT_YEELIGHT = (SUPPORT_BRIGHTNESS |
SUPPORT_TRANSITION |
SUPPORT_FLASH)
SUPPORT_YEELIGHT_WHITE_TEMP = (SUPPORT_YEELIGHT |
SUPPORT_COLOR_TEMP)
SUPPORT_YEELIGHT_RGB = (SUPPORT_YEELIGHT |
SUPPORT_COLOR |
SUPPORT_EFFECT |
SUPPORT_COLOR_TEMP)
ATTR_MODE = 'mode'
SERVICE_SET_MODE = 'set_mode'
SERVICE_START_FLOW = 'start_flow'
EFFECT_DISCO = "Disco"
EFFECT_TEMP = "Slow Temp"
EFFECT_STROBE = "Strobe epilepsy!"
EFFECT_STROBE_COLOR = "Strobe color"
EFFECT_ALARM = "Alarm"
EFFECT_POLICE = "Police"
EFFECT_POLICE2 = "Police2"
EFFECT_CHRISTMAS = "Christmas"
EFFECT_RGB = "RGB"
EFFECT_RANDOM_LOOP = "Random Loop"
EFFECT_FAST_RANDOM_LOOP = "Fast Random Loop"
EFFECT_LSD = "LSD"
EFFECT_SLOWDOWN = "Slowdown"
EFFECT_WHATSAPP = "WhatsApp"
EFFECT_FACEBOOK = "Facebook"
EFFECT_TWITTER = "Twitter"
EFFECT_STOP = "Stop"
YEELIGHT_EFFECT_LIST = [
EFFECT_DISCO,
EFFECT_TEMP,
EFFECT_STROBE,
EFFECT_STROBE_COLOR,
EFFECT_ALARM,
EFFECT_POLICE,
EFFECT_POLICE2,
EFFECT_CHRISTMAS,
EFFECT_RGB,
EFFECT_RANDOM_LOOP,
EFFECT_FAST_RANDOM_LOOP,
EFFECT_LSD,
EFFECT_SLOWDOWN,
EFFECT_WHATSAPP,
EFFECT_FACEBOOK,
EFFECT_TWITTER,
EFFECT_STOP]
def _transitions_config_parser(transitions):
"""Parse transitions config into initialized objects."""
import yeelight
transition_objects = []
for transition_config in transitions:
transition, params = list(transition_config.items())[0]
transition_objects.append(getattr(yeelight, transition)(*params))
return transition_objects
def _parse_custom_effects(effects_config):
import yeelight
effects = {}
for config in effects_config:
params = config[CONF_FLOW_PARAMS]
action = yeelight.Flow.actions[params[ATTR_ACTION]]
transitions = _transitions_config_parser(
params[ATTR_TRANSITIONS])
effects[config[CONF_NAME]] = {
ATTR_COUNT: params[ATTR_COUNT],
ATTR_ACTION: action,
ATTR_TRANSITIONS: transitions
}
return effects
def _cmd(func):
"""Define a wrapper to catch exceptions from the bulb."""
def _wrap(self, *args, **kwargs):
import yeelight
try:
_LOGGER.debug("Calling %s with %s %s", func, args, kwargs)
return func(self, *args, **kwargs)
except yeelight.BulbException as ex:
_LOGGER.error("Error when calling %s: %s", func, ex)
return _wrap
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Yeelight bulbs."""
from yeelight.enums import PowerMode
data_key = '{}_lights'.format(DATA_YEELIGHT)
if not discovery_info:
return
if data_key not in hass.data:
hass.data[data_key] = []
device = hass.data[DATA_YEELIGHT][discovery_info[CONF_HOST]]
_LOGGER.debug("Adding %s", device.name)
custom_effects = _parse_custom_effects(discovery_info[CONF_CUSTOM_EFFECTS])
lights = [YeelightLight(device, custom_effects=custom_effects)]
if device.is_ambilight_supported:
lights.append(
YeelightAmbientLight(device, custom_effects=custom_effects))
hass.data[data_key] += lights
add_entities(lights, True)
def service_handler(service):
"""Dispatch service calls to target entities."""
params = {key: value for key, value in service.data.items()
if key != ATTR_ENTITY_ID}
entity_ids = extract_entity_ids(hass, service)
target_devices = [light for light in hass.data[data_key]
if light.entity_id in entity_ids]
for target_device in target_devices:
if service.service == SERVICE_SET_MODE:
target_device.set_mode(**params)
elif service.service == SERVICE_START_FLOW:
params[ATTR_TRANSITIONS] = \
_transitions_config_parser(params[ATTR_TRANSITIONS])
target_device.start_flow(**params)
service_schema_set_mode = YEELIGHT_SERVICE_SCHEMA.extend({
vol.Required(ATTR_MODE):
vol.In([mode.name.lower() for mode in PowerMode])
})
hass.services.register(
DOMAIN, SERVICE_SET_MODE, service_handler,
schema=service_schema_set_mode)
service_schema_start_flow = YEELIGHT_SERVICE_SCHEMA.extend(
YEELIGHT_FLOW_TRANSITION_SCHEMA
)
hass.services.register(
DOMAIN, SERVICE_START_FLOW, service_handler,
schema=service_schema_start_flow)
class YeelightLight(Light):
"""Representation of a Yeelight light."""
def __init__(self, device, custom_effects=None):
"""Initialize the Yeelight light."""
from yeelight.enums import LightType
self.config = device.config
self._device = device
self._supported_features = SUPPORT_YEELIGHT
self._brightness = None
self._color_temp = None
self._is_on = None
self._hs = None
self._min_mireds = None
self._max_mireds = None
self._light_type = LightType.Main
if custom_effects:
self._custom_effects = custom_effects
else:
self._custom_effects = {}
@callback
def _schedule_immediate_update(self):
self.async_schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Handle entity which will be added."""
async_dispatcher_connect(
self.hass,
DATA_UPDATED.format(self._device.ipaddr),
self._schedule_immediate_update
)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def available(self) -> bool:
"""Return if bulb is available."""
return self.device.available
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._supported_features
@property
def effect_list(self):
"""Return the list of supported effects."""
return YEELIGHT_EFFECT_LIST + self.custom_effects_names
@property
def color_temp(self) -> int:
"""Return the color temperature."""
return self._color_temp
@property
def name(self) -> str:
"""Return the name of the device if any."""
return self.device.name
@property
def is_on(self) -> bool:
"""Return true if device is on."""
return self._is_on
@property
def brightness(self) -> int:
"""Return the brightness of this light between 1..255."""
return self._brightness
@property
def min_mireds(self):
"""Return minimum supported color temperature."""
return self._min_mireds
@property
def max_mireds(self):
"""Return maximum supported color temperature."""
return self._max_mireds
@property
def custom_effects(self):
"""Return dict with custom effects."""
return self._custom_effects
@property
def custom_effects_names(self):
"""Return list with custom effects names."""
return list(self.custom_effects.keys())
@property
def light_type(self):
"""Return light type."""
return self._light_type
def _get_hs_from_properties(self):
rgb = self._get_property('rgb')
color_mode = self._get_property('color_mode')
if not rgb or not color_mode:
return None
color_mode = int(color_mode)
if color_mode == 2: # color temperature
temp_in_k = mired_to_kelvin(self._color_temp)
return color_util.color_temperature_to_hs(temp_in_k)
if color_mode == 3: # hsv
hue = int(self._get_property('hue'))
sat = int(self._get_property('sat'))
return (hue / 360 * 65536, sat / 100 * 255)
rgb = int(rgb)
blue = rgb & 0xff
green = (rgb >> 8) & 0xff
red = (rgb >> 16) & 0xff
return color_util.color_RGB_to_hs(red, green, blue)
@property
def hs_color(self) -> tuple:
"""Return the color property."""
return self._hs
@property
def _properties(self) -> dict:
if self._bulb is None:
return {}
return self._bulb.last_properties
def _get_property(self, prop, default=None):
return self._properties.get(prop, default)
@property
def device(self):
"""Return yeelight device."""
return self._device
@property
def _is_nightlight_enabled(self):
return self.device.is_nightlight_enabled
# F821: https://github.com/PyCQA/pyflakes/issues/373
@property
def _bulb(self) -> 'yeelight.Bulb': # noqa: F821
return self.device.bulb
def set_music_mode(self, mode) -> None:
"""Set the music mode on or off."""
if mode:
self._bulb.start_music()
else:
self._bulb.stop_music()
def update(self) -> None:
"""Update properties from the bulb."""
import yeelight
bulb_type = self._bulb.bulb_type
if bulb_type == yeelight.BulbType.Color:
self._supported_features = SUPPORT_YEELIGHT_RGB
elif self.light_type == yeelight.enums.LightType.Ambient:
self._supported_features = SUPPORT_YEELIGHT_RGB
elif bulb_type in (yeelight.BulbType.WhiteTemp,
yeelight.BulbType.WhiteTempMood):
if self._is_nightlight_enabled:
self._supported_features = SUPPORT_YEELIGHT
else:
self._supported_features = SUPPORT_YEELIGHT_WHITE_TEMP
if self.min_mireds is None:
model_specs = self._bulb.get_model_specs()
self._min_mireds = \
kelvin_to_mired(model_specs['color_temp']['max'])
self._max_mireds = \
kelvin_to_mired(model_specs['color_temp']['min'])
if bulb_type == yeelight.BulbType.WhiteTempMood:
self._is_on = self._get_property('main_power') == 'on'
else:
self._is_on = self._get_property('power') == 'on'
if self._is_nightlight_enabled:
bright = self._get_property('nl_br')
else:
bright = self._get_property('bright')
if bright:
self._brightness = round(255 * (int(bright) / 100))
temp_in_k = self._get_property('ct')
if temp_in_k:
self._color_temp = kelvin_to_mired(int(temp_in_k))
self._hs = self._get_hs_from_properties()
@_cmd
def set_brightness(self, brightness, duration) -> None:
"""Set bulb brightness."""
if brightness:
_LOGGER.debug("Setting brightness: %s", brightness)
self._bulb.set_brightness(brightness / 255 * 100,
duration=duration,
light_type=self.light_type)
@_cmd
def set_rgb(self, rgb, duration) -> None:
"""Set bulb's color."""
if rgb and self.supported_features & SUPPORT_COLOR:
_LOGGER.debug("Setting RGB: %s", rgb)
self._bulb.set_rgb(rgb[0], rgb[1], rgb[2], duration=duration,
light_type=self.light_type)
@_cmd
def set_colortemp(self, colortemp, duration) -> None:
"""Set bulb's color temperature."""
if colortemp and self.supported_features & SUPPORT_COLOR_TEMP:
temp_in_k = mired_to_kelvin(colortemp)
_LOGGER.debug("Setting color temp: %s K", temp_in_k)
self._bulb.set_color_temp(temp_in_k, duration=duration,
light_type=self.light_type)
@_cmd
def set_default(self) -> None:
"""Set current options as default."""
self._bulb.set_default()
@_cmd
def set_flash(self, flash) -> None:
"""Activate flash."""
if flash:
from yeelight import (RGBTransition, SleepTransition, Flow,
BulbException)
if self._bulb.last_properties["color_mode"] != 1:
_LOGGER.error("Flash supported currently only in RGB mode.")
return
transition = int(self.config[CONF_TRANSITION])
if flash == FLASH_LONG:
count = 1
duration = transition * 5
if flash == FLASH_SHORT:
count = 1
duration = transition * 2
red, green, blue = color_util.color_hs_to_RGB(*self._hs)
transitions = list()
transitions.append(
RGBTransition(255, 0, 0, brightness=10, duration=duration))
transitions.append(SleepTransition(
duration=transition))
transitions.append(
RGBTransition(red, green, blue, brightness=self.brightness,
duration=duration))
flow = Flow(count=count, transitions=transitions)
try:
self._bulb.start_flow(flow, light_type=self.light_type)
except BulbException as ex:
_LOGGER.error("Unable to set flash: %s", ex)
@_cmd
def set_effect(self, effect) -> None:
"""Activate effect."""
if effect:
from yeelight import (Flow, BulbException)
from yeelight.transitions import (disco, temp, strobe, pulse,
strobe_color, alarm, police,
police2, christmas, rgb,
randomloop, lsd, slowdown)
if effect == EFFECT_STOP:
self._bulb.stop_flow(light_type=self.light_type)
return
effects_map = {
EFFECT_DISCO: disco,
EFFECT_TEMP: temp,
EFFECT_STROBE: strobe,
EFFECT_STROBE_COLOR: strobe_color,
EFFECT_ALARM: alarm,
EFFECT_POLICE: police,
EFFECT_POLICE2: police2,
EFFECT_CHRISTMAS: christmas,
EFFECT_RGB: rgb,
EFFECT_RANDOM_LOOP: randomloop,
EFFECT_LSD: lsd,
EFFECT_SLOWDOWN: slowdown,
}
if effect in self.custom_effects_names:
flow = Flow(**self.custom_effects[effect])
elif effect in effects_map:
flow = Flow(count=0, transitions=effects_map[effect]())
elif effect == EFFECT_FAST_RANDOM_LOOP:
flow = Flow(count=0, transitions=randomloop(duration=250))
elif effect == EFFECT_WHATSAPP:
flow = Flow(count=2, transitions=pulse(37, 211, 102))
elif effect == EFFECT_FACEBOOK:
flow = Flow(count=2, transitions=pulse(59, 89, 152))
elif effect == EFFECT_TWITTER:
flow = Flow(count=2, transitions=pulse(0, 172, 237))
try:
self._bulb.start_flow(flow, light_type=self.light_type)
except BulbException as ex:
_LOGGER.error("Unable to set effect: %s", ex)
def turn_on(self, **kwargs) -> None:
"""Turn the bulb on."""
import yeelight
brightness = kwargs.get(ATTR_BRIGHTNESS)
colortemp = kwargs.get(ATTR_COLOR_TEMP)
hs_color = kwargs.get(ATTR_HS_COLOR)
rgb = color_util.color_hs_to_RGB(*hs_color) if hs_color else None
flash = kwargs.get(ATTR_FLASH)
effect = kwargs.get(ATTR_EFFECT)
duration = int(self.config[CONF_TRANSITION]) # in ms
if ATTR_TRANSITION in kwargs: # passed kwarg overrides config
duration = int(kwargs.get(ATTR_TRANSITION) * 1000) # kwarg in s
self.device.turn_on(duration=duration, light_type=self.light_type)
if self.config[CONF_MODE_MUSIC] and not self._bulb.music_mode:
try:
self.set_music_mode(self.config[CONF_MODE_MUSIC])
except yeelight.BulbException as ex:
_LOGGER.error("Unable to turn on music mode,"
"consider disabling it: %s", ex)
try:
# values checked for none in methods
self.set_rgb(rgb, duration)
self.set_colortemp(colortemp, duration)
self.set_brightness(brightness, duration)
self.set_flash(flash)
self.set_effect(effect)
except yeelight.BulbException as ex:
_LOGGER.error("Unable to set bulb properties: %s", ex)
return
# save the current state if we had a manual change.
if self.config[CONF_SAVE_ON_CHANGE] and (brightness
or colortemp
or rgb):
try:
self.set_default()
except yeelight.BulbException as ex:
_LOGGER.error("Unable to set the defaults: %s", ex)
return
self.device.update()
def turn_off(self, **kwargs) -> None:
"""Turn off."""
duration = int(self.config[CONF_TRANSITION]) # in ms
if ATTR_TRANSITION in kwargs: # passed kwarg overrides config
duration = int(kwargs.get(ATTR_TRANSITION) * 1000) # kwarg in s
self.device.turn_off(duration=duration, light_type=self.light_type)
self.device.update()
def set_mode(self, mode: str):
"""Set a power mode."""
import yeelight
try:
self._bulb.set_power_mode(yeelight.enums.PowerMode[mode.upper()])
self.device.update()
except yeelight.BulbException as ex:
_LOGGER.error("Unable to set the power mode: %s", ex)
def start_flow(self, transitions, count=0, action=ACTION_RECOVER):
"""Start flow."""
import yeelight
try:
flow = yeelight.Flow(
count=count,
action=yeelight.Flow.actions[action],
transitions=transitions)
self._bulb.start_flow(flow, light_type=self.light_type)
self.device.update()
except yeelight.BulbException as ex:
_LOGGER.error("Unable to set effect: %s", ex)
class YeelightAmbientLight(YeelightLight):
"""Representation of a Yeelight ambient light."""
PROPERTIES_MAPPING = {
"color_mode": "bg_lmode",
"main_power": "bg_power",
}
def __init__(self, *args, **kwargs):
"""Initialize the Yeelight Ambient light."""
from yeelight.enums import LightType
super().__init__(*args, **kwargs)
self._min_mireds = kelvin_to_mired(6500)
self._max_mireds = kelvin_to_mired(1700)
self._light_type = LightType.Ambient
@property
def name(self) -> str:
"""Return the name of the device if any."""
return "{} ambilight".format(self.device.name)
@property
def _is_nightlight_enabled(self):
return False
def _get_property(self, prop, default=None):
bg_prop = self.PROPERTIES_MAPPING.get(prop)
if not bg_prop:
bg_prop = "bg_" + prop
return self._properties.get(bg_prop, default)
|
{
"content_hash": "3749c7c08325552a59c62e3e31afbd4f",
"timestamp": "",
"source": "github",
"line_count": 617,
"max_line_length": 79,
"avg_line_length": 33.176661264181526,
"alnum_prop": 0.5829018075232046,
"repo_name": "molobrakos/home-assistant",
"id": "fa62bdc35d7c971174858d626b2ac70975a0b524",
"size": "20470",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "homeassistant/components/yeelight/light.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "HCL",
"bytes": "407"
},
{
"name": "Python",
"bytes": "15057917"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17609"
}
],
"symlink_target": ""
}
|
"""Tests for ortools.util.python.sorted_interval_list."""
import unittest
from ortools.util.python import sorted_interval_list
class SortedIntervalListTest(unittest.TestCase):
def testCtorAndGetter(self):
bool_domain = sorted_interval_list.Domain(0, 1)
self.assertEqual(2, bool_domain.Size())
self.assertEqual(0, bool_domain.Min())
self.assertEqual(1, bool_domain.Max())
self.assertFalse(bool_domain.IsEmpty())
self.assertEqual(str(bool_domain), '[0,1]')
def testFromValues(self):
domain = sorted_interval_list.Domain.FromValues([1, 3, -5, 5])
self.assertEqual(4, domain.Size())
self.assertEqual(-5, domain.Min())
self.assertEqual(5, domain.Max())
self.assertEqual([-5, -5, 1, 1, 3, 3, 5, 5],
domain.FlattenedIntervals())
self.assertTrue(domain.Contains(1))
self.assertFalse(domain.Contains(0))
def testFromIntervals(self):
domain = sorted_interval_list.Domain.FromIntervals([[2, 4], [-2, 0]])
self.assertEqual(6, domain.Size())
self.assertEqual(-2, domain.Min())
self.assertEqual(4, domain.Max())
self.assertEqual([-2, 0, 2, 4], domain.FlattenedIntervals())
def testFromFlatIntervals(self):
domain = sorted_interval_list.Domain.FromFlatIntervals([2, 4, -2, 0])
self.assertEqual(6, domain.Size())
self.assertEqual(-2, domain.Min())
self.assertEqual(4, domain.Max())
self.assertEqual([-2, 0, 2, 4], domain.FlattenedIntervals())
def testNegation(self):
domain = sorted_interval_list.Domain(5, 20)
self.assertEqual([-20, -5], domain.Negation().FlattenedIntervals())
def testUnion(self):
d1 = sorted_interval_list.Domain(0, 5)
d2 = sorted_interval_list.Domain(10, 15)
d3 = d1.UnionWith(d2)
self.assertEqual([0, 5], d1.FlattenedIntervals())
self.assertEqual([10, 15], d2.FlattenedIntervals())
self.assertEqual([0, 5, 10, 15], d3.FlattenedIntervals())
def testIntersection(self):
d1 = sorted_interval_list.Domain(0, 10)
d2 = sorted_interval_list.Domain(5, 15)
d3 = d1.IntersectionWith(d2)
self.assertEqual([0, 10], d1.FlattenedIntervals())
self.assertEqual([5, 15], d2.FlattenedIntervals())
self.assertEqual([5, 10], d3.FlattenedIntervals())
def testAddition(self):
d1 = sorted_interval_list.Domain(0, 5)
d2 = sorted_interval_list.Domain(10, 15)
d3 = d1.AdditionWith(d2)
self.assertEqual([0, 5], d1.FlattenedIntervals())
self.assertEqual([10, 15], d2.FlattenedIntervals())
self.assertEqual([10, 20], d3.FlattenedIntervals())
def testComplement(self):
d1 = sorted_interval_list.Domain(-9223372036854775808, 5)
d2 = d1.Complement()
self.assertEqual([-9223372036854775808, 5], d1.FlattenedIntervals())
self.assertEqual([6, 9223372036854775807], d2.FlattenedIntervals())
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "81c3a18a0fc673a1b40d1f53e0d94403",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 77,
"avg_line_length": 39.23076923076923,
"alnum_prop": 0.6333333333333333,
"repo_name": "or-tools/or-tools",
"id": "252c766eea883c19dfbfe23c7e7327225d162b36",
"size": "3660",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "examples/tests/sorted_interval_list_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "18599"
},
{
"name": "C",
"bytes": "11382"
},
{
"name": "C#",
"bytes": "498888"
},
{
"name": "C++",
"bytes": "14071164"
},
{
"name": "CMake",
"bytes": "219723"
},
{
"name": "Dockerfile",
"bytes": "149476"
},
{
"name": "Java",
"bytes": "459136"
},
{
"name": "Lex",
"bytes": "2271"
},
{
"name": "Makefile",
"bytes": "207007"
},
{
"name": "Python",
"bytes": "629275"
},
{
"name": "SWIG",
"bytes": "414259"
},
{
"name": "Shell",
"bytes": "83555"
},
{
"name": "Starlark",
"bytes": "235950"
},
{
"name": "Yacc",
"bytes": "26027"
},
{
"name": "sed",
"bytes": "45"
}
],
"symlink_target": ""
}
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from fobi.base import BaseFormFieldPluginForm, get_theme
try:
from django.forms.widgets import NumberInput
except ImportError:
from django.forms.widgets import TextInput
class NumberInput(TextInput):
"""Number input."""
input_type = 'number'
__title__ = 'fobi.contrib.plugins.form_elements.fields.date_drop_down.forms'
__author__ = 'Artur Barseghyan <artur.barseghyan@gmail.com>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('DateDropDownInputForm',)
theme = get_theme(request=None, as_instance=True)
class DateDropDownInputForm(forms.Form, BaseFormFieldPluginForm):
"""Form for ``DateDropDownInputPlugin``."""
plugin_data_fields = [
("label", ""),
("name", ""),
("help_text", ""),
("year_min", ""),
("year_max", ""),
("initial", ""),
("input_formats", ""),
("required", False),
]
label = forms.CharField(
label=_("Label"),
required=True,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
name = forms.CharField(
label=_("Name"),
required=True,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
help_text = forms.CharField(
label=_("Help text"),
required=False,
widget=forms.widgets.Textarea(
attrs={'class': theme.form_element_html_class}
)
)
year_min = forms.IntegerField(
label=_("Minimum year value"),
required=False,
widget=NumberInput(attrs={'class': theme.form_element_html_class})
)
year_max = forms.IntegerField(
label=_("Maximum year value"),
required=False,
widget=NumberInput(attrs={'class': theme.form_element_html_class})
)
initial = forms.CharField(
label=_("Initial"),
required=False,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
input_formats = forms.CharField(
label=_("Input formats"),
required=False,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
required = forms.BooleanField(
label=_("Required"),
required=False,
widget=forms.widgets.CheckboxInput(
attrs={'class': theme.form_element_checkbox_html_class}
)
)
|
{
"content_hash": "6fa0ceede8196db0deb575be51e52d61",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 76,
"avg_line_length": 28.755555555555556,
"alnum_prop": 0.5904173106646059,
"repo_name": "mansonul/events",
"id": "e03dda48aa52747897ba1cda9424632d7ab7336d",
"size": "2588",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "events/contrib/plugins/form_elements/fields/date_drop_down/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "90251"
},
{
"name": "HTML",
"bytes": "186225"
},
{
"name": "JavaScript",
"bytes": "43221"
},
{
"name": "Python",
"bytes": "804726"
},
{
"name": "Shell",
"bytes": "4196"
}
],
"symlink_target": ""
}
|
import subprocess
import sys
import typer
from typer.testing import CliRunner
from docs_src.multiple_values.multiple_options import tutorial002 as mod
runner = CliRunner()
app = typer.Typer()
app.command()(mod.main)
def test_main():
result = runner.invoke(app)
assert result.exit_code == 0
assert "The sum is 0" in result.output
def test_1_number():
result = runner.invoke(app, ["--number", "2"])
assert result.exit_code == 0
assert "The sum is 2.0" in result.output
def test_2_number():
result = runner.invoke(app, ["--number", "2", "--number", "3", "--number", "4.5"])
assert result.exit_code == 0
assert "The sum is 9.5" in result.output
def test_script():
result = subprocess.run(
[sys.executable, "-m", "coverage", "run", mod.__file__, "--help"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
)
assert "Usage" in result.stdout
|
{
"content_hash": "91834fbbbf2f95cc6867a835fbc27aff",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 86,
"avg_line_length": 24.17948717948718,
"alnum_prop": 0.6383881230116649,
"repo_name": "tiangolo/typer",
"id": "d30357e76547e2c46943449770b21a9c1708b670",
"size": "943",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_tutorial/test_multiple_values/test_multiple_options/test_tutorial002.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "380062"
},
{
"name": "Shell",
"bytes": "2257"
}
],
"symlink_target": ""
}
|
def median(a, i, j, k):
"""
Return median of 3 integers from array a.
:param a: Iterable of elements
:param i: start element index
:param j: end element index
:param k: middle element index
:return: return median of values at indices i, j and k.
"""
ai, aj, ak = a[i], a[j], a[k]
med_val = ai + aj + ak - max(ai, aj, ak) - min(ai, aj, ak)
if ai == med_val:
return i
elif aj == med_val:
return j
return k
def partition(array, l, r):
"""
Perform Partition Operation on array.
Time Complexity: Theta(nLogn)
Auxiliary Space: O(n)
:param array: Iterable of elements
:param l: pivot value for array
:param r: right limit of array
:return: return q value for function, used in partitioning of array.
"""
i = l - 1
pivot_index = median(array, l, r, (l+r) // 2)
array[pivot_index], array[r] = array[r], array[pivot_index]
pivot = array[r]
for j in range(l, r):
if array[j] <= pivot:
i += 1
array[i], array[j] = array[j], array[i]
i += 1
array[r], array[i] = array[i], array[r]
return i
def quick_sort(array, left, right):
"""
Perform sort using partition function.
Time Complexity : O(nlog(n)).
Space Complexity : O(n).
:param array: Iterable of elements
:param left: used as left limit of quick sort
:param right: right limit for quick sort
:return: no returns, sorts array
"""
if left < right:
q = partition(array, left, right)
quick_sort(array, left, q - 1)
quick_sort(array, q + 1, right)
def main():
a = [1, 2, 1, 2, 3, 1, 2, 2, 1]
quick_sort(a, 0, len(a) - 1)
print(a)
if __name__ == '__main__':
main()
|
{
"content_hash": "0550e369deea63b68024a40682f04a3b",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 72,
"avg_line_length": 27.015384615384615,
"alnum_prop": 0.5671981776765376,
"repo_name": "aashutoshrathi/algos",
"id": "1a09e8be6476bd65552ca1d99c1134d03ed1d1cb",
"size": "1756",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "quick_sort/quick_sort.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "29003"
},
{
"name": "C#",
"bytes": "18840"
},
{
"name": "C++",
"bytes": "9335"
},
{
"name": "Go",
"bytes": "13835"
},
{
"name": "Java",
"bytes": "65826"
},
{
"name": "JavaScript",
"bytes": "24405"
},
{
"name": "Python",
"bytes": "46886"
},
{
"name": "Shell",
"bytes": "2197"
}
],
"symlink_target": ""
}
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Fill(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "table.cells"
_path_str = "table.cells.fill"
_valid_props = {"color", "colorsrc"}
# color
# -----
@property
def color(self):
"""
Sets the cell fill color. It accepts either a specific color or
an array of colors or a 2D array of colors.
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
Sets the cell fill color. It accepts either a specific
color or an array of colors or a 2D array of colors.
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
"""
def __init__(self, arg=None, color=None, colorsrc=None, **kwargs):
"""
Construct a new Fill object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.table.cells.Fill`
color
Sets the cell fill color. It accepts either a specific
color or an array of colors or a 2D array of colors.
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
Returns
-------
Fill
"""
super(Fill, self).__init__("fill")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.table.cells.Fill
constructor must be a dict or
an instance of :class:`plotly.graph_objs.table.cells.Fill`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("colorsrc", None)
_v = colorsrc if colorsrc is not None else _v
if _v is not None:
self["colorsrc"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
{
"content_hash": "b14cc09b44fab89988a89f94e87384bb",
"timestamp": "",
"source": "github",
"line_count": 172,
"max_line_length": 82,
"avg_line_length": 35.31976744186046,
"alnum_prop": 0.5450205761316872,
"repo_name": "plotly/python-api",
"id": "7b12bb070150e1fb2aeffaa52f9bb4f5f531100e",
"size": "6075",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/graph_objs/table/cells/_fill.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
"""Abstract tests for subclasses inheriting from meta_dataset.learners.base."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
from meta_dataset.learners.experimental import base as experimental_learner_base
from meta_dataset.models import functional_backbones
from meta_dataset.models.experimental import reparameterizable_backbones
import numpy as np
import tensorflow.compat.v1 as tf
IMAGE_HEIGHT = IMAGE_WIDTH = 84
IMAGE_SHAPE = (IMAGE_HEIGHT, IMAGE_WIDTH, 3)
SUPPORT_SIZE, QUERY_SIZE = 5, 15
NUM_CLASSES = 5
# Initialization arguments shared between non-experimental and experimental
# learners.
VALID_ABSTRACT_LEARNER_INIT_ARGS = {
'is_training': True,
'logit_dim': NUM_CLASSES,
'transductive_batch_norm': True,
'backprop_through_moments': True,
'input_shape': IMAGE_SHAPE,
}
VALID_LEARNER_INIT_ARGS = {
**VALID_ABSTRACT_LEARNER_INIT_ARGS, 'embedding_fn':
functools.partial(
functional_backbones.four_layer_convnet, weight_decay=0.01)
}
# TODO(eringrant): Right now some `Learner` (`MatchingNetworkLearner`)
# implementations NAN out if there are no labels of a class index in an episode.
# Decide on expected behavior for these cases and implement a test for that
# behavior.
# TODO(eringrant): Test feature (embeddding function-less) representations.
class MockEmbedding(reparameterizable_backbones.ConvNet):
def __init__(self, keep_spatial_dims=False):
super().__init__(
output_dim=None,
keep_spatial_dims=keep_spatial_dims,
num_filters_per_layer=(64, 64, 64, 64),
)
class MockEpisode(
collections.namedtuple(
'MockEpisode', 'support_images, query_images, '
'support_labels, query_labels')):
@property
def way(self):
return NUM_CLASSES
@property
def onehot_support_labels(self):
return tf.one_hot(self.support_labels, NUM_CLASSES)
@property
def onehot_query_labels(self):
return tf.one_hot(self.query_labels, NUM_CLASSES)
@property
def labels(self):
return self.query_labels
@property
def onehot_labels(self):
return self.onehot_query_labels
class MockBatch(collections.namedtuple('MockBatch', 'images, labels')):
@property
def way(self):
return NUM_CLASSES
@property
def onehot_labels(self):
return tf.one_hot(self.labels, NUM_CLASSES)
class TestLearner(tf.test.TestCase):
convergence_test_iterations = 500
@property
def learner_cls(self):
raise NotImplementedError('The test subclass must provide a `Learner`.')
@property
def learner_kwargs(self):
raise NotImplementedError(
'The test subclass must provide keyword arguments to initialize a '
'`Learner`.')
def set_up_learner(self):
learner_kwargs = self.learner_kwargs
if issubclass(self.learner_cls,
experimental_learner_base.ExperimentalLearner):
learner_kwargs['embedding_fn'] = MockEmbedding()
data = self.random_data()
learner = self.learner_cls(**learner_kwargs)
learner.build()
return data, learner
def testForwardPass(self):
"""Assert that the learner obeys the API for `forward_pass`."""
data, learner = self.set_up_learner()
outputs = learner.forward_pass(data)
self.assertEqual(len(outputs.get_shape().as_list()), 2)
with self.session():
self.evaluate(tf.compat.v1.local_variables_initializer())
self.evaluate(tf.compat.v1.global_variables_initializer())
self.evaluate(outputs)
def testComputeLoss(self):
"""Assert that the learner obeys the API for `compute_loss`."""
data, learner = self.set_up_learner()
loss = learner.compute_loss(
data.onehot_labels,
tf.cast(data.onehot_labels, tf.float32),
)
with self.session():
self.evaluate(tf.compat.v1.local_variables_initializer())
self.evaluate(tf.compat.v1.global_variables_initializer())
self.evaluate(loss)
def testComputeAccuracy(self):
"""Assert that the learner obeys the API for `compute_accuracy`."""
data, learner = self.set_up_learner()
accuracy = learner.compute_accuracy(
data.onehot_labels,
tf.cast(data.onehot_labels, tf.float32),
)
with self.session():
self.evaluate(tf.compat.v1.local_variables_initializer())
self.evaluate(tf.compat.v1.global_variables_initializer())
self.evaluate(accuracy)
def testLearnerInitRandomAccuracy(self):
"""Assert that the learner's performance wrt accuracy is initially random."""
data, learner = self.set_up_learner()
outputs = learner.forward_pass(data)
accuracy = learner.compute_accuracy(data.onehot_labels, outputs)
with self.session():
self.evaluate(tf.compat.v1.local_variables_initializer())
self.evaluate(tf.compat.v1.global_variables_initializer())
accuracy_value = self.evaluate(accuracy).mean()
epsilon = 0.08 # Allow 8% deviation from random.
self.assertLess(accuracy_value, 1. / data.way + epsilon)
def testLearnerImprovement(self):
"""Assert that the learner's objective monotonically improves."""
data, learner = self.set_up_learner()
# Small learning rate for improvement check.
optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.001)
outputs = learner.forward_pass(data)
loss = learner.compute_loss(data.onehot_labels, outputs)
train_op = optimizer.minimize(loss)
with self.session():
self.evaluate(tf.compat.v1.local_variables_initializer())
self.evaluate(tf.compat.v1.global_variables_initializer())
loss_value_prev = np.inf
# Allow three steps for gradient descent to stabilize.
for _ in range(3):
_, loss_value = self.evaluate((train_op, loss))
self.assertLess(loss_value.mean(), loss_value_prev)
def testLearnerConvergence(self):
"""Assert that the unregularized learner overfits a single batch."""
data, learner = self.set_up_learner()
optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate=0.001)
outputs = learner.forward_pass(data)
loss = learner.compute_loss(data.onehot_labels, outputs)
train_op = optimizer.minimize(loss)
with self.session():
self.evaluate(tf.compat.v1.local_variables_initializer())
self.evaluate(tf.compat.v1.global_variables_initializer())
for _ in range(self.convergence_test_iterations):
_, loss_value = self.evaluate((train_op, loss))
# TODO(eringrant): Parameterize this convergence check value per
# `Learner`; 10.0 is too high a loss value for most `Learner`s.
self.assertLess(loss_value.mean(), 10.0)
class TestBatchLearner(TestLearner):
def random_data(self):
return MockBatch(
tf.cast(
np.random.uniform(size=(NUM_CLASSES * QUERY_SIZE, *IMAGE_SHAPE)),
tf.float32),
tf.cast(
np.random.permutation(list(np.arange(NUM_CLASSES)) * QUERY_SIZE),
tf.int32),
)
class TestEpisodicLearner(TestLearner):
def random_data(self):
return MockEpisode(
tf.cast(
np.random.uniform(size=(NUM_CLASSES * SUPPORT_SIZE, *IMAGE_SHAPE)),
tf.float32),
tf.cast(
np.random.uniform(size=(NUM_CLASSES * QUERY_SIZE, *IMAGE_SHAPE)),
tf.float32),
tf.cast(
np.random.permutation(list(np.arange(NUM_CLASSES)) * SUPPORT_SIZE),
tf.int32),
tf.cast(
np.random.permutation(list(np.arange(NUM_CLASSES)) * QUERY_SIZE),
tf.int32),
)
|
{
"content_hash": "fe134912a3fbc99225b1f7c7a08a55d4",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 81,
"avg_line_length": 33.861607142857146,
"alnum_prop": 0.6868820039551747,
"repo_name": "google-research/meta-dataset",
"id": "1370c8a316aec859f549129ff39cd8ef1f24ba38",
"size": "8209",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "meta_dataset/learners/base_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "391025"
},
{
"name": "Python",
"bytes": "1055970"
}
],
"symlink_target": ""
}
|
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from datetime import datetime
from pprint import pformat
from six import iteritems
class BuildEnvironmentPage(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
BuildEnvironmentPage - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'page_index': 'int',
'page_size': 'int',
'total_pages': 'int',
'content': 'list[BuildEnvironmentRest]'
}
self.attribute_map = {
'page_index': 'pageIndex',
'page_size': 'pageSize',
'total_pages': 'totalPages',
'content': 'content'
}
self._page_index = None
self._page_size = None
self._total_pages = None
self._content = None
@property
def page_index(self):
"""
Gets the page_index of this BuildEnvironmentPage.
:return: The page_index of this BuildEnvironmentPage.
:rtype: int
"""
return self._page_index
@page_index.setter
def page_index(self, page_index):
"""
Sets the page_index of this BuildEnvironmentPage.
:param page_index: The page_index of this BuildEnvironmentPage.
:type: int
"""
self._page_index = page_index
@property
def page_size(self):
"""
Gets the page_size of this BuildEnvironmentPage.
:return: The page_size of this BuildEnvironmentPage.
:rtype: int
"""
return self._page_size
@page_size.setter
def page_size(self, page_size):
"""
Sets the page_size of this BuildEnvironmentPage.
:param page_size: The page_size of this BuildEnvironmentPage.
:type: int
"""
self._page_size = page_size
@property
def total_pages(self):
"""
Gets the total_pages of this BuildEnvironmentPage.
:return: The total_pages of this BuildEnvironmentPage.
:rtype: int
"""
return self._total_pages
@total_pages.setter
def total_pages(self, total_pages):
"""
Sets the total_pages of this BuildEnvironmentPage.
:param total_pages: The total_pages of this BuildEnvironmentPage.
:type: int
"""
self._total_pages = total_pages
@property
def content(self):
"""
Gets the content of this BuildEnvironmentPage.
:return: The content of this BuildEnvironmentPage.
:rtype: list[BuildEnvironmentRest]
"""
return self._content
@content.setter
def content(self, content):
"""
Sets the content of this BuildEnvironmentPage.
:param content: The content of this BuildEnvironmentPage.
:type: list[BuildEnvironmentRest]
"""
self._content = content
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, datetime):
result[attr] = str(value.date())
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
|
{
"content_hash": "19b3a8e5b975bb86a375f22c7b809751",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 77,
"avg_line_length": 26.90960451977401,
"alnum_prop": 0.5761074952760865,
"repo_name": "jianajavier/pnc-cli",
"id": "cb483f8dbf7a2dee9ee217decd1c855c81b6fa40",
"size": "4780",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pnc_cli/swagger_client/models/build_environment_page.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "44313"
},
{
"name": "Python",
"bytes": "1333465"
},
{
"name": "Shell",
"bytes": "338"
}
],
"symlink_target": ""
}
|
from azure.cli.core.help_files import helps
# pylint: disable=line-too-long, too-many-lines
helps['network'] = """
type: group
short-summary: Manage Azure Network resources.
"""
helps['network dns'] = """
type: group
short-summary: Manage DNS domains in Azure.
"""
# region Application Gateway
helps['network application-gateway'] = """
type: group
short-summary: Manage application-level routing and load balancing services.
"""
helps['network application-gateway create'] = """
type: command
short-summary: Create an application gateway.
"""
helps['network application-gateway delete'] = """
type: command
short-summary: Delete an application gateway.
"""
helps['network application-gateway list'] = """
type: command
short-summary: List application gateways.
"""
helps['network application-gateway show'] = """
type: command
short-summary: Get the details of an application gateway.
"""
helps['network application-gateway start'] = """
type: command
short-summary: Start an application gateway.
"""
helps['network application-gateway stop'] = """
type: command
short-summary: Stop an application gateway.
"""
helps['network application-gateway update'] = """
type: command
short-summary: Update an application gateway.
"""
helps['network application-gateway show-backend-health'] = """
type: command
short-summary: Get information on the backend health of an application gateway.
"""
helps['network application-gateway wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of the application gateway is met.
"""
# endregion
# region Application Gateway Address Pool
helps['network application-gateway address-pool'] = """
type: group
short-summary: Manage backend address pools for an application gateway.
"""
helps['network application-gateway address-pool create'] = """
type: command
short-summary: Create a backend address pool.
examples:
- name: Create an address pool with two endpoints.
text: |
az network application-gateway address-pool create \\
-g MyResourceGroup --gateway-name MyApplicationGateway \\
-n MyAddressPool --servers 10.0.0.4 10.0.0.5
"""
helps['network application-gateway address-pool delete'] = """
type: command
short-summary: Delete a backend address pool.
"""
helps['network application-gateway address-pool list'] = """
type: command
short-summary: List backend address pools.
"""
helps['network application-gateway address-pool show'] = """
type: command
short-summary: Get the details for a backend address pool.
"""
helps['network application-gateway address-pool update'] = """
type: command
short-summary: Update a backend address pool.
"""
# endregion
# region Application Gateway Authorization Cert
helps['network application-gateway auth-cert'] = """
type: group
short-summary: Manage authorization certificates for an application gateway.
"""
helps['network application-gateway auth-cert create'] = """
type: command
short-summary: Create an authorization certificate.
"""
helps['network application-gateway auth-cert delete'] = """
type: command
short-summary: Delete an authorization certificate.
"""
helps['network application-gateway auth-cert list'] = """
type: command
short-summary: List authorization certificates.
"""
helps['network application-gateway auth-cert show'] = """
type: command
short-summary: Get the details of an authorization certificate.
"""
helps['network application-gateway auth-cert update'] = """
type: command
short-summary: Update an authorization certificate.
"""
# endregion
# region Application Gateway Frontend IP
helps['network application-gateway frontend-ip'] = """
type: group
short-summary: Manage frontend IP addresses for an application gateway.
"""
helps['network application-gateway frontend-ip create'] = """
type: command
short-summary: Create a frontend IP address.
"""
helps['network application-gateway frontend-ip delete'] = """
type: command
short-summary: Delete a frontend IP address.
"""
helps['network application-gateway frontend-ip list'] = """
type: command
short-summary: List frontend IP addresses.
"""
helps['network application-gateway frontend-ip show'] = """
type: command
short-summary: Get the details of a frontend IP address.
"""
helps['network application-gateway frontend-ip update'] = """
type: command
short-summary: Update a frontend IP address.
"""
# endregion
# region Application Gateway frontend port
helps['network application-gateway frontend-port'] = """
type: group
short-summary: Manage frontend ports for an application gateway.
"""
helps['network application-gateway frontend-port create'] = """
type: command
short-summary: Create a frontend port.
"""
helps['network application-gateway frontend-port delete'] = """
type: command
short-summary: Delete a frontend port.
"""
helps['network application-gateway frontend-port list'] = """
type: command
short-summary: List frontend ports.
"""
helps['network application-gateway frontend-port show'] = """
type: command
short-summary: Get the details for a frontend port.
"""
helps['network application-gateway frontend-port update'] = """
type: command
short-summary: Update a frontend port.
"""
# endregion
# region Application Gateway HTTP listener
helps['network application-gateway http-listener'] = """
type: group
short-summary: Manage HTTP listeners for an application gateway.
"""
helps['network application-gateway http-listener create'] = """
type: command
short-summary: Create an HTTP listener.
"""
helps['network application-gateway http-listener delete'] = """
type: command
short-summary: Delete an HTTP listener.
"""
helps['network application-gateway http-listener list'] = """
type: command
short-summary: List HTTP listeners.
"""
helps['network application-gateway http-listener show'] = """
type: command
short-summary: Get the details for an HTTP listener.
"""
helps['network application-gateway http-listener update'] = """
type: command
short-summary: Update an HTTP listener.
"""
# endregion
# region Application Gateway HTTP settings
helps['network application-gateway http-settings'] = """
type: group
short-summary: Manage HTTP settings for an application gateway.
"""
helps['network application-gateway http-settings create'] = """
type: command
short-summary: Create HTTP settings.
"""
helps['network application-gateway http-settings delete'] = """
type: command
short-summary: Delete HTTP settings.
"""
helps['network application-gateway http-settings list'] = """
type: command
short-summary: List HTTP settings.
"""
helps['network application-gateway http-settings show'] = """
type: command
short-summary: Get the details of a gateway's HTTP settings.
"""
helps['network application-gateway http-settings update'] = """
type: command
short-summary: Update HTTP settings.
"""
# endregion
# region Application Gateway probe
helps['network application-gateway probe'] = """
type: group
short-summary: Manage probes to gather and evaluate information on a gateway.
"""
helps['network application-gateway probe create'] = """
type: command
short-summary: Create a probe.
examples:
- name: Create an application gateway probe.
text: >
az network application-gateway probe create -g MyResourceGroup -n MyProbe \\
--protocol https --gateway-name MyApplicationGateway \\
--host 127.0.0.1 --path /path/to/probe
"""
helps['network application-gateway probe delete'] = """
type: command
short-summary: Delete a probe.
"""
helps['network application-gateway probe list'] = """
type: command
short-summary: List probes.
"""
helps['network application-gateway probe show'] = """
type: command
short-summary: Get the details of a probe.
"""
helps['network application-gateway probe update'] = """
type: command
short-summary: Update a probe.
"""
# endregion
# region Application Gateway redirect configuration
helps['network application-gateway redirect-config'] = """
type: group
short-summary: Manage redirect configurations.
"""
helps['network application-gateway redirect-config create'] = """
type: command
short-summary: Create a redirect configuration.
"""
helps['network application-gateway redirect-config delete'] = """
type: command
short-summary: Delete a redirect configuration.
"""
helps['network application-gateway redirect-config list'] = """
type: command
short-summary: List redirect configurations.
"""
helps['network application-gateway redirect-config show'] = """
type: command
short-summary: Get the details of a redirect configuration.
"""
helps['network application-gateway redirect-config update'] = """
type: command
short-summary: Update a redirect configuration.
"""
# endregion
# region Application Gateway rules
helps['network application-gateway rule'] = """
type: group
short-summary: Evaluate probe information and define routing rules.
"""
helps['network application-gateway rule create'] = """
type: command
short-summary: Create a rule.
"""
helps['network application-gateway rule delete'] = """
type: command
short-summary: Delete a rule.
"""
helps['network application-gateway rule list'] = """
type: command
short-summary: List rules.
"""
helps['network application-gateway rule show'] = """
type: command
short-summary: Get the details of a rule.
"""
helps['network application-gateway rule update'] = """
type: command
short-summary: Update a rule.
"""
# endregion
# region Application Gateway SSL Certs
helps['network application-gateway ssl-cert'] = """
type: group
short-summary: Manage SSL certificates for an application gateway.
"""
helps['network application-gateway ssl-cert create'] = """
type: command
short-summary: Upload an SSL certificate.
"""
helps['network application-gateway ssl-cert delete'] = """
type: command
short-summary: Delete an SSL certificate.
"""
helps['network application-gateway ssl-cert list'] = """
type: command
short-summary: List SSL certificates.
"""
helps['network application-gateway ssl-cert show'] = """
type: command
short-summary: Get the details of an SSL certificate.
"""
helps['network application-gateway ssl-cert update'] = """
type: command
short-summary: Update an SSL certificate.
"""
# endregion
# region Application Gateway SSL Policy
helps['network application-gateway ssl-policy'] = """
type: group
short-summary: Manage the SSL policy for an application gateway.
"""
helps['network application-gateway ssl-policy set'] = """
type: command
short-summary: Update or clear SSL policy settings.
parameters:
- name: --cipher-suites
populator-commands:
- az network application-gateway ssl-policy list-options
- name: --disabled-ssl-protocols
populator-commands:
- az network application-gateway ssl-policy list-options
- name: --min-protocol-version
populator-commands:
- az network application-gateway ssl-policy list-options
"""
helps['network application-gateway ssl-policy show'] = """
type: command
short-summary: Get the details of a gateway's SSL policy settings.
"""
helps['network application-gateway ssl-policy predefined'] = """
type: group
short-summary: Get information on predefined SSL policies.
"""
# endregion
# region Application Gateway URL path map
helps['network application-gateway url-path-map'] = """
type: group
short-summary: Manage URL path maps for an application gateway.
"""
helps['network application-gateway url-path-map create'] = """
type: command
short-summary: Create a URL path map.
long-summary: >
The map must be created with at least one rule. This command requires the creation of the
first rule at the time the map is created. To create additional rules using different
address pools or HTTP settings, use the `url-path-map rule create` command. To update the
rule created using this command, use the `url-path-map rule update` command.
"""
helps['network application-gateway url-path-map delete'] = """
type: command
short-summary: Delete a URL path map.
"""
helps['network application-gateway url-path-map list'] = """
type: command
short-summary: List URL path maps.
"""
helps['network application-gateway url-path-map show'] = """
type: command
short-summary: Get the details of a URL path map.
"""
helps['network application-gateway url-path-map update'] = """
type: command
short-summary: Update a URL path map.
"""
# endregion
# region Application Gateway URL path map rules
helps['network application-gateway url-path-map rule'] = """
type: group
short-summary: Manage the rules for a URL path map.
"""
helps['network application-gateway url-path-map rule create'] = """
type: command
short-summary: Create a rule for a URL path map.
"""
helps['network application-gateway url-path-map rule delete'] = """
type: command
short-summary: Delete a rule for a URL path map.
"""
# endregion
# region Application Gateway WAF Config
helps['network application-gateway waf-config'] = """
type: group
short-summary: Configure the settings of a web application firewall.
long-summary: This command is only applicable to application gateways with an SKU type of WAF.
"""
helps['network application-gateway waf-config set'] = """
type: command
short-summary: Update the firewall configuration of a web application.
parameters:
- name: --rule-set-type
short-summary: Rule set type.
populator-commands:
- az network application-gateway waf-config list-rule-sets
- name: --rule-set-version
short-summary: Rule set version.
populator-commands:
- az network application-gateway waf-config list-rule-sets
- name: --disabled-rule-groups
short-summary: Space separated list of rule groups to disable. To disable individual rules, use `--disabled-rules`.
populator-commands:
- az network application-gateway waf-config list-rule-sets
- name: --disabled-rules
short-summary: Space separated list of rule IDs to disable.
populator-commands:
- az network application-gateway waf-config list-rule-sets
examples:
- name: Disable rules for validation of request body parsing and SQL injection.
text: >
az network application-gateway waf-config set -g MyResourceGroup -n MyGatewayName \\
--enabled true --rule-set-type OWASP --rule-set-version 3.0 \\
--disabled-rules 920130 920140 \\
--disabled-rule-groups REQUEST-942-APPLICATION-ATTACK-SQLI
"""
helps['network application-gateway waf-config show'] = """
type: command
short-summary: Get the firewall configuration of a web application.
"""
helps['network application-gateway waf-config list-rule-sets'] = """
type: command
short-summary: (PREVIEW) Get information on available WAF rule sets, rule groups, and rule IDs.
parameters:
- name: --group
short-summary: >
List rules for the specified rule group. Use '*' to list rules for all groups.
Omit to suppress listing individual rules.
- name: --type
short-summary: Rule set type to list. Omit to list all types.
- name: --version
short-summary: Rule set version to list. Omit to list all versions.
examples:
- name: List available rule groups in OWASP type rule sets.
text: >
az network application-gateway waf-config list-rule-sets --type OWASP
- name: List available rules in the OWASP 3.0 rule set.
text: >
az network application-gateway waf-config list-rule-sets --group * --type OWASP --version 3.0
- name: List available rules in the 'crs_35_bad_robots' rule group.
text: >
az network application-gateway waf-config list-rule-sets --group crs_35_bad_robots
"""
# endregion
# region DNS record-set
helps['network dns record-set'] = """
type: group
short-summary: Manage DNS records and record sets.
"""
# endregion
# region DNS records
for record in ['a', 'aaaa', 'cname', 'mx', 'ns', 'ptr', 'srv', 'txt']:
helps['network dns record-set {}'.format(record)] = """
type: group
short-summary: Manage DNS {} records.
""".format(record.upper())
for record in ['a', 'aaaa', 'cname', 'mx', 'ns', 'ptr', 'srv', 'txt']:
indef_article = 'an' if record.startswith('a') else 'a'
helps['network dns record-set {0} remove-record'.format(record)] = """
type: command
short-summary: Remove {1} {0} record from its record set.
long-summary: >
By default, if the last record in a set is removed, the record set is deleted. To retain the empty record set, include --keep-empty-record-set.
""".format(record.upper(), indef_article)
helps['network dns record-set {} create'.format(record)] = """
type: command
short-summary: Create an empty {} record set.
""".format(record.upper())
helps['network dns record-set {} delete'.format(record)] = """
type: command
short-summary: Delete {1} {0} record set and all associated records.
""".format(record.upper(), indef_article)
helps['network dns record-set {} list'.format(record)] = """
type: command
short-summary: List all {} record sets in a zone.
""".format(record.upper())
helps['network dns record-set {} show'.format(record)] = """
type: command
short-summary: Get the details for {1} {0} record set.
examples:
- name: Show information about {1} {0} record set.
text: >
az network dns record-set {2} show -g MyResourceGroup -n MyRecordSet -z www.mysite.com
""".format(record.upper(), indef_article, record)
for item in ['a', 'aaaa', 'mx', 'ns', 'ptr', 'srv', 'txt']:
indef_article = 'an' if record.startswith('a') else 'a'
helps['network dns record-set {} update'.format(record)] = """
type: command
short-summary: Update {} {} record set.
""".format(indef_article, record.upper())
helps['network dns record-set {} add-record'.format(record)] = """
type: command
short-summary: Add {} {} record.
""".format(indef_article, record.upper())
helps['network dns record-set cname set-record'] = """
type: command
short-summary: Set the value of the CNAME record.
"""
helps['network dns record-set soa'] = """
type: group
short-summary: Manage a DNS zone's SOA record.
"""
helps['network dns record-set soa show'] = """
type: command
short-summary: Get the details of a DNS zone's SOA record.
"""
helps['network dns record-set soa update'] = """
type: command
short-summary: Update properties of a zone's SOA record.
"""
helps['network dns record-set list'] = """
type: command
short-summary: List all record sets within a DNS zone.
examples:
- name: List all "@" record sets within this zone.
text: >
az network dns record-set list -g MyResourceGroup -z www.mysite.com --query "[?name=='@']"
"""
# endregion
# region DNS Zone
helps['network dns zone'] = """
type: group
short-summary: Manage DNS zones.
"""
helps['network dns zone create'] = """
type: command
short-summary: Create a DNS zone.
parameters:
- name: --if-none-match
short-summary: Only create a DNS zone if one doesn't exist that matches the given name.
examples:
- name: Create a DNS zone using a fully qualified domain name.
text: >
az network dns zone create -g MyResourceGroup -n www.mysite.com
"""
helps['network dns zone delete'] = """
type: command
short-summary: Delete a DNS zone and all associated records.
"""
helps['network dns zone export'] = """
type: command
short-summary: Export a DNS zone as a DNS zone file.
"""
helps['network dns zone import'] = """
type: command
short-summary: Create a DNS zone using a DNS zone file.
examples:
- name: Import a local zone file into a DNS zone resource.
text: >
az network dns zone import -g MyResourceGroup -n MyZone -f /path/to/zone/file
"""
helps['network dns zone list'] = """
type: command
short-summary: List DNS zones.
"""
helps['network dns zone show'] = """
type: command
short-summary: Get a DNS zone's parameters. Does not show DNS records within the zone.
"""
helps['network dns zone update'] = """
type: command
short-summary: Update a DNS zone's properties. Does not modify DNS records within the zone.
parameters:
- name: --if-match
short-summary: Update only if the resource with the same ETAG exists.
"""
# endregion
# region Express Route
helps['network express-route'] = """
type: group
short-summary: Manage dedicated private network fiber connections to Azure.
"""
helps['network express-route create'] = """
type: command
short-summary: Create an ExpressRoute circuit.
parameters:
- name: --bandwidth
populator-commands:
- az network express-route list-service-providers
- name: --peering-location
populator-commands:
- az network express-route list-service-providers
- name: --provider
populator-commands:
- az network express-route list-service-providers
"""
helps['network express-route delete'] = """
type: command
short-summary: Delete an ExpressRoute circuit.
"""
helps['network express-route get-stats'] = """
type: command
short-summary: Get the statistics for an ExpressRoute circuit.
"""
helps['network express-route list'] = """
type: command
short-summary: List ExpressRoute circuits.
"""
helps['network express-route list-arp-tables'] = """
type: command
short-summary: List the currently advertised address resolution protocol (ARP) table of an ExpressRoute circuit.
"""
helps['network express-route list-route-tables'] = """
type: command
short-summary: List the currently advertised route tables of an ExpressRoute circuit.
"""
helps['network express-route show'] = """
type: command
short-summary: Get the details for an ExpressRoute circuit.
"""
helps['network express-route update'] = """
type: command
short-summary: Update settings of an ExpressRoute circuit.
"""
helps['network express-route list-service-providers'] = """
type: command
short-summary: List available ExpressRoute service providers.
"""
helps['network express-route wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of the ExpressRoute is met.
"""
# endregion
# region Express Route auth
helps['network express-route auth'] = """
type: group
short-summary: Manage authentication of an ExpressRoute circuit.
"""
helps['network express-route auth create'] = """
type: command
short-summary: Create an authorization setting.
"""
helps['network express-route auth delete'] = """
type: command
short-summary: Delete an authorization setting.
"""
helps['network express-route auth list'] = """
type: command
short-summary: List authorization settings.
"""
helps['network express-route auth show'] = """
type: command
short-summary: Get the details of an authorization setting.
"""
# endregion
# region Express Route peering
helps['network express-route peering'] = """
type: group
short-summary: Manage ExpressRoute peering.
"""
helps['network express-route peering create'] = """
type: command
short-summary: Create peering settings.
examples:
- name: Create Microsoft Peering settings with IPv4 configuration.
text: az network express-route peering create -g myrg --circuit-name circuit1 --peering-type MicrosoftPeering --peer-asn 10002 --vlan-id 103 --primary-peer-subnet 101.0.0.0/30 --secondary-peer-subnet 102.0.0.0/30 --advertised-public-prefixes 101.0.0.0/30
- name: Add IPv6 Microsoft Peering settings to existing IPv4 config.
text: az network express-route peering update -g myrg --circuit-name circuit1 --peering-type MicrosoftPeering --ip-version ipv6 --primary-peer-subnet 2002:db00::/126 --secondary-peer-subnet 2003:db00::/126 --advertised-public-prefixes 2002:db00::/126
min_profile: latest
"""
helps['network express-route peering delete'] = """
type: command
short-summary: Delete peering settings.
"""
helps['network express-route peering list'] = """
type: command
short-summary: List peering settings.
"""
helps['network express-route peering show'] = """
type: command
short-summary: Get the details for an express route peering.
"""
helps['network express-route peering update'] = """
type: command
short-summary: Update peering settings.
examples:
- name: Add IPv6 Microsoft Peering settings to existing IPv4 config.
text: az network express-route peering update -g myrg --circuit-name circuit1 --peering-type MicrosoftPeering --ip-version ipv6 --primary-peer-subnet 2002:db00::/126 --secondary-peer-subnet 2003:db00::/126 --advertised-public-prefixes 2002:db00::/126
min_profile: latest
"""
# endregion
# region Load Balancer
helps['network lb'] = """
type: group
short-summary: Manage and configure load balancers.
"""
helps['network lb create'] = """
type: command
short-summary: Create a load balancer.
examples:
- name: Create a basic load balancer.
text: >
az network lb create -g MyResourceGroup -n MyLb
- name: Create a load balancer on a specific virtual network and subnet.
text: >
az network lb create -g MyResourceGroup -n MyLb --vnet-name MyVnet --subnet MySubnet
"""
helps['network lb delete'] = """
type: command
short-summary: Delete a load balancer.
"""
helps['network lb list'] = """
type: command
short-summary: List load balancers.
"""
helps['network lb show'] = """
type: command
short-summary: Get the details for a load balancer.
"""
helps['network lb update'] = """
type: command
short-summary: Update a load balancer.
"""
# endregion
# region Load Balancer address pool
helps['network lb address-pool'] = """
type: group
short-summary: Manage backend address pools for a load balancer.
"""
helps['network lb address-pool create'] = """
type: command
short-summary: Create a backend address pool.
"""
helps['network lb address-pool delete'] = """
type: command
short-summary: Delete a backend address pool.
"""
helps['network lb address-pool list'] = """
type: command
short-summary: List backend address pools.
"""
helps['network lb address-pool show'] = """
type: command
short-summary: Get the details for a backend address pool.
"""
# endregion
# region Load Balancer frontend IP
helps['network lb frontend-ip'] = """
type: group
short-summary: Manage frontend IP addresses for a load balancer.
"""
helps['network lb frontend-ip create'] = """
type: command
short-summary: Create a frontend IP address.
"""
helps['network lb frontend-ip delete'] = """
type: command
short-summary: Delete a frontend IP address.
"""
helps['network lb frontend-ip list'] = """
type: command
short-summary: List frontend IP addresses.
"""
helps['network lb frontend-ip show'] = """
type: command
short-summary: Get the details of a frontend IP address.
"""
helps['network lb frontend-ip update'] = """
type: command
short-summary: Update a frontend IP address.
"""
# endregion
# region Load Balancer inbound NAT pool
helps['network lb inbound-nat-pool'] = """
type: group
short-summary: Manage inbound NAT address pools for a load balancer.
"""
helps['network lb inbound-nat-pool create'] = """
type: command
short-summary: Create an inbound NAT address pool.
"""
helps['network lb inbound-nat-pool delete'] = """
type: command
short-summary: Delete an inbound NAT address pool.
"""
helps['network lb inbound-nat-pool list'] = """
type: command
short-summary: List inbound NAT address pools.
"""
helps['network lb inbound-nat-pool show'] = """
type: command
short-summary: Get the details for an inbound NAT address pool.
"""
helps['network lb inbound-nat-pool update'] = """
type: command
short-summary: Update an inbound NAT address pool.
"""
# endregion
# region Load Balancer inbound NAT rule
helps['network lb inbound-nat-rule'] = """
type: group
short-summary: Manage inbound NAT rules for a load balancer.
"""
helps['network lb inbound-nat-rule create'] = """
type: command
short-summary: Create an inbound NAT rule.
examples:
- name: Create a basic inbound NAT rule for port 80.
text: >
az network lb inbound-nat-rule create -g MyResourceGroup --lb-name MyLb -n MyNatRule \\
--protocol Tcp --frontend-port 80 --backend-port 80
"""
helps['network lb inbound-nat-rule delete'] = """
type: command
short-summary: Delete an inbound NAT rule.
"""
helps['network lb inbound-nat-rule list'] = """
type: command
short-summary: List inbound NAT rules.
"""
helps['network lb inbound-nat-rule show'] = """
type: command
short-summary: Get the details for an inbound NAT rule.
"""
helps['network lb inbound-nat-rule update'] = """
type: command
short-summary: Update an inbound NAT rule.
"""
# endregion
# region Load Balancer probe
helps['network lb probe'] = """
type: group
short-summary: Evaluate probe information and define routing rules.
"""
helps['network lb probe create'] = """
type: command
short-summary: Create a probe.
examples:
- name: Create a probe on a load balancer over HTTP and port 80.
text: >
az network lb probe create -g MyResourceGroup --lb-name MyLb -n MyProbe \\
--protocol http --port 80 --path /
"""
helps['network lb probe delete'] = """
type: command
short-summary: Delete a probe.
"""
helps['network lb probe list'] = """
type: command
short-summary: List probes.
"""
helps['network lb probe show'] = """
type: command
short-summary: Get the details for a probe.
"""
helps['network lb probe update'] = """
type: command
short-summary: Update a probe.
"""
# endregion
# region Load Balancer rule
helps['network lb rule'] = """
type: group
short-summary: Manage load balancing rules.
"""
helps['network lb rule create'] = """
type: command
short-summary: Create a load balancing rule.
examples:
- name: >
Create a basic load balancing rule that assigns a front-facing IP
configuration and port to a backend address pool and port.
text: >
az network lb rule create -g MyResourceGroup --lb-name MyLb -n MyLbRule \\
--protocol Tcp --frontend-port 80 --backend-port 80
"""
helps['network lb rule delete'] = """
type: command
short-summary: Delete a load balancing rule.
"""
helps['network lb rule list'] = """
type: command
short-summary: List load balancing rules.
"""
helps['network lb rule show'] = """
type: command
short-summary: Get the details of a load balancing rule.
"""
helps['network lb rule update'] = """
type: command
short-summary: Update a load balancing rule.
"""
# endregion
# region Local Gateway
helps['network local-gateway'] = """
type: group
short-summary: Manage local gateways.
"""
helps['network local-gateway create'] = """
type: command
short-summary: Create a local VPN gateway.
"""
helps['network local-gateway delete'] = """
type: command
short-summary: Delete a local VPN gateway.
"""
helps['network local-gateway list'] = """
type: command
short-summary: List local VPN gateways.
"""
helps['network local-gateway show'] = """
type: command
short-summary: Get the details for a local VPN gateway.
"""
helps['network local-gateway update'] = """
type: command
short-summary: Update a local VPN gateway.
"""
# endregion
# region Network Interface (NIC)
helps['network nic'] = """
type: group
short-summary: Manage network interfaces.
"""
helps['network nic show-effective-route-table'] = """
type: command
short-summary: Show all route tables applied to a network interface.
"""
helps['network nic list-effective-nsg'] = """
type: command
short-summary: List all network security groups applied to a network interface.
"""
helps['network nic create'] = """
type: command
short-summary: Create a network interface.
examples:
- name: Create a network interface for a specified subnet on a specified virtual network.
text: >
az network nic create -g MyResourceGroup --vnet-name MyVnet --subnet MySubnet -n MyNic
- name: Create a network interface for a specified subnet on a virtual network which allows
IP forwarding subject to a network security group.
text: >
az network nic create -g MyResourceGroup --vnet-name MyVnet --subnet MySubnet -n MyNic \\
--ip-forwarding --network-security-group MyNsg
"""
helps['network nic delete'] = """
type: command
short-summary: Delete a network interface.
"""
helps['network nic list'] = """
type: command
short-summary: List network interfaces.
long-summary: |
Does not list network interfaces attached to VMs in VM scale sets. Use 'az vmss nic list' or 'az vmss nic list-vm-nics' to display that information.
examples:
- name: List all NICs by internal DNS suffix.
text: >
az network nic list --query "[?dnsSettings.internalDomainNameSuffix==`<dns_suffix>`]"
"""
helps['network nic show'] = """
type: command
short-summary: Get the details of a network interface.
examples:
- name: Get the internal domain name suffix for a NIC.
text: >
az network nic show -g MyResourceGroup -n MyNic --query "dnsSettings.internalDomainNameSuffix"
"""
helps['network nic update'] = """
type: command
short-summary: Update a network interface.
examples:
- name: Update a network interface to use a different network security group.
text: >
az network nic update -g MyResourceGroup -n MyNic --network-security-group MyNsg
"""
# endregion
# region NIC ip-config
helps['network nic ip-config'] = """
type: group
short-summary: Manage IP configurations of a network interface.
"""
helps['network nic ip-config create'] = """
type: command
short-summary: Create an IP configuration.
long-summary: >
You must have the Microsoft.Network/AllowMultipleIpConfigurationsPerNic feature enabled for your subscription.
Only one configuration may be designated as the primary IP configuration per NIC, using the `--make-primary` flag.
"""
helps['network nic ip-config delete'] = """
type: command
short-summary: Delete an IP configuration.
long-summary: A NIC must have at least one IP configuration.
"""
helps['network nic ip-config list'] = """
type: command
short-summary: List IP configurations.
"""
helps['network nic ip-config show'] = """
type: command
short-summary: Get the details of an IP configuration.
"""
helps['network nic ip-config update'] = """
type: command
short-summary: Update an IP configuration.
examples:
- name: Update a NIC to use a new private IP address.
text: >
az network nic ip-config update -g MyResourceGroup --nic-name MyNic -n MyIpConfig --private-ip-address 10.0.0.9
- name: Make an IP configuration the default for the supplied NIC.
text: >
az network nic ip-config update -g MyResourceGroup --nic-name MyNic -n MyIpConfig --make-primary
"""
# endregion
# region NIC IP config address pool
helps['network nic ip-config address-pool'] = """
type: group
short-summary: Manage backend address pools in an IP configuration.
"""
helps['network nic ip-config address-pool add'] = """
type: command
short-summary: Add a backend address pool.
"""
helps['network nic ip-config address-pool remove'] = """
type: command
short-summary: Remove a backend address pool.
"""
# endregion
# region NIC IP config inbound NAT rules
helps['network nic ip-config inbound-nat-rule'] = """
type: group
short-summary: Manage inbound NAT rules for an IP configuration.
"""
helps['network nic ip-config inbound-nat-rule add'] = """
type: command
short-summary: Add an inbound NAT rule.
"""
helps['network nic ip-config inbound-nat-rule remove'] = """
type: command
short-summary: Remove an inbound NAT rule.
"""
# endregion
# region Network Security Group (NSG)
helps['network nsg'] = """
type: group
short-summary: Manage Azure Network Security Groups (NSGs).
"""
helps['network nsg rule'] = """
type: group
short-summary: Manage network security group rules.
"""
helps['network nsg create'] = """
type: command
short-summary: Create a network security group.
examples:
- name: Create an NSG with some tags.
text: >
az network nsg create -g MyResourceGroup -n MyNsg --tags super_secure no_80 no_22
"""
helps['network nsg list'] = """
type: command
short-summary: List network security groups.
examples:
- name: List all NSGs in the 'westus' region.
text: >
az network nsg list --query "[?location=='westus']"
"""
helps['network nsg show'] = """
type: command
short-summary: Get information about a network security group.
examples:
- name: Get basic information about an NSG.
text: >
az network nsg show -g MyResourceGroup -n MyNsg
- name: Get basic information about all default NSG rules with "Allow" access.
text: >
az network nsg show -g MyResourceGroup -n MyNsg --query "defaultSecurityRules[?access=='Allow']"
"""
helps['network nsg rule create'] = """
type: command
short-summary: Create a network security group rule.
examples:
- name: Create a basic "Allow" NSG rule with the highest priority.
text: >
az network nsg rule create -g MyResourceGroup --nsg-name MyNsg -n MyNsgRule --priority 100
- name: Create a "Deny" rule over TCP for a specific IP address range with the lowest priority.
text: >
az network nsg rule create -g MyResourceGroup --nsg-name MyNsg -n MyNsgRule --priority 4096 \\
--source-address-prefixes 208.130.28/24 --source-port-ranges 80 \\
--destination-address-prefixes * --destination-port-ranges 80 --access Deny \\
--protocol Tcp --description "Deny from specific IP address range on 80."
"""
helps['network nsg rule delete'] = """
type: command
short-summary: Delete a network security group rule.
"""
helps['network nsg rule list'] = """
type: command
short-summary: List all rules in a network security group.
"""
helps['network nsg rule show'] = """
type: command
short-summary: Get the details for a network security group rule.
"""
helps['network nsg rule update'] = """
type: command
short-summary: Update a network security group rule.
examples:
- name: Update an NSG rule with a new wildcard destination address prefix.
text: >
az network nsg rule update -g MyResourceGroup --nsg-name MyNsg -n MyNsgRule --destination-address-prefix *
"""
# endregion
# region Public IP
helps['network public-ip'] = """
type: group
short-summary: Manage public IP addresses.
"""
helps['network public-ip create'] = """
type: command
short-summary: Create a public IP address.
examples:
- name: Create a basic public IP resource.
text: >
az network public-ip create -g MyResourceGroup -n MyIp
- name: Create a static public IP resource for a DNS name label.
text: >
az network public-ip create -g MyResourceGroup -n MyIp --dns-name MyLabel --allocation-method Static
"""
helps['network public-ip delete'] = """
type: command
short-summary: Delete a public IP address.
"""
helps['network public-ip list'] = """
type: command
short-summary: List public IP addresses.
examples:
- name: List all public IPs in a resource group.
text: >
az network public-ip list -g MyResourceGroup
- name: List all public IPs for a domain name label.
text: >
az network public-ip list -g MyResourceGroup --query "[?dnsSettings.domainNameLabel=='MyLabel']"
"""
helps['network public-ip show'] = """
type: command
short-summary: Get the details of a public IP address.
examples:
- name: Get information about a public IP resource.
text: >
az network public-ip show -g MyResourceGroup -n MyIp
- name: Get the FQDN and IP address for a public IP resource.
text: >
az network public-ip show -g MyResourceGroup -n MyIp --query "{ fqdn:dnsSettings.fqdn, address: ipAddress }"
"""
helps['network public-ip update'] = """
type: command
short-summary: Update a public IP address.
examples:
- name: Update a public IP resource with a DNS name label and static allocation.
text: >
az network public-ip update -g MyResourceGroup -n MyIp --dns-name MyLabel --allocation-method Static
"""
# endregion
# region Route Table
helps['network route-table'] = """
type: group
short-summary: Manage route tables.
"""
helps['network route-table create'] = """
type: command
short-summary: Create a route table.
"""
helps['network route-table delete'] = """
type: command
short-summary: Delete a route table.
"""
helps['network route-table list'] = """
type: command
short-summary: List route tables.
"""
helps['network route-table show'] = """
type: command
short-summary: Get the details for a route table.
"""
helps['network route-table update'] = """
type: command
short-summary: Update a route table.
"""
helps['network route-table route'] = """
type: group
short-summary: Manage routes in a route table.
"""
helps['network route-table route create'] = """
type: command
short-summary: Create a route in a route table.
"""
helps['network route-table route delete'] = """
type: command
short-summary: Delete a route from a route table.
"""
helps['network route-table route list'] = """
type: command
short-summary: List routes in a route table.
"""
helps['network route-table route show'] = """
type: command
short-summary: Get the details of a route in a route table.
"""
helps['network route-table route update'] = """
type: command
short-summary: Update a route in a route table.
"""
# endregion
# region Route Filter
helps['network route-filter'] = """
type: group
short-summary: (PREVIEW) Manage route filters.
"""
helps['network route-filter create'] = """
type: command
short-summary: Create a route filter.
"""
helps['network route-filter delete'] = """
type: command
short-summary: Delete a route filter.
"""
helps['network route-filter list'] = """
type: command
short-summary: List route filters.
"""
helps['network route-filter show'] = """
type: command
short-summary: Get the details of a route filter.
"""
helps['network route-filter update'] = """
type: command
short-summary: Update a route filter.
"""
helps['network route-filter rule'] = """
type: group
short-summary: (PREVIEW) Manage rules in a route filter.
"""
helps['network route-filter rule create'] = """
type: command
short-summary: Create a rule in a route filter.
parameters:
- name: --communities
short-summary: Space separated list of border gateway protocol (BGP) community values to filter on.
populator-commands:
- az network route-filter rule list-service-communities
"""
helps['network route-filter rule delete'] = """
type: command
short-summary: Delete a rule from a route filter.
"""
helps['network route-filter rule list'] = """
type: command
short-summary: List rules in a route filter.
"""
helps['network route-filter rule show'] = """
type: command
short-summary: Get the details of a rule in a route filter.
"""
helps['network route-filter rule update'] = """
type: command
short-summary: Update a rule in a route filter.
"""
# endregion
# region Traffic Manager
helps['network traffic-manager'] = """
type: group
short-summary: Manage the routing of incoming traffic.
"""
helps['network traffic-manager endpoint'] = """
type: group
short-summary: Manage traffic manager end points.
"""
helps['network traffic-manager profile'] = """
type: group
short-summary: Manage traffic manager profiles.
"""
helps['network traffic-manager profile check-dns'] = """
type: command
short-summary: Check the availability of a relative DNS name.
"""
helps['network traffic-manager profile create'] = """
type: command
short-summary: Create a profile.
"""
helps['network traffic-manager profile delete'] = """
type: command
short-summary: Delete a profile.
"""
helps['network traffic-manager profile list'] = """
type: command
short-summary: List profiles.
"""
helps['network traffic-manager profile show'] = """
type: command
short-summary: Get the details for a profile.
"""
helps['network traffic-manager profile update'] = """
type: command
short-summary: Update a profile.
"""
helps['network traffic-manager endpoint create'] = """
type: command
short-summary: Create an endpoint.
parameters:
- name: --geo-mapping
short-summary: Space separated list of country/region codes mapped to this endpoint when using the 'Geographic' routing method.
populator-commands:
- az network traffic-manager endpoint show-geographic-hierarchy
"""
helps['network traffic-manager endpoint delete'] = """
type: command
short-summary: Delete an endpoint.
"""
helps['network traffic-manager endpoint list'] = """
type: command
short-summary: List endpoints.
"""
helps['network traffic-manager endpoint show'] = """
type: command
short-summary: Get the details for an endpoint.
"""
helps['network traffic-manager endpoint update'] = """
type: command
short-summary: Update an endpoint.
"""
# endregion
# region Virtual Network (VNET)
helps['network vnet'] = """
type: group
short-summary: Manage Azure Virtual Networks.
"""
helps['network vnet check-ip-address'] = """
type: command
short-summary: Check if a private IP address is available for use.
"""
helps['network vnet create'] = """
type: command
short-summary: Create a virtual network.
long-summary: You may also create a subnet at the same time by specifying a subnet name and (optionally) an address prefix.
examples:
- name: Create a basic virtual network.
text: >
az network vnet create -g MyResourceGroup -n MyVnet
- name: Create a virtual network with a specific address prefix and one subnet.
text: >
az network vnet create -g MyResourceGroup -n MyVnet --address-prefix 10.0.0.0/16 --subnet-name MySubnet --subnet-prefix 10.0.0.0/24
"""
helps['network vnet delete'] = """
type: command
short-summary: Delete a virtual network.
"""
helps['network vnet list'] = """
type: command
short-summary: List virtual networks.
examples:
- name: List virtual networks which specify a certain address prefix.
text: >
az network vnet list --query "[?contains(addressSpace.addressPrefixes, '10.0.0.0/16')]"
"""
helps['network vnet show'] = """
type: command
short-summary: Get the details of a virtual network.
"""
helps['network vnet update'] = """
type: command
short-summary: Update a virtual network.
"""
# endregion
# region VNet Subnet
helps['network vnet subnet'] = """
type: group
short-summary: Manage subnets in an Azure Virtual Network.
"""
helps['network vnet subnet create'] = """
type: command
short-summary: Create a subnet and associate an existing NSG and route table.
parameters:
- name: --service-endpoints
short-summary: Space separated list of services allowed private access to this subnet.
populator-commands:
- az network vnet list-endpoint-services
examples:
- name: Create new subnet attached to an NSG with a custom route table.
text: >
az network vnet subnet create -g MyResourceGroup --vnet-name MyVnet -n MySubnet \\
--address-prefix 10.0.0.0/24 --network-security-group MyNsg --route-table MyRouteTable
"""
helps['network vnet subnet delete'] = """
type: command
short-summary: Delete a subnet.
"""
helps['network vnet subnet list'] = """
type: command
short-summary: List subnets.
"""
helps['network vnet subnet show'] = """
type: command
short-summary: Show details of a subnet.
"""
helps['network vnet subnet update'] = """
type: command
short-summary: Update a subnet.
parameters:
- name: --service-endpoints
short-summary: Space separated list of services allowed private access to this subnet.
populator-commands:
- az network vnet list-endpoint-services
"""
# endregion
# region Virtual Network (VNet) Peering
helps['network vnet peering'] = """
type: group
short-summary: Manage peering connections between Azure Virtual Networks.
"""
helps['network vnet peering create'] = """
type: command
short-summary: Create a peering.
"""
helps['network vnet peering delete'] = """
type: command
short-summary: Delete a peering.
"""
helps['network vnet peering list'] = """
type: command
short-summary: List peerings.
"""
helps['network vnet peering show'] = """
type: command
short-summary: Get the details of a peering.
"""
helps['network vnet peering update'] = """
type: command
short-summary: Update a peering.
"""
# endregion
# region VPN Connection
helps['network vpn-connection'] = """
type: group
short-summary: Manage VPN connections.
"""
helps['network vpn-connection create'] = """
type: command
short-summary: Create a VPN connection.
"""
helps['network vpn-connection delete'] = """
type: command
short-summary: Delete a VPN connection.
"""
helps['network vpn-connection list'] = """
type: command
short-summary: List VPN connections.
"""
helps['network vpn-connection show'] = """
type: command
short-summary: Get the details of a VPN connection.
"""
helps['network vpn-connection update'] = """
type: command
short-summary: Update a VPN connection.
"""
# endregion
# region VPN Connection shared key
helps['network vpn-connection shared-key'] = """
type: group
short-summary: Manage VPN shared keys.
"""
helps['network vpn-connection shared-key reset'] = """
type: command
short-summary: Reset a VPN connection shared key.
"""
helps['network vpn-connection shared-key show'] = """
type: command
short-summary: Retrieve a VPN connection shared key.
"""
helps['network vpn-connection shared-key update'] = """
type: command
short-summary: Update a VPN connection shared key.
"""
# endregion
# region VPN Connection IPSec Policy
helps['network vpn-connection ipsec-policy'] = """
type: group
short-summary: Manage VPN connection IPSec policies.
"""
helps['network vpn-connection ipsec-policy add'] = """
type: command
short-summary: Add a VPN connection IPSec policy.
"""
helps['network vpn-connection ipsec-policy list'] = """
type: command
short-summary: List IPSec policies associated with a VPN connection.
"""
helps['network vpn-connection ipsec-policy clear'] = """
type: command
short-summary: Delete all IPSec policies on a VPN connection.
"""
# endregion
# region VNet Gateway
helps['network vnet-gateway'] = """
type: group
short-summary: Use an Azure Virtual Network Gateway to establish secure, cross-premises connectivity.
"""
helps['network vnet-gateway create'] = """
type: command
short-summary: Create a virtual network gateway.
examples:
- name: Create a basic virtual network gateway and associate with a public IP address.
text: >
az network vnet-gateway create -g MyResourceGroup --vnet MyVnet -n MyVnetGateway --public-ip-address MyIp
"""
helps['network vnet-gateway delete'] = """
type: command
short-summary: Delete a virtual network gateway.
"""
helps['network vnet-gateway list'] = """
type: command
short-summary: List virtual network gateways.
"""
helps['network vnet-gateway reset'] = """
type: command
short-summary: Reset a virtual network gateway.
"""
helps['network vnet-gateway show'] = """
type: command
short-summary: Get the details for a virtual network gateway.
"""
helps['network vnet-gateway update'] = """
type: command
short-summary: Update a virtual network gateway.
"""
helps['network vnet-gateway wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of the virtual network gateway is met.
"""
helps['network vnet-gateway vpn-client'] = """
type: group
short-summary: Download a configured client with which to connect to a VPN.
"""
helps['network vnet-gateway vpn-client generate'] = """
type: command
short-summary: Generates a binary client file that can be used to connect to a VPN.
long-summary: The legacy implementation returns an EXE, while the latest implementation returns a ZIP file.
"""
# endregion
# region VNet Gateway Revoke Cert
helps['network vnet-gateway revoked-cert'] = """
type: group
short-summary: Manage revoked certificates in a virtual network gateway.
"""
helps['network vnet-gateway revoked-cert create'] = """
type: command
short-summary: Revoke a certificate.
"""
helps['network vnet-gateway revoked-cert delete'] = """
type: command
short-summary: Delete a revoked certificate.
"""
# endregion
# region VNet Gateway Root Cert
helps['network vnet-gateway root-cert'] = """
type: group
short-summary: Manage root certificates for a virtual network gateway.
"""
helps['network vnet-gateway root-cert create'] = """
type: command
short-summary: Upload a root certificate.
"""
helps['network vnet-gateway root-cert delete'] = """
type: command
short-summary: Delete a root certificate.
"""
# endregion
# region Network Watcher
helps['network watcher'] = """
type: group
short-summary: (PREVIEW) Manage the Azure Network Watcher.
"""
helps['network watcher list'] = """
type: command
short-summary: List Network Watchers.
"""
helps['network watcher configure'] = """
type: command
short-summary: Configure the Network Watcher service for different regions.
parameters:
- name: --enabled
short-summary: Enabled status of Network Watch in the specified regions.
- name: --locations -l
short-summary: Space separated list of locations to configure.
- name: --resource-group -g
short-summary: Name of resource group. Required when enabling new regions.
long-summary: When a previously disabled region is enabled to use Network Watcher, a
Network Watcher resource will be created in this resource group.
"""
helps['network watcher troubleshooting'] = """
type: group
short-summary: (PREVIEW) Manage Network Watcher troubleshooting sessions.
"""
helps['network watcher troubleshooting start'] = """
type: command
short-summary: Troubleshoot issues with VPN connections or gateway connectivity.
parameters:
- name: --resource-type -t
short-summary: The type of target resource to troubleshoot, if resource ID is not specified.
- name: --storage-account
short-summary: Name or ID of the storage account in which to store the troubleshooting results.
- name: --storage-path
short-summary: Fully qualified URI to the storage blob container in which to store the troubleshooting results.
"""
helps['network watcher troubleshooting show'] = """
type: command
short-summary: Get the results of the last troubleshooting operation.
"""
helps['network watcher test-ip-flow'] = """
type: command
short-summary: Test IP flow to/from a VM given the currently configured network security group rules.
long-summary: Requires that Network Watcher is enabled for the region in which the VM is located.
parameters:
- name: --local
short-summary: The private IPv4 address for the VM's NIC and the port of the packet in
X.X.X.X:PORT format. '*' can be used for port when direction is outbound.
- name: --remote
short-summary: The IPv4 address and port for the remote side of the packet
X.X.X.X:PORT format. '*' can be used for port when direction is inbound.
- name: --direction
short-summary: Direction of the packet relative to the VM.
- name: --protocol
short-summary: Protocol to test.
"""
helps['network watcher test-connectivity'] = """
type: command
short-summary: Test if a direct TCP connection can be established between a Virtual Machine and a given endpoint.
parameters:
- name: --source-resource
short-summary: Name or ID of the resource from which to originate traffic.
long-summary: Currently only Virtual Machines are supported.
- name: --source-port
short-summary: Port number from which to originate traffic.
- name: --dest-resource
short-summary: Name or ID of the resource to receive traffic.
long-summary: Currently only Virtual Machines are supported.
- name: --dest-port
short-summary: Port number on which to receive traffic.
- name: --dest-address
short-summary: The IP address or URI at which to receive traffic.
"""
helps['network watcher show-next-hop'] = """
type: command
short-summary: Get information on the 'next hop' for a VM.
long-summary: Requires that Network Watcher is enabled for the region in which the VM is located.
"""
helps['network watcher show-security-group-view'] = """
type: command
short-summary: Get detailed security information on a VM for the currently configured network security group.
"""
helps['network watcher show-topology'] = """
type: command
short-summary: Get the network topology of a resource group.
parameters:
- name: --resource-group -g
short-summary: The name of the target resource group to perform topology on.
- name: --location -l
short-summary: Location. Defaults to the location of the target resource group.
long-summary: Topology information is only shown for resources within the target
resource group that are within the specified region.
"""
helps['network watcher packet-capture'] = """
type: group
short-summary: (PREVIEW) Manage packet capture sessions on VMs.
long-summary: |
These commands require that both Azure Network Watcher is enabled for the
VM's region and that AzureNetworkWatcherExtension is enabled on the VM.
"""
helps['network watcher packet-capture create'] = """
type: command
short-summary: Create and start a packet capture session.
parameters:
- name: --capture-limit
short-summary: The maximum size in bytes of the capture output.
- name: --capture-size
short-summary: Number of bytes captured per packet. Excess bytes are truncated.
- name: --time-limit
short-summary: Maximum duration of the capture session in seconds.
- name: --storage-account
short-summary: Name or ID of a storage account to save the packet capture to.
- name: --storage-path
short-summary: Fully qualified URI of an existing storage container in which to store the capture file.
long-summary: If not specified, the container 'network-watcher-logs' will be
created if it does not exist and the capture file will be stored there.
- name: --file-path
short-summary:
Local path on the targeted VM at which to save the packet capture. For Linux VMs, the
path must start with /var/captures.
- name: --vm
short-summary: Name or ID of the VM to target.
- name: --filters
short-summary: JSON encoded list of packet filters. Use `@<file path>` to load from file.
"""
helps['network watcher flow-log'] = """
type: group
short-summary: (PREVIEW) Manage network security group flow logging.
"""
helps['network watcher flow-log configure'] = """
type: command
short-summary: Configure flow logging on a network security group.
parameters:
- name: --nsg
short-summary: Name or ID of the Network Security Group to target.
- name: --enabled
short-summary: Enable logging.
- name: --retention
short-summary: Number of days to retain logs.
- name: --storage-account
short-summary: Name or ID of the storage account in which to save the flow logs.
"""
helps['network watcher flow-log show'] = """
type: command
short-summary: Get the flow log configuration for a network security group.
"""
# endregion
helps['network vnet list-service-endpoints'] = """
type: command
short-summary: List which services support VNET service tunneling for a given region.
"""
|
{
"content_hash": "7467d789bbe1048d9f114bf55a1ae741",
"timestamp": "",
"source": "github",
"line_count": 2112,
"max_line_length": 264,
"avg_line_length": 29.820075757575758,
"alnum_prop": 0.6670212765957447,
"repo_name": "QingChenmsft/azure-cli",
"id": "ca74fa21fa30a319243f92544b73e47002e76fa5",
"size": "63326",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/command_modules/azure-cli-network/azure/cli/command_modules/network/_help.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11279"
},
{
"name": "C++",
"bytes": "275"
},
{
"name": "JavaScript",
"bytes": "380"
},
{
"name": "Python",
"bytes": "5372365"
},
{
"name": "Shell",
"bytes": "25445"
}
],
"symlink_target": ""
}
|
import boto3
import sys
import os
defaultRegion = 'eu-west-1'
translate = boto3.client(service_name='translate', region_name=defaultRegion, use_ssl=True)
if len(sys.argv) >= 3:
inputFile = sys.argv[1]
outputLanguage = sys.argv[2]
outputFile = str(inputFile).replace('fr', outputLanguage)
if not os.path.isdir(outputLanguage):
#
# If directory structure doesn't exist, then create it.
# This should only happen at first run.
# Otherwise, go translate.
os.makedirs(outputLanguage)
with open(inputFile, 'r') as text:data = text.read()
outputTranslation = translate.translate_text(Text=data, SourceLanguageCode="fr", TargetLanguageCode=outputLanguage)
print('translating: ' + inputFile)
print('from: ' + outputTranslation.get('SourceLanguageCode'))
print('to: ' + outputTranslation.get('TargetLanguageCode'))
#
# Fixing inconsistencies with markdown
outputTranslationData = outputTranslation.get('TranslatedText')
outputTranslationData = outputTranslationData.replace('! [', '![')
outputTranslationData = outputTranslationData.replace('] (', '](')
file = open(outputFile,'w')
file.write(outputTranslationData)
file.close()
else:
print('usage : ' + sys.argv[0] + 'inputfile output_language_iso')
|
{
"content_hash": "ee8971730f8b6ed337c669b81c2b0234",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 116,
"avg_line_length": 35.02857142857143,
"alnum_prop": 0.7414355628058727,
"repo_name": "anamorph/recettes",
"id": "3bab554c6def4650e8983dff53007b3f1bc91e9e",
"size": "1251",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "translate_recipes.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1251"
},
{
"name": "Shell",
"bytes": "646"
}
],
"symlink_target": ""
}
|
"""Decorator utility functions."""
from __future__ import annotations
from collections.abc import Callable, Hashable
from typing import Any, TypeVar
_KT = TypeVar("_KT", bound=Hashable)
_VT = TypeVar("_VT", bound=Callable[..., Any])
class Registry(dict[_KT, _VT]):
"""Registry of items."""
def register(self, name: _KT) -> Callable[[_VT], _VT]:
"""Return decorator to register item with a specific name."""
def decorator(func: _VT) -> _VT:
"""Register decorated function."""
self[name] = func
return func
return decorator
|
{
"content_hash": "db26d60962f9929757e813ffb96bc2f6",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 69,
"avg_line_length": 27.136363636363637,
"alnum_prop": 0.6130653266331658,
"repo_name": "GenericStudent/home-assistant",
"id": "c648f6f1caba4b5f1283d445f83c8af495e2bdea",
"size": "597",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "homeassistant/util/decorator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
}
|
import warnings
from django.core.exceptions import ImproperlyConfigured
from django.utils.module_loading import autodiscover_modules, import_string
from django.utils.translation import gettext as _
from cms.app_base import CMSApp
from cms.exceptions import AppAlreadyRegistered
from cms.utils.conf import get_cms_setting
class ApphookPool:
def __init__(self):
self.apphooks = []
self.apps = {}
self.discovered = False
def clear(self):
# TODO: remove this method, it's Python, we don't need it.
self.apphooks = []
self.apps = {}
self.discovered = False
def register(self, app=None, discovering_apps=False):
# allow use as a decorator
if app is None:
return lambda app: self.register(app, discovering_apps)
if self.apphooks and not discovering_apps:
return app
if app.__name__ in self.apps:
raise AppAlreadyRegistered(
'A CMS application %r is already registered' % app.__name__)
if not issubclass(app, CMSApp):
raise ImproperlyConfigured(
'CMS application must inherit from cms.app_base.CMSApp, '
'but %r does not' % app.__name__)
if not hasattr(app, 'menus') and hasattr(app, 'menu'):
warnings.warn("You define a 'menu' attribute on CMS application "
"%r, but the 'menus' attribute is empty, "
"did you make a typo?" % app.__name__)
self.apps[app.__name__] = app()
return app
def discover_apps(self):
self.apphooks = get_cms_setting('APPHOOKS')
if self.apphooks:
for path in self.apphooks:
cls = import_string(path)
try:
self.register(cls, discovering_apps=True)
except AppAlreadyRegistered:
pass
else:
autodiscover_modules('cms_apps')
self.discovered = True
def get_apphooks(self):
hooks = []
if not self.discovered:
self.discover_apps()
for app_name in self.apps:
app = self.apps[app_name]
if app.get_urls():
hooks.append((app_name, app.name))
# Unfortunately, we lose the ordering since we now have a list of
# tuples. Let's reorder by app_name:
hooks = sorted(hooks, key=lambda hook: hook[1])
return hooks
def get_apphook(self, app_name):
if not self.discovered:
self.discover_apps()
try:
return self.apps[app_name]
except KeyError:
# deprecated: return apphooks registered in db with urlconf name
# instead of apphook class name
for app in self.apps.values():
if app_name in app.get_urls():
return app
warnings.warn(_('No registered apphook "%r" found') % app_name)
return None
apphook_pool = ApphookPool()
|
{
"content_hash": "c547752df1b318bc1d41196411ccd73d",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 77,
"avg_line_length": 29.81188118811881,
"alnum_prop": 0.5765526403188309,
"repo_name": "divio/django-cms",
"id": "c3c8f4aefac99aef666552521a16dd20daa98a55",
"size": "3011",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "cms/apphook_pool.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "132972"
},
{
"name": "HTML",
"bytes": "201508"
},
{
"name": "JavaScript",
"bytes": "1238070"
},
{
"name": "Python",
"bytes": "2360702"
},
{
"name": "Shell",
"bytes": "447"
}
],
"symlink_target": ""
}
|
import numpy as np
import scipy.io as sio
import re, os, sys
import matplotlib.pyplot as plt
from random import randint
from sklearn.linear_model import SGDClassifier
import sklearn
import pywt
printing = False
def main():
dataPath = '/media/david/linux_media/kaggle/eeg/'
"""
data = sio.loadmat(dataPath + 'train_1/1_2_0.mat')
print '1_1000_0'
print 'Samples per Segment: {0}'.format(data['dataStruct']['nSamplesSegment'])
print 'iEEG sampling rate: {0}'.format(data['dataStruct']['iEEGsamplingRate'])
print 'channelIndices: {0}'.format(data['dataStruct']['channelIndices'])
print 'sequence: {0}'.format(data['dataStruct']['sequence'])
print 'shape: {0}'.format(data['dataStruct']['data'][0][0].shape)
x = np.transpose(data['dataStruct']['data'][0][0])
coeffs = pywt.wavedec(x[0], 'bior3.1', level=3)
ca, cd3, cd2, cd1 = coeffs
f, ax = plt.subplots(4, sharex=True)
ax[0].plot(ca)
ax[1].plot(cd3)
ax[2].plot(cd2)
ax[3].plot(cd1)
plt.show()
#arma_mod = sm.tsa.ARIMA(x[c], (10,2,5)).fit(disp=0, start_params=)
#print 'Channel {0} {1}'.format(c, arma_mod.summary())
"""
#open output file
submission = open("submission.csv", "w")
submission.write("File,Class\n")
n_level = 3 #3-level wavelet decomposition
n_channels = 16
n_clfs = n_channels*(n_level+1)
#4 because range only goes from n to m-1
for dc in xrange(1,4):
print "Running dataset: {0}".format(dc)
clfs = []
for _ in xrange(n_clfs):
clfs.append(SGDClassifier(loss='log'))
print "Training "
folder = '{0}train_{1}/'.format(dataPath, dc)
num_files = len(os.listdir(folder))
fidx = 0
for filename in os.listdir(folder):
if filename=='1_45_1.mat':
continue
data = sio.loadmat('{0}{1}'.format(folder, filename))
metadata = re.split(r'[_.]+',filename)
cl = np.array([int(metadata[2])]) # the class is the 3rd number
x = np.transpose(data['dataStruct']['data'][0][0])
for chan in xrange(n_channels):
coeffs = pywt.wavedec(x[chan], 'bior3.1', level=n_level)
clfs[chan].partial_fit(np.concatenate(coeffs).reshape(1,-1), cl, classes=[0,1])
fidx+=1
sys.stdout.write("\r{0}/{1}".format(fidx, num_files))
sys.stdout.flush()
print "\nTesting"
folder = '{0}test_{1}/'.format(dataPath, dc)
num_files = len(os.listdir(folder))
fidx = 0
for filename in os.listdir(folder):
data = sio.loadmat('{0}{1}'.format(folder, filename))
metadata = re.split(r'[_.]+',filename)
x = np.transpose(data['dataStruct']['data'][0][0])
likelihood = 0
for chan in xrange(n_channels):
#TODO: Figure out way to make this arbitrary
coeffs = pywt.wavedec(x[chan], 'bior3.1', level=n_level)
likelihood += clfs[chan].predict(np.concatenate(coeffs).reshape(1,-1))
p = 0 if float(likelihood)/n_channels < .5 else 1
submission.write("{0},{1}\n".format(filename, p))
fidx+=1
sys.stdout.write("\r{0}/{1}".format(fidx, num_files))
sys.stdout.flush()
print ""
submission.close()
if __name__ == '__main__':
main()
|
{
"content_hash": "458697ea8fac7c0b6b55359c0b60b391",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 95,
"avg_line_length": 34.755102040816325,
"alnum_prop": 0.5704638872577804,
"repo_name": "drubinstein/kaggle_seizure",
"id": "6d67340ed1ccc4df998c94810c5ae7ed27ed7326",
"size": "3642",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run_wavelets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27822"
}
],
"symlink_target": ""
}
|
from Queue import Queue # Threadsafe queue for threads to use
from collections import Counter # To count stuff for us
import datetime # Because datetime printing is hard
import time # Should be obvious
import sys # Get system info
import threading # Should be obvious
import json # Also obvious
# FB API wrapper ("pip install facepy")
import facepy
import props
from utils import Color, notify_mac, log
__author__ = 'Henri Sweers'
appeared = dict()
# Junk method used for testing
def test():
log("Test")
# Export method, recieves a jsonObj of style {"label": dictionary}
def exportData(jsonDict):
# Do stuff
print "Exported"
# print jsonDict
# Thread class. Each thread gets all the data from a certain date range
class RequestThread(threading.Thread):
def __init__(self, queue, apikey, query, curr_time, num_weeks):
# Super class
threading.Thread.__init__(self)
# Queue object given from outside. Queues are threadsafe
self.queue = queue
# Graph object for our call, authenticated with a token
self.graph = facepy.GraphAPI(apikey)
# FQL query with specified date range
self.input_query = query
# Counters. t-total, p-posts, c-comments
self.tcounter = Counter()
self.pcounter = Counter()
self.ccounter = Counter()
self.tpcounter = Counter()
self.tccounter = Counter()
self.cccounter = Counter()
# Time range, for logging
self.time_range = datetime.datetime.fromtimestamp(
curr_time - num_weeks).strftime('%Y-%m-%d') + "-" + \
datetime.datetime.fromtimestamp(curr_time).strftime(
'%Y-%m-%d')
# Main runner
def run(self):
log("\t(" + self.time_range + ') - Getting posts...')
# Get group posts
try:
group_posts = self.graph.fql(query=self.input_query)
except Exception as e:
# 99% of the time this is just an expired API access token
log("Error: " + str(e), Color.RED)
sys.exit()
log("\t(" + self.time_range + ") - " +
str(len(group_posts["data"])) + " posts")
# Iterate over posts
if len(group_posts) != 0:
for post in group_posts["data"]:
comments_query = \
"SELECT fromid, likes, id, time FROM comment WHERE post_id="
# If it's a new actor
if post['actor_id'] in appeared.keys():
if appeared[post['actor_id']] > int(post['created_time']):
appeared[post['actor_id']] = int(post['created_time'])
else:
appeared[post['actor_id']] = int(post['created_time'])
# Add post's like count to that user in our total_likes_counter
self.tcounter[post['actor_id']] += post[
'like_info']['like_count']
# Add to top like posts counter
self.pcounter[post['post_id']] = post['like_info'][
'like_count']
# Timestamp of post by
day_timestamp = datetime.datetime.fromtimestamp(
int(post['created_time']))
day_timestamp = day_timestamp.replace(hour=0, minute=0,
second=0, microsecond=0)
day_timestamp = (
day_timestamp - datetime.datetime(1970, 1, 1)).total_seconds()
# Add to post count
self.tpcounter[str(day_timestamp)] += 1
# Initialize controversial counter
self.cccounter[post['post_id']] += 1
# Get likes on comments
comments = self.graph.fql(
comments_query + "\"" + str(post['post_id']) +
"\" LIMIT 500")
# Iterate over comments
if len(comments["data"]) != 0:
log("\t\t(" + self.time_range + ") - " + str(
len(comments["data"])) + " comments")
log("\t\t(" + self.time_range + ') - Getting comments...')
for c in comments["data"]:
# add their like counts to their respective users
# in our total_likes_counter
self.tcounter[c['fromid']] += c['likes']
# add like count to top_comments_likes_counter
self.ccounter[c['id']] = c['likes']
# Add to comment count
self.tccounter[str(day_timestamp)] += 1
# Add to controversial counter
self.cccounter[post['post_id']] += 1
# If it's a new actor
if c['fromid'] in appeared.keys():
if appeared[c['fromid']] > int(c['time']):
appeared[c['fromid']] = int(c['time'])
else:
appeared[c['fromid']] = int(c['time'])
else:
log("\t\tNo comments from this post")
else:
log("\t\tNo posts from this time frame")
self.queue.put({'t': self.tcounter, 'p': self.pcounter, 'c':
self.ccounter, 'tp': self.tpcounter,
'tc': self.tccounter, 'cc': self.cccounter})
# Method for counting various total likes in a group
def count_group_likes():
fb_api_access_token = props.token
fb_app_id = props.app_id
fb_secret_key = props.secret_key
# Counter object to do the counting for us
total_likes_counter = Counter()
top_liked_posts_counter = Counter()
top_liked_comments_counter = Counter()
total_posts_counter = Counter()
total_comments_counter = Counter()
most_discussed_counter = Counter()
group_id = props.group_id
num_of_items_to_return = props.num_of_items
# Put the number of weeks you want it to increment by each time
# smaller is better, but too small and you could hit your rate limit
# ... which is 600 calls per 600 seconds. Maybe apps get more
num_weeks = int("2")
# Convert to unix time
num_weeks_unix = num_weeks * 604800
start_date = props.start_date
datetime_start_date = datetime.datetime.fromtimestamp(start_date)
# Query strings for FQL
posts_query = \
"SELECT post_id, like_info, actor_id, created_time FROM stream" + \
" WHERE source_id=" + group_id + " AND created_time<"
person_query = "SELECT first_name, last_name FROM user WHERE uid="
# Authorize our API wrapper
graph = facepy.GraphAPI(fb_api_access_token)
# Code to programatically extend key
if extend_key:
access_token, expires_at = facepy.get_extended_access_token(
fb_api_access_token,
fb_app_id,
fb_secret_key
)
# This will print out new extended token and new expiration date
# Copy them and replace your token above with this one
print 'New token: ' + access_token
print 'New expiration date: ' + expires_at
log('Getting group posts', Color.BLUE)
# Send end time to current time and work backward
end_time = int(time.time())
# Or manually set end time
# end_time = 1392422400
log('Current date is: ' + datetime.datetime.fromtimestamp(
end_time).strftime('%Y-%m-%d'))
log('Incrementing by ' + str(num_weeks) + ' weeks at a time')
# List of thread objects
threads = []
# Threadsafe queue for the threads to dump their data in
final_queue = Queue()
log("Initializing threads...", Color.BLUE)
# While loop that creates the threads
# Instantiates each thread with calculated time, keeps decrementing to
# start
while end_time > start_date:
# New query
new_query = posts_query + str(
end_time) + " AND created_time>" + \
str(end_time - num_weeks_unix) + " LIMIT 600"
# Thread creation
t = RequestThread(final_queue, fb_api_access_token, new_query,
end_time, num_weeks_unix)
# Add it to our list
threads.append(t)
# Decrement the time
end_time -= num_weeks_unix
# Start the thread
t.start()
log("Joining threads...", Color.BLUE)
# Wait for all the threads to finish before counting everything up
for t in threads:
t.join()
log("Done, merging data...", Color.BLUE)
# Count up all the data by merging all the counters from each thread result
for stuff in list(final_queue.queue):
total_likes_counter += stuff['t']
top_liked_posts_counter += stuff['p']
top_liked_comments_counter += stuff['c']
total_posts_counter += stuff['tp']
total_comments_counter += stuff['tc']
most_discussed_counter += stuff['cc']
most_active_day_counter = total_posts_counter + total_comments_counter
# Returns key-value list of top <num_of_items_to_return> items
most_common_people = total_likes_counter.most_common(
num_of_items_to_return)
top_posts = top_liked_posts_counter.most_common(num_of_items_to_return)
top_comments = top_liked_comments_counter.most_common(
num_of_items_to_return)
total_posts = total_posts_counter.most_common(num_of_items_to_return)
total_comments = total_comments_counter.most_common(num_of_items_to_return)
most_active_days = most_active_day_counter.most_common(
num_of_items_to_return)
most_discussed = most_discussed_counter.most_common(num_of_items_to_return)
top_people_stats = []
# Iterate over top people and retrieve names from their ID's
# Use enumerate to keep track of indices for rank numbers
log('\nPeople Stats', Color.BOLD)
log("* = Weighted average calc'd from user's first post date")
for i, x in enumerate(most_common_people):
person = graph.fql(person_query + str(x[0]))["data"][0]
now = datetime.datetime.now()
join_date = datetime.datetime.fromtimestamp(appeared[x[0]])
diff1 = now - datetime_start_date
diff2 = now - join_date
avg = x[1] / (diff1.total_seconds() / 60 / 60 / 24 / 7)
weighted_avg = x[1] / (diff2.total_seconds() / 60 / 60 / 24 / 7)
top_people_stats.append({
"name": person['first_name'] + " " + person['last_name'],
"likes": x[1],
"avg": avg,
"augmented_avg": weighted_avg,
"first": int(
(join_date - datetime.datetime(1970, 1, 1)).total_seconds())
})
print '#' + str(i + 1) + '. ' + person['first_name'] + " " + person[
'last_name']
print '-- Likes: ' + str(x[1])
print '-- Weekly average: ' + str(avg)
print '-- Weekly average*: ' + str(weighted_avg)
print '-- First post: ' + join_date.strftime('%Y-%m-%d')
# Iterate over top posts and get info
log('\nTop posts!', Color.BOLD)
for x in top_posts:
post = graph.get(str(x[0]))
s = str(x[1]) + " - " + post['from']['name'] + " - " + post['type']
print s
if 'message' in post:
m = str(post['message'].encode('ascii', 'ignore')).replace('\n',
' ')
if len(m) > 70:
print '-- ' + m[0:70] + "..."
else:
print '-- ' + m
print '-- http://www.facebook.com/' + post['id']
# Iterate over top comments and get info
log('\nTop comments!', Color.BOLD)
for x in top_comments:
comment = graph.get(str(x[0]))
s = str(x[1]) + " - " + comment['from']['name']
print s
if 'message' in comment:
c = str(comment['message'].encode('ascii', 'ignore')).replace('\n',
' ')
if len(c) > 70:
print '-- ' + c[0:70] + "..."
else:
print '-- ' + c
print '-- http://www.facebook.com/' + comment['id']
# Iterate over total posts/comments and calculate info
log('\nMost active days (by number of posts and comments)', Color.BOLD)
for x in most_active_days:
d = datetime.datetime.fromtimestamp(float(x[0])).strftime('%m/%d/%Y')
print str(x[1]) + " - " + d
# Iterate over total posts and calculate info
log('\nMost active days (by number of posts)', Color.BOLD)
for x in total_posts:
d = datetime.datetime.fromtimestamp(float(x[0])).strftime('%m/%d/%Y')
print str(x[1]) + " - " + d
# Iterate over total comments and calculate info
log('\nMost active days (by number of comments)', Color.BOLD)
for x in total_comments:
d = datetime.datetime.fromtimestamp(float(x[0])).strftime('%m/%d/%Y')
print str(x[1]) + " - " + d
# Iterate over top posts and get info
log('\nMost discussed', Color.BOLD)
for x in most_discussed:
post = graph.get(str(x[0]))
s = str(x[1]) + " - " + post['from']['name'] + " - " + post['type']
print s
if 'message' in post:
m = str(post['message'].encode('ascii', 'ignore')).replace('\n',
' ')
if len(m) > 70:
print '-- ' + m[0:70] + "..."
else:
print '-- ' + m
print '-- http://www.facebook.com/' + post['id']
log('\nExporting...', Color.BLUE)
dataDict = json.dumps({"top_people_stats": top_people_stats,
"top_liked_posts_counter": top_liked_posts_counter,
"top_liked_comments_counter": top_liked_comments_counter,
"total_posts_counter": total_posts_counter,
"total_comments_counter": total_comments_counter,
"most_active_day_counter": most_active_day_counter,
"most_common_people": most_common_people,
"top_posts": top_posts,
"top_comments": top_comments,
"total_posts": total_posts,
"total_comments": total_comments,
"most_active_days": most_active_days})
exportData(dataDict)
args = sys.argv
extend_key = False # boolean for if we want to extend token access
if len(args) > 1:
if "--extend" in args: # Pass in flag
extend_key = True
if "test" in args:
test()
sys.exit()
else:
log('No args specified')
count_group_likes()
notify_mac()
|
{
"content_hash": "065ccfcbe912dbc56b9f4382092811e6",
"timestamp": "",
"source": "github",
"line_count": 411,
"max_line_length": 84,
"avg_line_length": 36.23357664233577,
"alnum_prop": 0.5393499865699705,
"repo_name": "hzsweers/fb_group_analytics",
"id": "85d0aa7a956bd0630a42d15be58084d13224b75a",
"size": "14892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16575"
}
],
"symlink_target": ""
}
|
"""Tests for the GAE mail API wrapper."""
from core.platform.email import gae_email_services
from core.tests import test_utils
import feconf
class EmailTests(test_utils.GenericTestBase):
"""Tests for sending emails."""
def test_sending_email_to_admin(self):
# Emails are not sent if the CAN_SEND_EMAILS_TO_ADMIN setting
# is not turned on.
with self.swap(feconf, 'CAN_SEND_EMAILS_TO_ADMIN', False):
gae_email_services.send_mail_to_admin('subject', 'body')
messages = self.mail_stub.get_sent_messages(
to=feconf.ADMIN_EMAIL_ADDRESS)
self.assertEqual(0, len(messages))
with self.swap(feconf, 'CAN_SEND_EMAILS_TO_ADMIN', True):
gae_email_services.send_mail_to_admin('subject', 'body')
messages = self.mail_stub.get_sent_messages(
to=feconf.ADMIN_EMAIL_ADDRESS)
self.assertEqual(1, len(messages))
self.assertEqual(feconf.ADMIN_EMAIL_ADDRESS, messages[0].to)
self.assertIn(
'(Sent from %s)' % self.EXPECTED_TEST_APP_ID,
messages[0].body.decode())
|
{
"content_hash": "bb70d35c23d0295d54d43fc7a43585a9",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 72,
"avg_line_length": 40.964285714285715,
"alnum_prop": 0.6233653007846556,
"repo_name": "kingctan/oppia",
"id": "e021cb4f1d3d1a1e35f966b75bd028581e971c74",
"size": "1770",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "core/platform/email/gae_email_services_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "363"
},
{
"name": "CSS",
"bytes": "44437"
},
{
"name": "HTML",
"bytes": "261559"
},
{
"name": "JavaScript",
"bytes": "1296213"
},
{
"name": "Python",
"bytes": "1502686"
},
{
"name": "Shell",
"bytes": "25115"
}
],
"symlink_target": ""
}
|
import logging
from docrane import util
from docrane.exceptions import ImageNotFoundError
LOG = logging.getLogger("docrane")
class Container(object):
def __init__(self, name, params, images_watcher):
self.name = name
self.params = params
self.docker_params = {}
self.images_watcher = images_watcher
def update_params(self, etcd_params):
"""
Checks if container's param keys have changed and
makes changes to container if required.
args:
container (obj) - Container
etcd_params (dict) - Current params in etcd
Returns: (bool)
Let us know if they have changed
"""
diff = set(etcd_params.items()) ^ set(self.params.items())
if len(diff):
LOG.warning("Params on %s have changed." % self.name)
LOG.info("Old params: %s" % self.params)
LOG.info("New params: %s" % etcd_params)
self.params = etcd_params
return True
return False
def ensure_running(self, force_restart=False):
"""
Ensure an up to date version of the container is running
args:
force_restart (bool) - restart even if already up
"""
# Ensure container is running with specified params
containers = util.get_containers()
if containers is None:
return
found = False
for pc in containers:
if "/%s" % self.name in pc['Names']:
found = True
full_image = "%s:%s" % (
self.params.get('image'), self.params.get('tag'))
try:
cur_images = util.get_docker_similar_images(
pc['Image'], self.images_watcher.get_images())
except TypeError:
# No images. Wait until image watcher resyncs
return
if (pc['Status'].startswith('Up') and
full_image in cur_images and
not force_restart):
return
elif full_image not in cur_images:
LOG.warning("Image ID has changed. Restarting container.")
break
try:
util.pull_image(
self.params.get('image'), self.params.get('tag'))
except ImageNotFoundError as e:
# The image wasn't found
LOG.error(e)
return
if found:
# Shut down old container first
self.stop_and_rm()
elif not force_restart:
LOG.warning("Container %s not running." % self.name)
self.docker_params = util.convert_params(self.params)
if self.create():
self.start()
def create(self):
# Create container with specified args
LOG.info("Creating %s..." % self.name)
return util.create_docker_container(self.name, self.docker_params)
def start(self):
# Start 'er up
LOG.warning("Starting %s..." % self.name)
util.start_docker_container(self.name)
def stop_and_rm(self):
# Stop and remove
LOG.warning("Stopping %s..." % self.name)
util.stop_and_rm_docker_container(self.name)
|
{
"content_hash": "da9043f31e4e92e071ed28fe4d898230",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 78,
"avg_line_length": 31.38095238095238,
"alnum_prop": 0.5402124430955993,
"repo_name": "CloudBrewery/docrane",
"id": "4faa375635a4a4b31fe27b310842c464538001b7",
"size": "3295",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docrane/container.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15116"
}
],
"symlink_target": ""
}
|
class NoVersionsError(Exception):
"""
No versions found for package.
"""
def __str__(self):
return """<NoVersionsError {}>""".format(self.message)
class PIPError(Exception):
"""
PIP process failure.
"""
def __str__(self):
return """<PIPError {}>""".format(self.message)
class PkgNotFoundError(Exception):
"""
No package found.
"""
def __str__(self):
return """<PkgNotFoundError {}>""".format(self.message)
|
{
"content_hash": "148617672f3d4d4a6d634c2dcdd785d2",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 63,
"avg_line_length": 19.4,
"alnum_prop": 0.5670103092783505,
"repo_name": "vuolter/autoupgrade",
"id": "29e7f3eb2c1f6f93a3cf2ca51a068196ef002b45",
"size": "511",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "autoupgrade/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8309"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from genericm2m.models import RelatedObjectsDescriptor
@python_2_unicode_compatible
class TestModel(models.Model):
name = models.CharField(max_length=200)
test = RelatedObjectsDescriptor()
for_inline = models.ForeignKey(
'self',
null=True,
blank=True,
related_name='inline_test_models'
)
def __str__(self):
return self.name
|
{
"content_hash": "a69149d1448cc308a2f0ccc2458111bb",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 61,
"avg_line_length": 22.904761904761905,
"alnum_prop": 0.6902286902286903,
"repo_name": "Eraldo/django-autocomplete-light",
"id": "42360342583e4fae309ae7ab21f393be8fe61f0c",
"size": "481",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test_project/select2_generic_m2m/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "44"
},
{
"name": "HTML",
"bytes": "4331"
},
{
"name": "JavaScript",
"bytes": "3248"
},
{
"name": "Python",
"bytes": "113829"
},
{
"name": "Shell",
"bytes": "2808"
}
],
"symlink_target": ""
}
|
"""
Management utility to clean up tokens
"""
from datetime import datetime as dt
from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models import Count
from tardis.tardis_portal.models import Token, ObjectACL
from tardis.tardis_portal.auth.token_auth import TokenGroupProvider
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--keep-acls', dest='keep_acls', default=False,
action='store_true',
help='Keep orphaned token ACLs'),
)
help = 'Deletes unused tokens and optionally their ACLs'
def handle(self, *args, **options):
verbosity = int(options.get('verbosity', 1))
keep_acls = options.get('keep_acls')
expired_tokens = Token.objects.filter(expiry_date__lt=dt.today())
num_tokens = expired_tokens.count()
expired_tokens.delete()
if verbosity > 0:
self.stdout.write("%s Tokens cleaned up successfully\n" % num_tokens)
if not keep_acls:
self._purge_unused_token_acls(verbosity)
def _purge_unused_token_acls(self, verbosity):
"""
purge ACLs if they are not in use
"""
acls_to_delete = ObjectACL.objects.filter(
pluginId=TokenGroupProvider.name).annotate(
num_tokens=Count('content_object__token')
).filter(num_tokens__eq=0)
num_acls = acls_to_delete.count()
acls_to_delete.delete()
if verbosity > 0:
self.stdout.write("%s Token ACLs cleaned up successfully\n" % num_acls)
|
{
"content_hash": "f9d33e042cc010c72cc6ecb63c159307",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 83,
"avg_line_length": 29.90740740740741,
"alnum_prop": 0.6396284829721363,
"repo_name": "pansapiens/mytardis",
"id": "654d2c97b2894ece9e1eb1bc5b0eb6253f23fb51",
"size": "1615",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "tardis/tardis_portal/management/commands/cleanuptokens.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "54456"
},
{
"name": "HTML",
"bytes": "335457"
},
{
"name": "JavaScript",
"bytes": "356177"
},
{
"name": "Python",
"bytes": "2082865"
},
{
"name": "Shell",
"bytes": "2971"
}
],
"symlink_target": ""
}
|
import pandas as pd
from pandas.util.testing import assert_series_equal
from six import iteritems
from functools import partial
from toolz import groupby
from zipline.finance.asset_restrictions import (
RESTRICTION_STATES,
Restriction,
HistoricalRestrictions,
StaticRestrictions,
SecurityListRestrictions,
NoRestrictions,
_UnionRestrictions,
)
from zipline.testing import parameter_space
from zipline.testing.fixtures import (
WithDataPortal,
ZiplineTestCase,
)
def str_to_ts(dt_str):
return pd.Timestamp(dt_str, tz='UTC')
FROZEN = RESTRICTION_STATES.FROZEN
ALLOWED = RESTRICTION_STATES.ALLOWED
MINUTE = pd.Timedelta(minutes=1)
class RestrictionsTestCase(WithDataPortal, ZiplineTestCase):
ASSET_FINDER_EQUITY_SIDS = 1, 2, 3
@classmethod
def init_class_fixtures(cls):
super(RestrictionsTestCase, cls).init_class_fixtures()
cls.ASSET1 = cls.asset_finder.retrieve_asset(1)
cls.ASSET2 = cls.asset_finder.retrieve_asset(2)
cls.ASSET3 = cls.asset_finder.retrieve_asset(3)
cls.ALL_ASSETS = [cls.ASSET1, cls.ASSET2, cls.ASSET3]
def assert_is_restricted(self, rl, asset, dt):
self.assertTrue(rl.is_restricted(asset, dt))
def assert_not_restricted(self, rl, asset, dt):
self.assertFalse(rl.is_restricted(asset, dt))
def assert_all_restrictions(self, rl, expected, dt):
self.assert_many_restrictions(rl, self.ALL_ASSETS, expected, dt)
def assert_many_restrictions(self, rl, assets, expected, dt):
assert_series_equal(
rl.is_restricted(assets, dt),
pd.Series(index=pd.Index(assets), data=expected),
)
@parameter_space(
date_offset=(
pd.Timedelta(0),
pd.Timedelta('1 minute'),
pd.Timedelta('15 hours 5 minutes')
),
restriction_order=(
list(range(6)), # Keep restrictions in order.
[0, 2, 1, 3, 5, 4], # Re-order within asset.
[0, 3, 1, 4, 2, 5], # Scramble assets, maintain per-asset order.
[0, 5, 2, 3, 1, 4], # Scramble assets and per-asset order.
),
__fail_fast=True,
)
def test_historical_restrictions(self, date_offset, restriction_order):
"""
Test historical restrictions for both interday and intraday
restrictions, as well as restrictions defined in/not in order, for both
single- and multi-asset queries
"""
def rdate(s):
"""Convert a date string into a restriction for that date."""
# Add date_offset to check that we handle intraday changes.
return str_to_ts(s) + date_offset
base_restrictions = [
Restriction(self.ASSET1, rdate('2011-01-04'), FROZEN),
Restriction(self.ASSET1, rdate('2011-01-05'), ALLOWED),
Restriction(self.ASSET1, rdate('2011-01-06'), FROZEN),
Restriction(self.ASSET2, rdate('2011-01-05'), FROZEN),
Restriction(self.ASSET2, rdate('2011-01-06'), ALLOWED),
Restriction(self.ASSET2, rdate('2011-01-07'), FROZEN),
]
# Scramble the restrictions based on restriction_order to check that we
# don't depend on the order in which restrictions are provided to us.
all_restrictions = [base_restrictions[i] for i in restriction_order]
restrictions_by_asset = groupby(lambda r: r.asset, all_restrictions)
rl = HistoricalRestrictions(all_restrictions)
assert_not_restricted = partial(self.assert_not_restricted, rl)
assert_is_restricted = partial(self.assert_is_restricted, rl)
assert_all_restrictions = partial(self.assert_all_restrictions, rl)
# Check individual restrictions.
for asset, r_history in iteritems(restrictions_by_asset):
freeze_dt, unfreeze_dt, re_freeze_dt = (
sorted([r.effective_date for r in r_history])
)
# Starts implicitly unrestricted. Restricted on or after the freeze
assert_not_restricted(asset, freeze_dt - MINUTE)
assert_is_restricted(asset, freeze_dt)
assert_is_restricted(asset, freeze_dt + MINUTE)
# Unrestricted on or after the unfreeze
assert_is_restricted(asset, unfreeze_dt - MINUTE)
assert_not_restricted(asset, unfreeze_dt)
assert_not_restricted(asset, unfreeze_dt + MINUTE)
# Restricted again on or after the freeze
assert_not_restricted(asset, re_freeze_dt - MINUTE)
assert_is_restricted(asset, re_freeze_dt)
assert_is_restricted(asset, re_freeze_dt + MINUTE)
# Should stay restricted for the rest of time
assert_is_restricted(asset, re_freeze_dt + MINUTE * 1000000)
# Check vectorized restrictions.
# Expected results for [self.ASSET1, self.ASSET2, self.ASSET3],
# ASSET3 is always False as it has no defined restrictions
# 01/04 XX:00 ASSET1: ALLOWED --> FROZEN; ASSET2: ALLOWED
d0 = rdate('2011-01-04')
assert_all_restrictions([False, False, False], d0 - MINUTE)
assert_all_restrictions([True, False, False], d0)
assert_all_restrictions([True, False, False], d0 + MINUTE)
# 01/05 XX:00 ASSET1: FROZEN --> ALLOWED; ASSET2: ALLOWED --> FROZEN
d1 = rdate('2011-01-05')
assert_all_restrictions([True, False, False], d1 - MINUTE)
assert_all_restrictions([False, True, False], d1)
assert_all_restrictions([False, True, False], d1 + MINUTE)
# 01/06 XX:00 ASSET1: ALLOWED --> FROZEN; ASSET2: FROZEN --> ALLOWED
d2 = rdate('2011-01-06')
assert_all_restrictions([False, True, False], d2 - MINUTE)
assert_all_restrictions([True, False, False], d2)
assert_all_restrictions([True, False, False], d2 + MINUTE)
# 01/07 XX:00 ASSET1: FROZEN; ASSET2: ALLOWED --> FROZEN
d3 = rdate('2011-01-07')
assert_all_restrictions([True, False, False], d3 - MINUTE)
assert_all_restrictions([True, True, False], d3)
assert_all_restrictions([True, True, False], d3 + MINUTE)
# Should stay restricted for the rest of time
assert_all_restrictions(
[True, True, False],
d3 + (MINUTE * 10000000)
)
def test_historical_restrictions_consecutive_states(self):
"""
Test that defining redundant consecutive restrictions still works
"""
rl = HistoricalRestrictions([
Restriction(self.ASSET1, str_to_ts('2011-01-04'), ALLOWED),
Restriction(self.ASSET1, str_to_ts('2011-01-05'), ALLOWED),
Restriction(self.ASSET1, str_to_ts('2011-01-06'), FROZEN),
Restriction(self.ASSET1, str_to_ts('2011-01-07'), FROZEN),
])
assert_not_restricted = partial(self.assert_not_restricted, rl)
assert_is_restricted = partial(self.assert_is_restricted, rl)
# (implicit) ALLOWED --> ALLOWED
assert_not_restricted(self.ASSET1, str_to_ts('2011-01-04') - MINUTE)
assert_not_restricted(self.ASSET1, str_to_ts('2011-01-04'))
assert_not_restricted(self.ASSET1, str_to_ts('2011-01-04') + MINUTE)
# ALLOWED --> ALLOWED
assert_not_restricted(self.ASSET1, str_to_ts('2011-01-05') - MINUTE)
assert_not_restricted(self.ASSET1, str_to_ts('2011-01-05'))
assert_not_restricted(self.ASSET1, str_to_ts('2011-01-05') + MINUTE)
# ALLOWED --> FROZEN
assert_not_restricted(self.ASSET1, str_to_ts('2011-01-06') - MINUTE)
assert_is_restricted(self.ASSET1, str_to_ts('2011-01-06'))
assert_is_restricted(self.ASSET1, str_to_ts('2011-01-06') + MINUTE)
# FROZEN --> FROZEN
assert_is_restricted(self.ASSET1, str_to_ts('2011-01-07') - MINUTE)
assert_is_restricted(self.ASSET1, str_to_ts('2011-01-07'))
assert_is_restricted(self.ASSET1, str_to_ts('2011-01-07') + MINUTE)
def test_static_restrictions(self):
"""
Test single- and multi-asset queries on static restrictions
"""
restricted_a1 = self.ASSET1
restricted_a2 = self.ASSET2
unrestricted_a3 = self.ASSET3
rl = StaticRestrictions([restricted_a1, restricted_a2])
assert_not_restricted = partial(self.assert_not_restricted, rl)
assert_is_restricted = partial(self.assert_is_restricted, rl)
assert_all_restrictions = partial(self.assert_all_restrictions, rl)
for dt in [str_to_ts(dt_str) for dt_str in ('2011-01-03',
'2011-01-04',
'2011-01-04 1:01',
'2020-01-04')]:
assert_is_restricted(restricted_a1, dt)
assert_is_restricted(restricted_a2, dt)
assert_not_restricted(unrestricted_a3, dt)
assert_all_restrictions([True, True, False], dt)
def test_security_list_restrictions(self):
"""
Test single- and multi-asset queries on restrictions defined by
zipline.utils.security_list.SecurityList
"""
# A mock SecurityList object filled with fake data
class SecurityList(object):
def __init__(self, assets_by_dt):
self.assets_by_dt = assets_by_dt
def current_securities(self, dt):
return self.assets_by_dt[dt]
assets_by_dt = {
str_to_ts('2011-01-03'): [self.ASSET1],
str_to_ts('2011-01-04'): [self.ASSET2, self.ASSET3],
str_to_ts('2011-01-05'): [self.ASSET1, self.ASSET2, self.ASSET3],
}
rl = SecurityListRestrictions(SecurityList(assets_by_dt))
assert_not_restricted = partial(self.assert_not_restricted, rl)
assert_is_restricted = partial(self.assert_is_restricted, rl)
assert_all_restrictions = partial(self.assert_all_restrictions, rl)
assert_is_restricted(self.ASSET1, str_to_ts('2011-01-03'))
assert_not_restricted(self.ASSET2, str_to_ts('2011-01-03'))
assert_not_restricted(self.ASSET3, str_to_ts('2011-01-03'))
assert_all_restrictions(
[True, False, False], str_to_ts('2011-01-03')
)
assert_not_restricted(self.ASSET1, str_to_ts('2011-01-04'))
assert_is_restricted(self.ASSET2, str_to_ts('2011-01-04'))
assert_is_restricted(self.ASSET3, str_to_ts('2011-01-04'))
assert_all_restrictions(
[False, True, True], str_to_ts('2011-01-04')
)
assert_is_restricted(self.ASSET1, str_to_ts('2011-01-05'))
assert_is_restricted(self.ASSET2, str_to_ts('2011-01-05'))
assert_is_restricted(self.ASSET3, str_to_ts('2011-01-05'))
assert_all_restrictions(
[True, True, True],
str_to_ts('2011-01-05')
)
def test_noop_restrictions(self):
"""
Test single- and multi-asset queries on no-op restrictions
"""
rl = NoRestrictions()
assert_not_restricted = partial(self.assert_not_restricted, rl)
assert_all_restrictions = partial(self.assert_all_restrictions, rl)
for dt in [str_to_ts(dt_str) for dt_str in ('2011-01-03',
'2011-01-04',
'2020-01-04')]:
assert_not_restricted(self.ASSET1, dt)
assert_not_restricted(self.ASSET2, dt)
assert_not_restricted(self.ASSET3, dt)
assert_all_restrictions([False, False, False], dt)
def test_union_restrictions(self):
"""
Test that we appropriately union restrictions together, including
eliminating redundancy (ignoring NoRestrictions) and flattening out
the underlying sub-restrictions of _UnionRestrictions
"""
no_restrictions_rl = NoRestrictions()
st_restrict_asset1 = StaticRestrictions([self.ASSET1])
st_restrict_asset2 = StaticRestrictions([self.ASSET2])
st_restricted_assets = [self.ASSET1, self.ASSET2]
before_frozen_dt = str_to_ts('2011-01-05')
freeze_dt_1 = str_to_ts('2011-01-06')
unfreeze_dt = str_to_ts('2011-01-06 16:00')
hist_restrict_asset3_1 = HistoricalRestrictions([
Restriction(self.ASSET3, freeze_dt_1, FROZEN),
Restriction(self.ASSET3, unfreeze_dt, ALLOWED)
])
freeze_dt_2 = str_to_ts('2011-01-07')
hist_restrict_asset3_2 = HistoricalRestrictions([
Restriction(self.ASSET3, freeze_dt_2, FROZEN)
])
# A union of a NoRestrictions with a non-trivial restriction should
# yield the original restriction
trivial_union_restrictions = no_restrictions_rl | st_restrict_asset1
self.assertIsInstance(trivial_union_restrictions, StaticRestrictions)
# A union of two non-trivial restrictions should yield a
# UnionRestrictions
st_union_restrictions = st_restrict_asset1 | st_restrict_asset2
self.assertIsInstance(st_union_restrictions, _UnionRestrictions)
arb_dt = str_to_ts('2011-01-04')
self.assert_is_restricted(st_restrict_asset1, self.ASSET1, arb_dt)
self.assert_not_restricted(st_restrict_asset1, self.ASSET2, arb_dt)
self.assert_not_restricted(st_restrict_asset2, self.ASSET1, arb_dt)
self.assert_is_restricted(st_restrict_asset2, self.ASSET2, arb_dt)
self.assert_is_restricted(st_union_restrictions, self.ASSET1, arb_dt)
self.assert_is_restricted(st_union_restrictions, self.ASSET2, arb_dt)
self.assert_many_restrictions(
st_restrict_asset1,
st_restricted_assets,
[True, False],
arb_dt
)
self.assert_many_restrictions(
st_restrict_asset2,
st_restricted_assets,
[False, True],
arb_dt
)
self.assert_many_restrictions(
st_union_restrictions,
st_restricted_assets,
[True, True],
arb_dt
)
# A union of a 2-sub-restriction UnionRestrictions and a
# non-trivial restrictions should yield a UnionRestrictions with
# 3 sub restrictions. Works with UnionRestrictions on both the left
# side or right side
for r1, r2 in [
(st_union_restrictions, hist_restrict_asset3_1),
(hist_restrict_asset3_1, st_union_restrictions)
]:
union_or_hist_restrictions = r1 | r2
self.assertIsInstance(
union_or_hist_restrictions, _UnionRestrictions)
self.assertEqual(
len(union_or_hist_restrictions.sub_restrictions), 3)
# Includes the two static restrictions on ASSET1 and ASSET2,
# and the historical restriction on ASSET3 starting on freeze_dt_1
# and ending on unfreeze_dt
self.assert_all_restrictions(
union_or_hist_restrictions,
[True, True, False],
before_frozen_dt
)
self.assert_all_restrictions(
union_or_hist_restrictions,
[True, True, True],
freeze_dt_1
)
self.assert_all_restrictions(
union_or_hist_restrictions,
[True, True, False],
unfreeze_dt
)
self.assert_all_restrictions(
union_or_hist_restrictions,
[True, True, False],
freeze_dt_2
)
# A union of two 2-sub-restrictions UnionRestrictions should yield a
# UnionRestrictions with 4 sub restrictions.
hist_union_restrictions = \
hist_restrict_asset3_1 | hist_restrict_asset3_2
multi_union_restrictions = \
st_union_restrictions | hist_union_restrictions
self.assertIsInstance(multi_union_restrictions, _UnionRestrictions)
self.assertEqual(len(multi_union_restrictions.sub_restrictions), 4)
# Includes the two static restrictions on ASSET1 and ASSET2, the
# first historical restriction on ASSET3 starting on freeze_dt_1 and
# ending on unfreeze_dt, and the second historical restriction on
# ASSET3 starting on freeze_dt_2
self.assert_all_restrictions(
multi_union_restrictions,
[True, True, False],
before_frozen_dt
)
self.assert_all_restrictions(
multi_union_restrictions,
[True, True, True],
freeze_dt_1
)
self.assert_all_restrictions(
multi_union_restrictions,
[True, True, False],
unfreeze_dt
)
self.assert_all_restrictions(
multi_union_restrictions,
[True, True, True],
freeze_dt_2
)
|
{
"content_hash": "d29c54d8e59b19b55ddd9c06d3157d24",
"timestamp": "",
"source": "github",
"line_count": 419,
"max_line_length": 79,
"avg_line_length": 40.575178997613364,
"alnum_prop": 0.6057878948297158,
"repo_name": "Scapogo/zipline",
"id": "72c1297d45c47b512484192e1971d220ae7a0ee3",
"size": "17001",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/test_restrictions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7014"
},
{
"name": "Emacs Lisp",
"bytes": "138"
},
{
"name": "Jupyter Notebook",
"bytes": "162383"
},
{
"name": "PowerShell",
"bytes": "3269"
},
{
"name": "Python",
"bytes": "3641596"
},
{
"name": "Shell",
"bytes": "7420"
}
],
"symlink_target": ""
}
|
"""Main Flask module"""
from flask import Flask
from flask_restful import Api
from dbclient.models import db
from resources import Systems, SingleSystem, Routes
app = Flask(__name__)
api = Api(app, catch_all_404s=True)
db.init_app(app)
api.add_resource(Systems,
'/systems')
api.add_resource(SingleSystem,
'/systems/<int:id>',
'/systems/<string:name>')
api.add_resource(Routes,
'/routes/<string:node_type>/<string:route_type>')
|
{
"content_hash": "3f718135358df442a00a7ff14543029e",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 66,
"avg_line_length": 27.666666666666668,
"alnum_prop": 0.6365461847389559,
"repo_name": "Srogozins/eve-travel-helper",
"id": "f292f03b40213112127e43e5ca611d60c3044f3d",
"size": "498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "eve_travel_helper/server.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "31792"
},
{
"name": "Shell",
"bytes": "226"
}
],
"symlink_target": ""
}
|
import os
import simplejson
from homely._errors import CleanupConflict, CleanupObstruction, HelperError
from homely._ui import note, warn
from homely._utils import (ENGINE2_CONFIG_PATH, FactConfig, RepoInfo,
isnecessarypath)
_ENGINE = None
_REPO = None
def initengine():
global _ENGINE
_ENGINE = Engine(ENGINE2_CONFIG_PATH)
return _ENGINE
def resetengine():
global _ENGINE
_ENGINE = None
def getengine():
assert _ENGINE is not None
return _ENGINE
def setrepoinfo(info):
assert info is None or isinstance(info, RepoInfo)
global _REPO
_REPO = info
def getrepoinfo():
return _REPO
def _exists(path):
return os.path.exists(path) or os.path.islink(path)
class _AccessibleFacts(object):
_facts = None
def _setfact(self, name, value):
if self._facts is None:
self._facts = FactConfig()
self._facts.jsondata[name] = value
self._facts.writejson()
def _clearfact(self, name):
if self._facts is None:
self._facts = FactConfig()
self._facts.jsondata.pop(name, None)
self._facts.writejson()
def _getfact(self, name, *args):
if self._facts is None:
self._facts = FactConfig()
if len(args):
return self._facts.jsondata.get(name, *args)
return self._facts.jsondata[name]
class Helper(_AccessibleFacts):
_facts = None
def getcleaner(self):
"""
Returns an instance of Cleaner that will clean up after this Helper is
done.
"""
raise NotImplementedError("%s needs to implement .getcleaner()" %
self.__class__.__name__)
def getclaims(self):
"""
Returns a list of arbitrary strings describing things claimed by this
helper.
"""
raise NotImplementedError("%s needs to implement .getclaims() -> []" %
self.__class__.__name__)
def isdone(self):
"""Returns True if the Helper doesn't need to do anything."""
raise NotImplementedError("%s needs to implement .isdone()" %
self.__class__.__name__)
def makechanges(self):
"""
Makes changes locally. Raises a HelperError if there is a
human-readable error that can be shown to the user.
"""
raise NotImplementedError("%s needs to implement .makechanges()" %
self.__class__.__name__)
@property
def description(self):
raise NotImplementedError("%s needs to define @property .description" %
self.__class__.__name__)
def pathsownable(self):
'''
Return a dict of {PATH: TYPE} where TYPE is one of:
- Engine.TYPE_FILE_PART
- Engine.TYPE_FILE_ALL
- Engine.TYPE_FOLDER_ONLY
- Engine.TYPE_FOLDER_ALL
- Engine.TYPE_LINK
'''
raise NotImplementedError("%s needs to implement .pathsownable()" %
self.__class__.__name__)
def affectspath(self, path):
raise NotImplementedError("%s needs to implement .affectspath(path)" %
self.__class__.__name__)
def cleanerfromdict(data):
# import the module
from importlib import import_module
# FIXME: handle an ImportError here in case the module disappears
module = import_module(data["module"])
# get the class from the module
# FIXME: also need to handle this nicely
class_ = getattr(module, data["class"])
# now load up the cleaner
# FIXME: handle exceptions when the cleaner can't be loaded nicely
return class_.fromdict(data["params"])
class Cleaner(_AccessibleFacts):
def fulldict(self):
return {
"module": self.__class__.__module__,
"class": self.__class__.__name__,
"params": self.asdict(),
}
def asdict(self):
raise NotImplementedError(
"%s needs to define .asdict() and "
"@classmethod .fromdict(class_, data)" %
self.__class__.__name__)
@classmethod
def fromdict(class_, data):
raise NotImplementedError(
"%s needs to define .asdict() and "
"@classmethod .fromdict(class_, data)" %
class_.__name__)
@property
def description(self):
raise NotImplementedError("%s needs to define @property .description" %
self.__class__.__name__)
def needsclaims(self):
raise NotImplementedError(
"%s needs to implement .needsclaims() -> []" %
self.__class__.__name__)
def wantspath(self, path):
raise NotImplementedError("%s needs to implement .wantspath()" %
self.__class__.__name__)
def issame(self, other):
return self.__class__ == other.__class__ and self.__eq__(other)
def __eq__(self, other):
raise NotImplementedError("%s needs to implement .__eq__(other)" %
self.__class__.__name__)
def __ne__(self, other):
return not self.__eq__(other)
def makechanges(self):
raise NotImplementedError("%s needs to implement .makechanges()" %
self.__class__.__name__)
class Engine(object):
# possible actions to take when a conflict occurs between cleaners
RAISE = "__raise__"
WARN = "__warn__"
ASK = "__ask__"
POSTPONE = "__postpone__"
TYPE_FILE_ALL = "whole_file"
TYPE_FILE_PART = "file"
TYPE_FOLDER_ALL = "dir_and_children"
TYPE_FOLDER_ONLY = "directory"
TYPE_LINK = "symlink"
def __init__(self, cfgpath):
super(Engine, self).__init__()
self._cfgpath = cfgpath
self._old_cleaners = []
self._new_cleaners = []
self._helpers = []
self._old_paths_owned = {}
self._new_paths_owned = {}
self._postponed = set()
# keep track of which things we created ourselves
self._created = set()
self._only = set()
self._section = None
# another way of keeping track of things we've claimed
self._claims = set()
if os.path.isfile(cfgpath):
with open(cfgpath, 'r') as f:
raw = f.read()
data = simplejson.loads(raw)
if not isinstance(data, dict):
raise Exception("Invalid json in %s" % cfgpath)
for item in data.get('cleaners', []):
cleaner = cleanerfromdict(item)
if cleaner is None:
warn("No cleaner for %s" % repr(item))
else:
self._old_cleaners.append(cleaner)
self._old_paths_owned = data.get('paths_owned', {})
for path in data.get('paths_postponed', []):
if path in self._old_paths_owned:
self._postponed.add(path)
for path in data.get('paths_created', []):
if path in self._old_paths_owned:
self._created.add(path)
def _savecfg(self):
# start with the old cleaners
cleaners = [c.fulldict() for c in self._old_cleaners]
# append any new cleaners
cleaners.extend([c.fulldict() for c in self._new_cleaners])
paths_owned = {}
for path in self._old_paths_owned:
paths_owned[path] = self._old_paths_owned[path]
for path in self._new_paths_owned:
paths_owned[path] = self._new_paths_owned[path]
data = dict(cleaners=cleaners,
paths_owned=paths_owned,
paths_postponed=list(self._postponed),
paths_created=list(self._created),
)
dumped = simplejson.dumps(data, indent=' ' * 4)
with open(self._cfgpath, 'w') as f:
f.write(dumped)
def _removecleaner(self, cleaner):
"""
Remove the cleaner from the list if it already exists. Returns True if
the cleaner was removed.
"""
oldlen = len(self._old_cleaners)
self._old_cleaners = [
oldc for oldc in self._old_cleaners
if not oldc.issame(cleaner)
]
return len(self._old_cleaners) != oldlen
def _addcleaner(self, cleaner):
# add a cleaner (it is guaranteed not to exist in the old list)
# NOTE we need to make sure it is only added once
for c in self._new_cleaners:
if c.issame(cleaner):
return
self._new_cleaners.append(cleaner)
def onlysections(self, names):
self._only.update(names)
def pushsection(self, name):
if self._section is not None:
raise Exception("Cannot nest section %r inside section %r" %
(name, self._section))
self._section = name
return name in self._only or not len(self._only)
def popsection(self, name):
assert self._section == name
self._section = None
def run(self, helper):
assert isinstance(helper, Helper)
cfg_modified = False
# what claims does this helper make?
self._claims.update(*helper.getclaims())
# take ownership of paths
for path, type_ in helper.pathsownable().items():
cfg_modified = True
self._new_paths_owned[path] = type_
self._old_paths_owned.pop(path, None)
# get a cleaner for this helper
cleaner = helper.getcleaner()
if helper.isdone():
# if there is already a cleaner for this thing, add and remove it
# so it hangs around. If there is no cleaner but the thing is
# already done, it means we shouldn't be cleaning it up
if cleaner is not None:
cfg_modified = True
if self._removecleaner(cleaner):
self._addcleaner(cleaner)
note("{}: Already done".format(helper.description))
else:
# remove and add the cleaner so that we know it will try to clean
# up, since we know we will be making the change
if cleaner is not None:
cfg_modified = True
self._removecleaner(cleaner)
self._addcleaner(cleaner)
# if the helper isn't already done, tell it to do its thing now
with note("{}: Running ...".format(helper.description)):
# take ownership of any paths that don't exist yet!
for path, type_ in helper.pathsownable().items():
if type_ in (self.TYPE_FILE_ALL, self.TYPE_FOLDER_ALL):
exists = path in self._created
elif type_ in (self.TYPE_FILE_PART, self.TYPE_FOLDER_ONLY):
exists = os.path.exists(path)
else:
exists = os.path.islink(path)
if not exists:
self._created.add(path)
cfg_modified = True
if cfg_modified:
# save the updated config before we try anything
self._savecfg()
cfg_modified = False
try:
helper.makechanges()
except HelperError as err:
warn("Failed: %s" % err.args[0])
self._helpers.append(helper)
# save the config now if we were successful
if cfg_modified:
# save the updated config before we try anything
self._savecfg()
def cleanup(self, conflicts):
assert conflicts in (self.RAISE, self.WARN, self.POSTPONE, self.ASK)
note("CLEANING UP %d items ..." % (
len(self._old_cleaners) + len(self._created)))
stack = list(self._old_cleaners)
affected = []
while len(stack):
deferred = []
for cleaner in stack:
# TODO: do we still need this complexity?
self._removecleaner(cleaner)
self._tryclean(cleaner, conflicts, affected)
self._savecfg()
assert len(deferred) < len(stack), "Every cleaner wants to go last"
stack = deferred
# all old cleaners should now be finished, or delayed
assert len(self._old_cleaners) == 0
# re-run any helpers that touch the affected files
for path in affected:
for helper in self._helpers:
if helper.affectspath(path) and not helper.isdone():
note("REDO: %s" % helper.description)
helper.makechanges()
# now, clean up the old paths we found
while len(self._old_paths_owned):
before = len(self._old_paths_owned)
for path in list(self._old_paths_owned.keys()):
type_ = self._old_paths_owned[path]
self._trycleanpath(path, type_, conflicts)
if len(self._old_paths_owned) >= before:
raise Exception("All paths want to delay cleaning")
def pathstoclean(self):
ret = {}
for path, type_ in self._old_paths_owned.items():
if path in self._created:
ret[path] = type_
for path, type_ in self._new_paths_owned.items():
if path in self._created:
ret[path] = type_
return ret
def _tryclean(self, cleaner, conflicts, affected):
# if the cleaner is not needed, we get rid of it
# FIXME try/except around the isneeded() call
if not cleaner.isneeded():
note("{}: Not needed".format(cleaner.description))
return
# run the cleaner now
with note("Cleaning: {}".format(cleaner.description)):
# if there are still helpers with claims over things the cleaner
# wants to remove, then the cleaner needs to wait
for claim in cleaner.needsclaims():
if claim in self._claims:
note("Postponed: Something else claimed %r" % claim)
self._addcleaner(cleaner)
return
try:
affected.extend(cleaner.makechanges())
except CleanupObstruction as err:
why = err.args[0]
if conflicts == self.RAISE:
raise
if conflicts == self.POSTPONE:
note("Postponed: %s" % why)
# add the cleaner back in
self._addcleaner(cleaner)
return
# NOTE: eventually we'd like to ask the user what to do, but
# for now we just issue a warning
assert conflicts in (self.WARN, self.ASK)
warn("Aborted: %s" % err.why)
def _trycleanpath(self, path, type_, conflicts):
def _discard():
note(" Forgetting about %s %s" % (type_, path))
self._old_paths_owned.pop(path)
self._postponed.discard(path)
self._created.discard(path)
self._savecfg()
def _remove():
# remove the thing
if type_ == self.TYPE_FOLDER_ONLY:
# TODO: what do we do if the folder isn't empty?
with note("Removing dir %s" % path):
try:
os.rmdir(path)
except OSError as err:
from errno import ENOTEMPTY
if err.errno == ENOTEMPTY:
warn("Directory not empty: {}".format(path))
else:
raise
elif type_ == self.TYPE_FILE_ALL:
note("Removing {}".format(path))
os.unlink(path)
elif type_ == self.TYPE_FILE_PART:
if os.stat(path).st_size == 0:
note("Removing empty {}".format(path))
os.unlink(path)
else:
note("Refusing to remove non-empty {}".format(path))
else:
note("Removing link {}".format(path))
os.unlink(path)
_discard()
def _postpone():
with note("Postponing cleanup of path: {}".format(path)):
self._postponed.add(path)
assert path not in self._new_paths_owned
self._new_paths_owned[path] = type_
self._old_paths_owned.pop(path)
self._savecfg()
# if we didn't create the path, then we don't need to clean it up
if path not in self._created:
return _discard()
# if the path no longer exists, we have nothing to do
if not _exists(path):
return _discard()
# if the thing has the wrong type, we'll issue an note() and just skip
if type_ in (self.TYPE_FILE_PART, self.TYPE_FILE_ALL):
correcttype = os.path.isfile(path)
elif type_ in (self.TYPE_FOLDER_ONLY, self.TYPE_FOLDER_ALL):
correcttype = os.path.isdir(path)
else:
assert type_ == self.TYPE_LINK
correcttype = os.path.islink(path)
if not correcttype:
with note("Ignoring: Won't remove {} as it is no longer a {}"
.format(path, type_)):
return _discard()
# work out if there is another path we need to remove first
for otherpath in self._old_paths_owned:
if otherpath != path and isnecessarypath(path, otherpath):
# If there's another path we need to do first, then don't do
# anything just yet. NOTE: there is an assertion to ensure that
# we can't get stuck in an infinite loop repeatedly not
# removing things.
return
# if any helpers want the path, don't delete it
wantedby = None
for c in self._new_cleaners:
if c.wantspath(path):
wantedby = c
break
if not wantedby:
for otherpath in self._new_paths_owned:
if isnecessarypath(path, otherpath):
wantedby = otherpath
if wantedby:
# if we previously postponed this path, keep postponing it
# while there are still things hanging around that want it
if path in self._postponed:
return _postpone()
if conflicts == self.ASK:
raise Exception("TODO: ask the user what to do") # noqa
# NOTE: options are:
# A) postpone until it can run later
# B) discard it
if conflicts == self.RAISE:
raise CleanupConflict(conflictpath=path, pathwanter=wantedby)
if conflicts == self.POSTPONE:
return _postpone()
assert conflicts == self.WARN
warn("Couldn't clean up path {}; it is still needed for {}"
.format(path, wantedby))
return _discard()
# if nothing else wants this path, clean it up now
return _remove()
|
{
"content_hash": "2ee48eb938521a85ec17ee2f533003a7",
"timestamp": "",
"source": "github",
"line_count": 535,
"max_line_length": 79,
"avg_line_length": 36.27663551401869,
"alnum_prop": 0.5336974443528442,
"repo_name": "toomuchphp/terraform",
"id": "1a2287e8519996ff436039f1b3cb287f5b77f31e",
"size": "19408",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "homely/_engine2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15584"
}
],
"symlink_target": ""
}
|
"""Examples."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
{
"content_hash": "e55c74e891007491ba8138da8d7ceb66",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 38,
"avg_line_length": 31.25,
"alnum_prop": 0.728,
"repo_name": "AlphaSmartDog/DeepLearningNotes",
"id": "c1425398ec788f14e0eb840fcb968cf0ce5c7673",
"size": "809",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "Note-1 RNN-DNCζ©ζΆ/Note-1-2 PonderingDNCore L2ζ£εεη€ΊδΎ/sonnet/examples/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "76094"
},
{
"name": "Jupyter Notebook",
"bytes": "9463332"
},
{
"name": "Python",
"bytes": "8832736"
}
],
"symlink_target": ""
}
|
import socket
import time
import datetime
import sys
from threading import Thread
class Server:
TAG = "NO TAG"
host = None
port = None
sock = None
running = False
clients = {}
def __init__(self, tag, host, port):
self.sock = None
self.host = host
self.port = port
self.TAG = tag
#starts the FTP server
def start(self, clientFunction):
if self.sock is None:
#socket initialization
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((self.host, self.port))
self.sock.settimeout(3)
self.sock.listen(3)
self.running = True
self.debug("Server Started")
#main thread loop, waiting for clients to connect
while True:
if self.running == 0:
break
try:
#waiting for a client to connect
(clientSocket, address) = self.sock.accept()
ip = str(address[0]) + ":" + str(address[1])
#starting the client thread
clientThread = Thread(target=clientFunction, args=(self, clientSocket, ip))
clientThread.start()
#saving the client's socket
self.clients[ip] = {'socket': clientSocket, 'thread': clientThread}
except:
pass
self.sock.close()
self.debug("Server stopped")
#Stops the server
def stop(self):
self.debug("Stopping server and closing active connections")
self.running = False
for ip,client in self.clients.items():
try:
client['socket'].shutdown(socket.SHUT_RDWR)
self.debug("["+ ip + "] Disconnecting")
except:
pass
client['thread'].join()
#Debug output
def debug(self, msg):
print((self.TAG + ": " + msg))
def getStatus(self):
status = "Client List: "
for ip,client in self.clients.items():
status += "["+ ip + "] "
return status
#Method called per connected client
def ftpClient(server, clientSocket, ip):
server.debug("["+ ip + "] Client Connected")
#Pure-FTPd banner
clientSocket.send(b'220---------- Welcome to Pure-FTPd [privsep] [TLS] ----------\r\n')
clientSocket.send(b'220-You are user number 1 of 2 allowed.\r\n')
now = bytes(datetime.datetime.now().strftime('%H:%M'),'utf-8')
clientSocket.send(b'220-Local time is now '+ now + b'. Server port: ' + bytes(str(server.port), 'utf-8') + b'.\r\n')
clientSocket.send(b'220-This is a private system - No anonymous login\r\n')
clientSocket.send(b'220 You will be disconnected after 15 minutes of inactivity.\r\n')
#Allows basic FTP commands
while True:
time.sleep(0.1)
try:
cmd = clientSocket.recv(1024)
cmd_str = str(cmd, "utf-8") #dangerous in case a byte can't be a character
if cmd == b'\r\n':
None
elif cmd_str.lower().startswith("user ") and len(cmd_str) > 5:
clientSocket.send(b"331 User "+ bytes(cmd_str[5:], "utf-8") + b" OK. Password required\r\n")
elif cmd_str.lower().startswith("pass ") and len(cmd_str) > 5:
time.sleep( round((time.time() % 4) + 1) )
clientSocket.send(b"530 Login authentication failed\r\n")
else:
clientSocket.send(b'530 You aren\'t logged in\r\n')
except:
break
clientSocket.close()
server.debug("["+ ip + "] Client disconnected")
ftp = Server("FTP", "127.0.0.1", 1921)
ftpThread = Thread(target=ftp.start, args=(ftpClient,))
ftpThread.start()
quit = False
while quit == False:
cmd = input('Type q to quit, l to list clients\n')
if cmd == 'q' or cmd == 'quit':
quit = True
if cmd == 'l' or cmd == 'list':
print(ftp.getStatus())
ftp.stop()
ftpThread.join()
|
{
"content_hash": "cd14b2cfbabe2af3271b960701e93193",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 117,
"avg_line_length": 24.3125,
"alnum_prop": 0.650956869465867,
"repo_name": "tedgueniche/DummyPot",
"id": "027c214ac04a406320b5dc40d1160c94121d6562",
"size": "3501",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ftp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3501"
}
],
"symlink_target": ""
}
|
import json
import fdb
from datadog_checks.base import AgentCheck
fdb.api_version(600)
class FoundationdbCheck(AgentCheck):
def __init__(self, name, init_config, instances):
super(FoundationdbCheck, self).__init__(name, init_config, instances)
self._db = None
def construct_database(self):
if self._db is not None:
return self._db
# TLS options. Each option has a different function name, so we cannot be smart with it without ugly code
if 'tls_certificate_file' in self.instance:
fdb.options.set_tls_cert_path(self.instance.get('tls_certificate_file'))
if 'tls_key_file' in self.instance:
fdb.options.set_tls_key_path(self.instance.get('tls_key_file'))
if 'tls_verify_peers' in self.instance:
fdb.options.set_tls_verify_peers(self.instance.get('tls_verify_peers').encode('latin-1'))
if 'cluster_file' in self.instance:
self._db = fdb.open(cluster_file=self.instance.get('cluster_file'))
else:
self._db = fdb.open()
def fdb_status_data(self):
self.construct_database()
return self._db[u'\xff\xff/status/json'.encode(u'latin-1')]
def check(self, _):
status_data = self.fdb_status_data()
try:
data = json.loads(status_data)
except Exception:
self.service_check("foundationdb.can_connect", AgentCheck.CRITICAL, message="Could not parse `status json`")
raise
self.check_metrics(data)
self.check_custom_queries()
def check_custom_queries(self):
custom_queries = self.instance.get('custom_queries')
if not custom_queries:
return
for query in custom_queries:
metric_prefix = query['metric_prefix']
if not metric_prefix:
self.log.error("custom query field `metric_prefix` is required")
continue
query_key = query['query_key']
if not query_key:
self.log.error("custom query field `query_key` is required for metric_prefix `%s`", metric_prefix)
continue
query_type = query['query_type']
if not query_type:
self.log.error("custom query field `query_type` is required for metric_prefix `%s`", metric_prefix)
continue
query_tags = query['tags']
if not query_tags:
self.log.error("custom query field `tags` is required for metric_prefix `%s`", metric_prefix)
continue
result = self._db[query_key.encode('UTF-8')]
if not result:
raise ValueError("No result for " + query_key)
if not hasattr(self, query_type):
self.log.error(
"invalid submission method `%s` for query key `%s` of metric_prefix `%s`",
query_type,
query_key,
metric_prefix,
)
break
else:
getattr(self, query_type)(metric_prefix + '.' + query_key, float(result), tags=set(query_tags))
def report_process(self, process):
if "address" not in process:
return
tags = ["fdb_process:" + process["address"]]
if "cpu" in process:
self.maybe_gauge("foundationdb.process.cpu.usage_cores", process["cpu"], "usage_cores", tags)
if "disk" in process:
disk = process["disk"]
self.maybe_gauge("foundationdb.process.disk.free_bytes", disk, "free_bytes", tags)
self.maybe_gauge("foundationdb.process.disk.total_bytes", disk, "total_bytes", tags)
if "reads" in disk:
self.maybe_gauge("foundationdb.process.disk.reads.hz", disk["reads"], "hz", tags)
self.maybe_count("foundationdb.process.disk.reads.count", disk["reads"], "count", tags)
if "writes" in disk:
self.maybe_gauge("foundationdb.process.disk.writes.hz", disk["writes"], "hz", tags)
self.maybe_count("foundationdb.process.disk.writes.count", disk["writes"], "count", tags)
if "memory" in process:
memory = process["memory"]
self.maybe_gauge("foundationdb.process.memory.available_bytes", memory, "available_bytes", tags)
self.maybe_gauge("foundationdb.process.memory.limit_bytes", memory, "limit_bytes", tags)
self.maybe_gauge(
"foundationdb.process.memory.unused_allocated_memory", memory, "unused_allocated_memory", tags
)
self.maybe_gauge("foundationdb.process.memory.used_bytes", memory, "used_bytes", tags)
if "network" in process:
network = process["network"]
self.maybe_gauge("foundationdb.process.network.current_connections", network, "current_connections", tags)
self.maybe_hz_counter("foundationdb.process.network.connection_errors", network, "connection_errors", tags)
self.maybe_hz_counter(
"foundationdb.process.network.connections_closed", network, "connections_closed", tags
)
self.maybe_hz_counter(
"foundationdb.process.network.connections_established", network, "connections_established", tags
)
self.maybe_hz_counter("foundationdb.process.network.megabits_received", network, "megabits_received", tags)
self.maybe_hz_counter("foundationdb.process.network.megabits_sent", network, "megabits_sent", tags)
self.maybe_hz_counter(
"foundationdb.process.network.tls_policy_failures", network, "tls_policy_failures", tags
)
if "roles" in process:
for role in process["roles"]:
self.report_role(role, tags)
def report_role(self, role, process_tags):
if "role" not in role:
return
tags = process_tags + ["fdb_role:" + role["role"]]
self.maybe_hz_counter("foundationdb.process.role.input_bytes", role, "input_bytes", tags)
self.maybe_hz_counter("foundationdb.process.role.durable_bytes", role, "durable_bytes", tags)
self.maybe_diff_counter("foundationdb.process.role.queue_length", role, "input_bytes", "durable_bytes", tags)
self.maybe_hz_counter("foundationdb.process.role.total_queries", role, "total_queries", tags)
self.maybe_hz_counter("foundationdb.process.role.bytes_queried", role, "bytes_queried", tags)
self.maybe_hz_counter("foundationdb.process.role.durable_bytes", role, "durable_bytes", tags)
self.maybe_hz_counter("foundationdb.process.role.finished_queries", role, "finished_queries", tags)
self.maybe_hz_counter("foundationdb.process.role.keys_queried", role, "keys_queried", tags)
self.maybe_hz_counter("foundationdb.process.role.low_priority_queries", role, "low_priority_queries", tags)
self.maybe_hz_counter("foundationdb.process.role.mutation_bytes", role, "mutation_bytes", tags)
self.maybe_hz_counter("foundationdb.process.role.mutations", role, "mutations", tags)
self.maybe_gauge("foundationdb.process.role.stored_bytes", role, "stored_bytes", tags)
self.maybe_gauge("foundationdb.process.role.query_queue_max", role, "query_queue_max", tags)
self.maybe_gauge("foundationdb.process.role.local_rate", role, "local_rate", tags)
self.maybe_gauge("foundationdb.process.role.kvstore_available_bytes", role, "kvstore_available_bytes", tags)
self.maybe_gauge("foundationdb.process.role.kvstore_free_bytes", role, "kvstore_free_bytes", tags)
self.maybe_gauge("foundationdb.process.role.kvstore_inline_keys", role, "kvstore_inline_keys", tags)
self.maybe_gauge("foundationdb.process.role.kvstore_total_bytes", role, "kvstore_total_bytes", tags)
self.maybe_gauge("foundationdb.process.role.kvstore_total_nodes", role, "kvstore_total_nodes", tags)
self.maybe_gauge("foundationdb.process.role.kvstore_total_size", role, "kvstore_total_size", tags)
self.maybe_gauge("foundationdb.process.role.kvstore_used_bytes", role, "kvstore_used_bytes", tags)
if "data_lag" in role:
self.maybe_gauge("foundationdb.process.role.data_lag.seconds", role["data_lag"], "seconds", tags)
if "durability_lag" in role:
self.maybe_gauge(
"foundationdb.process.role.durability_lag.seconds", role["durability_lag"], "seconds", tags
)
if "grv_latency_statistics" in role:
self.report_statistics(
"foundationdb.process.role.grv_latency_statistics.default",
role["grv_latency_statistics"],
"default",
tags,
)
self.report_statistics(
"foundationdb.process.role.read_latency_statistics", role, "read_latency_statistics", tags
)
self.report_statistics(
"foundationdb.process.role.commit_latency_statistics", role, "commit_latency_statistics", tags
)
def report_statistics(self, metric, obj, key, tags=None):
if key in obj:
statistics = obj[key]
self.maybe_count(metric + ".count", statistics, "count", tags=tags)
self.maybe_gauge(metric + ".min", statistics, "min", tags=tags)
self.maybe_gauge(metric + ".max", statistics, "max", tags=tags)
self.maybe_gauge(metric + ".p25", statistics, "p25", tags=tags)
self.maybe_gauge(metric + ".p50", statistics, "p50", tags=tags)
self.maybe_gauge(metric + ".p90", statistics, "p90", tags=tags)
self.maybe_gauge(metric + ".p99", statistics, "p99", tags=tags)
def check_metrics(self, status):
if "cluster" not in status:
raise ValueError("JSON Status data doesn't include cluster data")
cluster = status["cluster"]
if "machines" in cluster:
self.gauge("foundationdb.machines", len(cluster["machines"]))
if "processes" in cluster:
self.gauge("foundationdb.processes", len(cluster["processes"]))
self.count(
"foundationdb.instances",
sum(map(lambda p: len(p["roles"]) if "roles" in p else 0, cluster["processes"].values())),
)
role_counts = dict()
for process_key in cluster["processes"]:
process = cluster["processes"][process_key]
self.report_process(process)
if "roles" in process:
for role in process["roles"]:
if "role" in role:
rolename = role["role"]
if rolename in role_counts:
role_counts[rolename] += 1
else:
role_counts[rolename] = 1
for role in role_counts:
self.gauge("foundationdb.processes_per_role." + role, role_counts[role])
if "data" in cluster:
data = cluster["data"]
self.maybe_gauge("foundationdb.data.system_kv_size_bytes", data, "system_kv_size_bytes")
self.maybe_gauge("foundationdb.data.total_disk_used_bytes", data, "total_disk_used_bytes")
self.maybe_gauge("foundationdb.data.total_kv_size_bytes", data, "total_kv_size_bytes")
self.maybe_gauge(
"foundationdb.data.least_operating_space_bytes_log_server",
data,
"least_operating_space_bytes_log_server",
)
if "moving_data" in data:
self.maybe_gauge(
"foundationdb.data.moving_data.in_flight_bytes", data["moving_data"], "in_flight_bytes"
)
self.maybe_gauge("foundationdb.data.moving_data.in_queue_bytes", data["moving_data"], "in_queue_bytes")
self.maybe_gauge(
"foundationdb.data.moving_data.total_written_bytes", data["moving_data"], "total_written_bytes"
)
if "datacenter_lag" in cluster:
self.gauge("foundationdb.datacenter_lag.seconds", cluster["datacenter_lag"]["seconds"])
if "workload" in cluster:
workload = cluster["workload"]
if "transactions" in workload:
transactions = workload["transactions"]
for k in transactions:
self.maybe_hz_counter("foundationdb.workload.transactions." + k, transactions, k)
if "operations" in workload:
operations = workload["operations"]
for k in operations:
self.maybe_hz_counter("foundationdb.workload.operations." + k, operations, k)
if "latency_probe" in cluster:
for k, v in cluster["latency_probe"].items():
self.gauge("foundationdb.latency_probe." + k, v)
degraded_processes = 0
if "degraded_processes" in cluster:
self.gauge("foundationdb.degraded_processes", cluster["degraded_processes"])
degraded_processes = cluster["degraded_processes"]
if degraded_processes > 0:
self.service_check("foundationdb.can_connect", AgentCheck.WARNING, message="There are degraded processes")
else:
self.service_check("foundationdb.can_connect", AgentCheck.OK)
def maybe_gauge(self, metric, obj, key, tags=None):
if key in obj:
self.gauge(metric, obj[key], tags=tags)
def maybe_count(self, metric, obj, key, tags=None):
if key in obj:
self.monotonic_count(metric, obj[key], tags=tags)
def maybe_hz_counter(self, metric, obj, key, tags=None):
if key in obj:
if "hz" in obj[key]:
self.gauge(metric + ".hz", obj[key]["hz"], tags=tags)
if "counter" in obj[key]:
self.monotonic_count(metric + ".counter", obj[key]["counter"], tags=tags)
def maybe_diff_counter(self, metric, obj, a, b, tags):
if a in obj and "counter" in obj[a] and b in obj and "counter" in obj[b]:
self.gauge(metric, obj[a]["counter"] - obj[b]["counter"], tags=tags)
|
{
"content_hash": "234bd89c306bae704f630f684099afa9",
"timestamp": "",
"source": "github",
"line_count": 284,
"max_line_length": 120,
"avg_line_length": 50.19718309859155,
"alnum_prop": 0.5989057239057239,
"repo_name": "DataDog/integrations-core",
"id": "c80b418e9c938f0b515b0681fc825ff613965b28",
"size": "14372",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "foundationdb/datadog_checks/foundationdb/check.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "578"
},
{
"name": "COBOL",
"bytes": "12312"
},
{
"name": "Dockerfile",
"bytes": "22998"
},
{
"name": "Erlang",
"bytes": "15518"
},
{
"name": "Go",
"bytes": "6988"
},
{
"name": "HCL",
"bytes": "4080"
},
{
"name": "HTML",
"bytes": "1318"
},
{
"name": "JavaScript",
"bytes": "1817"
},
{
"name": "Kotlin",
"bytes": "430"
},
{
"name": "Lua",
"bytes": "3489"
},
{
"name": "PHP",
"bytes": "20"
},
{
"name": "PowerShell",
"bytes": "2398"
},
{
"name": "Python",
"bytes": "13020828"
},
{
"name": "Roff",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "241"
},
{
"name": "Scala",
"bytes": "7000"
},
{
"name": "Shell",
"bytes": "83227"
},
{
"name": "Swift",
"bytes": "203"
},
{
"name": "TSQL",
"bytes": "29972"
},
{
"name": "TypeScript",
"bytes": "1019"
}
],
"symlink_target": ""
}
|
class People(object):
country = "China"
@staticmethod
def getCountry():
return People.country
print(People.getCountry())
|
{
"content_hash": "3d37c469e97b77644b7f91cbab836f44",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 26,
"avg_line_length": 12.9,
"alnum_prop": 0.7286821705426356,
"repo_name": "onezens/python",
"id": "7da0974a2b02be4840cbaf203638fd129c142aeb",
"size": "146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "basic/13_staticmethod.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "709587"
},
{
"name": "Python",
"bytes": "45538"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from django.http import HttpResponse
from django.utils.datastructures import MultiValueDictKeyError
from core.common import *
from images.models import Image
from notes.models import Note
from posts.models import Post, PostMedia
from posts.forms import PostForm
from posts.views import post_form
from users.models import User
from videos.models import Video, generate_preview
import json, magic
def router(request):
if request.method == 'POST':
try:
post_id = request.POST['post_id']
except MultiValueDictKeyError:
post_id = None
return {
'save': save,
'delete': delete,
'like': toggle_like,
'new_media': new_media,
'delete_media': delete_media,
}[request.POST['action']](request, post_id)
elif request.method == 'GET':
try:
post_id = request.GET['post_id']
except MultiValueDictKeyError:
post_id = None
return {
'get_form': get_form,
}[request.GET['action']](request, post_id)
else:
return reject_method()
def save(request, post_id=None):
if post_id:
form = PostForm(
request.POST,
request=request,
instance=Post.objects.get(pk=post_id))
if form.is_valid():
form.save()
else:
user = User.objects.get(user=request.user)
form = PostForm(request.POST, request=request)
if form.is_valid():
post = form.save()
post_id = post.pk
response = {'postId': post_id}
return HttpResponse(json.dumps(response), content_type='application/json')
def delete(request, post_id=None):
post = Post.objects.get(pk=post_id)
if request.user == post.author.user:
post.delete()
else:
return reject_user()
response = {'message': 'Post ' + format(post.pk) + ' deleted.'}
return HttpResponse(json.dumps(response), content_type='application/json')
def toggle_like(request, post_id=None):
try:
user = User.objects.get(user=request.user)
except TypeError:
return reject_user()
post = Post.objects.get(pk=post_id)
if user.likes.filter(pk=post_id).exists():
user.likes.remove(post)
try:
Note.objects.get(
author=user,
post__pk=post_id,
category='lik'
).delete()
except Note.DoesNotExist:
pass
else:
user.likes.add(post)
note = Note(
author=user,
recipient=post.author,
post=post,
category='lik')
note.save()
user.save()
return HttpResponse()
def get_form(request, post_id=None):
return post_form(request, post_id)
def new_media(request, garbage=None):
response = {'files': []}
status = 200
try:
user = User.objects.get(user=request.user)
except TypeError:
return reject_user()
media_data = [i for i in json.loads(request.POST['data']) \
if format(i['id'])[:4] == 'temp']
for i, f in enumerate(request.FILES.getlist('uploads')):
filetype = format(
magic.from_buffer(f.read(1024), mime=True))[2:-1].split('/')
if f.name == 'undefined':
f.name += '.' + filetype[1]
if f._size > settings.MAX_UPLOAD_SIZE:
response['files'].append({
'tempId': media_data[i]['id'],
'name': f.name,
'size': f._size,
'label': 'oversize',
'error': 'File size over limit: '
+ format(settings.MAX_UPLOAD_SIZE / 10**6) + ' MB',
})
elif filetype[1] in settings.ACCEPTED_FILES:
media = PostMedia(uploader=user,
width=media_data[i]['width'],
height=media_data[i]['height'],
unit_width=media_data[i]['size_x'],
unit_height=media_data[i]['size_y'],
row=media_data[i]['row'],
col=media_data[i]['col'])
media.save()
if filetype[0] == 'image':
image = Image(image=f, media=media,
mime=filetype[0] + '/' + filetype[1])
image.save()
image.generate_thumbnail()
response['files'].append({
'id': media.pk,
'tempId': media_data[i]['id'],
'name': image.image.name,
'size': image.image.size,
'width': image.image.width,
'height': image.image.height,
'url': image.url,
'deleteUrl': '/p/handler/',
'deleteType': 'DELETE',
})
elif filetype[0] == 'video':
video = Video(video=f, media=media, loop=media_data[i]['loop'],
mime=filetype[0] + '/' + filetype[1])
video.save()
video.generate_preview()
video.generate_thumbnail()
response['files'].append({
'id': media.pk,
'tempId': media_data[i]['id'],
'name': video.video.name,
'size': video.video.size,
'width': video.media.width,
'height': video.media.height,
'url': video.url,
'deleteUrl': '/p/handler/',
'deleteType': 'DELETE',
})
else:
response['files'].append({
'tempId': media_data[i]['id'],
'name': f.name,
'size': f._size,
'label': 'badtype',
'error': 'Filetype (' + filetype[1]
+ ') not in list of allowed types: '
+ ', '.join(settings.ACCEPTED_FILES),
})
return HttpResponse(json.dumps(response), content_type='application/json',
status=status)
def delete_media(request, media_id):
response = {'files': []}
try:
user = User.objects.get(user=request.user)
except TypeError:
return reject_user()
media = PostMedia.objects.get(id=request.POST['media_id'])
if media.uploader == user:
if hasattr(media, 'image'):
response['files'].append({
media.image.image.name: True,
})
elif hasattr(media, 'video'):
response['files'].append({
media.video.video.name: True,
})
# Related image/video is deleted automatically.
media.delete()
else:
return reject_user()
return HttpResponse(json.dumps(response), content_type='application/json')
|
{
"content_hash": "70cde837b6856458ed21ffb0223513a8",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 76,
"avg_line_length": 24.222707423580786,
"alnum_prop": 0.6327744726879394,
"repo_name": "PrincessTeruko/TsunArt",
"id": "27e81c30529507316935ce03019473c5e95370b1",
"size": "5547",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "posts/handlers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23615"
},
{
"name": "HTML",
"bytes": "34809"
},
{
"name": "JavaScript",
"bytes": "47538"
},
{
"name": "Python",
"bytes": "114688"
},
{
"name": "Shell",
"bytes": "1392"
}
],
"symlink_target": ""
}
|
import json
__all__ = ['ConsoleStorage']
SCHEME = ('cons', 'console', 'stdout')
class ConsoleStorage(object):
def __init__(self, config, parsed):
self.config = config
def save(measure):
try:
print(json.dump(measure))
except TypeError:
from logging import getLogger
getLogger(__name__).error(
"Measure could not be converted:\n{}".format(measure))
|
{
"content_hash": "503dc049ef89ab42a22b6061b442cb0e",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 74,
"avg_line_length": 23.210526315789473,
"alnum_prop": 0.5668934240362812,
"repo_name": "skoenen/pyprol",
"id": "957ffba9f9ae8841d52ca7696d075a8fe843cce9",
"size": "441",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyprol/storage/console_storage.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "136711"
},
{
"name": "DOT",
"bytes": "1091"
},
{
"name": "JavaScript",
"bytes": "58458"
},
{
"name": "Python",
"bytes": "57052"
},
{
"name": "Shell",
"bytes": "461"
}
],
"symlink_target": ""
}
|
available_spaces = 60
# Prints
def print_start():
print('β{}β'.format('β' * available_spaces))
def print_empty():
print('β{}β'.format(' ' * available_spaces))
def print_left(value, padding=1):
print('β{}{}β'.format(' ' * padding, value.ljust(available_spaces - padding)))
def print_center(value):
print('β{}β'.format(value.center(available_spaces)))
def print_right(value, padding=1):
print('β{}{}β'.format(value.rjust(available_spaces - padding), ' ' * padding))
def print_separator():
print('β{}β€'.format('β' * available_spaces))
def print_end():
print('β{}β'.format('β' * available_spaces))
def print_enter_to_continue():
print_separator()
print_left('Press enter to continue.')
print_end()
input('β¬οΈ ')
def print_title(title):
clear()
print_start()
print_center(title)
print_separator()
def clear():
print('\n' * 100)
|
{
"content_hash": "836067ba637e8464c9cbc1499d9513e5",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 82,
"avg_line_length": 18.875,
"alnum_prop": 0.6037527593818984,
"repo_name": "LonamiWebs/Py-Utils",
"id": "e7aff427f7692fbd4eeaa00783e810ee4b68eaab",
"size": "979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mineutils/table_print.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "114706"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.