prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
# -*- coding: utf-8 -*-
impo | rt unittest
class TestExample(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("**************************************** setUpClass ****************************************")
@classmethod
def tearDownClass(cls):
print("************************************** tearDownClass **... | ***")
def setUp(self):
print("****** setUp *******")
def tearDown(self):
print("***** tearDown *****")
def _example(self):
print("This is a test example.")
|
#!/usr/bin/env python3
"""
Check of automatic algorithm rollover scenario.
"""
import collections
import os
import shutil
import datetime
import random
import subprocess
from subprocess import check_call
from dnstest.utils import *
from dnstest.keys import Keymgr
from dnstest.test import Test
from dnstest.module im... | , 1, 1, 1, desc + ": initial keys")
z = server.zones[zone[0].name];
z.get_module("onlinesign").algorithm = set_alg
z.get_module("onlinesign").key_size = key_len
server.gen_confile()
server.reload()
wait_for_rrsig_count(t, server, "SOA", 2, 20)
check_zone(server, zone, before_keys, 1, 1, 2,... | e, before_keys + after_keys, 2, 1, 2, desc + ": both algorithms active")
# wait for any change in CDS records
CDS1 = str(server.dig(ZONE, "CDS").resp.answer[0].to_rdataset())
t.sleep(3)
while CDS1 == str(server.dig(ZONE, "CDS").resp.answer[0].to_rdataset()):
t.sleep(1)
check_zone(server, zon... |
import numpy as np
import pytest
from numpy.testing import assert_allclose
try:
import scipy
except ImportError:
HAS_SCIPY = False
else:
HAS_SCIPY = True
import astropy.units as u
from astropy.timeseries.periodograms.lombscargle import LombScargle
from astropy.timeseries.periodograms.lombscargle._statisti... | Z, len(t),
from_normalization=normalization,
to_normalization='standard',
| chi2_ref=compute_chi2_ref(y, dy))
ls = LombScargle(t, y, dy, normalization='standard')
fap_std = ls.false_alarm_probability(Z_std, maximum_frequency=fmax,
method=method, method_kwds=kwds)
assert_allclose(fap, fap_std, rtol=0.1)
|
# -*- coding: utf-8 -*-
# Generated by | Django 1.11.4 on 2017-08-21 19:00
from __future__ import unicode_literals
from django.db import migrations, m | odels
class Migration(migrations.Migration):
dependencies = [
('books', '0007_auto_20170821_2052'),
]
operations = [
migrations.AlterField(
model_name='book',
name='slug',
field=models.SlugField(help_text='wykorzystywane w adresie strony', max_length=1... |
#!/usr/bin/env python
"""
.. py:currentmodule:: FileFormat.Results.Phirhoz
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
MCXRay phirhoz result file.
"""
# Script information for the file.
__author__ = "Hendrix Demers (hendrix.demers@mail.mcgill.ca)"
__version__ = ""
__date__ = ""
__copy... | lines[indexLine:]:
indexLine += 1
if line.startswith(self._label):
items = line.split('=')
self.intensity = float(items[-1])
break
|
self.depths_A = []
self.values = []
for _index in range(len(lines[indexLine:])):
line = lines[indexLine]
indexLine += 1
try:
items = line.split()
depth_A = float(items[0])
value = float(items[1])
... |
ass Telefone(AcoesTelefone):
def _get_telefone1(self):
return self._telefone1
def _set_telefone1(self,telefone):
self._telefone1 = telefone1
def _get_telefone2(self):
return self._telefone2
def _set_telefone2(self,telefone):
self._telefone2 = telefone2
telefone1 = property(_get_telefone1,_se... | e Óbito', null = True ,blank = True)
_idade = models.PositiveSmallIntegerField(verbose_name='Idade')
tutor = models.ForeignKey(TutorEndTel, on_delete = models.CASCADE, related_name='animais')
class Meta:
verbose_name_plural = "Animais"
abstract = True
def get_absolute_url(self):
return reverse('animal_detal... | args={'pk': self.pk})
class AcoesAnimal(AnimalAbs):
def __unicode__(self):
return u'%s' % (self.nome)
def __str__(self):
return u'%s' % (self.nome)
class Meta:
abstract = True
class Animal(AcoesAnimal):
def get_absolute_url(self):
return reverse('animal_detail', kwargs={'pk': self.pk})
def _get_... |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, mo | dels
class Migration(migrations.Migration):
dependencies = [
('layers', '24_to_26'),
]
operations = [
migrations.CreateModel(
name='QGISServerLayer',
fields=[
('layer', models.OneToOneField(primary_key=True, serialize=False, to='layers.Layer')),
... | bose_name=b'Base Layer Path')),
],
),
]
|
"""
A set of built-in default checks for the platform heartbeat endpoint
Other checks should be included in their respective modules/djangoapps
"""
from datetime import datetime, timedelta
from time import sleep, time
from django.conf import settings
from django.core.cache import cache
from django.db import connectio... | try:
cache.set(CACHE_KEY, CACHE_VALUE, 30)
return 'cache_set', True, u'OK'
except Exception as fail:
return 'cache_set', False, unicode(fail)
def check_cache_get():
""" Check getting a cache value
Returns:
(string, Boolean, unicode): A tuple containing the name of the ch... | ode
string of either "OK" or the failure message
"""
try:
data = cache.get(CACHE_KEY)
if data == CACHE_VALUE:
return 'cache_get', True, u'OK'
else:
return 'cache_get', False, u'value check failed'
except Exception as fail:
... |
import argpar | se
import requests
import logging
import pip._internal
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Get the nth version of a given package')
parser.add_argument('--package', type=str, required=True, help='The | PyPI you want to inspect')
parser.add_argument('--nth_last_version', type=int, default=1, help='The nth last package will be retrieved')
parser.add_argument('--prerelease', help='Get PreRelease Package Version', action='store_true')
parser.add_argument('--debug', help='Print debug information', action='sto... |
# pylint: disable=C0103,R0801
import sqlalchemy
import migrate
meta = sqlalchemy.MetaData()
# define the previous state of tenants
tenant = {}
tenant['id'] = sqlalchemy.Column('id', sqlalchemy.Integ | er, primary_key=True,
autoincrement=True)
tenant['name'] = sqlalchemy.Column('name', sqlalchemy.String(255), unique=True)
tenant['desc'] = sqlalchemy.Column('desc', sqlalchemy.String(255))
tenant['enabled'] = sqlalchemy | .Column('enabled', sqlalchemy.Integer)
tenants = sqlalchemy.Table('tenants', meta, *tenant.values())
# this column will become unique/non-nullable after populating it
tenant_uid = sqlalchemy.Column('uid', sqlalchemy.String(255),
unique=False, nullable=True)
def upgrade(migrate_engine):
meta.bind = migrate_e... |
from pygame import *
from key_dict import *
''' The pla | yer class '''
class Cursor:
def __init__(self, x, y, size):
self.x = int(x)
self.y = int(y)
self.size = size
self.speed = 1
self.cooldown = 0
self.block = 0
self.menu_switch = {'Build | ' : True}
self.menu_block = {
0 : 'Wall',
1 : 'Heavy tower',
2 : 'Light tower',
3 : 'Torch',
4 : 'Farm'}
def check_border(self, level, location):
if location <... |
# ==============================================================================
# Copyright 2019 - Philip Paquette
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without rest... | ""
run_in_separate_process(target=launch_ad | apter, timeout=240)
|
ment for Certbot.
When this module is run as a script, it takes the arguments that should
be passed to pip to install the Certbot packages as command line
arguments. If no arguments are provided, all Certbot packages and their
development dependencies are installed. The virtual environment will be
created with the nam... | return os.path.abspath(python_windows)
raise ValueError((
'Error, could not find python executable in venv path { | 0}: is it a valid venv ?'
.format(venv_path)))
def prepare_venv_path(venv_name):
"""Determines the venv path and prepares it for use.
This function cleans up any Python eggs in the current working directory
and ensures the venv path is available for use. The path used is the
VENV_NAME environ... |
# The name of the dashboard to be added to HORIZON['dashboards']. Required.
DASHBOARD = 'help_about'
DISABLED = False
# A list of | applications to be added to INSTALLED_APPS.
ADD_INSTALLED_APPS = [
'openstack_dashboard.dashb | oards.help_about',
]
|
from django.conf.urls import url
from core.views.generic import get_dashboard, delete
from users.views.individuals import RegisterView
from users.views.base import LoginView, logout_user
from core.views.dis | play import IndexView
urlpatterns = [#url(r'^$', LoginView.as_view(), name='index'),
url(r'^$', IndexView.as_view(), name='index'),
url(r'^login$', LoginView.as_view(), name='login'),
url(r'^logout$', logout_user, name='logout'),
... | url(r'^delete$', delete, name='delete'),
url(r'^dashboard$', get_dashboard, name='dashboard')] |
from requests import HTTPError
from database import Database
import simplejson as json
db = Database.getDatabaseConnection()["cras"]
from log_session import LogSession
import datetime
class DB:
def __init__(self):
pass
@staticmethod
def add_user(user_id, user_name, mail,picture,fcm_token):
... | )
| db[user_id]["log_sessions"][sup_id][len(db[user_id]["log_sessions"][sup_id]) - 1] = last_session
user.save()
except Exception,e:
print e
def get_user_by_id(user_id):
user = db[user_id]
return user
def db_exists(user_id):
try:
user = db[user_id]
... |
. However,
# where sphinx-quickstart hardcodes values in this file that you input, this
# file has been changed to pull from your module's metadata module.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autog... | ts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = | []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', metadata.package, meta... |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
#... | _id,
delegate_to=self.delegate_to
)
hook.create_bucket(bucket | _name=self.bucket_name,
resource=self.resource,
storage_class=self.storage_class,
location=self.location,
project_id=self.project_id,
labels=self.labels)
|
from rest_framework import serializers
from emotion_annotator.models import F | rameEmotions
class FrameEmotionsSerializer(serializers.Hype | rlinkedModelSerializer):
class Meta:
model = FrameEmotions
fields = ('video', 'frameTime', 'emotionType')
|
import unittest
import copy
import gc
import rpy2.rinterface as rinterface
rinterface.initr()
class SexpTestCase(unittest.TestCase):
def testNew_invalid(se | lf):
x = "a"
self.assertRaises(ValueError, rinterface.Sexp, x)
def testNew(self):
sexp = rinterface.baseenv.get("letters")
sexp_new = rinterface.Sexp(sexp)
idem = rinterface.baseenv.get( | "identical")
self.assertTrue(idem(sexp, sexp_new)[0])
sexp_new2 = rinterface.Sexp(sexp)
self.assertTrue(idem(sexp, sexp_new2)[0])
del(sexp)
self.assertTrue(idem(sexp_new, sexp_new2)[0])
def testTypeof_get(self):
sexp = rinterface.baseenv.get("letters")
self... |
from chainer import function
class Flatten(function.Function):
"""Flatten function."""
def forward(self, inputs):
self.retain_inputs(())
self._in_shape = inputs[0].shape
return inputs[0].ravel(),
def backward(self, inputs, grads):
return grads[0].reshape(self._in_shape),... | (2, 2)
>>> y = F.flatten(x)
>>> y.shape
(4,)
>>> y.data
array([1, 2, 3, 4])
>>> x = np. | arange(8).reshape(2, 2, 2)
>>> x.shape
(2, 2, 2)
>>> y = F.flatten(x)
>>> y.shape
(8,)
>>> y.data
array([0, 1, 2, 3, 4, 5, 6, 7])
"""
return Flatten()(x)
|
ERPOLATION_MODEL = "SpectraInterpolation"
KEY_VOXEL_SIMPLIFICATION = "VoxelSimplification"
KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR = "ElasticCrossSectionScalingFactor"
KEY_ENERGY_LOSS_SCALING_FACTOR = "EnergyLossScalingFactor"
class SimulationParameters(object):
def __init__(self):
self.version = c... | adlines()
extractMethods = self._createExtractMethod()
for line in lines:
line = line.strip()
for key in self._keys:
if line.startswith(key):
items = line.split('=')
self._parameters[key] = extractMethods[key](i... | formatMethods = self._createFormatMethod()
keys = self._createKeys()
for key in keys:
if key == KEY_SPECTRA_INTERPOLATION_MODEL:
value = formatMethods[key] % (self._parameters[key].getModel())
else:
value = formatMethods[key] % (self._para... |
"""
Unit tests for email feature flag in new instructor dashboard.
Additionally tests that bulk email is always disabled for
non-Mongo backed courses, regardless of email feature flag, and
that the view is conditionally available when Course Auth is turned on.
"""
from __future__ import absolute_import
from django.ur... | uire_course_email_auth=False)
response = self.client.get(self.url)
self.assertNotIn(self.email_lin | k, response.content)
# The flag is disabled and the course is not Mongo-backed (should not work)
def test_email_flag_false_mongo_false(self):
BulkEmailFlag.objects.create(enabled=False, require_course_email_auth=False)
response = self.client.get(self.url)
self.assertNotIn(self.email_lin... |
from SimpleLexicon import SimpleLexicon
from LOTlib.Evaluation.EvaluationException import RecursionDepthException
class RecursiveLexicon(SimpleLexicon):
"""
A lexicon where word meanings can call each other. Analogous to a RecursiveLOTHypothesis from a LOTHypothesis.
To achieve this, we require the LOThy... | n
def recursive_call(self, word, *args):
"""
This gets called internally on recursive calls. It keeps track of the depth to allow us to escape
"""
self.recursive_call_depth += 1
if self.recursive_call_depth > self.recur | sive_depth_bound:
raise RecursionDepthException
# print ">>>", self.value[word]
return self.value[word](self.recursive_call, *args) |
# | flake8: noqa
"""
Public testing utility functions.
"""
from pandas.util.testing import (
assert_frame_equal, assert_index_equal, asse | rt_series_equal)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-03-21 12:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
c | lass Migration(migrations.Migration):
dependencies = [("clusters", "0020_emr_release_model")]
operations = [
migrations.RenameField(
model_name="cluster", old_name="emr_release", new_name="emr_release_version"
),
migrations.AddField(
model_name="cluster",
... | field=models.ForeignKey(
blank=True,
help_text='Different AWS EMR versions have different versions of software like Hadoop, Spark, etc. See <a href="http://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-whatsnew.html">what\'s new</a> in each.',
null=True,
... |
from gettext import gettext as _
SECTION_ROOT = 'puppet'
DESC_ROOT = _('manage Puppet bindings')
def ensure_puppet_root(cli):
| """
Verifies that the root of puppet-related commands exists in the CLI,
creating it using constants from this module if it does not.
:param cli: CLI instance being configured
:type cli: pulp.client.extensions.core.PulpCli
"""
root_section = cli.find_section(SECTION_ROOT)
if root_section i... | CTION_ROOT)
|
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from flask.ext.mail import Mail
from flask.ext.moment import Moment
from flask.ext.sqlalchemy | import SQLAlchemy
from flask.ext.login import LoginManager
from config import config
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Fl... | nit_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
ret... |
import subprocess
def convert_chinese(text):
return subprocess.getoutput("echo '%s' | | opencc -c hk2 | s.json" % text) |
import sqlite3
from config import appConfig
def createTBLS(path=None):
conn = sqlite3.connect(path)
cursor = conn.cursor()
cursor.execute("""CREATE TABLE links(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE NOT NULL,
name TEXT NOT NULL
);""")
c... | FOREIGN KEY (links_id) REFERENCES links(id),
FOREIGN KEY (tags_id) REFERENCES tags(id)
);""")
conn.commit()
conn.close()
if __name__ == '__main__':
try:
path = appConfig.db_path
print path
createTBLS(str(path))
... | (str(e))
|
"""This module is part of Swampy, a suite of programs available from
allendowney.com/swampy.
Copyright 2011 Allen B. Downey
Distributed under the GNU General Public License at gnu.org/licenses/gpl.html.
"""
import math
from World import World
class CellWorld(World):
"""Contains cells and animals that move betwe... |
def unmark(self):
"""Unmarks this cell."""
self.marked = False
self.config(**self.unmarked_options)
def is_marked(self):
"""Checks whether this cell is marked."""
return self.marked
def toggle(self):
"""Toggles the state of this cell."""
... | rked():
self.unmark()
else:
self.mark()
if __name__ == '__main__':
world = CellWorld(interactive=True)
world.bind()
world.mainloop()
|
onfig()
cache = config.get("general", "package_cache")
cache_limit = config.get("general", "package_cache_limit")
cache_limit = int(cache_limit) if cache_limit else 0
cache_dir | = config.get("directories", "cached_packages_dir")
cache_dir = str(cache_dir) if cache_dir else '/var/cache/pisi/packages'
# If pisi.conf does not have it yet, default is use package cache
if not cache or cache == "True":
| enableCache = True
else:
enableCache = False
self.cacheEnabled = enableCache
self.cacheSize = cache_limit
self.settings.cacheGroup.setEnabled(self.cacheEnabled)
self.settings.useCacheCheck.setChecked(enableCache)
self.settings.useCacheSpin.setValue... |
#!/usr/bin/python3
import os, sys, random
import argparse
# this script processes all the log simulations in one dir, and writes the values of one particular attribute into one single file.
def prepareProcess(inputDir,simulationFile, separator, output, attribute ):
output = open(output, 'w')
simulation = open(inpu... |
simulation.readline()
for simulationLine in simulation:
previousLine = output.readline().strip('\n')+separator
splittedLine = simulationLine.split(separator)
value = splittedLine[attributeIndex]
outputTmp.write(previousLine+value+'\n')
simulation.close()
output.close()
outputTmp.close()
os.rename('t... | '-i', '--input', default='input', help='directory where simulated files are stored')
parser.add_argument('-o', '--output', default='results.csv', help='output file')
parser.add_argument('-s', '--separator', default=';', help='separator token between values')
parser.add_argument('-a', '--attribute', default='Number o... |
#!/usr/bin/python
# -*- coding: utf-8 -*-
|
"""
PyCOMPSs Testbench Arguments Warnings
=====================================
"""
# Imports
import unittest
from modules.testArgumentError import testArgum | entError
def main():
suite = unittest.TestLoader().loadTestsFromTestCase(testArgumentError)
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == "__main__":
main()
|
import os
from configurations import values
from boto.s3.connection import OrdinaryCallingFormat
from {{cookiecutter.app_name}}.config.common import Common
try:
# Python 2.x
import urlparse
except ImportError:
# Python 3.x
from urllib import parse as urlparse
class Production(Common):
# Honor th... | dia files
# http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += ('storages',)
DEFAULT_F | ILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = values.Value('DJANGO_AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = values.Value('DJANGO_AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = values.Value('DJANGO_AWS_STORAGE_BUCKET_NAME')
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTR... |
basis in
repo_info. Defaults to "". Not required.
default_depth:
description:
Default clone depth (int) in case not specified
on an individual repo basis. Defaults to 10.
Not required.
retries:
description:
Integer number of retries allowed in case of git
clone failure. ... | d": False,
"type": "int",
"default": 0},
"force": {"required": False,
"type": "bool",
"default": False},
"core_multiplier": {"required": False,
| "type": "int",
"default": 4},
}
# Pull in module fields and pass into variables
module = AnsibleModule(argument_spec=fields)
git_repos = module.params['repo_info']
defaults = {
"path": module.params["default_path"],
"depth": module.params["defaul... |
import os;
f = open('depsVerified', 'w');
f.write('ok');
f. | close(); | |
from django.db import models
from django.utils import timezone
# Create your models here.
def formatDateTime(dateTime):
return timezone.localtime(dateTime).strftime("%Y-%m-%d %H:%M:%S")
class Beacon(models.Model):
macAddr = models.CharField(max_length=20, unique=True)
uuid = models.UUIDField(editable=... | ones """
externalId = models.CharField(max_length=32, unique=True)
def getDict(self):
dict = {}
dict['deviceId'] = self.externalId
return dict
def __str__(self):
return self.externalId
class Be | aconLog(models.Model):
time = models.DateTimeField(null=False)
rssi = models.IntegerField(null=False)
measurePower = models.IntegerField(null=False)
beacon = models.ForeignKey(Beacon, on_delete=models.CASCADE)
device = models.ForeignKey(DetectorDevice, on_delete=models.CA... |
import json
import os
from processes.postgres import Postgres
from processes.gather_exception import GatherException
try:
DB_SERVER = os.environ['DB_SERVER']
DB_PORT = os.environ['DB_PORT']
DB_DATABASE = os.environ['DB_DATABASE']
DB_USER = os.environ['DB_USER']
DB_PASSWORD = os.environ['DB_PASSW... | :
"""
This inserts the relevant json information
into the table kino.movies.
:param data: json data holding information on films.
"""
imdb_id = data['imdb_id']
omdb_movie_data = data['omdb_main']
tmdb_movie_data = da | ta['tmdb_main']
sql = """insert into kino.languages(language)
select y.language
from json_to_recordset(%s) x (original_language varchar(1000))
join kino.iso2language y
on x.original_language = y.iso3166
where language... |
"""A modest set of tools to work with Django models."""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant f... | tps://github.com/damienjones/sculpt-model-tools',
author='Damien M. Jones',
author_email='damien@codesculpture.com',
license='LGPLv2',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Lesser General Public License v2 ( | LGPLv2)',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='',
packages=find_packages(),
install_requires=[
'sculpt-common>=0.2',
],
# package_data={},
# data_files=[],
# entry_points={},
... |
from JumpScale import j
descr = """
This jumpscript returns network info
"""
category = " | monitoring"
organization = "jumpscale"
author = "kristof@incubaid.com"
license = "bsd"
version = "1.0"
roles = []
def action():
return j.sal.nettools.getNetworkInfo()
if __name__ == "__main__":
print(ac | tion())
|
kedObjects with the following members set:
offset
obj_type_num
obj_chunks (for non-delta types)
delta_base (for delta types)
decomp_chunks
decomp_len
crc32 (if compute_crc32 is True)
:raise ChecksumMismatch: if the checksum of t... | The header is variable length. If the MSB of each byte is set then it
indicates that the subsequent byte is still part of the header.
For the first byte the next MS bits are the type, which tells you the type
of object, and whether it is a delta. The LS byte is the lowest bits of the
size | . For each subsequent byte the LS 7 bits are the next MS bits of the
size, i.e. the last byte of the header contains the MS bits of the size.
For the complete objects the data is stored as zlib deflated data.
The size in the header is the uncompressed object size, so to uncompress
you need to just keep... |
#!/usr/bin/env python
"""
Copyright 2010-2018 University Of Southern California
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by appli... | getval(self, attr):
try:
val = self.cfgdict[attr]
except KeyError:
print("Invalid Source File - Missing attribute: %s" % (attr))
print("Exiting")
sys.exit(1)
return val
def parse_src(self, a_srcfile):
"""
This function calls bb... | e property file function
to get a dictionary of key, value pairs and then looks for a
the parameters needed by bbtoolbox
"""
self.cfgdict = bband_utils.parse_properties(a_srcfile)
val = self.getval("depth_to_top")
self.DEPTH_TO_TOP = float(val)
val = self.getval... |
#! /usr/bin/env python
###############################################################################
#
# simulavr - A simulator for the Atmel AVR family of microcontrollers.
# Copyright (C) 2001, 2002 Theodore A. Roth
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of th... | 001 000d dddd 1110' where 0 <= d <= 31 and d != {26,27}
Only registers PC, R26, R27 and Rd should be changed.
"""
def setup(self):
# Set the register values
self.setup_regs[self.Rd] = 0
self.setup_regs[Reg.R26] = (self.X & 0xff)
self.setup_regs[Reg.R27] = ((self.X >> 8) & 0xff)
# set up the val in memory... | thus we need to write to memory _at_ X - 1)
self.mem_byte_write( self.X - 1, self.Vd )
# Return the raw opcode
return 0x900E | (self.Rd << 4)
def analyze_results(self):
self.reg_changed.extend( [self.Rd, Reg.R26, Reg.R27] )
# check that result is correct
expect = self.Vd
got = self.anal_regs[self.R... |
import sys
import lofarpipe.support.lofaringredient as ingredient
from lofarpipe.support.baserecipe import BaseRecipe
from lofarpipe.support.remotecommand import RemoteCommandRecipeMixIn
from lofarpipe.support.remotecommand import ComputeJob
from lofarpipe.support.data_map import DataMap, validate_data_maps, \
... | s["minbaseline"],
self.inputs["maxbaseline"],
processed_ms_dir,
| fillrootimagegroup_exec,
self.environment,
sourcedb_item.file]
self.logger.info(
"Starting finalize with the folowing args: {0}".format(
arguments))
... |
"""
Functions and decorators for making sure the parameters they work on are of
iterable types.
Copyright 2014-2015, Outernet Inc.
Some rights reserved.
This software is free software licensed under the terms of GPLv3. See COPYING
file that comes with the source code, or http://www.gnu.org/licenses/gpl.txt.
"""
impor... | d not is_iterable(x) else x
for (i, x) in enumerate(args)]
# patch keyword arguments, if needed
| if keys:
for key in keys:
if not is_iterable(kwargs[key]):
kwargs[key] = [kwargs[key]]
# invoke ``fn`` with patched parameters
return fn(*args, **kwargs)
return wrapper
return decorator
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Copyright 2011 Yaşar Arabacı
This file is part of packagequiz.
packagequiz is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at y... | TY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import pyalpm
from pycman imp... | estion as q
from random import choice, randint
from sys import modules
config.init_with_config("/etc/pacman.conf")
localdb = pyalpm.get_localdb()
#questionTypes= (q.definition,q.depends,
# q.fileOwner,q.installedSize,
# q.packager)
types = [getattr(q, t) for t in dir(q) if str(type(getat... |
else:
| return self._set_closed()
def _get(self):
if self._is_opened():
return "OPEN"
else:
return "CLOSED"
class Shutter(object):
MANUAL,EXTERNAL,CONFIGURATION = range(3) # modes
MODE2STR = {MANUAL: ("MANUAL", "Manual mode"),
EXT... | range(3) # state
STATE2STR = { OPEN: ("OPEN", "Shutter is open"),
CLOSED: ("CLOSED", "Shutter is closed"),
UNKNOWN: ("UNKNOWN", "Unknown shutter state"),
}
"""
Generic shutter object
This interface should be used for all type of shutter (motor,... |
#!/usr/bin/env python3
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is generated, do not edit. Update BuildConfigGenerator.groovy and
# 3ppFetch.template instead.
import argparse
import json
imp... | _MODULE_NAME, version,
_FILE_EXT)
file_name = file_url.rsplit('/', 1)[-1]
partial_manifest = {
'url': [file_url],
'name': [file_name], |
'ext': '.' + _FILE_EXT,
}
print(json.dumps(partial_manifest))
def main():
ap = argparse.ArgumentParser()
sub = ap.add_subparsers()
latest = sub.add_parser('latest')
latest.set_defaults(func=lambda _opts: do_latest())
download = sub.add_parser('get_url')
download.set_defaults... |
# -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.utils.translation import ugettext as _
class UserProfile(models.Model):
'''
... | ls.DateTimeField(auto_now=True, verbose_name=_('修改日期'))
class Meta:
db_table = "auth_userprofile"
verbose_name = _('用户附加信息')
verbose_name_plural = _('用户附加信息')
@receiver(pre_save, sender=User)
def pre_save_user_handler(sen | der, instance, **kwargs):
'''
保存用户前如果开启了EMAIL_AS_USERNAME, 需要将email字段设为username
'''
if settings.FEATURES.get('EMAIL_AS_USERNAME'):
if not instance.email or instance.email.strip() != instance.username.strip():
instance.email = instance.username
@receiver(post_save, sender=User)
def p... |
from __future__ import print_function
import sys
sys.path.append('..') # help python find cyton.py relative to scripts folder
from openbci import cyton as bci
import logging
import time
de | f printData(sample):
# os.system('clear')
print("----------------")
print("%f" % (sample.id))
print(sample.channel_data)
print(sample.aux_dat | a)
print("----------------")
if __name__ == '__main__':
# port = '/dev/tty.OpenBCI-DN008VTF'
port = '/dev/tty.usbserial-DB00JAM0'
# port = '/dev/tty.OpenBCI-DN0096XA'
baud = 115200
logging.basicConfig(filename="test.log", format='%(asctime)s - %(levelname)s : %(message)s', level=logging.DEBUG)... |
from src import model as mdl
class LaTeXPrinter(object):
def __init__(self, target_file_path):
self._target_file_path = target_file_path
def run(self):
with open(self._target_file_path, 'w') as output:
text = self._generate_text()
output.write(text)
def _generate... | self):
return ('\\sffamily\\bfseries ID & \\sffamily\\bfseries Descrizione & '
'\\sffamily\\bfseries Fonte & '
'\\sffamily\\bfseries Padre\\\\\n')
def _get_content(self):
for req_id in self._req_id_list:
req = mdl.dal.get_requirement(req_id)
| source = mdl.dal.get_source(req.source_id)
yield (req.req_id, req.description, source.name,
req.parent_id or '--')
def _get_caption_and_label(self):
return ('Elenco dei requisiti {0} {1}.'.format(
('funzionali' if self._req_type == 'F' else
'... |
#import logging
#logging.basicConfig(level=logging.INFO, datefmt='%H:%M:%S',
# format='%(asctime)s %(levelname)s: %(message)s')
import unittest
import SocketServer, socket
import random, time
import threading
import cStringIO
from datetime import datetime
from shapy import register_settings
registe... | aning-of-so-reuseaddr-setsockopt-option-linux
s = self.sock_client
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(self.client_addr)
s.connect(self.server_addr)
start = datetime.now()
# client -> server
sent = 0
while sent < self.fi... | l the server finishes reading data from its socket
# and closes the connection.
rcvd = s.recv(1024)
delay = total_seconds(datetime.now() - start)
#delay = delta.seconds + delta.microseconds/float(10**6)
tt = self.estimate_transfer_time(self.filesize, self.client_addr[0],... |
import os
impor | t jug.backends.redis_store
import jug.backends.file_store
import jug.backends.dict_store
from jug.backends.redis_store import redis
import pytest
if not os.getenv('TEST_REDIS'):
redis = None
try:
redisConnectionError = redis.ConnectionError
except:
redisConnectionError = SystemError
@pytest.fixture(scope... | , params=['file', 'dict', 'redis'])
def store(tmpdir, request):
if request.param == 'file':
tmpdir = str(tmpdir)
yield jug.backends.file_store.file_store(tmpdir)
jug.backends.file_store.file_store.remove_store(tmpdir)
elif request.param == 'dict':
yield jug.backends.dict_store.di... |
#!/usr/bin/python3
import sys
def process_import(filename, statement):
statement = statement.replace(",", " ")
modules = statement.split()
for module in modules[1:]:
print('"%s" -> "%s"' % (filename, module))
def process_from(fi | lename, statement):
statement = statement.replace(",", " ")
modules = statement.split()
main_module = modules[1]
for module in modules[3:]:
print('"%s" -> "%s" -> "%s"' % (filename, main_module, module))
def print_header():
print("digraph WeCase {")
print("ratio=2")
def print_footer():... | ine.replace("\n", "")
if line.endswith(".py"):
filename = line
else:
if line.startswith("import"):
process_import(filename, line)
elif line.startswith("from"):
process_from(filename, line)
print_footer()
|
"""
A pretty-printing dump function for the ast module. The code was copied from
the ast.dump function and modified slightly to pretty-prin | t.
Alex Leone (acleone ~AT~ gmail.com), 2010-01-30
"""
from ast import *
def dump(node, annotate_fields=True, include_attri | butes=False, indent=' '):
"""
Return a formatted dump of the tree in *node*. This is mainly useful for
debugging purposes. The returned string will show the names and the values
for fields. This makes the code impossible to evaluate, so if evaluation is
wanted *annotate_fields* must be set to Fa... |
# flake8: noqa: F401
from pandas.core.arrays | .sparse.accessor import SparseAccessor, SparseFrameAccessor
from pandas.core.arrays.sparse.array import (
BlockIndex,
IntIndex,
SparseArray,
_make_index,
)
from pandas.core.arrays.sparse.dtype import Spars | eDtype
|
from stard.services import B | aseService
class Service(BaseService):
def init_service(self):
se | lf.children = {self.service('child')}
|
if force_unicode(initial_value) != force_unicode(data_value):
return True
return False
def render(self, name, value, attrs=None):
# THIS IS A COPY OF django.forms.widgets.MultiWidget.render()
# (except for the last line)
# value is a list of values, each cor... | ue,
minimumInputLength: 3,
ajax: {
url: "%(ajax_url)s",
dataType: 'json',
data: function (term, page) {
| return {
q: term, // search term
language_code: '%(language_code)s'
};
},
results: function (data, page) {
return {
more: false,
results: $.map(data, ... |
from django.forms import Media
from wagtail.admin.staticfiles import versioned_static
# Feature objects: these are mapped to feature identifiers within the rich text
# feature registry (wagtail.core.rich_text.features). Each one implements
# a `construct_options` method which modifies an options dict | as appropriate to
# enable that feature.
# Additionally, a Feature object defines a media property
# (https://docs.djangoproject.com/en/stable/topics/forms/media/) to specify css/js
# files to import when the feature is active.
class Feature:
def __init__(self, js=None, css=None):
self.js = js or []
... | css = {}
for media_type, css_files in self.css.items():
css[media_type] = [versioned_static(css_file) for css_file in css_files]
return Media(js=js, css=css)
class BooleanFeature(Feature):
"""
A feature which is enabled by a boolean flag at the top level of
the options d... |
from contextlib import closing
from flask import current_app
from summer.app import create_app
from summer.db.connect import connect_db
def init_db():
app = create_app('product')
_context = app.app_context()
_context.push()
with closing(connect_db()) as db | :
with open('./summer/schema.sql', mode='r') as f:
db.cursor().execu | tescript(f.read())
db.commit()
if __name__ == '__main__':
init_db()
|
fr | om Model import *
| |
# Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, so... | obj.process(parameters)
self.context_instances.append(obj)
self.parameters = parameters
| def __enter__(self):
return [(obj.__enter__()) for obj in self.context_instances]
def __exit__(self, type_, value, traceback):
for context in self.context_instances:
context.__exit__(type_, value, traceback)
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-64... | program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPL... | e details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import... |
import bleach
from pyramid.config import Configurator
from pyramid.authentication import AuthTktAuthenticationPolicy
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.session import UnencryptedCookieSessionFactoryConfig
from sqlalchemy import engine_from_config
from .models import (
DBSession,... | onfig.add_route('task_split',
'/project/{project:\d+}/task/{task:\d+}/split', xhr=True)
config.add_route( | 'task_validate',
'/project/{project:\d+}/task/{task:\d+}/validate',
xhr=True)
config.add_route('task_cancel_done',
'/project/{project:\d+}/task/{task:\d+}/cancel_done',
xhr=True)
config.add_route('task_comment',
... |
# DJANGO 1.10.5 LOCAL SETTINGS
# https://docs.djangopr | oject.com/en/1.10/topics/settings/
# ==================================================================================================
from .base import *
DEBUG = True
# APP CONFIGURATION
# https://docs.djangoproject.com/en/1.10/ref/applications
# ===================================================================... | ======================
# Add your local apps here
INSTALLED_APPS += []
|
(job_ids)", nl=True, tab=1)
self._print(message="finally:")
self._print(message="pl.log_pipeline_footer(l)", tab=1)
self._print(message="pl.log_info(l, '{0} pipeline finished with exit code {1}. Please check the logs.'.format(pipeline_name, rc))", tab=1)
self._print(message="sys.exit(rc)... | nl}"
"{ind}{ind}raise{nl}"
| "{ind}except:{nl}"
"{ind}{ind}job_rcs.put(98){nl}"
"{ind}{ind}traceback.print_exc()").format(ind=indent, nl=newline)
return expt_str
def _get_function_signatures(self):
newline = '\n'
indent = ' ' * 4
for t in self.tags:
c = se... |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import urlparse
def uc2utf8(input):
## argh! this feels wrong, but seems to be needed.
if type(input) == unicode:
return input.encode('utf-8')
else:
return input
class URL:
"""
This class is for wrapping URLs into objects. It's use... | se ValueError("%s can't be joined with %s" % (self, path))
if path.path[0] == '/':
ret_path = uc2utf8(path.path)
else:
sep = "/"
if s | elf.path.endswith("/"):
sep = ""
ret_path = "%s%s%s" % (self.path, sep, uc2utf8(path.path))
return URL(urlparse.ParseResult(
self.scheme or path.scheme, self.netloc or path.netloc, ret_path, path.params, path.query, path.fragment))
def make(url):
"""Backward compatib... |
import notorm
import momoko
from tornado import gen
import psycopg2.extras
class AsyncRecord(notorm.record):
@gen.coroutine
def update(self, **args):
for k,v in args.items():
setattr(self, k, v)
cursor = yield notorm.db.execute(
self.update_... | self.insert_qry,
self.__dict__,
cursor_factory=psycopg2.extras.NamedTupleCursor)
| results = cursor.fetchone()
if results:
self.id = results[0]
|
from pylons import tmpl_context as c
from adhocracy.lib.auth import can
from util import render_tile, BaseTile
class VariantRow(object):
def __init__(self, tile, variant, poll):
self.tile = tile
self.variant = variant
self.poll = poll
if tile.frozen:
freeze_time = til... | = selection
self.selected = selection.selected
self.variant_polls = self.selection.variant_polls
@property
def has_variants(self):
return len(self.selection.page.variants) < 2
@property
def num_varian | ts(self):
return len(self.selection.page.variants) - 1
@property
def selected_text(self):
variant = self.selected
if self.frozen:
freeze_time = self.selection.proposal.adopt_poll.begin_time
return self.selection.page.variant_at(variant, freeze_time)
else:... |
from _ | _future__ import absolute_import
# Start a Celery worker by executing:
# celery -A proj worker -l info
# Import available tasks
from proj.tasks import add, mul, xsum, fib
# Test short-running tasks
add.delay(2, 2)
mul.delay(10, 12)
xsum.delay(range(100))
fib.delay(10)
# Test medium-running tasks
fib. | delay(35)
fib.delay(35)
fib.delay(35)
|
import cherrypy
from cherrypy._cpcompat import ntou
from cherrypy.test import helper
class ETagTest(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def resource(self):
return "Oh wah ta goo Siam."
@cherrypy.expose... | .assertStatus("200 OK")
def test_errors(self):
self.getPage("/resource")
self.assertStatus(200)
etag = self.assertHeader('ETag')
# Test raising errors in page handler
self.getPage("/fail/412", headers=[('If-Match', etag)])
self.assertStatus(412)
self.getPage... | headers=[('If-Match', etag)])
self.assertStatus(304)
self.getPage("/fail/412", headers=[('If-None-Match', "*")])
self.assertStatus(412)
self.getPage("/fail/304", headers=[('If-None-Match', "*")])
self.assertStatus(304)
def test_unicode_body(self):
self.getPage("/uni... |
import unittest
import matmath
import numpy as np
import math
class TestMatrix(unittest.TestCase):
def testRotX(self):
mat = matmath.xRotationMatrix(math.radians(90))
pt = np.array([1, 0, 0, 1])
npt = pt.dot(mat)
np.testing.assert_almost_equal(npt, [1, 0, 0, 1]) |
pt = np.array([0, 1, 0, 1])
npt = pt.dot(mat)
np.testing.assert_almost_equal(npt, [0, 0, 1, 1])
pt = np.array([0, 0, 1, 1])
npt = pt.dot(mat)
np.testing.assert_almost_equal(npt, [0, -1, 0, 1])
def testRotY(self):
| pt = np.array([0, 0, 1, 1])
mat = matmath.yRotationMatrix(math.radians(90))
npt = pt.dot(mat)
np.testing.assert_almost_equal(npt, [1, 0, 0, 1])
def testRotZ(self):
pt = np.array([1, 0, 0, 1])
mat = matmath.zRotationMatrix(math.radians(90))
npt = pt.dot(mat)
... |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
clas | s Voropp(MakefilePackage):
"""Voro++ is a open source software library for the c | omputation of the
Voronoi diagram, a widely-used tessellation that has applications in many
scientific fields."""
homepage = "http://math.lbl.gov/voro++/about.html"
url = "http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz"
variant('pic', default=True,
description='Positi... |
# Copyright (c) 2012 Roberto Alsina y otros.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, pub... | OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# O | THERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from docutils import nodes
from docutils.parsers.rst import Directive, directives
CODE = """\
<iframe width="{width}"
height="{height}"
src="http://www.youtube.com/embed/{yid}?rel=0&hd=1&wmode=tr... |
def process(self):
#GUJARATI VOWEL SIGN CANDRA E
#GUJARATI VOWE | L CANDRA E
self.edit("GUJARATI")
self.edit("LETTER")
self.edit("DIGIT")
self.processAs("Helper Indic")
self.edit("VOWEL SIGN", "sign")
self.edit("VOWEL")
self.edit("SIGN")
self.edit("THREE-DOT NUKTA ABOVE", "threedotnuktaabove")
self.edit("TWO-CIRCLE NUKTA ABOVE", "twocirclenuktaab... | ()
if __name__ == "__main__":
from glyphNameFormatter.exporters import printRange
from glyphNameFormatter.tools import debug
printRange("Gujarati")
debug(0x0AFA) |
# -*- mode: python; indent-tabs-mode: nil; tab-width: 2 -*-
"""
aria_api.py - implements handlers which are for the Aria to talk to helvetic.
"""
from __future__ import absolute_import
from base64 import b16encode
from crc16 import crc16xmodem
from datetime import timedelta
from decimal import Decimal
from django.cont... | = [b16encode(x) for x in (mac, auth_code)]
scale = None
try:
scale = Scale.objects.get(hw_address=mac)
except Scale.DoesNotExist:
return HttpResponseBadRequest('Unknown scale: %s' % mac)
# Check authcode
if scale.auth_code is None or scale.auth_code == '':
scale.auth_code = auth_code
elif | scale.auth_code != auth_code:
return HttpResponseForbidden('Invalid auth code')
scale.battery_percent = battery_pc
fw_ver, unknown2, scale_now, measurement_count = struct.unpack('<LLLL', body[:16])
body = body[16:]
scale.fw_version = fw_ver
scale.save()
for x in range(measurement_count):
if len(bo... |
Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import sys, os, TestUtil, shlex
from threading import Thread
#
# Set nreplicas to a number N to test replication with N replicas.
#
#nrep... | # C# test/IceGrid/simple test with SSL.
#
overrideOptions += ("%s=\\\"%s\\\" ") % (key, value.replace('"', '\\\\\\"'))
return overrideOptions
def startIceGridNode(testdir):
iceGrid = TestUtil.getIceGridNod | e()
dataDir = os.path.join(testdir, "db", "node")
if not os.path.exists(dataDir):
os.mkdir(dataDir)
else:
cleanDbDir(dataDir)
overrideOptions = '" ' + iceGridNodePropertiesOverride()
overrideOptions += ' Ice.ServerIdleTime=0 Ice.PrintProcessId=0 Ice.PrintAdapterReady=0"'
sys.st... |
print(" | My script | ") |
# $Id$
#
import inc_const as const
PJSUA = ["--null- | audio --max-calls=1 $SIPP_URI"]
PJS | UA_EXPECTS = [[0, const.STATE_CONFIRMED, "v"]]
|
import paho.mqtt.publish as | publish
import paho.mqtt.client as mqtt
import socket
import json
from datetime import datetime
import configparser
'''
Author: GYzheng, guanggyz@gmail.com
###Server side
We have two topic, one is from client to server, the other one is from client to server
1. Server->Client : sc_topic
2. Client->Server : cs_to... | self.sc_topic = 'sc_'+topic
self.cs_topic = 'cs_'+topic
self.get_host_info()
self.subscribe_msg()
def send_command(self,cmd):
msg = self.json_generator(cmd,'run')#cmd,status
self.send_msg(msg)
def get_host_info(self):
self.host_name = socket.gethostname()
... |
from bravado_core.spec import Spec
import mock
from pyramid.config import Configurator
from pyramid.registry import Registry
import pytest
from swagger_spec_validator.common import SwaggerValidationError
import pyramid_swagger
from pyramid_swagger.model import SwaggerSchema
@mock.patch('pyramid_swagger.register_api_... | swagger_spec_validation': False,
}
mock_config = mock.Mock(
spec=Configurator, registry=mock.Mock(settings=settings))
pyramid_swagger.includeme(mock_config)
@mock.patch('pyramid_sw | agger.register_api_doc_endpoints')
def test_swagger_12_only(mock_register):
settings = {
'pyramid_swagger.schema_directory': 'tests/sample_schemas/good_app/',
'pyramid_swagger.swagger_versions': ['1.2']
}
mock_config = mock.Mock(registry=mock.Mock(settings=settings))
pyramid_swagger.incl... |
view"].customize_template_get(
request.cr, request.uid, xml_id, full=full, context=request.context)
@http.route('/website/get_view_translations', type='json', auth='public', website=True)
def get_view_translations(self, xml_id, lang=None):
lang = lang or request.context.get('lang')
... | e=True)
def seo_suggest(self, keywords):
url = "http://google.com/complete/search"
| try:
req = urllib2.Request("%s?%s" % (url, werkzeug.url_encode({
'ie': 'utf8', 'oe': 'utf8', 'output': 'toolbar', 'q': keywords})))
request = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
return []
xmlroot = ET.fromstring(reque... |
n Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed i... | eceived a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
import sys
from copy import deepcopy
from weboob.tools.log import getLogger, DEBUG_FILTERS
from weboob.tools.ordereddict import OrderedDict
from weboob.browser.pages import NextPage
from... | filters.html import AttributeNotFound, XPathNotFound
__all__ = ['DataError', 'AbstractElement', 'ListElement', 'ItemElement', 'TableElement', 'SkipItem']
class DataError(Exception):
"""
Returned data from pages are incoherent.
"""
def method(klass):
"""
Class-decorator to call it as a method.
... |
from P_14 import *
print( | "Shape of data: {}".format(iris_dataset["data"].shape | ))
input()
|
# encoding: utf-8
"""Gherkin step implementations for chart data features."""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
import datetime
from behave import given, then, when
from pptx.chart.data import (
BubbleChartData, Category, CategoryChartData, XyChartData
)... | HART_TYPE, xy_type)
data = (
('Series 1', ((-0.1, 0.5), (16.2, 0.0), (8.0, 0.2))),
('Series 2', ((12.4, 0.8), (-7. | 5, -0.5), (-5.1, -0.2)))
)
chart_data = XyChartData()
for series_data in data:
series_label, points = series_data
series = chart_data.add_series(series_label)
for point in points:
x, y = point
series.add_data_point(x, y)
context.chart = context.slide.sh... |
# -*- coding: utf-8 -*-
import pytest
from irc3.plugins import slack
pytestmark = pytest.mark.asyncio
async def test_simple_matches(irc3_bot_factory):
bot = irc3_bot_factory(includes=['irc3.plugins.slack'])
plugin = bot.get_plugin(slack.Slack)
setattr(plugin, 'config', {'token': 'xoxp-faketoken'})
as... | in = bot.get_plugin(slack.Slack)
setattr(plugin, 'config', {'t | oken': 'xoxp-faketoken'})
async def api_call(self, method, date=None):
return ({'user': {'name': 'daniel'}})
plugin.api_call = api_call
assert '@daniel' == await plugin.parse_text('<@U12345>')
assert 'user' == await plugin.parse_text('<@U12345|user>')
async def test_emoji_matches(irc3_bot_fa... |
import six
try:
from logging import NullHandler
except ImportErro | r: # Python 2.6
from logging import Handler
class NullHandler(Handler):
def emit(self, record):
pass
try:
from urllib import urlencode as format_query
except ImportError:
from urllib.parse import urlencode as format_query
try:
from urlparse import urlparse as parse_url
except Im... | or:
memoryview = buffer
def get_int(*args):
try:
return int(get_character(*args))
except ValueError:
return ord(get_character(*args))
def get_character(x, index):
return chr(get_byte(x, index))
def get_byte(x, index):
return six.indexbytes(x, index)
def encode_string(x):
... |
from random import randint
from position import Position, Size
from block import Room, Block
class Room(object):
def __init__(self, pos_row=0, pos_col=0, rows=1, cols=1, fill=Block.empty,
left=Room.left, right=Room.right,
top=Room.top, bottom=Room.bottom,
top_le... | col - size.cols))
return cls.from_objects(pos, size)
class RoomList():
def __init__(self):
self._room_list = []
def __iter__(self):
return iter(self._room_list)
def __getitem__(self, key):
return self._room_list[key]
def __len__(self):
| return len(self._room_list)
def append(self, room):
self._room_list.append(room)
def generate(self, num, min_pos, max_pos, min_size, max_size):
"""
Given a number of rooms, generate rooms that don't intersect
"""
for i in range(num):
room = Room.generate(... |
#!/usr/bin/python3
## @package domomaster
# Master daemon for D3 boxes.
#
# Developed by GreenLeaf.
import sys;
import os;
import random;
import string;
from hashlib import sha1
from subprocess import *
import socket;
sys.path.append("/usr/lib/domoleaf");
from DaemonConfigParser import *;
MASTER_CONF_FILE_BKP ... | ve');
file_to.writeValueFromSection('listen', 'port_slave', var);
var = file_from.getValueFromSection('listen', 'port_cmd');
file_to.writeValueFromSection('listen', 'port_cmd', var);
#connect
var = file_from.getValueFromSection('connect', 'port');
file_to.writeV | alueFromSection('connect', 'port', var);
#mysql
var = file_from.getValueFromSection('mysql', 'user');
file_to.writeValueFromSection('mysql', 'user', var);
var = file_from.getValueFromSection('mysql', 'database_name');
file_to.writeValueFromSection('mysql', 'database_name', var);
#greenleaf
... |
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Create missing snapshot revisions.
Create Date: 2017-01-05 23:10:37.257161
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from ggrc.mi... | pshottable objects."""
# copy pasted from ggrc.snapshoter.rules.Types.all
snapshot_objects = sorted([
"AccessGroup",
"Clause",
"Control",
"DataAsset",
"Facility",
"Market",
"Objective",
"OrgGroup",
"Product",
"Section",
"Vendor",
"Policy",
... | correction migrations can not be downgraded."""
|
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Matthias Luescher
#
# Authors:
# Matthias Luescher
#
# This file is part of edi.
#
# edi is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of... | the program version."
subparsers.add_parser(cls._get_short_command_name(),
help=help_text,
description=description_text)
def run_cli(self, _):
version = self.run()
print(version)
@staticmethod
def run():
| return get_edi_version()
|
#!/bin/python
import sys
import vlc
import os
import re
from tempfile import *
from gtts import gTTS
from remote2text import | RGBRemote2Text
parser = RGBRemote2Text(verbose=True)
while True:
ir_out = input()
response = parser.process(ir_out)
if response:
tts = gTTS(text=response, lang='pt')
tmp = NamedTemporaryFile(delete=False)
tts.write_to_fp(tmp)
path = os.path.join(gettempdir(), str(tmp.nam... | tmp.close()
|
# -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.db.models import Max, F
from django.shortcuts import render, get_object_or_404, redirect
from django.utils.decorators import method_decorator
from django.views.generic.edit i... | form_class = ChapterForm
template_name = 'chapter_work.html'
initial = {'button_submit': 'Добавить'}
story = None
@ | method_decorator(login_required)
@method_decorator(csrf_protect)
def dispatch(self, request, *args, **kwargs):
self.story = get_object_or_404(Story, pk=kwargs['story_id'])
if self.story.editable_by(request.user):
return CreateView.dispatch(self, request, *args, **kwargs)
else... |
raw_surn_data[_SURNAME_IN_LIST],
raw_surn_data[_CONNECTOR_IN_LIST])
return ' '.join(result.split())
return ''
def _raw_patro_surname_only(raw_surn_data_list):
"""method for the '1y' symbol: patronymic surname only"""
for raw_surn_data ... | raw_data[_SUFFIX])
return ' '.join(result.split())
def _raw_fnln(self, raw_data):
result = "%s %s %s" % (raw_data[_FIRSTNAME],
_raw_full_surname(raw_data[_SURNAME_LIST]),
| raw_data[_SUFFIX])
return ' '.join(result.split())
def _raw_fn(self, raw_data):
result = raw_data[_FIRSTNAME]
return ' '.join(result.split())
def set_name_format(self, formats):
raw_func_dict = {
Name.LNFN : self._raw_lnfn,
Name.FNLN : self._raw_f... |
from .enums import IncrementalSearchDirection
from .filters import SimpleFilter, Never
__all__ = (
'SearchState',
)
class SearchState(object):
"""
A search 'query'.
"""
__slots__ = ('text', 'direction', 'ignore_case')
def __init__(self, text='', dir | ection=IncrementalSearchDirection.FORWARD, ignore_case=Never()):
assert isinstance(ignore_case, SimpleFilter)
self.text = text
self.direction = direction
self.ignore_case = ignore_case
def __repr__(self):
return '%s(%r, direction=%r, ignore_case=%r)' % (
self.__... | and the other
way around.
"""
if self.direction == IncrementalSearchDirection.BACKWARD:
direction = IncrementalSearchDirection.FORWARD
else:
direction = IncrementalSearchDirection.BACKWARD
return SearchState(text=self.text, direction=direction, ignore_ca... |
oto import connect_s3
s3_avail = True
except ImportError: #pragma: no cover
s3_avail = False
#=================================================================
def is_http(filename):
return filename.startswith(('http://', 'https://'))
#=================================================================
d... | one
def load(self, url, offset=0, length=-1):
"""
Determine loading method based on uri
"""
if is_http(url):
return self.load_http(url, offset, length)
elif is_s3(url):
return self.load_s3(url, offset, length)
else:
return self.loa... | _or_resource(url, offset, length)
def load_file_or_resource(self, url, offset=0, length=-1):
"""
Load a file-like reader from the local file system
"""
# if starting with . or /, can only be a file path..
file_only = url.startswith(('/', '.'))
# convert to filename... |
"""
logitech-m720-config - A config script for Logitech M720 button mappings
Copyright (C) 2017 Fin Christensen <christensen.fin@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either ver... | PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath (path... | e:
long_description = readme.read ()
setup (
name = "m720-config",
version = "0.0.1",
description = "A config script for Logitech M720 button mappings.",
long_description = long_description,
url = "",
author = "Fin Christensen",
author_email = "christensen.fin@gmail.com",
license = ... |
import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.Str | ingValidator):
d | ef __init__(
self,
plotly_name="family",
parent_name="layout.ternary.aaxis.title.font",
**kwargs
):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
... |
"""TailorDev Biblio
Bib | liography management with Django.
"""
__version__ = "2.0. | 0"
default_app_config = "td_biblio.apps.TDBiblioConfig"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.