code stringlengths 1 199k |
|---|
from netmiko import ConnectHandler
def main():
# Definition of routers
rtr1 = {
'device_type': 'cisco_ios',
'ip': '50.76.53.27',
'username': 'pyclass',
'password': '88newclass',
}
rtr2 = {
'device_type': 'cisco_ios',
'ip': '50.76.53.27',
'usern... |
""" IO classes for Omnivor input file
Copyright (C) 2013 DTU Wind Energy
Author: Emmanuel Branlard
Email: ebra@dtu.dk
Last revision: 25/11/2013
Namelist IO: badis functions to read and parse a fortran file into python dictonary and write it back to a file
The parser was adapted from: fortran-namelist on code.google wit... |
"""
IPMI power manager driver.
Uses the 'ipmitool' command (http://ipmitool.sourceforge.net/) to remotely
manage hardware. This includes setting the boot device, getting a
serial-over-LAN console, and controlling the power state of the machine.
NOTE THAT CERTAIN DISTROS MAY INSTALL openipmi BY DEFAULT, INSTEAD OF ipmi... |
from random import choice
from python.decorators import euler_timer
SQUARES = ["GO",
"A1", "CC1", "A2", "T1", "R1", "B1", "CH1", "B2", "B3",
"JAIL",
"C1", "U1", "C2", "C3", "R2", "D1", "CC2", "D2", "D3",
"FP",
"E1", "CH2", "E2", "E3", "R3", "F1", "F2", "U2", "F3",
... |
"""add tenant_id to lcm_subscriptions and lcm_op_occs
Revision ID: d6ae359ab0d6
Revises: 3ff50553e9d3
Create Date: 2022-01-06 13:35:53.868106
"""
from alembic import op
import sqlalchemy as sa
revision = 'd6ae359ab0d6'
down_revision = '3ff50553e9d3'
def upgrade(active_plugins=None, options=None):
op.add_column('vnf... |
"""Base classes and utilities for image datasets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import io
import os
import numpy as np
from tensor2tensor.data_generators import generator_utils
from tensor2tensor.data_generators import problem
from tensor... |
"""
Generate a toy dataset for the matrix factorisation case, and store it.
We use dimensions 100 by 50 for the dataset, and 10 latent factors.
As the prior for U and V we take value 1 for all entries (so exp 1).
As a result, each value in R has a value of around 20, and a variance of 100-120.
For contrast, the Sanger ... |
"""
felix.test.stub_utils
~~~~~~~~~~~~
Test utilities.
"""
import logging
import random
from collections import namedtuple
CommandOutput = namedtuple('CommandOutput', ['stdout', 'stderr'])
log = logging.getLogger(__name__)
test_time = 0
def set_time(value):
global test_time
test_time = value
log.debug("Time... |
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import ... |
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='publishablemodel',
name='id',
field=models.UUIDField(default=uuid.uui... |
from solum.api.controllers import common_types
from solum.api.controllers.v1.datamodel import types as api_types
class Extensions(api_types.Base):
"""extensions resource"""
extension_links = [common_types.Link]
"""This attribute contains Links to extension resources that contain
information about the e... |
"""Custom Exception Classes for Phylotyper Module
"""
class PhylotyperError(Exception):
"""Basic exception for errors raised by Phylotyper modules"""
def __init__(self, subtype, msg=None):
if msg is None:
msg = "An error occured for subtype {}".format(subtype)
super(PhylotyperError, ... |
import unittest
from cryptography.fernet import Fernet
from airflow import settings
from airflow.models import Variable, crypto
from tests.test_utils.config import conf_vars
class TestVariable(unittest.TestCase):
def setUp(self):
crypto._fernet = None
def tearDown(self):
crypto._fernet = None
... |
"""Tests for download_data."""
from unittest import mock
from google.protobuf import text_format
import tensorflow as tf
from tensorboard.backend.event_processing import plugin_event_multiplexer
from tensorboard.plugins import base_plugin
from tensorboard.plugins.hparams import api_pb2
from tensorboard.plugins.hparams ... |
from exaqute.ExaquteTask import *
from pycompss.api.task import task
from pycompss.api.api import compss_wait_on
from pycompss.api.api import compss_barrier
from pycompss.api.api import compss_delete_object
from pycompss.api.api import compss_delete_file
from pycompss.api.parameter import *
from pycompss.api.implement ... |
from rest_framework import generics, permissions, views, response,status
from .models import Account
from .serializers import AccountCreateSerializer, AccountSerializer, AuthenticateSerializer, \
UpdateAccountSerializer, AccountRetrieveSerializer
class AccountCreateView(generics.CreateAPIView):
queryset = Accou... |
__author__ = 'litleleprikon'
from random import randint
FIGURES = ['камень', 'бумага', 'ножницы']
FIG_LEN = len(FIGURES)
class Player:
"""
Player class is needed to store tactics and to generate figures by this tactic
-- Doctests --
>>> player = Player()
>>> player.figure in FIGURES
True
"""... |
from __future__ import print_function
import argparse
import collections
import hashlib
import os
import subprocess
import sys
import settings
OUTPUT_DIR = os.path.join(settings.PROJECT_DIR, 'build', 'tests')
Options = collections.namedtuple('Options', ['name', 'build_args', 'test_args'])
Options.__new__.__defaults__ =... |
import math
def isPrime(num):
if num < 2:
return False # 0, 1不是质数
# num为100时, 它是不可能有因子是大于50的. 比如说60 * ? = 100, 这是不可能的, 所以这里只要比较sqrt(), 平方根
boundary = int(math.sqrt(num)) + 1
for i in range(2, boundary):
if num % i == 0:
return False
return True
def primeSieve(size):
... |
import pyparsing as pp
uninary_operators = ("not", )
binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne",
u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠", u"≥",
u"≤", u"like" "in")
multiple_operators = (u"and", u"or", u"∧", u"∨")
operator = pp.Regex(u"|".... |
from solum import objects
from solum.objects import extension as abstract_extension
from solum.objects import operation as abstract_operation
from solum.objects import plan as abstract_plan
from solum.objects import sensor as abstract_sensor
from solum.objects import service as abstract_srvc
from solum.objects.sqlalche... |
default_app_config = 'providers.com.dailyssrn.apps.AppConfig' |
from distutils.core import setup
V = "0.7"
setup(
name = 'mooncake_utils',
packages = ['mooncake_utils'],
version = V,
description = 'just a useful utils for mooncake personal project.',
author = 'mooncake',
author_email = 'hi.moonlight@gmail.com',
url = 'https://github.com/ericyue/mooncake_utils',
down... |
import collections
import copy
import datetime
import re
import mock
import six
from osprofiler import profiler
from osprofiler.tests import test
class ProfilerGlobMethodsTestCase(test.TestCase):
def test_get_profiler_not_inited(self):
profiler.clean()
self.assertIsNone(profiler.get())
def test_... |
"""Tests for the Google Chrome Cache files event formatter."""
import unittest
from plaso.formatters import chrome_cache
from tests.formatters import test_lib
class ChromeCacheEntryEventFormatterTest(test_lib.EventFormatterTestCase):
"""Tests for the Chrome Cache entry event formatter."""
def testInitialization(sel... |
macimport os
import subprocess
name = "gobuildmaster"
current_hash = ""
for line in os.popen("md5sum " + name).readlines():
current_hash = line.split(' ')[0]
for line in os.popen('cp ' + name + ' old' + name).readlines():
print line.strip()
for line in os.popen('go build').readlines():
print line.strip()
si... |
import unittest2
import helper
import simplejson as json
from nose.plugins.attrib import attr
PORTAL_ID = 62515
class ListsClientTest(unittest2.TestCase):
"""
Unit tests for the HubSpot List API Python wrapper (hapipy) client.
This file contains some unittest tests for the List API.
Questions, comments,... |
import json
from six.moves.urllib import parse as urllib
from tempest_lib import exceptions as lib_exc
from tempest.api_schema.response.compute.v2_1 import images as schema
from tempest.common import service_client
from tempest.common import waiters
class ImagesClientJSON(service_client.ServiceClient):
def create_i... |
"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
... |
import sys
import os
import json
import grpc
import time
import subprocess
from google.oauth2 import service_account
import google.oauth2.credentials
import google.auth.transport.requests
import google.auth.transport.grpc
from google.firestore.v1beta1 import firestore_pb2
from google.firestore.v1beta1 import firestore_... |
import sys
sys.path.append("helper")
import web
from helper import session
web.config.debug = False
urls = (
"/", "controller.start.index",
"/1", "controller.start.one",
"/2", "controller.start.two",
)
app = web.application(urls, globals())
sessions = session.Sessions()
if __name__ == "__main__":
app.run() |
from typing import List
class Solution:
def partitionLabels(self, S: str) -> List[int]:
lastPos, seen, currMax = {}, set(), -1
res = []
for i in range(0, 26):
c = chr(97+i)
lastPos[c] = S.rfind(c)
for i, c in enumerate(S):
# Encounter new index hig... |
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def mnist_many_cols_gbm_large():
train = h2o.import_file(path=pyunit_utils.locate("bigdata/laptop/mnist/train.csv.gz"))
train.tail()
gbm_mnist = H2OGradientBoostingEstima... |
from __future__ import print_function
import sys
if sys.version_info[0] > 2:
import tkinter
else:
import Tkinter as tkinter
from PIL import Image, ImageTk
class UI(tkinter.Label):
def __init__(self, master, im):
if isinstance(im, list):
# list of images
self.im = im[1:]
... |
import copy
from oslo_log import log as logging
from oslo_versionedobjects import base as object_base
from cyborg.common import exception
from cyborg.db import api as dbapi
from cyborg.objects import base
from cyborg.objects import fields as object_fields
from cyborg.objects.deployable import Deployable
from cyborg.obj... |
class Solution:
def isValidSerialization(self, preorder):
"""
:type preorder: str
:rtype: bool
"""
arr_pre_order = preorder.split(',')
stack = []
for node in arr_pre_order:
stack.append(node)
while len(stack) > 1 and stack[-1] == '#' an... |
import tensorflow as tf
import numpy as np
x_shape = [5, 3, 3, 2]
x = np.arange(reduce(lambda t, s: t*s, list(x_shape), 1))
print x
x = x.reshape([5, 3, 3, -1])
print x.shape
X = tf.Variable(x)
with tf.Session() as sess:
m = tf.nn.moments(X, axes=[0])
# m = tf.nn.moments(X, axes=[0,1])
# m = tf.nn.moments(X... |
"""Template shortcut & filters"""
import os
import datetime
from jinja2 import Environment, FileSystemLoader
from uwsgi_sloth.settings import ROOT
from uwsgi_sloth import settings, __VERSION__
template_path = os.path.join(ROOT, 'templates')
env = Environment(loader=FileSystemLoader(template_path))
def friendly_time(mse... |
import json
import mock
import unittest
from zvmsdk.sdkwsgi.handlers import version
from zvmsdk import version as sdk_version
class HandlersRootTest(unittest.TestCase):
def setUp(self):
pass
def test_version(self):
req = mock.Mock()
ver_str = {"rc": 0,
"overallRC": 0,
... |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Journey'
db.create_table('places_journey', (
('id', self.gf('django.db.models.fields.AutoField')(primary_ke... |
import ee
import math
from cmt.mapclient_qt import addToMap
from cmt.util.miscUtilities import safe_get_info
import modis_utilities
'''
Contains implementations of several simple MODIS-based flood detection algorithms.
'''
def dem_threshold(domain, b):
'''Just use a height threshold on the DEM!'''
heightLevel =... |
__author__ = 'q00222219@huawei'
import time
from heat.openstack.common import log as logging
import heat.engine.resources.cloudmanager.commonutils as commonutils
import heat.engine.resources.cloudmanager.constant as constant
import heat.engine.resources.cloudmanager.exception as exception
import pdb
LOG = logging.getLo... |
"""Neural network operations."""
from __future__ import absolute_import as _abs
from . import _make
def conv2d(data,
weight,
strides=(1, 1),
padding=(0, 0),
dilation=(1, 1),
groups=1,
channels=None,
kernel_size=None,
data_layout="NC... |
"""
Authors: Tim Hessels
UNESCO-IHE 2016
Contact: t.hessels@unesco-ihe.org
Repository: https://github.com/wateraccounting/wa
Module: Collect/MOD17
Description:
This module downloads MOD17 GPP data from
http://e4ftl01.cr.usgs.gov/. Use the MOD17.GPP_8daily function to
download and create 8 daily GPP images in G... |
"""Term aggregations."""
from __future__ import unicode_literals
from timesketch.lib.aggregators import manager
from timesketch.lib.aggregators import interface
def get_spec(field, limit=10, query='', query_dsl=''):
"""Returns aggregation specs for a term of filtered events.
The aggregation spec will summarize ... |
import os
from textwrap import dedent
import unittest
from eventlet.green import ssl
import mock
from six.moves.configparser import NoSectionError, NoOptionError
from swift.common.middleware import memcache
from swift.common.memcached import MemcacheRing
from swift.common.swob import Request
from swift.common.wsgi impo... |
import datetime
import webob
from cinder.api.contrib import volume_type_access as type_access
from cinder.api.v2 import types as types_api_v2
from cinder import context
from cinder import db
from cinder import exception
from cinder import test
from cinder.tests.unit.api import fakes
from cinder.tests.unit import fake_c... |
from src.utils import labels as utils_labels
from src.utils import load_ncbi_taxinfo
from src import binning_classes
import matplotlib
matplotlib.use('Agg')
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
import matplotlib.ticker as ticker
import numpy as np
import os, sys, ins... |
import os
import datetime
from jinja2 import Environment,PackageLoader,TemplateNotFound
from hotzenplotz.openstack.common import cfg
from hotzenplotz.openstack.common import log as logging
from hotzenplotz.openstack.common import utils
from hotzenplotz.common import exception
from hotzenplotz.api import validator
LOG =... |
from must import MustHavePatterns
from successor import Successor
class TestSuccessor(object):
@classmethod
def setup_class(cls):
cls.test_patterns = MustHavePatterns(Successor)
def test_successor(self):
try:
self.test_patterns.create(Successor)
raise Exception("Recur... |
import csv, math, time, re, threading, sys
try:
from urllib.request import urlopen
except ImportError:
from urllib import urlopen
class ErAPI():
# Metodo constructor, seteos basicos necesarios de configuracion, instancia objetos utiles
def __init__(self):
self.data = {}
# Data format: {'... |
"""Unit tests for the NetApp-specific NFS driver module."""
from lxml import etree
import mock
import mox
from mox import IgnoreArg
from mox import IsA
import os
from cinder import context
from cinder import exception
from cinder.image import image_utils
from cinder.openstack.common.gettextutils import _
from cinder.op... |
import os
import mock
import six
import yaml
from heat.common import config
from heat.common import exception
from heat.common import template_format
from heat.tests.common import HeatTestCase
from heat.tests import utils
class JsonToYamlTest(HeatTestCase):
def setUp(self):
super(JsonToYamlTest, self).setUp... |
import mock
from oslo_config import cfg
import requests
from mistral.actions import std_actions
from mistral.db.v2 import api as db_api
from mistral.services import workflows as wf_service
from mistral.tests.unit import base as test_base
from mistral.tests.unit.engine import base
from mistral.workflow import states
cfg... |
import re
import unicodedata
from collections import defaultdict
from typing import Any, Dict, List, Optional, Sequence, Union
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db.models.query import QuerySet
from django.forms.models import model_to_dict
from django.utils.t... |
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import hashlib
import os
from elasticsearch import Elasticsearch, TransportError
from elasticsearch.helpers import bulk_index
from warehouse.utils import AttributeDict
class Index(object):
_index = "warehouse"
... |
from __future__ import print_function
import wx
import threading
import lcm
import random
import Forseti
import configurator
BLUE = (24, 25, 141)
GOLD = (241, 169, 50)
class TeamPanel(wx.Panel):
def __init__(self, remote, letter, number, name, colour, *args, **kwargs):
super(TeamPanel, self).__init__(*args,... |
import proto # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1", manifest={"SpecialistPool",},
)
class SpecialistPool(proto.Message):
r"""SpecialistPool represents customers' own workforce to work on
their data labeling jobs. It includes a group of specialist
managers and ... |
"""Prints the env_setup banner for cmd.exe.
This is done from Python as activating colors and printing ASCII art are not
easy to do in cmd.exe. Activated colors also don't persist in the parent
process.
"""
from __future__ import print_function
import argparse
import os
import sys
from .colors import Color, enable_colo... |
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
from __future__ import print_function
import atexit
import copy
import json
import os
import re
import shutil
import subprocess
import tempfile
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic... |
"""Train a ResNet-50 model on ImageNet on TPU."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import os
import re
import sys
import time
from absl import app
from absl import flags
import tensorflow.compat.v1 as tf
sys.path.append(os.path.dirna... |
from socket import inet_ntoa
from struct import pack
def calcDottedNetmask(mask):
bits = 0
for i in xrange(32 - mask, 32):
bits |= (1 << i)
packed_value = pack('!I', bits)
addr = inet_ntoa(packed_value)
return addr |
import pytest
import salt.engines
from tests.support.mock import MagicMock, patch
def test_engine_module_name():
engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
assert engine.name == "foobar"
def test_engine_title_set():
engine = salt.engines.Engine({}, "foobar.start", {}, {}... |
from tempest.api.volume import base
from tempest.common import utils
from tempest.common import waiters
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib.common.utils import test_utils
from tempest.lib import decorators
CONF = config.CONF
class VolumesActionsTest(base.BaseVolum... |
"""A flow to run checks for a host."""
from grr.lib import aff4
from grr.lib import flow
from grr.lib import rdfvalue
from grr.lib.checks import checks
from grr.proto import flows_pb2
class CheckFlowArgs(rdfvalue.RDFProtoStruct):
protobuf = flows_pb2.CheckFlowArgs
class CheckRunner(flow.GRRFlow):
"""This flow runs ... |
import scipy.io.wavfile as wav
import numpy as np
import copy
class Signal:
# Data loaders
def LoadFromFile(self, file):
self.fs, self.s = wav.read(file)
self.sLength, self.nChans = self.s.shape
def LoadWF(self, waveform, fs):
self.s = waveform
self.fs = fs
self.sLeng... |
"""Tests for StatementVisitor."""
from __future__ import unicode_literals
import re
import subprocess
import textwrap
import unittest
from grumpy_tools.compiler import block
from grumpy_tools.compiler import imputil
from grumpy_tools.compiler import shard_test
from grumpy_tools.compiler import stmt
from grumpy_tools.co... |
"""Translation helper functions."""
import locale
import os
import re
import sys
import gettext as gettext_module
from cStringIO import StringIO
from django.utils.importlib import import_module
from django.utils.safestring import mark_safe, SafeData
from django.utils.thread_support import currentThread
_translations = ... |
"""
Support for EBox.
Get data from 'My Usage Page' page: https://client.ebox.ca/myusage
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.ebox/
"""
import logging
from datetime import timedelta
import voluptuous as vol
import homeassistant.helpers.co... |
"""Tests for the output module field formatting helper."""
import unittest
from dfdatetime import semantic_time as dfdatetime_semantic_time
from dfvfs.path import fake_path_spec
from plaso.containers import events
from plaso.lib import definitions
from plaso.output import formatting_helper
from tests.containers import ... |
from turbo.flux import Mutation, register, dispatch, register_dispatch
import mutation_types
@register_dispatch('user', mutation_types.INCREASE)
def increase(rank):
pass
def decrease(rank):
return dispatch('user', mutation_types.DECREASE, rank)
@register_dispatch('metric', 'inc_qps')
def inc_qps():
pass |
import testtools
from tempest.api.compute import base
from tempest.common import waiters
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions
CONF = config.CONF
class MigrationsAdminTest(base.BaseV2ComputeAdminTest):
"""Test... |
from functools import wraps
import json
import os
import traceback
import validators
from jinja2 import Environment, PackageLoader
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
import requests
from requests.auth import HTTPBasicAuth
env = Environment(
loader=PackageLoade... |
import numpy as np
class WordClusters(object):
def __init__(self, vocab, clusters):
self.vocab = vocab
self.clusters = clusters
def ix(self, word):
"""
Returns the index on self.vocab and self.clusters for 'word'
"""
temp = np.where(self.vocab == word)[0]
... |
"""api_server URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-... |
import os,json
from cgi import escape
def unescape(s):
s = s.replace("<", "<")
s = s.replace(">", ">")
# this has to be last:
s = s.replace("&", "&")
return s
class FilesystemMixin:
def h_fs_get(_,path,eltName=''):
from stat import S_ISDIR
data = (escape(open(path).read... |
"""
tests for catalog module
"""
import os
import fabric.api
from fabric.operations import _AttributeString
from mock import patch
from prestoadmin import catalog
from prestoadmin.util import constants
from prestoadmin.util.exception import ConfigurationError, \
ConfigFileNotFoundError
from prestoadmin.standalone.c... |
"""
Installs and configures MySQL
"""
import uuid
import logging
from packstack.installer import validators
from packstack.installer import utils
from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile
controller = None
PLUGIN_NAME = "OS-MySQL"
PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_N... |
def power_digit_sum(exponent):
power_of_2 = str(2 ** exponent)
return sum([int(x) for x in power_of_2]) |
"""Constants for music processing in Magenta."""
DEFAULT_QUARTERS_PER_MINUTE = 120.0
DEFAULT_STEPS_PER_BAR = 16 # 4/4 music sampled at 4 steps per quarter note.
DEFAULT_STEPS_PER_QUARTER = 4
DEFAULT_STEPS_PER_SECOND = 100
STANDARD_PPQ = 220
NUM_SPECIAL_MELODY_EVENTS = 2
MELODY_NOTE_OFF = -1
MELODY_NO_EVENT = -2
MIN_ME... |
"""Module containing classes related to AWS CloudWatch Logs."""
import json
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.aws import util
class LogGroup(resource.BaseResource):
"""Class representing a CloudWatch log group."""
def __init__(self, regi... |
"""Auto-generated file, do not edit by hand. BM metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_BM = PhoneMetadata(id='BM', country_code=1, international_prefix='011',
general_desc=PhoneNumberDesc(national_number_pattern='(?:441|[58]\\d\\d|900)\\d{7}', possible_le... |
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import str
from past.builtins import basestring
from datetime import datetime
import logging
from urllib.parse import urlparse
from time import sleep
import airflow
from airflow import hooks, setti... |
"""Let's Encrypt constants."""
import logging
from acme import challenges
SETUPTOOLS_PLUGINS_ENTRY_POINT = "letsencrypt.plugins"
"""Setuptools entry point group name for plugins."""
CLI_DEFAULTS = dict(
config_files=["/etc/letsencrypt/cli.ini"],
verbose_count=-(logging.WARNING / 10),
server="https://www.let... |
"""Tests for methods in the action registry."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.domain import action_registry
from core.tests import test_utils
class ActionRegistryUnitTests(test_utils.... |
'''
This submodule contains helper functions for parsing and printing the
contents of describe hashes for various DNAnexus entities (projects,
containers, dataobjects, apps, and jobs).
'''
from __future__ import print_function, unicode_literals, division, absolute_import
import datetime, time, json, math, sys, copy
imp... |
"""ByteNet tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensor2tensor.data_generators import problem_hparams
from tensor2tensor.models import bytenet
import tensorflow as tf
class ByteNetTest(tf.test.TestCase):
def testB... |
"""
Linguistic and other taggers.
Tagging each token in a sentence with supplementary information,
such as its part-of-speech (POS) tag, and named entity (NE) tag.
"""
__all__ = [
"PerceptronTagger",
"pos_tag",
"pos_tag_sents",
"tag_provinces",
"chunk_parse",
"NER",
]
from pythainlp.tag.location... |
"""
Common Policy Engine Implementation
Policies can be expressed in one of two forms: A list of lists, or a
string written in the new policy language.
In the list-of-lists representation, each check inside the innermost
list is combined as with an "and" conjunction--for that check to pass,
all the specified checks mus... |
extensions = [
'oslosphinx',
'reno.sphinxext',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'cellar Release Notes'
copyright = u'2016, OpenStack Foundation'
release = ''
version = ''
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'default'
html_static_pa... |
"""Test suite for XenAPI."""
import ast
import contextlib
import datetime
import functools
import os
import re
import mox
from nova.compute import aggregate_states
from nova.compute import instance_types
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_... |
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import re
import subprocess
from pants.backend.codegen.subsystems.thrift_defaults import ThriftDefaults
from pants.base.build_environment import get_buildroot... |
import unittest, re
from rexp.compiler import PatternCompiler
class CompilerTestMethods(unittest.TestCase):
def test_compile_1(self):
compiler = PatternCompiler(pattern_set=dict(
TEST=r'\w+'
))
try:
c1 = compiler.compile('$1{TEST}')
except Exception as exc:
... |
import torch
from deluca.lung.core import Controller, LungEnv
class PIDCorrection(Controller):
def __init__(self, base_controller: Controller, sim: LungEnv, pid_K=[0.0, 0.0], decay=0.1, **kwargs):
self.base_controller = base_controller
self.sim = sim
self.I = 0.0
self.K = pid_K
... |
from __future__ import unicode_literals, print_function, division |
def strip_region_tags(sample_text):
"""Remove blank lines and region tags from sample text"""
magic_lines = [
line for line in sample_text.split("\n") if len(line) > 0 and "# [" not in line
]
return "\n".join(magic_lines) |
import hashlib
from core.analytics import InlineAnalytics
from core.observables import Hash
HASH_TYPES_DICT = {
"md5": hashlib.md5,
"sha1": hashlib.sha1,
"sha256": hashlib.sha256,
"sha512": hashlib.sha512,
}
class HashFile(InlineAnalytics):
default_values = {
"name": "HashFile",
"des... |
import zerorpc
import gevent.queue
import logging
import sys
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(0)
class QueueingLogHandler(logging.Handler):
""" A simple logging handler which puts all emitted logs into a
gevent queue.
"""
def __init__(self, queue, level, formatter):... |
import proto # type: ignore
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.gaming.v1",
manifest={
"OperationMetadata",
"OperationStatus",
"LabelSelector",
"Real... |
from changes.api.serializer import Crumbler, register
from changes.models.node import Cluster
@register(Cluster)
class ClusterCrumbler(Crumbler):
def crumble(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'dateCreated': instance.date_creat... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.