content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
# list(map(int, input().split()))
# int(input())
if __name__ == '__main__':
N = int(input())
main(N)
| [
2,
1351,
7,
8899,
7,
600,
11,
5128,
22446,
35312,
3419,
4008,
198,
2,
493,
7,
15414,
28955,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
399,
796,
493,
7,
15414,
28955,
198,
220,
220,
220... | 2.265306 | 49 |
"""/usr/bin/python
$ Filename :controlbeep.py
$ Description :If KEY_4 is pressed,this script will be executed
$ Author :alan
$ Website :www.osoyoo.com
$ Update :2017/07/07
$
$
"""
import RPi.GPIO as GPIO
PIN = 23
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(PIN, GPIO.IN)
GPIO.setup(PIN, GPIO.OUT)
if GPIO.input(PIN) == 0:
GPIO.output(PIN, GPIO.HIGH)
print('close buzzer\n')
else:
GPIO.output(PIN, GPIO.LOW)
print('open buzzer\n')
| [
37811,
14,
14629,
14,
8800,
14,
29412,
198,
3,
220,
220,
220,
220,
220,
7066,
12453,
220,
220,
220,
220,
220,
1058,
13716,
1350,
538,
13,
9078,
198,
3,
220,
220,
220,
220,
220,
12489,
220,
220,
1058,
1532,
35374,
62,
19,
318,
1207... | 2.011321 | 265 |
"""
fwks.tasks
==========
Module responsible for scheduling the computations. Each type of task may be configured and then run in sequence.
Useful for creation of batches of jobs.
"""
__all__ = ["Task", "make_training_task", "make_ab_feature_test", "make_feature_learnability"]
import keras
import numpy as np
import os
import fwks.model as model
import fwks.dataset as dataset
import fwks.metricization as metricization
from fwks.miscellanea import StopOnConvergence
"""
TODO:
- saving // loading
- running the network
- creation of chains for language models
- test coverage
"""
class Task(type):
"""
Metaclass registering and running tasks.
"""
_instances = {}
@classmethod
def make_training_task(
noise=None,
evaluation_metrics=None,
evaluation_selection=None,
):
"""
Factory of basic model training tasks
"""
# TODO: add training using noisy instead of clean
_evaluation_selection = evaluation_selection
metaclass = AbstractModelTraining
return metaclass
AbstractModelTraining = make_training_task()
def make_ab_feature_test(noise_gen):
"""
Factory for tasks that compare feature transforms on clean and noisy recordings
"""
_noise_gen = noise_gen
return AbstractABTraining
def make_feature_learnability(noise_gen=None):
"""
Create a task that uses secondary neural network to learn the feature transform used by the first
"""
_noise_gen = noise_gen
class FeatureLearnabilityTask(Task):
"""
classmethods:
get_mapping
get_mapper_network(mapping_size)
"""
how_much = 9000
noise_gen = _noise_gen
from_path = "datasets/clarin-long/data"
return FeatureLearnabilityTask
FeatureLearnabilityTask = make_feature_learnability()
| [
37811,
198,
44482,
591,
13,
83,
6791,
198,
2559,
855,
198,
198,
26796,
4497,
329,
26925,
262,
2653,
602,
13,
5501,
2099,
286,
4876,
743,
307,
17839,
290,
788,
1057,
287,
8379,
13,
198,
11041,
913,
329,
6282,
286,
37830,
286,
3946,
1... | 2.842025 | 652 |
#!/usr/bin/env python
import importlib
import argparse
import sys
import signal
from subprocess import call
# parse command line arguments
parser = argparse.ArgumentParser()
parser.add_argument("--cfg", help="path to hyperparameter config file", type=str)
parser.add_argument("--render", help="render agent", action="store_true")
parser.add_argument("--load", help="path to saved model", type=str, default=None)
args = parser.parse_args()
# load config from file
sys.path.append('/'.join(args.cfg.split('/')[0:3]))
cfg = importlib.import_module(args.cfg.split('/')[-1].split('.')[0])
# load proper environment module and create environment object
if cfg.env_class.startswith('Gym'):
from libs.environments import gym
environment = eval('gym.' + cfg.env_class + '(**cfg.environment)')
elif cfg.env_class.startswith('Unity'):
from libs.environments import unity
environment = eval('unity.' + cfg.env_class + '(**cfg.environment)')
elif cfg.env_class.startswith('PLE'):
from libs.environments import ple
environment = eval('ple.' + cfg.env_class + '(render=args.render, **cfg.environment)')
# load modules based on algorithm
exec('from libs.algorithms.' + cfg.algorithm + ' import agents, models, training')
# trap Ctrl-C
signal.signal(signal.SIGINT, signal_handler)
# create model and agent objects and start training
if cfg.algorithm == 'maddpg_v2': # model is loaded from inside the agent
agent = agents.Agent(load_file=args.load, **cfg.agent)
training.train(environment, agent, render=args.render, **cfg.train)
else:
model = eval('models.' + cfg.model_class + '(**cfg.model)')
agent = agents.Agent(model, load_file=args.load, **cfg.agent)
training.train(environment, agent, render=args.render, **cfg.train)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
1330,
8019,
198,
11748,
1822,
29572,
198,
11748,
25064,
198,
11748,
6737,
198,
6738,
850,
14681,
1330,
869,
198,
198,
2,
21136,
3141,
1627,
7159,
198,
48610,
796,
1822,
29572... | 3.020583 | 583 |
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 22 10:23:18 2018
@author: gy17mjk
"""
import sqlite3
conn = sqlite3.connect('resultsdb.sqlite')
c = conn.cursor()
c.execute("CREATE TABLE Results (address text, burglaries integer)")
c.execute("INSERT INTO Results VALUES ('Queen Vic',2)")
conn.commit()
conn.close() | [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
26223,
3158,
2534,
838,
25,
1954,
25,
1507,
2864,
198,
198,
31,
9800,
25,
21486,
1558,
76,
73,
74,
198,
37811,
198,
198,
11748,
44161,
578,
18,... | 2.73913 | 115 |
# -*- coding: utf-8 -*-
###############################################################################
#
# Query
# Access DuckDuckGo web search functionality.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class QueryInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the Query
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Format(self, value):
"""
Set the value of the Format input for this Choreo. ((optional, string) Enter: xml, or json. Default is set to xml.)
"""
super(QueryInputSet, self)._set_input('Format', value)
def set_NoHTML(self, value):
"""
Set the value of the NoHTML input for this Choreo. ((optional, integer) Enter 1 to remove HTML from text. Set only if Format=json.)
"""
super(QueryInputSet, self)._set_input('NoHTML', value)
def set_NoRedirect(self, value):
"""
Set the value of the NoRedirect input for this Choreo. ((optional, integer) Enter 1 to skip HTTP redirects. This is useful for !bang commands. Set only if Format=json.)
"""
super(QueryInputSet, self)._set_input('NoRedirect', value)
def set_PrettyPrint(self, value):
"""
Set the value of the PrettyPrint input for this Choreo. ((optional, integer) Enter 1 to pretty-print the JSON output.)
"""
super(QueryInputSet, self)._set_input('PrettyPrint', value)
def set_Query(self, value):
"""
Set the value of the Query input for this Choreo. ((required, string) Enter a search query.)
"""
super(QueryInputSet, self)._set_input('Query', value)
def set_SkipDisambiguation(self, value):
"""
Set the value of the SkipDisambiguation input for this Choreo. ((optional, integer) Enter 1 to skip disambiguation. Set only if Format=json.)
"""
super(QueryInputSet, self)._set_input('SkipDisambiguation', value)
class QueryResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the Query Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from DuckDuckGo in XML or JSON format.)
"""
return self._output.get('Response', None)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
29113,
29113,
7804,
4242,
21017,
198,
2,
198,
2,
43301,
198,
2,
8798,
21867,
35,
1347,
5247,
3992,
2989,
11244,
13,
220,
220,
198,
2,
198,
2,
11361,
6300,
362,
... | 3.045788 | 1,092 |
"""Path routing-based tests fixtures."""
import pytest
from testsuite import rawobj
from testsuite.gateways.apicast.selfmanaged import SelfManagedApicast
from testsuite.utils import blame
def delete_all_mapping_rules(proxy):
"""Deletes all mapping rules in a given proxy."""
mapping_rules = proxy.mapping_rules.list()
for mapping_rule in mapping_rules:
proxy.mapping_rules.delete(mapping_rule["id"])
@pytest.fixture(scope="module")
def gateway_kind():
"""Gateway class to use for tests"""
return SelfManagedApicast
@pytest.fixture(scope="module")
def gateway_options(gateway_options):
"""Deploy template apicast staging gateway."""
gateway_options["path_routing"] = True
return gateway_options
@pytest.fixture(scope="module")
def service_mapping():
"""Change mapping rule for service"""
return "/get"
@pytest.fixture(scope="module")
def service(service, service_mapping):
"""Delete mapping rules and add new one from/to default service."""
proxy = service.proxy.list()
metric = service.metrics.list()[0]
delete_all_mapping_rules(proxy)
proxy.mapping_rules.create(rawobj.Mapping(metric, service_mapping))
proxy.update()
return service
@pytest.fixture(scope="module")
def service2_proxy_settings(private_base_url):
"""Change api_backend to echo-api for service2."""
return rawobj.Proxy(private_base_url("echo_api"))
@pytest.fixture(scope="module")
def service2_mapping():
"""Change mapping rule for service2"""
return "/echo"
# pylint: disable=too-many-arguments
@pytest.fixture(scope="module")
def service2(request, custom_service, lifecycle_hooks, service2_proxy_settings,
service2_mapping):
"""Create second service and mapping rule."""
service2 = custom_service({"name": blame(request, "svc")}, service2_proxy_settings,
hooks=lifecycle_hooks)
metric = service2.metrics.list()[0]
proxy = service2.proxy.list()
delete_all_mapping_rules(proxy)
proxy.mapping_rules.create(rawobj.Mapping(metric, service2_mapping))
proxy.update()
return service2
@pytest.fixture(scope="module")
def application2(request, service2, custom_app_plan, custom_application, lifecycle_hooks):
"""Create custom application for service2."""
plan = custom_app_plan(rawobj.ApplicationPlan(blame(request, "aplan")), service2)
return custom_application(rawobj.Application(blame(request, "app"), plan), hooks=lifecycle_hooks)
@pytest.fixture(scope="module")
def client(api_client):
"""Client for the first application."""
return api_client()
@pytest.fixture(scope="module")
def client2(application2, api_client):
"""Client for second application."""
return api_client(application2)
| [
37811,
15235,
28166,
12,
3106,
5254,
34609,
526,
15931,
198,
11748,
12972,
9288,
198,
198,
6738,
1332,
2385,
578,
1330,
8246,
26801,
198,
6738,
1332,
2385,
578,
13,
10494,
1322,
13,
499,
291,
459,
13,
944,
39935,
1330,
12189,
5124,
1886... | 2.885536 | 961 |
import requests
import json, os
url = "{0}:{1}".format(os.environ['HOSTNAME'] , "8000")
resp = requests.post('http://' + url + '/api/v1/type/service/botbuilder/def/',
json={
"cb_id" : "cb0001",
"chat_cate" : "EP",
"chat_sub_cate" : "people",
"cb_title" : "chatbot",
"cb_desc" : "find_people",
"creation_date": "2017-05-22T18:00:00.000",
"last_update_date": "2017-05-22T18:00:00.000",
"created_by" : "KSS",
"last_updated_by" : "KSS"
})
data = json.loads(resp.json())
print("evaluation result : {0}".format(data))
| [
11748,
7007,
198,
11748,
33918,
11,
28686,
198,
6371,
796,
45144,
15,
92,
29164,
16,
92,
1911,
18982,
7,
418,
13,
268,
2268,
17816,
39,
10892,
20608,
20520,
837,
366,
33942,
4943,
198,
198,
4363,
796,
7007,
13,
7353,
10786,
4023,
1378... | 1.650964 | 467 |
from flask import (
Blueprint,
render_template,
session,
make_response,
request,
redirect,
url_for,
)
from .. import orm
module = Blueprint('index', __name__, url_prefix='/')
@module.route('/', methods=['GET'])
| [
6738,
42903,
1330,
357,
198,
220,
220,
220,
39932,
11,
198,
220,
220,
220,
8543,
62,
28243,
11,
198,
220,
220,
220,
6246,
11,
198,
220,
220,
220,
787,
62,
26209,
11,
198,
220,
220,
220,
2581,
11,
198,
220,
220,
220,
18941,
11,
1... | 2.574468 | 94 |
# coding: utf-8
#------------------------------------------------------------------------------------------#
# This file is part of Pyccel which is released under MIT License. See the LICENSE file or #
# go to https://github.com/pyccel/pyccel/blob/master/LICENSE for full license details. #
#------------------------------------------------------------------------------------------#
"""
"""
from os.path import join, dirname
from textx.metamodel import metamodel_from_file
from pyccel.parser.syntax.basic import BasicStmt
from pyccel.ast.core import AnnotatedComment
DEBUG = False
class Openacc(object):
"""Class for Openacc syntax."""
def __init__(self, **kwargs):
"""
Constructor for Openacc.
"""
self.statements = kwargs.pop('statements', [])
class OpenaccStmt(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.stmt = kwargs.pop('stmt')
super(OpenaccStmt, self).__init__(**kwargs)
@property
#################################################
# Constructs and Directives
#################################################
class AccParallelConstruct(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccParallelConstruct, self).__init__(**kwargs)
@property
class AccKernelsConstruct(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccKernelsConstruct, self).__init__(**kwargs)
@property
class AccDataConstruct(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccDataConstruct, self).__init__(**kwargs)
@property
class AccEnterDataDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccEnterDataDirective, self).__init__(**kwargs)
@property
class AccExitDataDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccExitDataDirective, self).__init__(**kwargs)
@property
class AccHostDataDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccHostDataDirective, self).__init__(**kwargs)
@property
class AccLoopConstruct(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccLoopConstruct, self).__init__(**kwargs)
@property
class AccAtomicConstruct(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccAtomicConstruct, self).__init__(**kwargs)
@property
class AccDeclareDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccDeclareDirective, self).__init__(**kwargs)
@property
class AccInitDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccInitDirective, self).__init__(**kwargs)
@property
class AccShutDownDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccShutDownDirective, self).__init__(**kwargs)
@property
class AccSetDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccSetDirective, self).__init__(**kwargs)
@property
class AccUpdateDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccUpdateDirective, self).__init__(**kwargs)
@property
class AccRoutineDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccRoutineDirective, self).__init__(**kwargs)
@property
class AccWaitDirective(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.clauses = kwargs.pop('clauses')
super(AccWaitDirective, self).__init__(**kwargs)
@property
class AccEndClause(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.construct = kwargs.pop('construct')
super(AccEndClause, self).__init__(**kwargs)
@property
#################################################
#################################################
# Clauses
#################################################
#AccAsync: 'async' '(' args+=ID[','] ')';
class AccAsync(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccAsync, self).__init__(**kwargs)
@property
#AccAuto: 'auto';
class AccAuto(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
super(AccAuto, self).__init__(**kwargs)
@property
#AccBind: 'bind' '(' arg=STRING ')';
class AccBind(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.arg = kwargs.pop('arg')
super(AccBind, self).__init__(**kwargs)
@property
#AccCache: 'cache' '(' args+=ID[','] ')';
class AccCache(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccCache, self).__init__(**kwargs)
@property
#AccCollapse: 'collapse' '(' n=INT ')';
class AccCollapse(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.n = kwargs.pop('n')
super(AccCollapse, self).__init__(**kwargs)
@property
#AccCopy: 'copy' '(' args+=ID[','] ')';
class AccCopy(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccCopy, self).__init__(**kwargs)
@property
#AccCopyin: 'copyin' '(' args+=ID[','] ')';
class AccCopyin(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccCopyin, self).__init__(**kwargs)
@property
#AccCopyout: 'copyout' '(' args+=ID[','] ')';
class AccCopyout(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccCopyout, self).__init__(**kwargs)
@property
#AccCreate: 'create' '(' args+=ID[','] ')';
class AccCreate(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccCreate, self).__init__(**kwargs)
@property
#AccDefault: 'default' '(' status=DefaultStatus ')';
class AccDefault(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.status = kwargs.pop('status')
super(AccDefault, self).__init__(**kwargs)
@property
#AccDefaultAsync: 'default_async' '(' args+=ID[','] ')';
class AccDefaultAsync(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccDefaultAsync, self).__init__(**kwargs)
@property
#AccDelete: 'delete' '(' args+=ID[','] ')';
class AccDelete(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccDelete, self).__init__(**kwargs)
@property
#AccDevice: 'device' '(' args+=ID[','] ')';
class AccDevice(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccDevice, self).__init__(**kwargs)
@property
#AccDeviceNum: 'device_num' '(' n=INT ')';
class AccDeviceNum(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.n = kwargs.pop('n')
super(AccDeviceNum, self).__init__(**kwargs)
@property
#AccDevicePtr: 'deviceptr' '(' args+=ID[','] ')';
class AccDevicePtr(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccDevicePtr, self).__init__(**kwargs)
@property
#AccDeviceResident: 'device_resident' '(' args+=ID[','] ')';
class AccDeviceResident(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccDeviceResident, self).__init__(**kwargs)
@property
#AccDeviceType: 'device_type' '(' args+=ID[','] ')';
class AccDeviceType(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccDeviceType, self).__init__(**kwargs)
@property
#AccFinalize: 'finalize';
class AccFinalize(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
super(AccFinalize, self).__init__(**kwargs)
@property
#AccFirstPrivate: 'firstprivate' '(' args+=ID[','] ')';
class AccFirstPrivate(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccFirstPrivate, self).__init__(**kwargs)
@property
#AccGang: 'gang' '(' args+=GangArg[','] ')';
class AccGang(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccGang, self).__init__(**kwargs)
@property
#AccHost: 'host' '(' args+=ID[','] ')';
class AccHost(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccHost, self).__init__(**kwargs)
@property
#AccIf: 'if' cond=ID;
class AccIf(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.cond = kwconds.pop('cond')
super(AccIf, self).__init__(**kwargs)
@property
#AccIfPresent: 'if_present';
class AccIfPresent(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
super(AccIfPresent, self).__init__(**kwargs)
@property
#AccIndependent: 'independent';
class AccIndependent(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
super(AccIndependent, self).__init__(**kwargs)
@property
#AccLink: 'link' '(' args+=ID[','] ')';
class AccLink(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccLink, self).__init__(**kwargs)
@property
#AccNoHost: 'nohost';
class AccNoHost(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
super(AccNoHost, self).__init__(**kwargs)
@property
#AccNumGangs: 'num_gangs' '(' n=INT ')';
class AccNumGangs(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.n = kwargs.pop('n')
super(AccNumGangs, self).__init__(**kwargs)
@property
#AccNumWorkers: 'num_workers' '(' n=INT ')';
class AccNumWorkers(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.n = kwargs.pop('n')
super(AccNumWorkers, self).__init__(**kwargs)
@property
#AccPresent: 'present' '(' args+=ID[','] ')';
class AccPresent(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccPresent, self).__init__(**kwargs)
@property
#AccPrivate: 'private' '(' args+=ID[','] ')';
class AccPrivate(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccPrivate, self).__init__(**kwargs)
@property
#AccReduction: 'reduction' '('op=ReductionOperator ':' args+=ID[','] ')';
class AccReduction(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.op = kwargs.pop('op')
self.args = kwargs.pop('args')
super(AccReduction, self).__init__(**kwargs)
@property
#AccSelf: 'self' '(' args+=ID[','] ')';
class AccSelf(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccSelf, self).__init__(**kwargs)
@property
#AccSeq: 'seq';
class AccSeq(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
super(AccSeq, self).__init__(**kwargs)
@property
#AccTile: 'tile' '(' args+=ID[','] ')';
class AccTile(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccTile, self).__init__(**kwargs)
@property
#AccUseDevice: 'use_device' '(' args+=ID[','] ')';
class AccUseDevice(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccUseDevice, self).__init__(**kwargs)
@property
#AccVector: 'vector' ('(' args+=VectorArg ')')?;
class AccVector(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccVector, self).__init__(**kwargs)
@property
#AccVectorLength: 'vector_length' '(' n=INT ')';
class AccVectorLength(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.n = kwargs.pop('n')
super(AccVectorLength, self).__init__(**kwargs)
@property
#AccWait: 'wait' '(' args+=ID[','] ')';
class AccWait(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccWait, self).__init__(**kwargs)
@property
#AccWorker: 'worker' ('(' args+=WorkerArg ')')?;
class AccWorker(AccBasic):
"""Class representing a ."""
def __init__(self, **kwargs):
"""
"""
self.args = kwargs.pop('args')
super(AccWorker, self).__init__(**kwargs)
@property
#################################################
#################################################
# whenever a new rule is added in the grammar, we must update the following
# lists.
acc_directives = [AccParallelConstruct,
AccKernelsConstruct,
AccDataConstruct,
AccEnterDataDirective,
AccExitDataDirective,
AccHostDataDirective,
AccLoopConstruct,
AccAtomicConstruct,
AccDeclareDirective,
AccInitDirective,
AccShutDownDirective,
AccSetDirective,
AccUpdateDirective,
AccRoutineDirective,
AccWaitDirective,
AccEndClause]
acc_clauses = [AccAsync,
AccAuto,
AccBind,
AccCollapse,
AccCopy,
AccCopyin,
AccCopyout,
AccCreate,
AccDefault,
AccDefaultAsync,
AccDelete,
AccDevice,
AccDeviceNum,
AccDevicePtr,
AccDeviceResident,
AccDeviceType,
AccFinalize,
AccFirstPrivate,
AccGang,
AccHost,
AccIf,
AccIfPresent,
AccIndependent,
AccLink,
AccNoHost,
AccNumGangs,
AccNumWorkers,
AccPresent,
AccPrivate,
AccReduction,
AccSelf,
AccSeq,
AccTile,
AccUseDevice,
AccVector,
AccVectorLength,
AccWait,
AccWorker]
acc_classes = [Openacc, OpenaccStmt] + acc_directives + acc_clauses
this_folder = dirname(__file__)
# Get meta-model from language description
grammar = join(this_folder, '../grammar/openacc.tx')
meta = metamodel_from_file(grammar, classes=acc_classes)
def parse(filename=None, stmts=None):
""" Parse openacc pragmas
Parameters
----------
filename: str
stmts : list
Results
-------
stmts : list
"""
# Instantiate model
if filename:
model = meta.model_from_file(filename)
elif stmts:
model = meta.model_from_str(stmts)
else:
raise ValueError('Expecting a filename or a string')
stmts = []
for stmt in model.statements:
if isinstance(stmt, OpenaccStmt):
e = stmt.stmt.expr
stmts.append(e)
if len(stmts) == 1:
return stmts[0]
else:
return stmts
#################################################
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
2,
10097,
22369,
438,
2,
198,
2,
770,
2393,
318,
636,
286,
9485,
66,
5276,
543,
318,
2716,
739,
17168,
13789,
13,
4091,
262,
38559,
24290,
2393,
393,
1303,
198,
2,
467,
284,
3740,
1378,
12567,
... | 2.218818 | 8,258 |
import motor.motor_asyncio
from bson.objectid import ObjectId
MONGO_DETAILS = "mongodb://localhost:27017"
client = motor.motor_asyncio.AsyncIOMotorClient(MONGO_DETAILS)
database = client.review_db
review_collection = database.get_collection("reviews")
# Retrieve all businesses present in the database
| [
11748,
5584,
13,
76,
20965,
62,
292,
13361,
952,
198,
6738,
275,
1559,
13,
15252,
312,
1330,
9515,
7390,
628,
198,
27857,
11230,
62,
35,
20892,
45484,
796,
366,
31059,
375,
65,
1378,
36750,
25,
1983,
29326,
1,
198,
16366,
796,
5584,
... | 3.14433 | 97 |
"""Utilities that help with wrapping various C structures."""
import copy
import glob
import h5py
import logging
import numpy as np
import warnings
from cffi import FFI
from hashlib import md5
from os import makedirs, path
from pathlib import Path
from . import __version__
from ._cfg import config
_ffi = FFI()
logger = logging.getLogger("21cmFAST")
class ParameterError(RuntimeError):
"""An exception representing a bad choice of parameters."""
class FatalCError(Exception):
"""An exception representing something going wrong in C."""
SUCCESS = 0
IOERROR = 1
GSLERROR = 2
VALUEERROR = 3
PARAMETERERROR = 4
MEMORYALLOCERROR = 5
FILEERROR = 6
def _process_exitcode(exitcode, fnc, args):
"""Determine what happens for different values of the (integer) exit code from a C function."""
if exitcode != SUCCESS:
logger.error(f"In function: {fnc.__name__}. Arguments: {args}")
if exitcode in (GSLERROR, PARAMETERERROR):
raise ParameterError
elif exitcode in (IOERROR, VALUEERROR, MEMORYALLOCERROR, FILEERROR):
raise FatalCError
else: # Unknown C code
raise FatalCError("Unknown error in C. Please report this error!")
ctype2dtype = {}
# Integer types
for prefix in ("int", "uint"):
for log_bytes in range(4):
ctype = "%s%d_t" % (prefix, 8 * (2 ** log_bytes))
dtype = "%s%d" % (prefix[0], 2 ** log_bytes)
ctype2dtype[ctype] = np.dtype(dtype)
# Floating point types
ctype2dtype["float"] = np.dtype("f4")
ctype2dtype["double"] = np.dtype("f8")
ctype2dtype["int"] = np.dtype("i4")
def asarray(ptr, shape):
"""Get the canonical C type of the elements of ptr as a string."""
ctype = _ffi.getctype(_ffi.typeof(ptr).item).split("*")[0].strip()
if ctype not in ctype2dtype:
raise RuntimeError(
f"Cannot create an array for element type: {ctype}. Can do {list(ctype2dtype.values())}."
)
array = np.frombuffer(
_ffi.buffer(ptr, _ffi.sizeof(ctype) * np.prod(shape)), ctype2dtype[ctype]
)
array.shape = shape
return array
class StructWrapper:
"""
A base-class python wrapper for C structures (not instances of them).
Provides simple methods for creating new instances and accessing field names and values.
To implement wrappers of specific structures, make a subclass with the same name as the
appropriate C struct (which must be defined in the C code that has been compiled to the ``ffi``
object), *or* use an arbitrary name, but set the ``_name`` attribute to the C struct name.
"""
_name = None
_ffi = None
@classmethod
@property
def _cstruct(self):
"""
The actual structure which needs to be passed around to C functions.
.. note:: This is best accessed by calling the instance (see __call__).
The reason it is defined as this (manual) cached property is so that it can be created
dynamically, but not lost. It must not be lost, or else C functions which use it will lose
access to its memory. But it also must be created dynamically so that it can be recreated
after pickling (pickle can't handle CData).
"""
try:
return self.__cstruct
except AttributeError:
self.__cstruct = self._new()
return self.__cstruct
def _new(self):
"""Return a new empty C structure corresponding to this class."""
return self._ffi.new("struct " + self._name + "*")
@classmethod
def get_fields(cls, cstruct=None):
"""Obtain the C-side fields of this struct."""
if cstruct is None:
cstruct = cls._ffi.new("struct " + cls._get_name() + "*")
return cls._ffi.typeof(cstruct[0]).fields
@classmethod
def get_fieldnames(cls, cstruct=None):
"""Obtain the C-side field names of this struct."""
fields = cls.get_fields(cstruct)
return [f for f, t in fields]
@classmethod
def get_pointer_fields(cls, cstruct=None):
"""Obtain all pointer fields of the struct (typically simulation boxes)."""
return [f for f, t in cls.get_fields(cstruct) if t.type.kind == "pointer"]
@property
def fields(self):
"""List of fields of the underlying C struct (a list of tuples of "name, type")."""
return self.get_fields(self._cstruct)
@property
def fieldnames(self):
"""List names of fields of the underlying C struct."""
return [f for f, t in self.fields]
@property
def pointer_fields(self):
"""List of names of fields which have pointer type in the C struct."""
return [f for f, t in self.fields if t.type.kind == "pointer"]
@property
def primitive_fields(self):
"""List of names of fields which have primitive type in the C struct."""
return [f for f, t in self.fields if t.type.kind == "primitive"]
def __getstate__(self):
"""Return the current state of the class without pointers."""
return {
k: v
for k, v in self.__dict__.items()
if k not in ["_strings", "_StructWrapper__cstruct"]
}
def refresh_cstruct(self):
"""Delete the underlying C object, forcing it to be rebuilt."""
try:
del self.__cstruct
except AttributeError:
pass
def __call__(self):
"""Return an instance of the C struct."""
pass
class StructWithDefaults(StructWrapper):
"""
A convenient interface to create a C structure with defaults specified.
It is provided for the purpose of *creating* C structures in Python to be passed to C functions,
where sensible defaults are available. Structures which are created within C and passed back do
not need to be wrapped.
This provides a *fully initialised* structure, and will fail if not all fields are specified
with defaults.
.. note:: The actual C structure is gotten by calling an instance. This is auto-generated when
called, based on the parameters in the class.
.. warning:: This class will *not* deal well with parameters of the struct which are pointers.
All parameters should be primitive types, except for strings, which are dealt with
specially.
Parameters
----------
ffi : cffi object
The ffi object from any cffi-wrapped library.
"""
_defaults_ = {}
def convert(self, key, val):
"""Make any conversions of values before saving to the instance."""
return val
def update(self, **kwargs):
"""
Update the parameters of an existing class structure.
This should always be used instead of attempting to *assign* values to instance attributes.
It consistently re-generates the underlying C memory space and sets some book-keeping
variables.
Parameters
----------
kwargs:
Any argument that may be passed to the class constructor.
"""
# Start a fresh cstruct.
if kwargs:
self.refresh_cstruct()
for k in self._defaults_:
# Prefer arguments given to the constructor.
if k in kwargs:
v = kwargs.pop(k)
try:
setattr(self, k, v)
except AttributeError:
# The attribute has been defined as a property, save it as a hidden variable
setattr(self, "_" + k, v)
# Also ensure that parameters that are part of the class, but not the defaults, are set
# this will fail if these parameters cannot be set for some reason, hence doing it
# last.
for k in list(kwargs.keys()):
if hasattr(self, k):
setattr(self, k, kwargs.pop(k))
if kwargs:
warnings.warn(
"The following arguments to be updated are not compatible with this class: %s"
% kwargs
)
def clone(self, **kwargs):
"""Make a fresh copy of the instance with arbitrary parameters updated."""
new = self.__class__(self.self)
new.update(**kwargs)
return new
def __call__(self):
"""Return a filled C Structure corresponding to this instance."""
for key, val in self.pystruct.items():
# Find the value of this key in the current class
if isinstance(val, str):
# If it is a string, need to convert it to C string ourselves.
val = self.ffi.new("char[]", getattr(self, key).encode())
try:
setattr(self._cstruct, key, val)
except TypeError:
print("For key %s, value %s:" % (key, val))
raise
return self._cstruct
@property
def pystruct(self):
"""A pure-python dictionary representation of the corresponding C structure."""
return {fld: self.convert(fld, getattr(self, fld)) for fld in self.fieldnames}
@property
def defining_dict(self):
"""
Pure python dictionary representation of this class, as it would appear in C.
.. note:: This is not the same as :attr:`pystruct`, as it omits all variables that don't
need to be passed to the constructor, but appear in the C struct (some can be
calculated dynamically based on the inputs). It is also not the same as
:attr:`self`, as it includes the 'converted' values for each variable, which are
those actually passed to the C code.
"""
return {k: self.convert(k, getattr(self, k)) for k in self._defaults_}
@property
def self(self):
"""
Dictionary which if passed to its own constructor will yield an identical copy.
.. note:: This differs from :attr:`pystruct` and :attr:`defining_dict` in that it uses the
hidden variable value, if it exists, instead of the exposed one. This prevents
from, for example, passing a value which is 10**10**val (and recurring!).
"""
# Try to first use the hidden variable before using the non-hidden variety.
dct = {}
for k in self._defaults_:
if hasattr(self, "_" + k):
dct[k] = getattr(self, "_" + k)
else:
dct[k] = getattr(self, k)
return dct
def __repr__(self):
"""Full unique representation of the instance."""
return (
self.__class__.__name__
+ "("
+ ", ".join(sorted(k + ":" + str(v) for k, v in self.defining_dict.items()))
+ ")"
)
def __eq__(self, other):
"""Check whether this instance is equal to another object (by checking the __repr__)."""
return self.__repr__() == repr(other)
def __hash__(self):
"""Generate a unique hsh for the instance."""
return hash(self.__repr__())
def snake_to_camel(word: str, publicize: bool = True):
"""Convert snake case to camel case."""
if publicize:
word = word.lstrip("_")
return "".join(x.capitalize() or "_" for x in word.split("_"))
def camel_to_snake(word: str, depublicize: bool = False):
"""Convert came case to snake case."""
word = "".join(["_" + i.lower() if i.isupper() else i for i in word])
if not depublicize:
word = word.lstrip("_")
return word
def get_all_subclasses(cls):
"""Get a list of all subclasses of a given class, recursively."""
all_subclasses = []
for subclass in cls.__subclasses__():
all_subclasses.append(subclass)
all_subclasses.extend(get_all_subclasses(subclass))
return all_subclasses
class OutputStruct(StructWrapper):
"""Base class for any class that wraps a C struct meant to be output from a C function."""
_meta = True
_fields_ = []
_global_params = None
_inputs = ["user_params", "cosmo_params", "_random_seed"]
_filter_params = ["external_table_path", "wisdoms_path"]
_c_based_pointers = ()
_c_compute_function = None
_c_free_function = None
_TYPEMAP = {"float32": "float *", "float64": "double *", "int32": "int *"}
def _c_shape(self, cstruct):
"""Return a dictionary of field: shape for arrays allocated within C."""
return {}
@classmethod
def _init_arrays(self): # pragma: nocover
"""Abstract base method for initializing any arrays that the structure has."""
pass
@property
def random_seed(self):
"""The random seed for this particular instance."""
if self._random_seed is None:
self._random_seed = int(np.random.randint(1, int(1e12)))
return self._random_seed
@property
def arrays_initialized(self):
"""Whether all necessary arrays are initialized.
.. note:: This must be true before passing to a C function.
"""
# This assumes that all pointer fields will be arrays...
for k in self.pointer_fields:
if k in self._c_based_pointers:
continue
if not hasattr(self, k):
return False
elif getattr(self._cstruct, k) == self._ffi.NULL:
return False
return True
def __call__(self):
"""Initialize/allocate a fresh C struct in memory and return it."""
if not (self.arrays_initialized or self.dummy):
self._init_cstruct()
return self._cstruct
def _expose(self):
"""Expose the non-array primitives of the ctype to the top-level object."""
if not self.filled:
raise Exception(
"You need to have actually called the C code before the primitives can be exposed."
)
for k in self.primitive_fields:
setattr(self, k, getattr(self._cstruct, k))
@property
def _fname_skeleton(self):
"""The filename without specifying the random seed."""
return self._name + "_" + self._md5 + "_r{seed}.h5"
@property
def filename(self):
"""The base filename of this object."""
if self._random_seed is None:
raise AttributeError("filename not defined until random_seed has been set")
return self._fname_skeleton.format(seed=self.random_seed)
def find_existing(self, direc=None):
"""
Try to find existing boxes which match the parameters of this instance.
Parameters
----------
direc : str, optional
The directory in which to search for the boxes. By default, this is the
centrally-managed directory, given by the ``config.yml`` in ``~/.21cmfast/``.
Returns
-------
str
The filename of an existing set of boxes, or None.
"""
# First, if appropriate, find a file without specifying seed.
# Need to do this first, otherwise the seed will be chosen randomly upon
# choosing a filename!
direc = path.expanduser(direc or config["direc"])
if not self._random_seed:
f = self._find_file_without_seed(direc)
if f and self._check_parameters(f):
return f
else:
f = self._get_fname(direc)
if path.exists(f) and self._check_parameters(f):
return f
return None
def exists(self, direc=None):
"""
Return a bool indicating whether a box matching the parameters of this instance is in cache.
Parameters
----------
direc : str, optional
The directory in which to search for the boxes. By default, this is the
centrally-managed directory, given by the ``config.yml`` in ``~/.21cmfast/``.
"""
return self.find_existing(direc) is not None
def write(self, direc=None, fname=None, write_inputs=True, mode="w"):
"""
Write the struct in standard HDF5 format.
Parameters
----------
direc : str, optional
The directory in which to write the boxes. By default, this is the
centrally-managed directory, given by the ``config.yml`` in ``~/.21cmfast/``.
fname : str, optional
The filename to write to. By default creates a unique filename from the hash.
write_inputs : bool, optional
Whether to write the inputs to the file. Can be useful to set to False if
the input file already exists and has parts already written.
"""
if not self.filled:
raise IOError("The boxes have not yet been computed.")
if not self._random_seed:
raise ValueError(
"Attempting to write when no random seed has been set. "
"Struct has been 'filled' inconsistently."
)
if not write_inputs:
mode = "a"
try:
direc = path.expanduser(direc or config["direc"])
if not path.exists(direc):
makedirs(direc)
fname = fname or self._get_fname(direc)
if not path.isabs(fname):
fname = path.abspath(path.join(direc, fname))
with h5py.File(fname, mode) as f:
# Save input parameters to the file
if write_inputs:
for k in self._inputs + ["_global_params"]:
q = getattr(self, k)
kfile = k.lstrip("_")
if isinstance(q, StructWithDefaults) or isinstance(
q, StructInstanceWrapper
):
grp = f.create_group(kfile)
if isinstance(q, StructWithDefaults):
# using self allows to rebuild the object from HDF5 file.
dct = q.self
else:
dct = q
for kk, v in dct.items():
if kk not in self._filter_params:
try:
grp.attrs[kk] = "none" if v is None else v
except TypeError:
raise TypeError(
f"key {kk} with value {v} is not able to be written to HDF5 attrs!"
)
else:
f.attrs[kfile] = q
# Write 21cmFAST version to the file
f.attrs["version"] = __version__
# Save the boxes to the file
boxes = f.create_group(self._name)
self.write_data_to_hdf5_group(boxes)
except OSError as e:
logger.warning(
"When attempting to write {} to file, write failed with the "
"following error. Continuing without caching.".format(
self.__class__.__name__
)
)
logger.warning(e)
def write_data_to_hdf5_group(self, group: h5py.Group):
"""
Write out this object to a particular HDF5 subgroup.
Parameters
----------
group
The HDF5 group into which to write the object.
"""
# Go through all fields in this struct, and save
for k in self.pointer_fields:
group.create_dataset(k, data=getattr(self, k))
for k in self.primitive_fields:
group.attrs[k] = getattr(self, k)
def save(self, fname=None, direc="."):
"""Save the box to disk.
In detail, this just calls write, but changes the default directory to the
local directory. This is more user-friendly, while :meth:`write` is for
automatic use under-the-hood.
Parameters
----------
fname : str, optional
The filename to write. Can be an absolute or relative path. If relative,
by default it is relative to the current directory (otherwise relative
to ``direc``). By default, the filename is auto-generated as unique to
the set of parameters that go into producing the data.
direc : str, optional
The directory into which to write the data. By default the current directory.
Ignored if ``fname`` is an absolute path.
"""
# If fname is absolute path, then get direc from it, otherwise assume current dir.
if path.isabs(fname):
direc = path.dirname(fname)
self.write(direc, fname)
def read(self, direc: [str, Path, None] = None, fname: [str, Path, None] = None):
"""
Try find and read existing boxes from cache, which match the parameters of this instance.
Parameters
----------
direc
The directory in which to search for the boxes. By default, this is the
centrally-managed directory, given by the ``config.yml`` in ``~/.21cmfast/``.
fname
The filename to read. By default, use the filename associated with this
object.
"""
if self.filled:
raise IOError("This data is already filled, no need to read in.")
if fname is None:
pth = self.find_existing(direc)
if pth is None:
raise IOError("No boxes exist for these parameters.")
else:
direc = Path(direc or config["direc"]).expanduser()
fname = Path(fname)
pth = fname if fname.exists() else direc / fname
# Need to make sure arrays are initialized before reading in data to them.
if not self.arrays_initialized:
self._init_cstruct()
with h5py.File(pth, "r") as f:
try:
boxes = f[self._name]
except KeyError:
raise IOError(
f"While trying to read in {self._name}, the file exists, but does not have the "
"correct structure."
)
# Fill our arrays.
for k in boxes.keys():
if k in self._c_based_pointers:
# C-based pointers can just be read straight in.
setattr(self, k, boxes[k][...])
else:
# Other pointers should fill the already-instantiated arrays.
getattr(self, k)[...] = boxes[k][...]
for k in boxes.attrs.keys():
if k == "version":
version = ".".join(boxes.attrs[k].split(".")[:2])
patch = ".".join(boxes.attrs[k].split(".")[2:])
if version != ".".join(__version__.split(".")[:2]):
# Ensure that the major and minor versions are the same.
warnings.warn(
f"The file {pth} is out of date (version = {version}.{patch}). "
f"Consider using another box and removing it!"
)
self.version = version
self.patch_version = patch
setattr(self, k, boxes.attrs[k])
# Need to make sure that the seed is set to the one that's read in.
seed = f.attrs["random_seed"]
self._random_seed = seed
self.filled = True
self._expose()
@classmethod
def from_file(cls, fname, direc=None, load_data=True):
"""Create an instance from a file on disk.
Parameters
----------
fname : str, optional
Path to the file on disk. May be relative or absolute.
direc : str, optional
The directory from which fname is relative to (if it is relative). By
default, will be the cache directory in config.
load_data : bool, optional
Whether to read in the data when creating the instance. If False, a bare
instance is created with input parameters -- the instance can read data
with the :func:`read` method.
"""
direc = path.expanduser(direc or config["direc"])
if not path.exists(fname):
fname = path.join(direc, fname)
self = cls(**cls._read_inputs(fname))
if load_data:
self.read(fname=fname)
return self
@classmethod
def __repr__(self):
"""Return a fully unique representation of the instance."""
# This is the class name and all parameters which belong to C-based input structs,
# eg. InitialConditions(HII_DIM:100,SIGMA_8:0.8,...)
return self._seedless_repr() + "_random_seed={}".format(self._random_seed)
def __str__(self):
"""Return a human-readable representation of the instance."""
# this is *not* a unique representation, and doesn't include global params.
return (
self._name
+ "("
+ ";\n\t".join(
[
repr(v)
if isinstance(v, StructWithDefaults)
else k.lstrip("_") + ":" + repr(v)
for k, v in [(k, getattr(self, k)) for k in self._inputs]
]
)
+ ")"
)
def __hash__(self):
"""Return a unique hsh for this instance, even global params and random seed."""
return hash(repr(self))
@property
def _md5(self):
"""Return a unique hsh of the object, *not* taking into account the random seed."""
return md5(self._seedless_repr().encode()).hexdigest()
def __eq__(self, other):
"""Check equality with another object via its __repr__."""
return repr(self) == repr(other)
def compute(self, direc, *args, write=True):
"""Compute the actual function that fills this struct."""
logger.debug(f"Calling {self._c_compute_function.__name__} with args: {args}")
try:
exitcode = self._c_compute_function(
*[arg() if isinstance(arg, StructWrapper) else arg for arg in args],
self(),
)
except TypeError as e:
logger.error(
f"Arguments to {self._c_compute_function.__name__}: "
f"{[arg() if isinstance(arg, StructWrapper) else arg for arg in args]}"
)
raise e
_process_exitcode(exitcode, self._c_compute_function, args)
# Ensure memory created in C gets mapped to numpy arrays in this struct.
self.filled = True
self._memory_map()
self._expose()
# Optionally do stuff with the result (like writing it)
if write:
self.write(direc)
return self
def __del__(self):
"""Safely delete the object and its C-allocated memory."""
if self._c_free_function is not None:
self._c_free_function(self._cstruct)
class StructInstanceWrapper:
"""A wrapper for *instances* of C structs.
This is as opposed to :class:`StructWrapper`, which is for the un-instantiated structs.
Parameters
----------
wrapped :
The reference to the C object to wrap (contained in the ``cffi.lib`` object).
ffi :
The ``cffi.ffi`` object.
"""
def __setattr__(self, name, value):
"""Set an attribute of the instance, attempting to change it in the C struct as well."""
try:
setattr(self._cobj, name, value)
except AttributeError:
pass
object.__setattr__(self, name, value)
def items(self):
"""Yield (name, value) pairs for each element of the struct."""
for nm, tp in self._ffi.typeof(self._cobj).fields:
yield nm, getattr(self, nm)
def keys(self):
"""Return a list of names of elements in the struct."""
return [nm for nm, tp in self.items()]
def __repr__(self):
"""Return a unique representation of the instance."""
return (
self._ctype
+ "("
+ ";".join([k + "=" + str(v) for k, v in sorted(self.items())])
+ ")"
)
def filtered_repr(self, filter_params):
"""Get a fully unique representation of the instance that filters out some parameters.
Parameters
----------
filter_params : list of str
The parameter names which should not appear in the representation.
"""
return (
self._ctype
+ "("
+ ";".join(
[
k + "=" + str(v)
for k, v in sorted(self.items())
if k not in filter_params
]
)
+ ")"
)
def _check_compatible_inputs(*datasets, ignore=["redshift"]):
"""Ensure that all defined input parameters for the provided datasets are equal.
Parameters
----------
datasets : list of :class:`~_utils.OutputStruct`
A number of output datasets to cross-check.
ignore : list of str
Attributes to ignore when ensuring that parameter inputs are the same.
Raises
------
ValueError :
If datasets are not compatible.
"""
done = [] # keeps track of inputs we've checked so we don't double check.
for i, d in enumerate(datasets):
# If a dataset is None, just ignore and move on.
if d is None:
continue
# noinspection PyProtectedMember
for inp in d._inputs:
# Skip inputs that we want to ignore
if inp in ignore:
continue
if inp not in done:
for j, d2 in enumerate(datasets[(i + 1) :]):
if d2 is None:
continue
# noinspection PyProtectedMember
if inp in d2._inputs and getattr(d, inp) != getattr(d2, inp):
raise ValueError(
"%s and %s are incompatible"
% (d.__class__.__name__, d2.__class__.__name__)
)
done += [inp]
| [
37811,
18274,
2410,
326,
1037,
351,
27074,
2972,
327,
8573,
526,
15931,
198,
11748,
4866,
198,
11748,
15095,
198,
11748,
289,
20,
9078,
198,
11748,
18931,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
14601,
198,
6738,
269,
487,
72,
1... | 2.263716 | 13,397 |
import pathlib
import pandas as pd
import pytest
import taxes.download as dload
from taxes.loading import ( # noqa: E501
dload_to_df_list,
get_gov_dir,
gov_dir_to_names_dict,
names_dict_to_df_dict,
)
gov_dir = {
2019: (
[
"https://www.gov.pl/attachment/6594af94-cd1e-49fb-9149-99fd663aef25", # noqa: E501
"https://www.gov.pl/attachment/5f2abc44-6a7e-4b73-8999-696920252efc", # noqa: E501
"https://www.gov.pl/attachment/141da745-800d-44c5-ac97-e90c4cbd5e11", # noqa: E501
"https://www.gov.pl/attachment/12150aff-d70e-412b-afdc-2bc5341dc823", # noqa: E501
"https://www.gov.pl/attachment/141eeb3c-dedc-4491-b0bf-895587824eff", # noqa: E501
],
[
"20200214_Gminy_za_2019.xlsx",
"20200214_Powiaty_za_2019.xlsx",
"20200214_Miasta_NPP_za_2019.xlsx",
"20200214_Gornoslasko_Zaglebiowska_Metropolia.xlsx",
"20200214_Wojewodztwa_za_2019.xlsx",
],
),
2020: (
[
"https://www.gov.pl/attachment/31d60032-a3c5-4e4f-8af8-67c8fa09afd2", # noqa: E501
"https://www.gov.pl/attachment/82cb06d7-02e6-4d24-a8b4-9926fe0a3079", # noqa: E501
"https://www.gov.pl/attachment/bafb6020-bca0-4ec8-9369-845e0afb94d9", # noqa: E501
"https://www.gov.pl/attachment/e4077a76-1fbc-478e-a15d-eea0a4e3f130", # noqa: E501
"https://www.gov.pl/attachment/0b98f8be-e9e1-48e3-8bc5-796e8c0b169e", # noqa: E501
],
[
"20210215_Gminy_2_za_2020.xlsx",
"20210211_Powiaty_za_2020.xlsx",
"20210215_Miasta_NPP_2_za_2020.xlsx",
"20210211_Metropolia_2020.xlsx",
"20210211_Wojewodztwa_za_2020.xlsx",
],
),
}
dir_sheets = pathlib.Path.cwd().joinpath("data")
gus_dir = pathlib.Path.cwd().joinpath("data", "gus")
names = {
2019: {
"Gminy": "20200214_Gminy_za_2019.xlsx",
"Powiaty": "20200214_Powiaty_za_2019.xlsx",
"Miasta_NPP": "20200214_Miasta_NPP_za_2019.xlsx",
"Metropolia": "20200214_Gornoslasko_Zaglebiowska_Metropolia.xlsx",
"Wojewodztwa": "20200214_Wojewodztwa_za_2019.xlsx",
},
2020: {
"Gminy": "20210215_Gminy_2_za_2020.xlsx",
"Powiaty": "20210211_Powiaty_za_2020.xlsx",
"Miasta_NPP": "20210215_Miasta_NPP_2_za_2020.xlsx",
"Metropolia": "20210211_Metropolia_2020.xlsx",
"Wojewodztwa": "20210211_Wojewodztwa_za_2020.xlsx",
},
}
@pytest.mark.parametrize("years", [[2019, 2020]])
@pytest.mark.parametrize("years", [[2019, 2020]])
| [
11748,
3108,
8019,
198,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
12972,
9288,
198,
11748,
5704,
13,
15002,
355,
288,
2220,
198,
6738,
5704,
13,
25138,
1330,
357,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
288,... | 1.717547 | 1,533 |
# -*- coding: utf-8 -*-
import sys
from typing import List
import time
import datetime
from .exceptions import NotEnoughValuesError, UnrecognizedFlagError
# MIT License
#
# Copyright (c) 2019-2020 karx1
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class FlagParser:
"""
This is the main class for parsing flags.
:param program_name: The name of the program. Defaults to :class:`sys.argv[0]`
:type program_name: str, optional
:param description: The message to display before the arguments.
:type description: str, optional
:param epilogue: The message to display at the end of the help message
:type epilogue: str, optional
:param prefix_chars: The prefix of each argument. Defaults to '-'
:type prefix_chars: str, optional
:param debug: Turns on or off debug mode. Defaults to false.
:type debug: bool, optional
:param debug_file: The file to write to in debug mode. Needs to be a file object as returned by :class:`open`. Defaults to :class`sys.stdout`.
:type debug_file: file, optional
flags
A dictionary of flags and their values.
For example:\n
.. code:: py
{"--flag, -f": True}
"""
def add_flag(self, *args: str, value: bool, help: str = None):
"""Add a flag to the parser.
:param args: Things to name the flag. Maximum of two values.
:type args: str
:param value: The value of the flag when present.
:type value: bool
:param help: A brief description of the flag. These descriptions will be displayed when the `-h` or `--help` flags are present.
:type help: str, optional
"""
self._log("Computing values")
if len(args) < 0:
raise NotEnoughValuesError("Must provide at least one flag to add")
args = args[:2]
string_one = args[0]
try:
string_two = args[1]
except IndexError:
string_two = None
if string_two:
self._log("Adding flag")
bigger_string = _string_max(string_one, string_two)
smaller_string = _string_min(string_one, string_two)
key_string = f"{bigger_string}, {smaller_string}"
help_string = f"{key_string} - {help}"
self.flags[key_string] = value
self._added_flags[bigger_string] = value
self._added_flags[smaller_string] = value
self._help_messages.append(help_string)
self._flag_pairs[bigger_string] = smaller_string
else:
self._log("Adding flag")
key_string = string_one
self.flags[key_string] = value
self._added_flags[string_one] = value
self._log("Added flag")
def parse_flags(self, flag_list: List[str] = None):
"""Parse the flag inputs. Returns an object with the values of each flag.
See :ref:`parsing` for more info.
:param flag_list: List of flags to parse. This can be used for testing. Defaults to :class:`sys.argv[1:]`.
:type flag_list: list, optional
:return: Returns an object containing the values of all the flags.
:rtype: :class:`_ParsedObj`
"""
flag_list = flag_list or sys.argv[1:]
self._log("Formatting help string")
formatter = _HelpFormatter(
self._help_messages,
self.program_name,
description=self.description,
epilogue=self.epilogue,
)
help_string = formatter.format()
self._log("Checking for help flag")
if "--help" in flag_list or "-h" in flag_list:
print(help_string)
sys.exit()
parsed = _ParsedObj()
self._log("Adding values to _ParsedObj instance")
for key, value in self._added_flags.items():
stripped_flag = key.replace("-", "")
flipped_bool = not value
setattr(parsed, stripped_flag, flipped_bool)
for flag in flag_list:
if flag in self._added_flags:
stripped_flag = flag.replace("-", "")
values = self._flag_pairs.values()
if flag in values:
key = list(self._flag_pairs.keys())[list(self._flag_pairs.values()).index(flag)]
short_version = key.replace("-", "")
setattr(parsed, short_version, self._added_flags[flag])
setattr(parsed, stripped_flag, self._added_flags[flag])
else:
raise UnrecognizedFlagError(f"Unrecognized flag: {flag}")
self._log("Done, cleaning up")
return parsed
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
628,
198,
11748,
25064,
198,
6738,
19720,
1330,
7343,
198,
11748,
640,
198,
11748,
4818,
8079,
198,
6738,
764,
1069,
11755,
1330,
1892,
47323,
40161,
12331,
11,
791,
26243,
114... | 2.500886 | 2,258 |
import unittest
from tropopause import Tags as BaseTags
from tropopause.ec2 import InternetGatewayVPC, PublicSubnet
from tropopause.autoscaling import AutoScalingGroup, LaunchConfigurationRPM
from troposphere import Ref, Template
from troposphere.autoscaling import LaunchConfiguration, Tag
class TestAutoscaling(unittest.TestCase):
""" Unit Tests for tropopause.autoscaling """
| [
11748,
555,
715,
395,
198,
198,
6738,
14673,
404,
682,
1330,
44789,
355,
7308,
36142,
198,
6738,
14673,
404,
682,
13,
721,
17,
1330,
4455,
22628,
1014,
53,
5662,
11,
5094,
7004,
3262,
198,
6738,
14673,
404,
682,
13,
2306,
17500,
4272,... | 3.67619 | 105 |
import subprocess
| [
11748,
850,
14681,
628,
628
] | 4.2 | 5 |
import argparse
import logging
import utils # noqa: F401 Keep for django_hack
from apps.noclook.models import NodeType, NodeHandle
from actstream.models import Action
logger = logging.getLogger('noclook_cleanup_peering_partners')
if __name__ == '__main__':
if not len(logger.handlers):
logger.propagate = False
logger.setLevel(logging.WARNING)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
main()
| [
11748,
1822,
29572,
198,
11748,
18931,
198,
11748,
3384,
4487,
220,
1303,
645,
20402,
25,
376,
21844,
9175,
329,
42625,
14208,
62,
31153,
198,
6738,
6725,
13,
77,
420,
5460,
13,
27530,
1330,
19081,
6030,
11,
19081,
37508,
198,
6738,
719... | 2.644898 | 245 |
"""
@project : pyrgbdev
@author : Gooday2die
@date : 2022-02-13
@file : RainbowAll.py
"""
import threading
import time
from pyrgbdev import All
from abstractDemo import AbstractDemo
if __name__ == '__main__':
sdk_object = All.sdk()
sdk_object.connect()
rainbow_all = Demo()
rainbow_all.run(sdk_object=sdk_object, delay=0.0001)
| [
37811,
198,
31,
16302,
1058,
279,
2417,
22296,
7959,
198,
31,
9800,
1058,
4599,
323,
17,
11979,
198,
31,
4475,
1058,
33160,
12,
2999,
12,
1485,
198,
31,
7753,
1058,
19909,
3237,
13,
9078,
198,
37811,
198,
11748,
4704,
278,
198,
11748,... | 2.558824 | 136 |
frase = str (input ('Digite uma frase: ')).strip().upper()
palavras = frase.split()
junto = ''.join(palavras)
inverso = ''
for letra in range (len(junto) -1, -1, -1):
inverso += junto[letra]
if inverso == junto:
print ('A frase digitada é um palindromo')
else:
print ('A frase digitada não é um palindromo')
| [
8310,
589,
796,
965,
357,
15414,
220,
19203,
19511,
578,
220,
334,
2611,
1216,
589,
25,
705,
29720,
36311,
22446,
45828,
3419,
198,
18596,
615,
8847,
796,
1216,
589,
13,
35312,
3419,
198,
29741,
1462,
796,
705,
4458,
22179,
7,
18596,
... | 2.402985 | 134 |
import numpy as np
from src.bandit_algorithms.bandit_learner import BanditLearner
# n_arms = number of arms the learner can pull.
# Select which arm to pull by sampling beta distribution.
# We select the max value from the values sampled.
# pulled_arm = arm pulled.
# reward = reward of arm pulled.
| [
11748,
299,
32152,
355,
45941,
198,
198,
6738,
12351,
13,
3903,
270,
62,
282,
7727,
907,
13,
3903,
270,
62,
3238,
1008,
1330,
10243,
270,
14961,
1008,
628,
220,
220,
220,
1303,
299,
62,
8357,
796,
1271,
286,
5101,
262,
22454,
1008,
... | 3.23 | 100 |
from .causal_graph import CausalGraph
from .transition_system import FiniteTransitionSystem | [
6738,
764,
6888,
6775,
62,
34960,
1330,
6488,
6775,
37065,
198,
6738,
764,
7645,
653,
62,
10057,
1330,
4463,
578,
8291,
653,
11964
] | 3.956522 | 23 |
#!/usr/bin/env python3
# Copyright 2021 - 2022 Universität Tübingen, DKFZ and EMBL
# for the German Human Genome-Phenome Archive (GHGA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Populates the database directly with example data for each record type"""
import asyncio
import json
import os
from pathlib import Path
import motor.motor_asyncio
import typer
# pylint: disable=too-many-arguments
HERE: Path = Path(__file__).parent.resolve()
DEFAULT_EXAMPLES_DIR: str = HERE.parent.resolve() / "example_data" # type: ignore
RECORD_TYPES = {
("analyses", "Analysis"),
("biospecimens", "Biospecimen"),
("data_access_committees", "DataAccessCommittee"),
("data_access_policies", "DataAccessPolicy"),
("datasets", "Dataset"),
("experiments", "Experiment"),
("files", "File"),
("individuals", "Individual"),
("members", "Member"),
("samples", "Sample"),
("studies", "Study"),
("technologies", "Technology"),
("publications", "Publication"),
("projects", "Project"),
("phenotypic_features", "PhenotypicFeature"),
}
async def populate_record(
example_dir: str, record_type: str, db_url: str, db_name: str, collection_name: str
):
"""Populate the database with data for a specific record type"""
file = os.path.join(example_dir, f"{record_type}.json")
if os.path.exists(file):
with open(file, encoding="utf-8") as records_file:
records = json.load(records_file)
await insert_records(db_url, db_name, collection_name, records[record_type])
async def create_text_index(db_url: str, db_name: str, collection_name: str):
"""Create a text index on a collection"""
client = motor.motor_asyncio.AsyncIOMotorClient(db_url)
collection = client[db_name][collection_name]
await collection.create_index([("$**", "text")])
async def insert_records(db_url, db_name, collection_name, records):
"""Insert a set of records to the database"""
client = motor.motor_asyncio.AsyncIOMotorClient(db_url)
collection = client[db_name][collection_name]
await collection.insert_many(records)
async def count_documents_in_collection(db_url, db_name, collection_name):
"""Check whether there is data in a given collection"""
client = motor.motor_asyncio.AsyncIOMotorClient(db_url)
collection = client[db_name][collection_name]
count = await collection.count_documents({})
return count
def main(
example_dir: str = DEFAULT_EXAMPLES_DIR,
db_url: str = "mongodb://localhost:27017",
db_name: str = "metadata-store",
reload: bool = False,
):
"""Populate the database with records for all record types"""
loop = asyncio.get_event_loop()
typer.echo("This will populate the database with records for all record types.")
if not os.path.exists(example_dir):
raise IOError(f"Directory '{example_dir}' does not exist.")
for record_type, collection_name in RECORD_TYPES:
typer.echo(f" - working on record type: {record_type}")
count = loop.run_until_complete(
count_documents_in_collection(db_url, db_name, collection_name)
)
if count > 0 and not reload:
raise Exception(
f"Cannot write to a non-empty {collection_name} collection."
)
loop.run_until_complete(
populate_record(example_dir, record_type, db_url, db_name, collection_name)
)
loop.run_until_complete(create_text_index(db_url, db_name, collection_name))
typer.echo("Done.")
if __name__ == "__main__":
typer.run(main)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
2,
15069,
33448,
532,
33160,
26986,
270,
11033,
83,
309,
9116,
4623,
268,
11,
32975,
37,
57,
290,
17228,
9148,
198,
2,
329,
262,
2679,
5524,
5215,
462,
12,
47,
831,
462,
2... | 2.780504 | 1,467 |
import os
import io
from setuptools import setup, find_packages
from os import path
from io import open
setup(
# This is the name of your project. The first time you publish this
# package, this name will be registered for you. It will determine how
# users can install this project, e.g.:
#
# $ pip install sampleproject
#
# And where it will live on PyPI: https://pypi.org/project/sampleproject/
#
# There are some restrictions on what makes a valid project name
# specification here:
# https://packaging.python.org/specifications/core-metadata/#name
name='SimpleRestApp', # Required
# To print absolute path on your system
# os.path.abspath('.')
# To print files and directories in the current directory
# on your system
# os.listdir('.')
# Versions should comply with PEP 440:
# https://www.python.org/dev/peps/pep-0440/
#
# For a discussion on single-sourcing the version across setup.py and the
# project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.3.0', # Required
#def __path(filename):
#return os.path.join(os.path.dirname(__file__),
# filename)
url="https://github.com/womenwhocoderichmond/DataPy-CI-Pipeline",
packages=find_packages()
)
| [
11748,
28686,
198,
11748,
33245,
198,
6738,
900,
37623,
10141,
1330,
9058,
11,
1064,
62,
43789,
198,
6738,
28686,
1330,
3108,
198,
6738,
33245,
1330,
1280,
198,
198,
40406,
7,
198,
220,
220,
220,
1303,
770,
318,
262,
1438,
286,
534,
1... | 2.871739 | 460 |
# -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class PropDBProxyTargetGroupConnectionPoolConfigurationInfoFormat(Property):
"""
AWS Object Type = "AWS::RDS::DBProxyTargetGroup.ConnectionPoolConfigurationInfoFormat"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html
Property Document:
- ``p_ConnectionBorrowTimeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-connectionborrowtimeout
- ``p_InitQuery``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-initquery
- ``p_MaxConnectionsPercent``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-maxconnectionspercent
- ``p_MaxIdleConnectionsPercent``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-maxidleconnectionspercent
- ``p_SessionPinningFilters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-sessionpinningfilters
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBProxyTargetGroup.ConnectionPoolConfigurationInfoFormat"
p_ConnectionBorrowTimeout: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "ConnectionBorrowTimeout"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-connectionborrowtimeout"""
p_InitQuery: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "InitQuery"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-initquery"""
p_MaxConnectionsPercent: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MaxConnectionsPercent"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-maxconnectionspercent"""
p_MaxIdleConnectionsPercent: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MaxIdleConnectionsPercent"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-maxidleconnectionspercent"""
p_SessionPinningFilters: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SessionPinningFilters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfoformat-sessionpinningfilters"""
@attr.s
class PropDBInstanceDBInstanceRole(Property):
"""
AWS Object Type = "AWS::RDS::DBInstance.DBInstanceRole"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-dbinstancerole.html
Property Document:
- ``rp_FeatureName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-dbinstancerole.html#cfn-rds-dbinstance-dbinstancerole-featurename
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-dbinstancerole.html#cfn-rds-dbinstance-dbinstancerole-rolearn
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBInstance.DBInstanceRole"
rp_FeatureName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "FeatureName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-dbinstancerole.html#cfn-rds-dbinstance-dbinstancerole-featurename"""
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-dbinstancerole.html#cfn-rds-dbinstance-dbinstancerole-rolearn"""
@attr.s
class PropDBClusterScalingConfiguration(Property):
"""
AWS Object Type = "AWS::RDS::DBCluster.ScalingConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-scalingconfiguration.html
Property Document:
- ``p_AutoPause``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-scalingconfiguration.html#cfn-rds-dbcluster-scalingconfiguration-autopause
- ``p_MaxCapacity``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-scalingconfiguration.html#cfn-rds-dbcluster-scalingconfiguration-maxcapacity
- ``p_MinCapacity``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-scalingconfiguration.html#cfn-rds-dbcluster-scalingconfiguration-mincapacity
- ``p_SecondsUntilAutoPause``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-scalingconfiguration.html#cfn-rds-dbcluster-scalingconfiguration-secondsuntilautopause
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBCluster.ScalingConfiguration"
p_AutoPause: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "AutoPause"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-scalingconfiguration.html#cfn-rds-dbcluster-scalingconfiguration-autopause"""
p_MaxCapacity: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MaxCapacity"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-scalingconfiguration.html#cfn-rds-dbcluster-scalingconfiguration-maxcapacity"""
p_MinCapacity: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MinCapacity"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-scalingconfiguration.html#cfn-rds-dbcluster-scalingconfiguration-mincapacity"""
p_SecondsUntilAutoPause: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "SecondsUntilAutoPause"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-scalingconfiguration.html#cfn-rds-dbcluster-scalingconfiguration-secondsuntilautopause"""
@attr.s
class PropDBInstanceProcessorFeature(Property):
"""
AWS Object Type = "AWS::RDS::DBInstance.ProcessorFeature"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-processorfeature.html
Property Document:
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-processorfeature.html#cfn-rds-dbinstance-processorfeature-name
- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-processorfeature.html#cfn-rds-dbinstance-processorfeature-value
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBInstance.ProcessorFeature"
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-processorfeature.html#cfn-rds-dbinstance-processorfeature-name"""
p_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbinstance-processorfeature.html#cfn-rds-dbinstance-processorfeature-value"""
@attr.s
class PropDBSecurityGroupIngress(Property):
"""
AWS Object Type = "AWS::RDS::DBSecurityGroup.Ingress"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group-rule.html
Property Document:
- ``p_CIDRIP``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group-rule.html#cfn-rds-securitygroup-cidrip
- ``p_EC2SecurityGroupId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group-rule.html#cfn-rds-securitygroup-ec2securitygroupid
- ``p_EC2SecurityGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group-rule.html#cfn-rds-securitygroup-ec2securitygroupname
- ``p_EC2SecurityGroupOwnerId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group-rule.html#cfn-rds-securitygroup-ec2securitygroupownerid
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBSecurityGroup.Ingress"
p_CIDRIP: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CIDRIP"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group-rule.html#cfn-rds-securitygroup-cidrip"""
p_EC2SecurityGroupId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EC2SecurityGroupId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group-rule.html#cfn-rds-securitygroup-ec2securitygroupid"""
p_EC2SecurityGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EC2SecurityGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group-rule.html#cfn-rds-securitygroup-ec2securitygroupname"""
p_EC2SecurityGroupOwnerId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EC2SecurityGroupOwnerId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group-rule.html#cfn-rds-securitygroup-ec2securitygroupownerid"""
@attr.s
class PropDBProxyTagFormat(Property):
"""
AWS Object Type = "AWS::RDS::DBProxy.TagFormat"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-tagformat.html
Property Document:
- ``p_Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-tagformat.html#cfn-rds-dbproxy-tagformat-key
- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-tagformat.html#cfn-rds-dbproxy-tagformat-value
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBProxy.TagFormat"
p_Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-tagformat.html#cfn-rds-dbproxy-tagformat-key"""
p_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-tagformat.html#cfn-rds-dbproxy-tagformat-value"""
@attr.s
class PropDBProxyAuthFormat(Property):
"""
AWS Object Type = "AWS::RDS::DBProxy.AuthFormat"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html
Property Document:
- ``p_AuthScheme``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-authscheme
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-description
- ``p_IAMAuth``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-iamauth
- ``p_SecretArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-secretarn
- ``p_UserName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-username
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBProxy.AuthFormat"
p_AuthScheme: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "AuthScheme"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-authscheme"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-description"""
p_IAMAuth: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "IAMAuth"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-iamauth"""
p_SecretArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SecretArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-secretarn"""
p_UserName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "UserName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxy-authformat.html#cfn-rds-dbproxy-authformat-username"""
@attr.s
class PropDBProxyEndpointTagFormat(Property):
"""
AWS Object Type = "AWS::RDS::DBProxyEndpoint.TagFormat"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxyendpoint-tagformat.html
Property Document:
- ``p_Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxyendpoint-tagformat.html#cfn-rds-dbproxyendpoint-tagformat-key
- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxyendpoint-tagformat.html#cfn-rds-dbproxyendpoint-tagformat-value
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBProxyEndpoint.TagFormat"
p_Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxyendpoint-tagformat.html#cfn-rds-dbproxyendpoint-tagformat-key"""
p_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbproxyendpoint-tagformat.html#cfn-rds-dbproxyendpoint-tagformat-value"""
@attr.s
class PropOptionGroupOptionSetting(Property):
"""
AWS Object Type = "AWS::RDS::OptionGroup.OptionSetting"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations-optionsettings.html
Property Document:
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations-optionsettings.html#cfn-rds-optiongroup-optionconfigurations-optionsettings-name
- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations-optionsettings.html#cfn-rds-optiongroup-optionconfigurations-optionsettings-value
"""
AWS_OBJECT_TYPE = "AWS::RDS::OptionGroup.OptionSetting"
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations-optionsettings.html#cfn-rds-optiongroup-optionconfigurations-optionsettings-name"""
p_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations-optionsettings.html#cfn-rds-optiongroup-optionconfigurations-optionsettings-value"""
@attr.s
class PropDBClusterDBClusterRole(Property):
"""
AWS Object Type = "AWS::RDS::DBCluster.DBClusterRole"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-dbclusterrole.html
Property Document:
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-dbclusterrole.html#cfn-rds-dbcluster-dbclusterrole-rolearn
- ``p_FeatureName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-dbclusterrole.html#cfn-rds-dbcluster-dbclusterrole-featurename
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBCluster.DBClusterRole"
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-dbclusterrole.html#cfn-rds-dbcluster-dbclusterrole-rolearn"""
p_FeatureName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "FeatureName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbcluster-dbclusterrole.html#cfn-rds-dbcluster-dbclusterrole-featurename"""
@attr.s
class PropOptionGroupOptionConfiguration(Property):
"""
AWS Object Type = "AWS::RDS::OptionGroup.OptionConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html
Property Document:
- ``rp_OptionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-optionname
- ``p_DBSecurityGroupMemberships``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-dbsecuritygroupmemberships
- ``p_OptionSettings``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-optionsettings
- ``p_OptionVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfiguration-optionversion
- ``p_Port``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-port
- ``p_VpcSecurityGroupMemberships``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-vpcsecuritygroupmemberships
"""
AWS_OBJECT_TYPE = "AWS::RDS::OptionGroup.OptionConfiguration"
rp_OptionName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "OptionName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-optionname"""
p_DBSecurityGroupMemberships: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "DBSecurityGroupMemberships"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-dbsecuritygroupmemberships"""
p_OptionSettings: typing.List[typing.Union['PropOptionGroupOptionSetting', dict]] = attr.ib(
default=None,
converter=PropOptionGroupOptionSetting.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropOptionGroupOptionSetting), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "OptionSettings"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-optionsettings"""
p_OptionVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "OptionVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfiguration-optionversion"""
p_Port: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Port"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-port"""
p_VpcSecurityGroupMemberships: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "VpcSecurityGroupMemberships"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-optiongroup-optionconfigurations.html#cfn-rds-optiongroup-optionconfigurations-vpcsecuritygroupmemberships"""
#--- Resource declaration ---
@attr.s
class DBSubnetGroup(Resource):
"""
AWS Object Type = "AWS::RDS::DBSubnetGroup"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbsubnet-group.html
Property Document:
- ``rp_DBSubnetGroupDescription``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbsubnet-group.html#cfn-rds-dbsubnetgroup-dbsubnetgroupdescription
- ``rp_SubnetIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbsubnet-group.html#cfn-rds-dbsubnetgroup-subnetids
- ``p_DBSubnetGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbsubnet-group.html#cfn-rds-dbsubnetgroup-dbsubnetgroupname
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbsubnet-group.html#cfn-rds-dbsubnetgroup-tags
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBSubnetGroup"
rp_DBSubnetGroupDescription: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DBSubnetGroupDescription"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbsubnet-group.html#cfn-rds-dbsubnetgroup-dbsubnetgroupdescription"""
rp_SubnetIds: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "SubnetIds"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbsubnet-group.html#cfn-rds-dbsubnetgroup-subnetids"""
p_DBSubnetGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBSubnetGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbsubnet-group.html#cfn-rds-dbsubnetgroup-dbsubnetgroupname"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbsubnet-group.html#cfn-rds-dbsubnetgroup-tags"""
@attr.s
class GlobalCluster(Resource):
"""
AWS Object Type = "AWS::RDS::GlobalCluster"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html
Property Document:
- ``p_DeletionProtection``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-deletionprotection
- ``p_Engine``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-engine
- ``p_EngineVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-engineversion
- ``p_GlobalClusterIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-globalclusteridentifier
- ``p_SourceDBClusterIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-sourcedbclusteridentifier
- ``p_StorageEncrypted``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-storageencrypted
"""
AWS_OBJECT_TYPE = "AWS::RDS::GlobalCluster"
p_DeletionProtection: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DeletionProtection"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-deletionprotection"""
p_Engine: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Engine"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-engine"""
p_EngineVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EngineVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-engineversion"""
p_GlobalClusterIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "GlobalClusterIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-globalclusteridentifier"""
p_SourceDBClusterIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SourceDBClusterIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-sourcedbclusteridentifier"""
p_StorageEncrypted: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "StorageEncrypted"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-globalcluster.html#cfn-rds-globalcluster-storageencrypted"""
@attr.s
class DBSecurityGroupIngress(Resource):
"""
AWS Object Type = "AWS::RDS::DBSecurityGroupIngress"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html
Property Document:
- ``rp_DBSecurityGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-dbsecuritygroupname
- ``p_CIDRIP``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-cidrip
- ``p_EC2SecurityGroupId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-ec2securitygroupid
- ``p_EC2SecurityGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-ec2securitygroupname
- ``p_EC2SecurityGroupOwnerId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-ec2securitygroupownerid
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBSecurityGroupIngress"
rp_DBSecurityGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DBSecurityGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-dbsecuritygroupname"""
p_CIDRIP: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CIDRIP"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-cidrip"""
p_EC2SecurityGroupId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EC2SecurityGroupId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-ec2securitygroupid"""
p_EC2SecurityGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EC2SecurityGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-ec2securitygroupname"""
p_EC2SecurityGroupOwnerId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EC2SecurityGroupOwnerId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-ec2securitygroupownerid"""
@attr.s
class DBCluster(Resource):
"""
AWS Object Type = "AWS::RDS::DBCluster"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html
Property Document:
- ``rp_Engine``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-engine
- ``p_AssociatedRoles``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-associatedroles
- ``p_AvailabilityZones``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-availabilityzones
- ``p_BacktrackWindow``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-backtrackwindow
- ``p_BackupRetentionPeriod``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-backuprententionperiod
- ``p_CopyTagsToSnapshot``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-copytagstosnapshot
- ``p_DBClusterIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-dbclusteridentifier
- ``p_DBClusterParameterGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-dbclusterparametergroupname
- ``p_DBSubnetGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-dbsubnetgroupname
- ``p_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-databasename
- ``p_DeletionProtection``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-deletionprotection
- ``p_EnableCloudwatchLogsExports``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-enablecloudwatchlogsexports
- ``p_EnableHttpEndpoint``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-enablehttpendpoint
- ``p_EnableIAMDatabaseAuthentication``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-enableiamdatabaseauthentication
- ``p_EngineMode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-enginemode
- ``p_EngineVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-engineversion
- ``p_GlobalClusterIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-globalclusteridentifier
- ``p_KmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-kmskeyid
- ``p_MasterUserPassword``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-masteruserpassword
- ``p_MasterUsername``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-masterusername
- ``p_Port``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-port
- ``p_PreferredBackupWindow``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-preferredbackupwindow
- ``p_PreferredMaintenanceWindow``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-preferredmaintenancewindow
- ``p_ReplicationSourceIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-replicationsourceidentifier
- ``p_RestoreType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-restoretype
- ``p_ScalingConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-scalingconfiguration
- ``p_SnapshotIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-snapshotidentifier
- ``p_SourceDBClusterIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-sourcedbclusteridentifier
- ``p_SourceRegion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-sourceregion
- ``p_StorageEncrypted``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-storageencrypted
- ``p_UseLatestRestorableTime``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-uselatestrestorabletime
- ``p_VpcSecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-vpcsecuritygroupids
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-tags
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBCluster"
rp_Engine: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Engine"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-engine"""
p_AssociatedRoles: typing.List[typing.Union['PropDBClusterDBClusterRole', dict]] = attr.ib(
default=None,
converter=PropDBClusterDBClusterRole.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDBClusterDBClusterRole), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "AssociatedRoles"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-associatedroles"""
p_AvailabilityZones: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "AvailabilityZones"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-availabilityzones"""
p_BacktrackWindow: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "BacktrackWindow"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-backtrackwindow"""
p_BackupRetentionPeriod: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "BackupRetentionPeriod"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-backuprententionperiod"""
p_CopyTagsToSnapshot: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "CopyTagsToSnapshot"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-copytagstosnapshot"""
p_DBClusterIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBClusterIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-dbclusteridentifier"""
p_DBClusterParameterGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBClusterParameterGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-dbclusterparametergroupname"""
p_DBSubnetGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBSubnetGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-dbsubnetgroupname"""
p_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-databasename"""
p_DeletionProtection: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DeletionProtection"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-deletionprotection"""
p_EnableCloudwatchLogsExports: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "EnableCloudwatchLogsExports"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-enablecloudwatchlogsexports"""
p_EnableHttpEndpoint: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "EnableHttpEndpoint"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-enablehttpendpoint"""
p_EnableIAMDatabaseAuthentication: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "EnableIAMDatabaseAuthentication"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-enableiamdatabaseauthentication"""
p_EngineMode: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EngineMode"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-enginemode"""
p_EngineVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EngineVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-engineversion"""
p_GlobalClusterIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "GlobalClusterIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-globalclusteridentifier"""
p_KmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-kmskeyid"""
p_MasterUserPassword: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "MasterUserPassword"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-masteruserpassword"""
p_MasterUsername: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "MasterUsername"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-masterusername"""
p_Port: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Port"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-port"""
p_PreferredBackupWindow: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PreferredBackupWindow"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-preferredbackupwindow"""
p_PreferredMaintenanceWindow: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PreferredMaintenanceWindow"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-preferredmaintenancewindow"""
p_ReplicationSourceIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ReplicationSourceIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-replicationsourceidentifier"""
p_RestoreType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RestoreType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-restoretype"""
p_ScalingConfiguration: typing.Union['PropDBClusterScalingConfiguration', dict] = attr.ib(
default=None,
converter=PropDBClusterScalingConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDBClusterScalingConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "ScalingConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-scalingconfiguration"""
p_SnapshotIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SnapshotIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-snapshotidentifier"""
p_SourceDBClusterIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SourceDBClusterIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-sourcedbclusteridentifier"""
p_SourceRegion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SourceRegion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-sourceregion"""
p_StorageEncrypted: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "StorageEncrypted"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-storageencrypted"""
p_UseLatestRestorableTime: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "UseLatestRestorableTime"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-uselatestrestorabletime"""
p_VpcSecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "VpcSecurityGroupIds"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-vpcsecuritygroupids"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#cfn-rds-dbcluster-tags"""
@property
def rv_EndpointAddress(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#aws-resource-rds-dbcluster-return-values"""
return GetAtt(resource=self, attr_name="Endpoint.Address")
@property
def rv_EndpointPort(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#aws-resource-rds-dbcluster-return-values"""
return GetAtt(resource=self, attr_name="Endpoint.Port")
@property
def rv_ReadEndpointAddress(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbcluster.html#aws-resource-rds-dbcluster-return-values"""
return GetAtt(resource=self, attr_name="ReadEndpoint.Address")
@attr.s
class EventSubscription(Resource):
"""
AWS Object Type = "AWS::RDS::EventSubscription"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html
Property Document:
- ``rp_SnsTopicArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-snstopicarn
- ``p_Enabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-enabled
- ``p_EventCategories``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-eventcategories
- ``p_SourceIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-sourceids
- ``p_SourceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-sourcetype
"""
AWS_OBJECT_TYPE = "AWS::RDS::EventSubscription"
rp_SnsTopicArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SnsTopicArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-snstopicarn"""
p_Enabled: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Enabled"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-enabled"""
p_EventCategories: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "EventCategories"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-eventcategories"""
p_SourceIds: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SourceIds"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-sourceids"""
p_SourceType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SourceType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-eventsubscription.html#cfn-rds-eventsubscription-sourcetype"""
@attr.s
class DBInstance(Resource):
"""
AWS Object Type = "AWS::RDS::DBInstance"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html
Property Document:
- ``rp_DBInstanceClass``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbinstanceclass
- ``p_AllocatedStorage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-allocatedstorage
- ``p_AllowMajorVersionUpgrade``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-allowmajorversionupgrade
- ``p_AssociatedRoles``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-associatedroles
- ``p_AutoMinorVersionUpgrade``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-autominorversionupgrade
- ``p_AvailabilityZone``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-availabilityzone
- ``p_BackupRetentionPeriod``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-backupretentionperiod
- ``p_CACertificateIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-cacertificateidentifier
- ``p_CharacterSetName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-charactersetname
- ``p_CopyTagsToSnapshot``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-copytagstosnapshot
- ``p_DBClusterIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbclusteridentifier
- ``p_DBInstanceIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbinstanceidentifier
- ``p_DBName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbname
- ``p_DBParameterGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbparametergroupname
- ``p_DBSecurityGroups``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbsecuritygroups
- ``p_DBSnapshotIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbsnapshotidentifier
- ``p_DBSubnetGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbsubnetgroupname
- ``p_DeleteAutomatedBackups``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-deleteautomatedbackups
- ``p_DeletionProtection``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-deletionprotection
- ``p_Domain``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-domain
- ``p_DomainIAMRoleName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-domainiamrolename
- ``p_EnableCloudwatchLogsExports``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-enablecloudwatchlogsexports
- ``p_EnableIAMDatabaseAuthentication``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-enableiamdatabaseauthentication
- ``p_EnablePerformanceInsights``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-enableperformanceinsights
- ``p_Engine``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-engine
- ``p_EngineVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-engineversion
- ``p_Iops``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-iops
- ``p_KmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-kmskeyid
- ``p_LicenseModel``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-licensemodel
- ``p_MasterUserPassword``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-masteruserpassword
- ``p_MasterUsername``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-masterusername
- ``p_MaxAllocatedStorage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-maxallocatedstorage
- ``p_MonitoringInterval``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-monitoringinterval
- ``p_MonitoringRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-monitoringrolearn
- ``p_MultiAZ``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-multiaz
- ``p_OptionGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-optiongroupname
- ``p_PerformanceInsightsKMSKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-performanceinsightskmskeyid
- ``p_PerformanceInsightsRetentionPeriod``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-performanceinsightsretentionperiod
- ``p_Port``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-port
- ``p_PreferredBackupWindow``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-preferredbackupwindow
- ``p_PreferredMaintenanceWindow``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-preferredmaintenancewindow
- ``p_ProcessorFeatures``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-processorfeatures
- ``p_PromotionTier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-promotiontier
- ``p_PubliclyAccessible``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-publiclyaccessible
- ``p_SourceDBInstanceIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-sourcedbinstanceidentifier
- ``p_SourceRegion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-sourceregion
- ``p_StorageEncrypted``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-storageencrypted
- ``p_StorageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-storagetype
- ``p_Timezone``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-timezone
- ``p_UseDefaultProcessorFeatures``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-usedefaultprocessorfeatures
- ``p_VPCSecurityGroups``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-vpcsecuritygroups
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-tags
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBInstance"
rp_DBInstanceClass: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DBInstanceClass"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbinstanceclass"""
p_AllocatedStorage: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "AllocatedStorage"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-allocatedstorage"""
p_AllowMajorVersionUpgrade: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "AllowMajorVersionUpgrade"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-allowmajorversionupgrade"""
p_AssociatedRoles: typing.List[typing.Union['PropDBInstanceDBInstanceRole', dict]] = attr.ib(
default=None,
converter=PropDBInstanceDBInstanceRole.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDBInstanceDBInstanceRole), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "AssociatedRoles"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-associatedroles"""
p_AutoMinorVersionUpgrade: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "AutoMinorVersionUpgrade"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-autominorversionupgrade"""
p_AvailabilityZone: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "AvailabilityZone"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-availabilityzone"""
p_BackupRetentionPeriod: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "BackupRetentionPeriod"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-backupretentionperiod"""
p_CACertificateIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CACertificateIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-cacertificateidentifier"""
p_CharacterSetName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CharacterSetName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-charactersetname"""
p_CopyTagsToSnapshot: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "CopyTagsToSnapshot"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-copytagstosnapshot"""
p_DBClusterIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBClusterIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbclusteridentifier"""
p_DBInstanceIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBInstanceIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbinstanceidentifier"""
p_DBName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbname"""
p_DBParameterGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBParameterGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbparametergroupname"""
p_DBSecurityGroups: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "DBSecurityGroups"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbsecuritygroups"""
p_DBSnapshotIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBSnapshotIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbsnapshotidentifier"""
p_DBSubnetGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DBSubnetGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-dbsubnetgroupname"""
p_DeleteAutomatedBackups: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DeleteAutomatedBackups"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-deleteautomatedbackups"""
p_DeletionProtection: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DeletionProtection"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-deletionprotection"""
p_Domain: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Domain"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-domain"""
p_DomainIAMRoleName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DomainIAMRoleName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-domainiamrolename"""
p_EnableCloudwatchLogsExports: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "EnableCloudwatchLogsExports"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-enablecloudwatchlogsexports"""
p_EnableIAMDatabaseAuthentication: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "EnableIAMDatabaseAuthentication"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-enableiamdatabaseauthentication"""
p_EnablePerformanceInsights: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "EnablePerformanceInsights"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-enableperformanceinsights"""
p_Engine: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Engine"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-engine"""
p_EngineVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EngineVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-engineversion"""
p_Iops: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Iops"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-iops"""
p_KmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-kmskeyid"""
p_LicenseModel: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "LicenseModel"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-licensemodel"""
p_MasterUserPassword: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "MasterUserPassword"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-masteruserpassword"""
p_MasterUsername: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "MasterUsername"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-masterusername"""
p_MaxAllocatedStorage: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MaxAllocatedStorage"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-maxallocatedstorage"""
p_MonitoringInterval: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MonitoringInterval"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-monitoringinterval"""
p_MonitoringRoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "MonitoringRoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-monitoringrolearn"""
p_MultiAZ: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "MultiAZ"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-multiaz"""
p_OptionGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "OptionGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-optiongroupname"""
p_PerformanceInsightsKMSKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PerformanceInsightsKMSKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-performanceinsightskmskeyid"""
p_PerformanceInsightsRetentionPeriod: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "PerformanceInsightsRetentionPeriod"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-performanceinsightsretentionperiod"""
p_Port: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Port"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-port"""
p_PreferredBackupWindow: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PreferredBackupWindow"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-preferredbackupwindow"""
p_PreferredMaintenanceWindow: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PreferredMaintenanceWindow"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-preferredmaintenancewindow"""
p_ProcessorFeatures: typing.List[typing.Union['PropDBInstanceProcessorFeature', dict]] = attr.ib(
default=None,
converter=PropDBInstanceProcessorFeature.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDBInstanceProcessorFeature), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "ProcessorFeatures"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-processorfeatures"""
p_PromotionTier: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "PromotionTier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-promotiontier"""
p_PubliclyAccessible: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "PubliclyAccessible"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-publiclyaccessible"""
p_SourceDBInstanceIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SourceDBInstanceIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-sourcedbinstanceidentifier"""
p_SourceRegion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SourceRegion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-sourceregion"""
p_StorageEncrypted: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "StorageEncrypted"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-storageencrypted"""
p_StorageType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "StorageType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-storagetype"""
p_Timezone: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Timezone"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-timezone"""
p_UseDefaultProcessorFeatures: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "UseDefaultProcessorFeatures"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-usedefaultprocessorfeatures"""
p_VPCSecurityGroups: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "VPCSecurityGroups"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-vpcsecuritygroups"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#cfn-rds-dbinstance-tags"""
@property
def rv_EndpointAddress(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#aws-properties-rds-database-instance-return-values"""
return GetAtt(resource=self, attr_name="Endpoint.Address")
@property
def rv_EndpointPort(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html#aws-properties-rds-database-instance-return-values"""
return GetAtt(resource=self, attr_name="Endpoint.Port")
@attr.s
class DBSecurityGroup(Resource):
"""
AWS Object Type = "AWS::RDS::DBSecurityGroup"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group.html
Property Document:
- ``rp_DBSecurityGroupIngress``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group.html#cfn-rds-dbsecuritygroup-dbsecuritygroupingress
- ``rp_GroupDescription``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group.html#cfn-rds-dbsecuritygroup-groupdescription
- ``p_EC2VpcId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group.html#cfn-rds-dbsecuritygroup-ec2vpcid
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group.html#cfn-rds-dbsecuritygroup-tags
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBSecurityGroup"
rp_DBSecurityGroupIngress: typing.List[typing.Union['PropDBSecurityGroupIngress', dict]] = attr.ib(
default=None,
converter=PropDBSecurityGroupIngress.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDBSecurityGroupIngress), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "DBSecurityGroupIngress"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group.html#cfn-rds-dbsecuritygroup-dbsecuritygroupingress"""
rp_GroupDescription: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "GroupDescription"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group.html#cfn-rds-dbsecuritygroup-groupdescription"""
p_EC2VpcId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EC2VpcId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group.html#cfn-rds-dbsecuritygroup-ec2vpcid"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-security-group.html#cfn-rds-dbsecuritygroup-tags"""
@attr.s
class DBClusterParameterGroup(Resource):
"""
AWS Object Type = "AWS::RDS::DBClusterParameterGroup"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbclusterparametergroup.html
Property Document:
- ``rp_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbclusterparametergroup.html#cfn-rds-dbclusterparametergroup-description
- ``rp_Family``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbclusterparametergroup.html#cfn-rds-dbclusterparametergroup-family
- ``rp_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbclusterparametergroup.html#cfn-rds-dbclusterparametergroup-parameters
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbclusterparametergroup.html#cfn-rds-dbclusterparametergroup-tags
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBClusterParameterGroup"
rp_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbclusterparametergroup.html#cfn-rds-dbclusterparametergroup-description"""
rp_Family: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Family"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbclusterparametergroup.html#cfn-rds-dbclusterparametergroup-family"""
rp_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.instance_of(dict),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbclusterparametergroup.html#cfn-rds-dbclusterparametergroup-parameters"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbclusterparametergroup.html#cfn-rds-dbclusterparametergroup-tags"""
@attr.s
class OptionGroup(Resource):
"""
AWS Object Type = "AWS::RDS::OptionGroup"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html
Property Document:
- ``rp_EngineName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-enginename
- ``rp_MajorEngineVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-majorengineversion
- ``rp_OptionConfigurations``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-optionconfigurations
- ``rp_OptionGroupDescription``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-optiongroupdescription
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-tags
"""
AWS_OBJECT_TYPE = "AWS::RDS::OptionGroup"
rp_EngineName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "EngineName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-enginename"""
rp_MajorEngineVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "MajorEngineVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-majorengineversion"""
rp_OptionConfigurations: typing.List[typing.Union['PropOptionGroupOptionConfiguration', dict]] = attr.ib(
default=None,
converter=PropOptionGroupOptionConfiguration.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropOptionGroupOptionConfiguration), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "OptionConfigurations"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-optionconfigurations"""
rp_OptionGroupDescription: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "OptionGroupDescription"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-optiongroupdescription"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-optiongroup.html#cfn-rds-optiongroup-tags"""
@attr.s
class DBParameterGroup(Resource):
"""
AWS Object Type = "AWS::RDS::DBParameterGroup"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbparametergroup.html
Property Document:
- ``rp_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbparametergroup.html#cfn-rds-dbparametergroup-description
- ``rp_Family``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbparametergroup.html#cfn-rds-dbparametergroup-family
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbparametergroup.html#cfn-rds-dbparametergroup-parameters
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbparametergroup.html#cfn-rds-dbparametergroup-tags
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBParameterGroup"
rp_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbparametergroup.html#cfn-rds-dbparametergroup-description"""
rp_Family: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Family"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbparametergroup.html#cfn-rds-dbparametergroup-family"""
p_Parameters: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbparametergroup.html#cfn-rds-dbparametergroup-parameters"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-dbparametergroup.html#cfn-rds-dbparametergroup-tags"""
@attr.s
class DBProxy(Resource):
"""
AWS Object Type = "AWS::RDS::DBProxy"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html
Property Document:
- ``rp_Auth``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-auth
- ``rp_DBProxyName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-dbproxyname
- ``rp_EngineFamily``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-enginefamily
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-rolearn
- ``rp_VpcSubnetIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-vpcsubnetids
- ``p_DebugLogging``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-debuglogging
- ``p_IdleClientTimeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-idleclienttimeout
- ``p_RequireTLS``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-requiretls
- ``p_VpcSecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-vpcsecuritygroupids
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-tags
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBProxy"
rp_Auth: typing.List[typing.Union['PropDBProxyAuthFormat', dict]] = attr.ib(
default=None,
converter=PropDBProxyAuthFormat.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDBProxyAuthFormat), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Auth"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-auth"""
rp_DBProxyName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DBProxyName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-dbproxyname"""
rp_EngineFamily: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "EngineFamily"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-enginefamily"""
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-rolearn"""
rp_VpcSubnetIds: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "VpcSubnetIds"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-vpcsubnetids"""
p_DebugLogging: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DebugLogging"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-debuglogging"""
p_IdleClientTimeout: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "IdleClientTimeout"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-idleclienttimeout"""
p_RequireTLS: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "RequireTLS"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-requiretls"""
p_VpcSecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "VpcSecurityGroupIds"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-vpcsecuritygroupids"""
p_Tags: typing.List[typing.Union['PropDBProxyTagFormat', dict]] = attr.ib(
default=None,
converter=PropDBProxyTagFormat.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDBProxyTagFormat), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#cfn-rds-dbproxy-tags"""
@property
def rv_DBProxyArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#aws-resource-rds-dbproxy-return-values"""
return GetAtt(resource=self, attr_name="DBProxyArn")
@property
def rv_Endpoint(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#aws-resource-rds-dbproxy-return-values"""
return GetAtt(resource=self, attr_name="Endpoint")
@property
def rv_VpcId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxy.html#aws-resource-rds-dbproxy-return-values"""
return GetAtt(resource=self, attr_name="VpcId")
@attr.s
class DBProxyTargetGroup(Resource):
"""
AWS Object Type = "AWS::RDS::DBProxyTargetGroup"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html
Property Document:
- ``rp_DBProxyName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-dbproxyname
- ``rp_TargetGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-targetgroupname
- ``p_ConnectionPoolConfigurationInfo``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfo
- ``p_DBClusterIdentifiers``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-dbclusteridentifiers
- ``p_DBInstanceIdentifiers``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-dbinstanceidentifiers
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBProxyTargetGroup"
rp_DBProxyName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DBProxyName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-dbproxyname"""
rp_TargetGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TargetGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-targetgroupname"""
p_ConnectionPoolConfigurationInfo: typing.Union['PropDBProxyTargetGroupConnectionPoolConfigurationInfoFormat', dict] = attr.ib(
default=None,
converter=PropDBProxyTargetGroupConnectionPoolConfigurationInfoFormat.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDBProxyTargetGroupConnectionPoolConfigurationInfoFormat)),
metadata={AttrMeta.PROPERTY_NAME: "ConnectionPoolConfigurationInfo"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-connectionpoolconfigurationinfo"""
p_DBClusterIdentifiers: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "DBClusterIdentifiers"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-dbclusteridentifiers"""
p_DBInstanceIdentifiers: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "DBInstanceIdentifiers"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#cfn-rds-dbproxytargetgroup-dbinstanceidentifiers"""
@property
def rv_TargetGroupArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxytargetgroup.html#aws-resource-rds-dbproxytargetgroup-return-values"""
return GetAtt(resource=self, attr_name="TargetGroupArn")
@attr.s
class DBProxyEndpoint(Resource):
"""
AWS Object Type = "AWS::RDS::DBProxyEndpoint"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html
Property Document:
- ``rp_DBProxyEndpointName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-dbproxyendpointname
- ``rp_DBProxyName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-dbproxyname
- ``rp_VpcSubnetIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-vpcsubnetids
- ``p_TargetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-targetrole
- ``p_VpcSecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-vpcsecuritygroupids
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-tags
"""
AWS_OBJECT_TYPE = "AWS::RDS::DBProxyEndpoint"
rp_DBProxyEndpointName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DBProxyEndpointName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-dbproxyendpointname"""
rp_DBProxyName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DBProxyName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-dbproxyname"""
rp_VpcSubnetIds: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "VpcSubnetIds"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-vpcsubnetids"""
p_TargetRole: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "TargetRole"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-targetrole"""
p_VpcSecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "VpcSecurityGroupIds"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-vpcsecuritygroupids"""
p_Tags: typing.List[typing.Union['PropDBProxyEndpointTagFormat', dict]] = attr.ib(
default=None,
converter=PropDBProxyEndpointTagFormat.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDBProxyEndpointTagFormat), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#cfn-rds-dbproxyendpoint-tags"""
@property
def rv_DBProxyEndpointArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#aws-resource-rds-dbproxyendpoint-return-values"""
return GetAtt(resource=self, attr_name="DBProxyEndpointArn")
@property
def rv_VpcId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#aws-resource-rds-dbproxyendpoint-return-values"""
return GetAtt(resource=self, attr_name="VpcId")
@property
def rv_Endpoint(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#aws-resource-rds-dbproxyendpoint-return-values"""
return GetAtt(resource=self, attr_name="Endpoint")
@property
def rv_IsDefault(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbproxyendpoint.html#aws-resource-rds-dbproxyendpoint-return-values"""
return GetAtt(resource=self, attr_name="IsDefault")
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
198,
1212,
8265,
198,
37811,
198,
198,
11748,
708,
81,
198,
11748,
19720,
198,
198,
6738,
11485,
7295,
13,
19849,
1330,
357,
198,
220,
220,
220,
14161,
11,
... | 2.571292 | 46,155 |
import json
from compas_singular.datastructures import CoarseQuadMesh
from compas_plotters.meshplotter import MeshPlotter
# read input data
json_data = 'data/coarse_quad_mesh_british_museum.json'
coarse_quad_mesh = CoarseQuadMesh.from_json(json_data)
# plot coarse quad mesh
plotter = MeshPlotter(coarse_quad_mesh, figsize=(5, 5))
plotter.draw_edges()
plotter.draw_vertices(radius=.05)
plotter.draw_faces()
plotter.show()
# collect strip data
coarse_quad_mesh.collect_strips()
# densification with uniform density
coarse_quad_mesh.set_strips_density(3)
coarse_quad_mesh.densification()
# plot dense quad mesh
plotter = MeshPlotter(coarse_quad_mesh.get_quad_mesh(), figsize=(5, 5))
plotter.draw_edges()
plotter.draw_vertices(radius=.05)
plotter.draw_faces()
plotter.show()
# densification with target length
coarse_quad_mesh.set_strips_density_target(t=.5)
coarse_quad_mesh.densification()
# plot dense quad mesh
plotter = MeshPlotter(coarse_quad_mesh.get_quad_mesh(), figsize=(5, 5))
plotter.draw_edges()
plotter.draw_vertices(radius=.05)
plotter.draw_faces()
plotter.show()
# change density of one strip
skey = list(coarse_quad_mesh.strips())[0]
coarse_quad_mesh.set_strip_density(skey, 10)
coarse_quad_mesh.densification()
# plot dense quad mesh
plotter = MeshPlotter(coarse_quad_mesh.get_quad_mesh(), figsize=(5, 5))
plotter.draw_edges()
plotter.draw_vertices(radius=.05)
plotter.draw_faces()
plotter.show()
| [
11748,
33918,
198,
198,
6738,
552,
292,
62,
12215,
934,
13,
19608,
459,
1356,
942,
1330,
1766,
17208,
4507,
324,
37031,
198,
198,
6738,
552,
292,
62,
29487,
1010,
13,
76,
5069,
29487,
353,
1330,
47529,
43328,
353,
198,
198,
2,
1100,
... | 2.601463 | 547 |
from ..bases import BaseParser
from collections import defaultdict
__all__ = ['ReactionsParser']
| [
6738,
11485,
65,
1386,
1330,
7308,
46677,
198,
6738,
17268,
1330,
4277,
11600,
198,
198,
834,
439,
834,
796,
37250,
3041,
4658,
46677,
20520,
628
] | 3.96 | 25 |
#!/usr/bin/env python
import argparse
import datetime
import json
import os
import feedparser
import httplib2
import requests
import dateutil.parser
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
from lib.bottle import (
default_app,
get,
hook,
request,
route,
run,
static_file,
template,
TEMPLATE_PATH,
)
from get_menu_img import get_menu_img
WEATHER_URL = "https://api.weather.gov/points/41.252363,-95.997988/forecast"
NEWS_FEED = "http://feeds.reuters.com/reuters/topNews"
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
CLIENT_SECRET_FILE = 'private/client_id.json'
APPLICATION_NAME = 'Google Calendar API Python Quickstart'
@get('/')
@route('/static/<path:path>')
# remove ending slash from requests
@hook('before_request')
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'calendar-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = "daily-brief"
credentials = tools.run_flow(flow, store, None)
print('Storing credentials to ' + credential_path)
return credentials
tpl_path = os.path.join(get_script_rel_path("templates"))
TEMPLATE_PATH.insert(0, tpl_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='starts a lists server')
parser.add_argument(
'--config',
help='specifies the config file location (default: ./config.json)',
default="./config.json"
)
args = parser.parse_args()
with open(args.config) as f:
config = json.load(f)
run(host='0.0.0.0', port=config['port'], reloader=True)
app = default_app()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
1822,
29572,
198,
11748,
4818,
8079,
198,
11748,
33918,
198,
11748,
28686,
198,
198,
11748,
3745,
48610,
198,
11748,
1841,
489,
571,
17,
198,
11748,
7007,
198,
198,
11748,
31... | 2.595395 | 912 |
"""
Revolve body generator based on RoboGen framework
"""
import yaml
import traceback
from collections import OrderedDict
from pyrevolve import SDF
from .revolve_module import CoreModule, Orientation
from .brain import Brain, BrainNN, BrainRLPowerSplines
from .render.render import Render
from .render.brain_graph import BrainGraph
from .measure.measure_body import MeasureBody
from .measure.measure_brain import MeasureBrain
class RevolveBot:
"""
Basic robot description class that contains robot's body and/or brain
structures, ID and several other necessary parameters. Capable of reading
a robot's sdf mode
"""
@property
@property
@property
def measure_behaviour(self):
"""
:return:
"""
pass
def measure_body(self):
"""
:return: dict of body measurements
"""
if self._body is None:
raise RuntimeError('Brain not initialized')
try:
measure = MeasureBody(self._body)
return measure.measure_all()
except Exception as e:
print('Exception: {}'.format(e))
def measure_brain(self):
"""
:return: dict of brain measurements
"""
if self._brain == None:
raise RuntimeError('Brain not initialized')
else:
try:
measure = MeasureBrain(self._brain, 10)
return measure.measure_all()
except:
print('Failed measuring brain')
def load(self, text, conf_type):
"""
Load robot's description from a string and parse it to Python structure
:param text: Robot's description string
:param conf_type: Type of a robot's description format
:return:
"""
if 'yaml' == conf_type:
self.load_yaml(text)
elif 'sdf' == conf_type:
raise NotImplementedError("Loading from SDF not yet implemented")
def load_yaml(self, text):
"""
Load robot's description from a yaml string
:param text: Robot's yaml description
"""
yaml_bot = yaml.safe_load(text)
self._id = yaml_bot['id'] if 'id' in yaml_bot else None
self._body = CoreModule.FromYaml(yaml_bot['body'])
try:
if 'brain' in yaml_bot:
yaml_brain = yaml_bot['brain']
if 'type' not in yaml_brain:
# raise IOError("brain type not defined, please fix it")
yaml_brain['type'] = 'neural-network'
self._brain = Brain.from_yaml(yaml_brain)
else:
self._brain = Brain()
except:
self._brain = Brain()
print('Failed to load brain, setting to None')
def load_file(self, path, conf_type='yaml'):
"""
Read robot's description from a file and parse it to Python structure
:param path: Robot's description file path
:param conf_type: Type of a robot's description format
:return:
"""
with open(path, 'r') as robot_file:
robot = robot_file.read()
self.load(robot, conf_type)
def to_yaml(self):
"""
Converts robot data structure to yaml
:return:
"""
yaml_dict = OrderedDict()
yaml_dict['id'] = self._id
yaml_dict['body'] = self._body.to_yaml()
if self._brain is not None:
yaml_dict['brain'] = self._brain.to_yaml()
return yaml.dump(yaml_dict)
def save_file(self, path, conf_type='yaml'):
"""
Save robot's description on a given file path in a specified format
:param path:
:param conf_type:
:return:
"""
robot = ''
if 'yaml' == conf_type:
robot = self.to_yaml()
elif 'sdf' == conf_type:
robot = self.to_sdf(nice_format=True)
with open(path, 'w') as robot_file:
robot_file.write(robot)
def update_substrate(self, raise_for_intersections=False):
"""
Update all coordinates for body components
:param raise_for_intersections: enable raising an exception if a collision of coordinates is detected
:raises self.ItersectionCollisionException: If a collision of coordinates is detected (and check is enabled)
"""
substrate_coordinates_all = {(0, 0): self._body.id}
self._body.substrate_coordinates = (0, 0)
self._update_substrate(raise_for_intersections, self._body, Orientation.NORTH, substrate_coordinates_all)
class ItersectionCollisionException(Exception):
"""
A collision has been detected when updating the robot coordinates.
Check self.substrate_coordinates_map to know more.
"""
def _update_substrate(self,
raise_for_intersections,
parent,
parent_direction,
substrate_coordinates_map):
"""
Internal recursive function for self.update_substrate()
:param raise_for_intersections: same as in self.update_substrate
:param parent: updates the children of this parent
:param parent_direction: the "absolute" orientation of this parent
:param substrate_coordinates_map: map for all already explored coordinates(useful for coordinates conflict checks)
"""
dic = {Orientation.NORTH: 0,
Orientation.WEST: 1,
Orientation.SOUTH: 2,
Orientation.EAST: 3}
inverse_dic = {0: Orientation.NORTH,
1: Orientation.WEST,
2: Orientation.SOUTH,
3: Orientation.EAST}
movement_table = {
Orientation.NORTH: ( 1, 0),
Orientation.WEST: ( 0, -1),
Orientation.SOUTH: (-1, 0),
Orientation.EAST: ( 0, 1),
}
for slot, module in parent.iter_children():
if module is None:
continue
slot = Orientation(slot)
# calculate new direction
direction = dic[parent_direction] + dic[slot]
if direction >= len(dic):
direction = direction - len(dic)
new_direction = Orientation(inverse_dic[direction])
# calculate new coordinate
movement = movement_table[new_direction]
coordinates = (
parent.substrate_coordinates[0] + movement[0],
parent.substrate_coordinates[1] + movement[1],
)
module.substrate_coordinates = coordinates
# For Karine: If you need to validate old robots, remember to add this condition to this if:
# if raise_for_intersections and coordinates in substrate_coordinates_all and type(module) is not TouchSensorModule:
if raise_for_intersections:
if coordinates in substrate_coordinates_map:
raise self.ItersectionCollisionException(substrate_coordinates_map)
substrate_coordinates_map[coordinates] = module.id
self._update_substrate(raise_for_intersections,
module,
new_direction,
substrate_coordinates_map)
def render_brain(self, img_path):
"""
Render image of brain
@param img_path: path to where to store image
"""
if self._brain is None:
raise RuntimeError('Brain not initialized')
else:
try:
brain_graph = BrainGraph(self._brain, img_path)
brain_graph.brain_to_graph()
brain_graph.save_graph()
except Exception as e:
print('Failed rendering brain. Exception:')
print(e)
print(traceback.format_exc())
def render2d(self, img_path):
"""
Render 2d representation of robot and store as png
:param img_path: path of storing png file
"""
if self._body is None:
raise RuntimeError('Body not initialized')
else:
try:
render = Render()
render.render_robot(self._body, img_path)
except Exception as e:
print('Failed rendering 2d robot. Exception:')
print(e)
print(traceback.format_exc())
| [
37811,
198,
18009,
6442,
1767,
17301,
1912,
319,
39702,
13746,
9355,
198,
37811,
198,
11748,
331,
43695,
198,
11748,
12854,
1891,
198,
6738,
17268,
1330,
14230,
1068,
35,
713,
198,
198,
6738,
12972,
18218,
6442,
1330,
311,
8068,
198,
198,... | 2.183846 | 3,900 |
"""only needed post torch 1.10 updates patches improvements that allow us to handle tensordicts
"""
import torch
import re
import collections
from torch._six import string_classes
np_str_obj_array_pattern = re.compile(r'[SaUO]')
default_collate_err_msg_format = (
"default_collate: batch must contain tensors, numpy arrays, numbers, "
"dicts or lists; found {}"
)
def default_collate(batch): # noqa: C901
"""Function that takes in a batch of data and puts the elements within the batch
into a tensor with an additional outer dimension - batch size. The exact output type can be
a :class:`torch.Tensor`, a `Sequence` of :class:`torch.Tensor`, a
Collection of :class:`torch.Tensor`, or left unchanged, depending on the input type.
This is used as the default function for collation when
`batch_size` or `batch_sampler` is defined in :class:`~torch.utils.data.DataLoader`.
Args:
batch: a single batch to be collated
Examples:
>>> from collections import namedtuple
>>> # Example with a batch of `int`s:
>>> default_collate([0, 1, 2, 3])
tensor([0, 1, 2, 3])
>>> # Example with a batch of `str`s:
>>> default_collate(['a', 'b', 'c'])
['a', 'b', 'c']
>>> # Example with `Map` inside the batch:
>>> default_collate([{'A': 0, 'B': 1}, {'A': 100, 'B': 100}])
{'A': tensor([ 0, 100]), 'B': tensor([ 1, 100])}
>>> # Example with `NamedTuple` inside the batch:
>>> Point = namedtuple('Point', ['x', 'y'])
>>> default_collate([Point(0, 0), Point(1, 1)])
Point(x=tensor([0, 1]), y=tensor([0, 1]))
>>> # Example with `Tuple` inside the batch:
>>> default_collate([(0, 1), (2, 3)])
[tensor([0, 2]), tensor([1, 3])]
>>> # Example with `List` inside the batch:
>>> default_collate([[0, 1], [2, 3]])
[tensor([0, 2]), tensor([1, 3])]
"""
elem = batch[0]
elem_type = type(elem)
if isinstance(elem, torch.Tensor):
out = None
if torch.utils.data.get_worker_info() is not None:
# If we're in a background process, concatenate directly into a
# shared memory tensor to avoid an extra copy
numel = sum(x.numel() for x in batch)
storage = elem.storage()._new_shared(numel)
out = elem.new(storage)
return torch.stack(batch, 0, out=out)
elif (
elem_type.__module__ == 'numpy'
and elem_type.__name__ != 'str_'
and elem_type.__name__ != 'string_'
):
if elem_type.__name__ in ['ndarray', 'memmap']:
# array of string classes and object
if np_str_obj_array_pattern.search(elem.dtype.str) is not None:
raise TypeError(default_collate_err_msg_format.format(elem.dtype))
return default_collate([torch.as_tensor(b) for b in batch])
elif elem.shape == (): # scalars
return torch.as_tensor(batch)
elif isinstance(elem, float):
return torch.tensor(batch, dtype=torch.float64)
elif isinstance(elem, int):
return torch.tensor(batch)
elif isinstance(elem, string_classes):
return batch
elif isinstance(elem, collections.abc.Mapping):
return elem_type({key: default_collate([d[key] for d in batch]) for key in elem})
elif isinstance(elem, tuple) and hasattr(elem, '_fields'): # namedtuple
return elem_type(*(default_collate(samples) for samples in zip(*batch)))
elif isinstance(elem, collections.abc.Sequence):
# check to make sure that the elements in batch have consistent size
it = iter(batch)
elem_size = len(next(it))
if any(len(elem) != elem_size for elem in it):
raise RuntimeError('each element in list of batch should be of equal size')
transposed = list(zip(*batch)) # It may be accessed twice, so we use a list.
if isinstance(elem, tuple):
return [default_collate(samples) for samples in transposed] # Backwards compatibility.
try:
return elem_type([default_collate(samples) for samples in transposed])
except TypeError:
# The sequence type may not support `__init__(iterable)` (e.g., `range`).
return [default_collate(samples) for samples in transposed]
raise TypeError(default_collate_err_msg_format.format(elem_type))
| [
37811,
8807,
2622,
1281,
28034,
352,
13,
940,
5992,
16082,
8561,
326,
1249,
514,
284,
5412,
11192,
585,
14137,
198,
37811,
198,
198,
11748,
28034,
198,
11748,
302,
198,
11748,
17268,
198,
6738,
28034,
13557,
19412,
1330,
4731,
62,
37724,
... | 2.39307 | 1,847 |
from pathlib import Path
import configparser
import logging
import flask
import pymongo
from datetime import datetime, timedelta
import numpy as np
from iqmon import get_webpage_config, get_all_configs
from iqmon.webpage import mongo_query
from iqmon.webpage.weather_plot import generate_weather_plot
from iqmon.webpage.iqmon_plot import generate_iqmon_plot
app = flask.Flask(__name__)
log = logging.getLogger('FlaskLogger')
##-------------------------------------------------------------------------
## static_path: /static/plots/<string:telescope>/<string:date>/<string:filename>
##-------------------------------------------------------------------------
@app.route("/static/plots/<string:telescope>/<string:date>/<string:filename>")
##-------------------------------------------------------------------------
## base: /
##-------------------------------------------------------------------------
@app.route("/")
##-------------------------------------------------------------------------
## status: /<string:telescope>
##-------------------------------------------------------------------------
@app.route("/<string:telescope>/")
##-------------------------------------------------------------------------
## nightWeather: /<string:telescope>/weather/<string:date>
##-------------------------------------------------------------------------
@app.route("/<string:telescope>/weather/<string:date>")
##-------------------------------------------------------------------------
## nightReport: /<string:telescope>/report/<string:date>
##-------------------------------------------------------------------------
@app.route("/<string:telescope>/report/<string:date>")
##-------------------------------------------------------------------------
## imageList: /<string:telescope>/images/<string:date>
##-------------------------------------------------------------------------
@app.route("/<string:telescope>/images/<string:date>")
##-------------------------------------------------------------------------
## nightList: /<string:telescope>/nights/
##-------------------------------------------------------------------------
@app.route("/<string:telescope>/nights/")
if __name__ == "__main__":
app.run()
| [
6738,
3108,
8019,
1330,
10644,
198,
11748,
4566,
48610,
198,
11748,
18931,
198,
11748,
42903,
198,
11748,
279,
4948,
25162,
198,
6738,
4818,
8079,
1330,
4818,
8079,
11,
28805,
12514,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
131... | 4.534694 | 490 |
import logging
import databases
import sqlalchemy
from app.settings import DATABASE_URL
log = logging.getLogger(__name__)
db = databases.Database(DATABASE_URL)
metadata = sqlalchemy.MetaData()
| [
11748,
18931,
198,
198,
11748,
20083,
198,
11748,
44161,
282,
26599,
198,
198,
6738,
598,
13,
33692,
1330,
360,
1404,
6242,
11159,
62,
21886,
198,
198,
6404,
796,
18931,
13,
1136,
11187,
1362,
7,
834,
3672,
834,
8,
198,
198,
9945,
796... | 3.061538 | 65 |
from kinorrt.mechanics.mechanics import *
from kinorrt.mechanics.stability_margin import *
#import wrenchStampingLib as ws
smsolver = StabilityMarginSolver()
h_modes = np.array([[CONTACT_MODE.STICKING, CONTACT_MODE.STICKING],
[CONTACT_MODE.SLIDING_RIGHT, CONTACT_MODE.SLIDING_RIGHT],
[CONTACT_MODE.SLIDING_LEFT, CONTACT_MODE.SLIDING_LEFT],
[CONTACT_MODE.STICKING, CONTACT_MODE.LIFT_OFF],
[CONTACT_MODE.LIFT_OFF, CONTACT_MODE.STICKING],
[CONTACT_MODE.SLIDING_LEFT, CONTACT_MODE.LIFT_OFF],
[CONTACT_MODE.SLIDING_RIGHT, CONTACT_MODE.LIFT_OFF],
[CONTACT_MODE.LIFT_OFF, CONTACT_MODE.SLIDING_RIGHT],
[CONTACT_MODE.LIFT_OFF, CONTACT_MODE.SLIDING_LEFT]])
x =(0,2.2,0)
mnps = [Contact((0.2097357615814568, 0.2),(0,-1),0),Contact((-0.9389810887084302, 0.2),(0,-1),0)]
envs = [Contact((-1.0, -0.20000000000000018),(0,1),0),Contact((1.0, -0.20000000000000018),(0,1),0)]
mode = [CONTACT_MODE.FOLLOWING, CONTACT_MODE.FOLLOWING, CONTACT_MODE.SLIDING_RIGHT, CONTACT_MODE.SLIDING_RIGHT]
e_modes = np.array(get_contact_modes([], envs))
e_modes = e_modes[~np.all(e_modes == CONTACT_MODE.LIFT_OFF, axis=1)]
env_mu = 0.3
mnp_mu = 0.8
object_weight = 10
mnp_fn_max = 6
v= -np.array([[ 1.], [-0.], [ 0.]])
preprocess = smsolver.preprocess(x, env_mu, mnp_mu, envs, mnps, e_modes, h_modes,
object_weight, mnp_fn_max)
stability_margin_score = smsolver.stability_margin(preprocess, v, mode)
print(stability_margin_score) | [
6738,
18967,
273,
17034,
13,
1326,
3147,
873,
13,
1326,
3147,
873,
1330,
1635,
198,
6738,
18967,
273,
17034,
13,
1326,
3147,
873,
13,
301,
1799,
62,
36153,
1330,
1635,
198,
2,
11748,
34561,
1273,
37843,
25835,
355,
266,
82,
198,
198,
... | 1.954657 | 816 |
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.urls import reverse
from taggit.managers import TaggableManager
# Published custom post manager model
# Returns the QuerySet that will be executed / custom manager
# Post model
# Comment model
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
6738,
42625,
14208,
13,
26791,
1330,
640,
11340,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
27530,
1330,
11787,
198,
6738,
42625,
14208,
13,
6371,
82,
1330,
9575,
198,
6738,
762... | 3.788235 | 85 |
num = int(input())
ans = [int(i) for i in bin(num)[2:]]
sum = 0
for i in ans:
sum += i
print(str(sum))
| [
22510,
796,
493,
7,
15414,
28955,
198,
504,
796,
685,
600,
7,
72,
8,
329,
1312,
287,
9874,
7,
22510,
38381,
17,
25,
11907,
198,
16345,
796,
657,
198,
1640,
1312,
287,
9093,
25,
198,
220,
220,
220,
2160,
15853,
1312,
198,
4798,
7,
... | 2.183673 | 49 |
# pylint: disable=C0111,R0902,R0904,R0912,R0913,R0915,E1101
# Smartsheet Python SDK.
#
# Copyright 2019 Smartsheet.com, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from .column import Column
from .cell_data_item import CellDataItem
from .enums import WidgetType
from .widget_content import WidgetContent
from .widget_hyperlink import WidgetHyperlink
from ..types import *
from ..util import serialize
from ..util import deserialize
class CellLinkWidgetContent(WidgetContent):
"""Smartsheet CellLinkWidgetContent data model."""
def __init__(self, props=None, base_obj=None):
"""Initialize the CellLinkWidgetContent model."""
super(CellLinkWidgetContent, self).__init__(WidgetType.METRIC, base_obj)
self._base = None
if base_obj is not None:
self._base = base_obj
"""Represents the CellLinkWidgetContent object."""
self._sheet_id = Number()
self._cell_data = TypedList(CellDataItem)
self._columns = TypedList(Column)
self._hyperlink = TypedObject(WidgetHyperlink)
if props:
deserialize(self, props)
self.__initialized = True
"""Represents the CellLinkWidgetContent object."""
@property
@sheet_id.setter
@property
@cell_data.setter
@property
@columns.setter
@property
@hyperlink.setter
| [
2,
279,
2645,
600,
25,
15560,
28,
34,
486,
1157,
11,
49,
2931,
2999,
11,
49,
2931,
3023,
11,
49,
2931,
1065,
11,
49,
2931,
1485,
11,
49,
2931,
1314,
11,
36,
1157,
486,
198,
2,
2439,
5889,
25473,
11361,
26144,
13,
198,
2,
198,
... | 2.927581 | 649 |
# Adapted from https://github.com/huggingface/transformers/blob/master/examples/pytorch/language-modeling/run_clm.py
from itertools import chain
from pathlib import Path
import pickle
from typing import Any, List, Union
from torch.utils.data.dataloader import DataLoader, Dataset
from transformers import AutoTokenizer, default_data_collator
from datasets import load_dataset, DatasetDict
from pytorch_lightning import LightningDataModule
from src.utils.utils import get_logger
logger = get_logger()
| [
2,
30019,
276,
422,
3740,
1378,
12567,
13,
785,
14,
71,
1018,
2667,
2550,
14,
35636,
364,
14,
2436,
672,
14,
9866,
14,
1069,
12629,
14,
9078,
13165,
354,
14,
16129,
12,
4666,
10809,
14,
5143,
62,
565,
76,
13,
9078,
198,
6738,
340,... | 3.294118 | 153 |
from google.protobuf.json_format import MessageToJson, ParseDict
def message_to_json(message):
"""Converts a message to JSON, using snake_case for field names."""
return MessageToJson(message, preserving_proto_field_name=True)
def _stringify_all_experiment_ids(x):
"""Converts experiment_id fields which are defined as ints into strings in the given json.
This is necessary for backwards- and forwards-compatibility with MLflow clients/servers
running MLflow 0.9.0 and below, as experiment_id was changed from an int to a string.
To note, the Python JSON serializer is happy to auto-convert strings into ints (so a
server or client that sees the new format is fine), but is unwilling to convert ints
to strings. Therefore, we need to manually perform this conversion.
This code can be removed after MLflow 1.0, after users have given reasonable time to
upgrade clients and servers to MLflow 0.9.1+.
"""
if isinstance(x, dict):
items = x.items()
for k, v in items:
if k == "experiment_id":
x[k] = str(v)
elif k == "experiment_ids":
x[k] = [str(w) for w in v]
elif k == "info" and isinstance(v, dict) and "experiment_id" in v and "run_uuid" in v:
# shortcut for run info
v["experiment_id"] = str(v["experiment_id"])
elif k not in ("params", "tags", "metrics"): # skip run data
_stringify_all_experiment_ids(v)
elif isinstance(x, list):
for y in x:
_stringify_all_experiment_ids(y)
def parse_dict(js_dict, message):
"""Parses a JSON dictionary into a message proto, ignoring unknown fields in the JSON."""
_stringify_all_experiment_ids(js_dict)
ParseDict(js_dict=js_dict, message=message, ignore_unknown_fields=True)
| [
6738,
23645,
13,
11235,
672,
3046,
13,
17752,
62,
18982,
1330,
16000,
2514,
41,
1559,
11,
2547,
325,
35,
713,
628,
198,
4299,
3275,
62,
1462,
62,
17752,
7,
20500,
2599,
198,
220,
220,
220,
37227,
3103,
24040,
257,
3275,
284,
19449,
... | 2.587413 | 715 |
from aws_cdk.core import App
from b_cfn_custom_userpool_authorizer_test.integration.infrastructure.main_stack import MainStack
app = App()
MainStack(app)
app.synth()
| [
6738,
3253,
82,
62,
10210,
74,
13,
7295,
1330,
2034,
198,
198,
6738,
275,
62,
12993,
77,
62,
23144,
62,
7220,
7742,
62,
9800,
7509,
62,
9288,
13,
18908,
1358,
13,
10745,
6410,
13,
12417,
62,
25558,
1330,
8774,
25896,
198,
198,
1324,... | 2.847458 | 59 |
"""
重写range函数,要求重写之后的myrange(5)输出结果为:5,4,3,2,1,0
"""
mr=MyRange(5)
iterator=mr.__iter__()
while True:
try:
i=iterator.__next__()
print(i)
except StopIteration:
break
| [
37811,
198,
34932,
235,
37863,
247,
9521,
49035,
121,
46763,
108,
171,
120,
234,
17358,
223,
162,
109,
224,
34932,
235,
37863,
247,
45298,
28938,
236,
21410,
1820,
9521,
7,
20,
8,
164,
122,
241,
49035,
118,
163,
119,
241,
162,
252,
... | 1.492537 | 134 |
# -*- coding: utf-8 -*-
# import seaborn as sns
# sns.set()
import numpy as np
from dramkit.gentools import isnull
from dramkit.gentools import get_con_start_end
from dramkit.gentools import get_update_kwargs
from dramkit.logtools.utils_logger import logger_show
import matplotlib as mpl
mpl.rcParams['font.family'] = ['sans-serif', 'stixgeneral', 'serif']
mpl.rcParams['font.sans-serif'] = ['SimHei', 'KaiTi', 'FangSong']
mpl.rcParams['font.serif'] = ['cmr10', 'SimHei', 'KaiTi', 'FangSong']
mpl.rcParams['axes.unicode_minus'] = False
mpl.rcParams['text.usetex'] = False
mpl.rcParams['mathtext.fontset'] = 'cm' # 'dejavusans', 'cm', 'stix'
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
#%%
def _plot_series_with_styls_info(ax, series, styls_info,
lnstyl_default='.-',
lbl_str_ext='',
**kwargs_plot):
'''
给定线型设置信息styls_info, 在ax上对series (`pandas.Series`)绘图,
lnstyl_default设置默认线型
styls_info格式形如:('.-b', 'lbl')或'.-b'
第一种格式中lbl设置图例(legend),lbl若为None则默认取series列名,若为False,则不设置图例
第二种格式只设置线型,legend默认取series列名
lbl_str_ext设置legend文本后缀(比如双坐标轴情况下在右轴的legend加上'(右)')
**kwargs_plot可接收符合ax.plot函数的其它参数
'''
if styls_info is None:
lnstyl, lbl_str = lnstyl_default, series.name
else:
if isinstance(styls_info, str):
lnstyl, lbl_str = styls_info, series.name
else:
if len(styls_info) == 2:
lnstyl, lbl_str = styls_info
elif len(styls_info) == 3:
lnstyl, lbl_str, kwothers = styls_info
kwargs_plot.update(kwothers)
lnstyl = lnstyl_default if isnull(lnstyl) else lnstyl
lbl_str = series.name if lbl_str is None else lbl_str
if lbl_str is False:
ax.plot(series, lnstyl, **kwargs_plot)
return None
else:
ln = ax.plot(series, lnstyl, label=str(lbl_str)+lbl_str_ext,
**kwargs_plot)
return ln
#%%
def plot_series(data, cols_styl_up_left, cols_styl_up_right={},
cols_styl_low_left={}, cols_styl_low_right={},
cols_to_label_info={}, cols_to_fill_info={}, yscales=None,
xparls_info={}, yparls_info_up=None, yparls_info_low=None,
fills_yparl_up=None, fills_yparl_low=None, fills_xparl={},
twinx_align_up=None, twinx_align_low=None,
ylabels=None, xlabels=None, grids=False, figsize=(11, 7),
title=None, n_xticks=8, xticks_rotation=None,
fontsize_label=15, fontsize_title=15,
fontsize_legend=15, fontsize_tick=10, fontname=None,
markersize=10, legend_locs=None, fig_save_path=None,
logger=None):
'''
对data (`pd.DataFrame`)进行多列绘图
.. note::
目前功能未考虑data.index重复情况,若有重复可能会导致部分绘图错误
.. todo::
- 多重索引处理
- legend位置增加放在图片外面的设置
- 不规则区域填充设置
- 添加堆叠图(面积图)绘制方式
- 数字文本标注(比如在折线图上标注数值)
- 正常绘制与特殊标注重复绘制问题
- x轴平行线对应列不一定非要在主图绘制列中选择
- 平行线图层绘制在主线下面
- 标注图层绘制在线型图层上面(根据输入顺序绘制图层而不是根据坐标轴区域顺序绘制)
- 上图和下图的x轴不一定非要都是data的index,设置上下图不同x轴坐标
Parameters
----------
data : pandas.DataFrame
待作图数据
cols_styl_up_left : dict
指定顶部左轴需要绘制的序列及其线型和图例,格式形如:
``{'col1': ('.-b', 'lbl1', kwargs), 'col2': ...}`` 或 ``{'col1': '.-b', 'col2': ...}``
第一种格式中 `lbl` 设置图例(legend),若为None则默认取列名,为False则不设置图例
第二种格式只设置线型,legend默认取列名
cols_styl_up_right : dict
指定顶部右轴需要绘制的序列及其线型和图例,格式同 ``cols_styl_up_left``
cols_styl_low_left : dict
指定底部左轴需要绘制的序列及其线型和图例,格式同 ``cols_styl_up_left``
cols_styl_low_right : dict
指定底部右轴需要绘制的序列及其线型和图例,格式同 ``cols_styl_up_left``
cols_to_label_info : dict
设置需要特殊标注的列绘图信息,格式形如:
.. code-block:: python
{col1:
[[col_lbl1, (v1, v2, ..), (styl1, styl2, ..), (lbl1, lbl2, ..),
{kwargs, v1: {kwargs1}, v2: {kwargs2}, ...}],
[col_lbl2, (v1, v2, ..), ...]
],
col2: ...
}
其中col是需要被特殊标注的列,col_lbl为标签列;v指定哪些标签值对应的
数据用于绘图;styl设置线型;lbl设置图例标签,若为None,则设置为v,若为False,
则不设置图例标签;{kwargs, v1: {kwargs1}, v2: {kwargs2}}设置其他绘图标注参数
cols_to_fill_info : dict
需要进行颜色填充的列信息,格式形如(具体参数key参见matplotlib的fill_between函数):
``{col1 : {'color': 'c', 'alpha': 0.3}, ...}``
yscales : None, list
y轴标轴尺度设置,若为None,则默认普通线性坐标,
可设置为list指定每个坐标尺度(参见matplotlib中的set_yscale)
xparls_info : dict
设置x轴平行线信息,格式形如:
``{col1: [(yval1, clor1, styl1, width1, kwargs), (yval2, ...)], col2:, ...}``
其中yval指定平行线y轴位置,clor设置颜色,styl设置线型,width设置线宽
yparls_info_up : None, list
设置顶部x轴平行线格式信息,格式形如:
``[(xval1, clor1, styl1, width1, kwargs), (xval2, clor2, style2, width2), ...]``
其中xval指定平行线x轴位置,clor设置颜色,styl设置线型,width设置线宽
yparls_info_low : None, list
设置顶部x轴平行线格式信息,格式同 ``yparls_info_up``
fills_yparl_up : None, list
设置上图平行于y轴的填充区域信息,格式形如:
``[([x1, x2], clor1, alpha1, kwargs), (...)]``
fills_yparl_low : None, list
设置下图平行于y轴的填充区域信息,格式同 ``fills_yparl_up``
fills_xparl : dict
设置平行于x轴的填充区域信息,格式形如:
``{'col1': [([y1, y2], clor1, alpha1, kwargs), ...], 'col2': ...}``
twinx_align_up : None, list
设置上图双坐标轴两边坐标轴刻度对齐位置,格式如 ``[v_left, v_right]`` ,
绘图时左轴的 ``v_left`` 位置与右轴的 ``v_right`` 位置对齐
twinx_align_low : None, list
设置上图双坐标轴两边坐标轴刻度对齐位置,格式同 ``twinx_align_up``
ylabels : None, list
设置四个y轴标签文本内容,若为None则不设置标签文本,
若为False则既不设置y轴标签文本内容,也不显示y轴刻度
xlabels : None, list
置两个x轴标签文本内容,若为None则不设置标签文本,
若为False则既不设置x轴标签文本内容,也不显示x轴刻度
grids : boll, list
设置四个坐标轴网格,若grids=True,则在顶部左轴和底部左轴绘制网格;
若grids=False,则全部没有网格;若为列表,则分别对四个坐标轴设置网格
.. caution::
当某个坐标轴设置为不显示刻度时,其对应的网格线也会不显示?
legend_locs : None, list
设置上下两个图的legend位置,默认设置为[0, 0]
fontname : None, str
字体默认设置为None,可替换其他字体
(如 ``Courier New``, ``Times New Roman``)
.. hint::
matplotlib默认字体为 ``sans-serif``
'''
df = data.copy()
# 网格设置,grids分别设置顶部左边、顶部右边、底部左边、底部右边的网格
if grids is True:
grids = [True, False, True, False]
elif grids is False or grids is None:
grids = [False, False, False, False]
# y轴标签设置
if ylabels is None:
ylabels = [None, None, None, None]
# 坐标轴尺度设置
if yscales is None:
yscales = ['linear'] * 4
# x轴标签设置
if xlabels is None:
xlabels = [None, None]
# legend位置设置
if legend_locs is None:
legend_locs = [0, 0]
# 索引列处理
if df.index.name is None:
df.index.name = 'idx'
idx_name = df.index.name
if idx_name in df.columns:
df.drop(idx_name, axis=1, inplace=True)
df.reset_index(inplace=True)
if len(cols_styl_low_left) == 0 and len(cols_styl_low_right) > 0:
logger_show('当底部图只指定右边坐标轴时,默认绘制在左边坐标轴!', logger, 'warning')
cols_styl_low_left, cols_styl_low_right = cols_styl_low_right, {}
# 坐标准备
plt.figure(figsize=figsize)
if len(cols_styl_low_left) > 0:
gs = GridSpec(3, 1)
axUpLeft = plt.subplot(gs[:2, :]) # 顶部为主图,占三分之二高度
axLowLeft = plt.subplot(gs[2, :])
else:
gs = GridSpec(1, 1)
axUpLeft = plt.subplot(gs[:, :])
def get_cols_to_label_info(cols_to_label_info, col):
'''需要进行特殊点标注的列绘图设置信息获取'''
to_plots = []
for label_infos in cols_to_label_info[col]:
if len(label_infos) == 5:
ext_styl = True
kwstyl_universal = {}
kwstyl_unique = {}
kwstyl = label_infos[4]
for k, v in kwstyl.items():
if not isinstance(v, dict):
kwstyl_universal.update({k: v})
else:
if k in kwstyl_universal.keys():
kwstyl_unique[k].update(v)
else:
kwstyl_unique[k] = v
else:
ext_styl = False
lbl_col = label_infos[0]
if label_infos[2] is None:
label_infos = [lbl_col, label_infos[1], [None]*len(label_infos[1]),
label_infos[3]]
if label_infos[3] is False:
label_infos = [lbl_col, label_infos[1], label_infos[2],
[False]*len(label_infos[1])]
elif isnull(label_infos[3]) or \
all([isnull(x) for x in label_infos[3]]):
label_infos = [lbl_col, label_infos[1], label_infos[2],
label_infos[1]]
vals = label_infos[1]
for k in range(len(vals)):
series = df[df[lbl_col] == vals[k]][col]
if len(series) > 0:
ln_styl = label_infos[2][k]
lbl_str = label_infos[3][k]
if not ext_styl:
to_plots.append([series, (ln_styl, lbl_str)])
else:
kwothers = {}
kwothers.update(kwstyl_universal)
if vals[k] in kwstyl_unique.keys():
kwothers.update(kwstyl_unique[vals[k]])
to_plots.append([series, (ln_styl, lbl_str, kwothers)])
return to_plots
def get_xparls_info(parls_info, col, clor_default='k',
lnstyl_default='--', lnwidth_default=1.0):
'''x轴平行线绘图设置信息获取'''
parls = parls_info[col]
to_plots = []
for parlInfo in parls:
val, clor, lnstyl, lnwidth, kwstyl = get_parls_info(parlInfo)
clor = clor_default if isnull(clor) else clor
lnstyl = lnstyl_default if isnull(lnstyl) else lnstyl
lnwidth = lnwidth_default if isnull(lnwidth) else lnwidth
to_plots.append([val, clor, lnstyl, lnwidth, kwstyl])
return to_plots
def get_yparls_info(parls_info, clor_default='r', lnstyl_default='--',
lnwidth_default=1.0):
'''y轴平行线绘图设置信息获取'''
to_plots = []
for parlInfo in parls_info:
val, clor, lnstyl, lnwidth, kwstyl = get_parls_info(parlInfo)
clor = clor_default if isnull(clor) else clor
lnstyl = lnstyl_default if isnull(lnstyl) else lnstyl
lnwidth = lnwidth_default if isnull(lnwidth) else lnwidth
val = df[df[idx_name] == val].index[0]
to_plots.append([val, clor, lnstyl, lnwidth, kwstyl])
return to_plots
def get_fills_xparl_info(fills_info, col,
clor_default='grey', alpha_default=0.3):
'''x轴平行填充区域设置信息获取'''
fills_info_ = fills_info[col]
to_fills = []
for fillInfo in fills_info_:
ylocs, clor, alpha, kwstyl = get_fill_info(fillInfo)
clor = clor_default if isnull(clor) else clor
alpha = alpha_default if isnull(alpha) else alpha
to_fills.append([ylocs, clor, alpha, kwstyl])
return to_fills
def get_fills_yparl_info(fills_info, clor_default='grey', alpha_default=0.3):
'''y轴平行填充区域设置信息获取'''
to_fills = []
for fillInfo in fills_info:
xlocs, clor, alpha, kwstyl = get_fill_info(fillInfo)
clor = clor_default if isnull(clor) else clor
alpha = alpha_default if isnull(alpha) else alpha
xlocs = [df[df[idx_name] == x].index[0] for x in xlocs]
to_fills.append([xlocs, clor, alpha, kwstyl])
return to_fills
def twinx_align(ax_left, ax_right, v_left, v_right):
'''双坐标轴左右按照v_left和v_right对齐'''
left_min, left_max = ax_left.get_ybound()
right_min, right_max = ax_right.get_ybound()
k = (left_max-left_min) / (right_max-right_min)
b = left_min - k * right_min
x_right_new = k * v_right + b
dif = x_right_new - v_left
if dif >= 0:
right_min_new = ((left_min-dif) - b) / k
k_new = (left_min-v_left) / (right_min_new-v_right)
b_new = v_left - k_new * v_right
right_max_new = (left_max - b_new) / k_new
else:
right_max_new = ((left_max-dif) - b) / k
k_new = (left_max-v_left) / (right_max_new-v_right)
b_new = v_left - k_new * v_right
right_min_new = (left_min - b_new) / k_new
ax_right.set_ylim([right_min_new, right_max_new])
ax_right.set_yscale('function', functions=(_forward, _inverse))
return ax_left, ax_right
# lns存放双坐标legend信息
# 双坐标轴legend参考:https://www.cnblogs.com/Atanisi/p/8530693.html
lns = []
# 顶部左边坐标轴
for col, styl in cols_styl_up_left.items():
ln = _plot_series_with_styls_info(axUpLeft, df[col], styl)
if ln is not None:
lns.append(ln)
# 填充
if col in cols_to_fill_info.keys():
kwargs_fill = cols_to_fill_info[col]
axUpLeft.fill_between(df.index, df[col], **kwargs_fill)
# 特殊点标注
if col in cols_to_label_info.keys():
to_plots = get_cols_to_label_info(cols_to_label_info, col)
for series, styls_info in to_plots:
ln = _plot_series_with_styls_info(axUpLeft, series, styls_info,
lnstyl_default='ko', markersize=markersize)
if ln is not None:
lns.append(ln)
# x轴平行线
if col in xparls_info.keys():
to_plots = get_xparls_info(xparls_info, col)
for yval, clor, lnstyl, lnwidth, kwstyl_ in to_plots:
axUpLeft.axhline(y=yval, c=clor, ls=lnstyl, lw=lnwidth,
**kwstyl_)
# x轴平行填充
xlimMinUp, xlimMaxUp = axUpLeft.axis()[0], axUpLeft.axis()[1]
if col in fills_xparl.keys():
to_fills = get_fills_xparl_info(fills_xparl, col)
for ylocs, clor, alpha, kwstyl_ in to_fills:
axUpLeft.fill_betweenx(ylocs, xlimMinUp, xlimMaxUp,
color=clor, alpha=alpha, **kwstyl_)
# 坐标轴尺度
axUpLeft.set_yscale(yscales[0])
# y轴平行线
if not isnull(yparls_info_up):
to_plots = get_yparls_info(yparls_info_up)
for xval, clor, lnstyl, lnwidth, kwstyl_ in to_plots:
axUpLeft.axvline(x=xval, c=clor, ls=lnstyl, lw=lnwidth,
**kwstyl_)
# y轴平行填充
if not isnull(fills_yparl_up):
ylimmin, ylimmax = axUpLeft.axis()[2], axUpLeft.axis()[3]
to_fills = get_fills_yparl_info(fills_yparl_up)
for xlocs, clor, alpha, kwstyl_ in to_fills:
axUpLeft.fill_between(xlocs, ylimmin, ylimmax,
color=clor, alpha=alpha, **kwstyl_)
# 顶部左边坐标轴网格
axUpLeft.grid(grids[0])
# 标题绘制在顶部图上
if title is not None:
if isnull(fontname):
axUpLeft.set_title(title, fontsize=fontsize_title)
else:
axUpLeft.set_title(title, fontdict={'family': fontname,
'size': fontsize_title})
# y轴标签文本
if ylabels[0] is False:
axUpLeft.set_ylabel(None)
axUpLeft.set_yticks([])
else:
if isnull(fontname):
axUpLeft.set_ylabel(ylabels[0], fontsize=fontsize_label)
[_.set_fontsize(fontsize_tick) for _ in axUpLeft.get_yticklabels()]
else:
axUpLeft.set_ylabel(ylabels[0], fontdict={'family': fontname,
'size': fontsize_label})
# y轴刻度字体
[_.set_fontname(fontname) for _ in axUpLeft.get_yticklabels()]
[_.set_fontsize(fontsize_tick) for _ in axUpLeft.get_yticklabels()]
# 顶部右边坐标轴
if len(cols_styl_up_right) > 0:
axUpRight = axUpLeft.twinx()
for col, styl in cols_styl_up_right.items():
ln = _plot_series_with_styls_info(axUpRight, df[col], styl,
lbl_str_ext='(右)')
if ln is not None:
lns.append(ln)
# 填充
if col in cols_to_fill_info.keys():
kwargs_fill = cols_to_fill_info[col]
axUpRight.fill_between(df.index, df[col], **kwargs_fill)
# 特殊点标注
if col in cols_to_label_info.keys():
to_plots = get_cols_to_label_info(cols_to_label_info, col)
for series, styls_info in to_plots:
ln = _plot_series_with_styls_info(axUpRight, series,
styls_info, lnstyl_default='ko',
markersize=markersize, lbl_str_ext='(右)')
if ln is not None:
lns.append(ln)
# x轴平行线
if col in xparls_info.keys():
to_plots = get_xparls_info(xparls_info, col)
for yval, clor, lnstyl, lnwidth, kwstyl_ in to_plots:
axUpRight.axhline(y=yval, c=clor, ls=lnstyl, lw=lnwidth,
**kwstyl_)
# x轴平行填充
if col in fills_xparl.keys():
to_fills = get_fills_xparl_info(fills_xparl, col)
for ylocs, clor, alpha, kwstyl_ in to_fills:
axUpRight.fill_betweenx(ylocs, xlimMinUp, xlimMaxUp,
color=clor, alpha=alpha, **kwstyl_)
# 顶部双坐标轴刻度对齐
if twinx_align_up is not None:
axUpLeft, axUpRight = twinx_align(axUpLeft, axUpRight,
twinx_align_up[0], twinx_align_up[1])
# 坐标轴尺度
axUpRight.set_yscale(yscales[1])
# 顶部右边坐标轴网格
axUpRight.grid(grids[1])
# y轴标签文本
if ylabels[1] is False:
axUpRight.set_ylabel(None)
axUpRight.set_yticks([])
else:
if isnull(fontname):
axUpRight.set_ylabel(ylabels[1], fontsize=fontsize_label)
[_.set_fontsize(fontsize_tick) for _ in axUpRight.get_yticklabels()]
else:
axUpRight.set_ylabel(ylabels[1], fontdict={'family': fontname,
'size': fontsize_label})
# y轴刻度字体
[_.set_fontname(fontname) for _ in axUpRight.get_yticklabels()]
[_.set_fontsize(fontsize_tick) for _ in axUpRight.get_yticklabels()]
# 顶部图legend合并显示
if len(lns) > 0:
lnsAdd = lns[0]
for ln in lns[1:]:
lnsAdd = lnsAdd + ln
labs = [l.get_label() for l in lnsAdd]
if isnull(fontname):
axUpLeft.legend(lnsAdd, labs, loc=legend_locs[0],
fontsize=fontsize_legend)
else:
axUpLeft.legend(lnsAdd, labs, loc=legend_locs[0],
prop={'family': fontname, 'size': fontsize_legend})
if len(cols_styl_low_left) > 0:
# 要绘制底部图时取消顶部图x轴刻度
# axUpLeft.set_xticks([]) # 这样会导致设置网格线时没有竖线
axUpLeft.set_xticklabels([]) # 这样不会影响设置网格
lns = []
# 底部左边坐标轴
for col, styl in cols_styl_low_left.items():
ln = _plot_series_with_styls_info(axLowLeft, df[col], styl)
if ln is not None:
lns.append(ln)
# 填充
if col in cols_to_fill_info.keys():
kwargs_fill = cols_to_fill_info[col]
axLowLeft.fill_between(df.index, df[col], **kwargs_fill)
# 特殊点标注
if col in cols_to_label_info.keys():
to_plots = get_cols_to_label_info(cols_to_label_info, col)
for series, styls_info in to_plots:
ln = _plot_series_with_styls_info(axLowLeft, series,
styls_info, lnstyl_default='ko', markersize=markersize)
if ln is not None:
lns.append(ln)
# x轴平行线
if col in xparls_info.keys():
to_plots = get_xparls_info(xparls_info, col)
for yval, clor, lnstyl, lnwidth, kwstyl_ in to_plots:
axLowLeft.axhline(y=yval, c=clor, ls=lnstyl, lw=lnwidth,
**kwstyl_)
# x轴平行填充
xlimMinLow, xlimMaxLow = axLowLeft.axis()[0], axLowLeft.axis()[1]
if col in fills_xparl.keys():
to_fills = get_fills_xparl_info(fills_xparl, col)
for ylocs, clor, alpha, kwstyl_ in to_fills:
axLowLeft.fill_betweenx(ylocs, xlimMinLow, xlimMaxLow,
color=clor, alpha=alpha, **kwstyl_)
# 坐标轴尺度
axLowLeft.set_yscale(yscales[2])
# y轴平行线
if not isnull(yparls_info_low):
to_plots = get_yparls_info(yparls_info_low)
for xval, clor, lnstyl, lnwidth, kwstyl_ in to_plots:
axLowLeft.axvline(x=xval, c=clor, ls=lnstyl, lw=lnwidth,
**kwstyl_)
# y轴平行填充
if not isnull(fills_yparl_low):
ylimmin, ylimmax = axLowLeft.axis()[2], axLowLeft.axis()[3]
to_fills = get_fills_yparl_info(fills_yparl_low)
for xlocs, clor, alpha, kwstyl_ in to_fills:
axLowLeft.fill_between(xlocs, ylimmin, ylimmax,
color=clor, alpha=alpha, **kwstyl_)
# 底部左边坐标轴网格
axLowLeft.grid(grids[2])
# y轴标签文本
if ylabels[2] is False:
axLowLeft.set_ylabel(None)
axLowLeft.set_yticks([])
else:
if isnull(fontname):
axLowLeft.set_ylabel(ylabels[2], fontsize=fontsize_label)
[_.set_fontsize(fontsize_tick) for _ in axLowLeft.get_yticklabels()]
else:
axLowLeft.set_ylabel(ylabels[2], fontdict={'family': fontname,
'size': fontsize_label})
# y轴刻度字体
[_.set_fontname(fontname) for _ in axLowLeft.get_yticklabels()]
[_.set_fontsize(fontsize_tick) for _ in axLowLeft.get_yticklabels()]
# 底部右边坐标轴
if len(cols_styl_low_right) > 0:
axLowRight = axLowLeft.twinx()
for col, styl in cols_styl_low_right.items():
ln = _plot_series_with_styls_info(axLowRight, df[col], styl,
lbl_str_ext='(右)')
if ln is not None:
lns.append(ln)
# 填充
if col in cols_to_fill_info.keys():
kwargs_fill = cols_to_fill_info[col]
axLowRight.fill_between(df.index, df[col], **kwargs_fill)
# 特殊点标注
if col in cols_to_label_info.keys():
to_plots = get_cols_to_label_info(cols_to_label_info, col)
for series, styls_info in to_plots:
ln = _plot_series_with_styls_info(axLowRight, series,
styls_info, lnstyl_default='ko',
markersize=markersize, lbl_str_ext='(右)')
if ln is not None:
lns.append(ln)
# x轴平行线
if col in xparls_info.keys():
to_plots = get_xparls_info(xparls_info, col)
for yval, clor, lnstyl, lnwidth, kwstyl_ in to_plots:
axLowRight.axhline(y=yval, c=clor, ls=lnstyl,
lw=lnwidth, **kwstyl_)
# x轴平行填充
if col in fills_xparl.keys():
to_fills = get_fills_xparl_info(fills_xparl, col)
for ylocs, clor, alpha, kwstyl_ in to_fills:
axLowRight.fill_betweenx(ylocs, xlimMinUp, xlimMaxUp,
color=clor, alpha=alpha, **kwstyl_)
# 底部双坐标轴刻度对齐
if twinx_align_low is not None:
axLowLeft, axLowRight = twinx_align(axUpLeft, axUpRight,
twinx_align_low[0], twinx_align_low[1])
# 坐标轴尺度
axLowRight.set_yscale(yscales[3])
# 底部右边坐标轴网格
axLowRight.grid(grids[3])
# y轴标签文本
if ylabels[3] is False:
axLowRight.set_ylabel(None)
axLowRight.set_yticks([])
else:
if isnull(fontname):
axLowRight.set_ylabel(ylabels[3], fontsize=fontsize_label)
[_.set_fontsize(fontsize_tick) for _ in axLowRight.get_yticklabels()]
else:
axLowRight.set_ylabel(ylabels[3],
fontdict={'family': fontname,
'size': fontsize_label})
# y轴刻度字体
[_.set_fontname(fontname) for _ in axLowRight.get_yticklabels()]
[_.set_fontsize(fontsize_tick) for _ in axLowRight.get_yticklabels()]
# 底部图legend合并显示
if len(lns) > 0:
lnsAdd = lns[0]
for ln in lns[1:]:
lnsAdd = lnsAdd + ln
labs = [l.get_label() for l in lnsAdd]
if isnull(fontname):
axLowLeft.legend(lnsAdd, labs, loc=legend_locs[1],
fontsize=fontsize_legend)
else:
axLowLeft.legend(lnsAdd, labs, loc=legend_locs[1],
prop={'family': fontname, 'size': fontsize_legend})
# x轴刻度
n = df.shape[0]
xpos = [int(x*n/n_xticks) for x in range(0, n_xticks)] + [n-1]
# 上图x轴刻度
axUpLeft.set_xticks(xpos)
if isnull(fontname):
axUpLeft.set_xticklabels([df.loc[x, idx_name] for x in xpos],
fontsize=fontsize_tick,
rotation=xticks_rotation)
else:
axUpLeft.set_xticklabels([df.loc[x, idx_name] for x in xpos],
fontdict={'family': fontname,
'size': fontsize_tick},
rotation=xticks_rotation)
# 下图x轴刻度
if len(cols_styl_low_left) > 0:
axLowLeft.set_xticks(xpos)
if isnull(fontname):
axLowLeft.set_xticklabels([df.loc[x, idx_name] for x in xpos],
fontsize=fontsize_tick,
rotation=xticks_rotation)
else:
axLowLeft.set_xticklabels([df.loc[x, idx_name] for x in xpos],
fontdict={'family': fontname,
'size': fontsize_tick},
rotation=xticks_rotation)
# x轴标签文本-上图
if xlabels[0] is False:
axUpLeft.set_xlabel(None)
axUpLeft.set_xticks([])
else:
if isnull(fontname):
axUpLeft.set_xlabel(xlabels[0], fontsize=fontsize_label)
else:
axUpLeft.set_xlabel(xlabels[0], fontdict={'family': fontname,
'size': fontsize_label})
# x轴标签文本-下图
if len(cols_styl_low_left) > 0:
if xlabels[1] is False:
axLowLeft.set_xlabel(None)
axLowLeft.set_xticks([])
else:
if isnull(fontname):
axLowLeft.set_xlabel(xlabels[1], fontsize=fontsize_label)
else:
axLowLeft.set_xlabel(xlabels[1], fontdict={'family': fontname,
'size': fontsize_label})
plt.tight_layout()
# 保存图片
if fig_save_path:
plt.savefig(fig_save_path)
plt.show()
#%%
def plot_series_conlabel(data, conlabel_info, del_repeat_lbl=True, **kwargs):
'''
在 :func:`dramkit.plottools.plot_common.plot_series` 基础上添加了连续标注绘图功能
Parameters
----------
data : pandas.DataFrame
待作图数据
conlabel_info : dict
需要进行连续标注的列绘图信息,格式形如:
``{col: [[lbl_col, (v1, ...), (styl1, ...), (lbl1, ...)]]}``
.. note::
(v1, ...)中的最后一个值会被当成默认值,其余的当成特殊值
(绘图时为了保证连续会将默认值与特殊值连接起来)
del_repeat_lbl : bool
当conlabel_info与cols_to_label_info存在重复设置信息时,
是否删除cols_to_label_info中的设置信息
**kwargs :
:func:`dramkit.plottools.plot_common.plot_series` 接受的参数
'''
df_ = data.copy()
df_['_tmp_idx_'] = range(0, df_.shape[0])
kwargs_new = kwargs.copy()
if 'cols_to_label_info' in kwargs_new.keys():
cols_to_label_info = kwargs_new['cols_to_label_info']
else:
cols_to_label_info = {}
def _deal_exist_lbl_col(col, lbl_col, del_exist=True):
'''
处理cols_to_label_info中已经存在的待标注列,
del_exist为True时删除重复的
'''
if col in cols_to_label_info.keys():
if len(cols_to_label_info[col]) > 0 and del_exist:
for k in range(len(cols_to_label_info[col])):
if cols_to_label_info[col][k][0] == lbl_col:
del cols_to_label_info[col][k]
else:
cols_to_label_info[col] = []
for col, lbl_infos in conlabel_info.items():
lbl_infos_new = []
for lbl_info in lbl_infos:
lbl_col = lbl_info[0]
_deal_exist_lbl_col(col, lbl_col, del_exist=del_repeat_lbl)
Nval = len(lbl_info[1])
tmp = 0
for k in range(0, Nval):
val = lbl_info[1][k]
start_ends = get_con_start_end(df_[lbl_col], lambda x: x == val)
for _ in range(0, len(start_ends)):
new_col = '_'+lbl_col+'_tmp_'+str(tmp)+'_'
df_[new_col] = np.nan
idx0, idx1 = start_ends[_][0], start_ends[_][1]+1
if k == Nval-1:
idx0, idx1 = max(0, idx0-1), min(idx1+1, df_.shape[0])
df_.loc[df_.index[idx0: idx1], new_col] = val
if _ == 0:
if len(lbl_info) == 4:
lbl_infos_new.append([new_col, (val,),
(lbl_info[2][k],), (lbl_info[3][k],)])
elif len(lbl_info) == 5:
lbl_infos_new.append([new_col, (val,),
(lbl_info[2][k],), (lbl_info[3][k],), lbl_info[4]])
else:
if len(lbl_info) == 4:
lbl_infos_new.append([new_col, (val,),
(lbl_info[2][k],), (False,)])
elif len(lbl_info) == 5:
lbl_infos_new.append([new_col, (val,),
(lbl_info[2][k],), (False,), lbl_info[4]])
tmp += 1
# cols_to_label_info[col] += lbl_infos_new
cols_to_label_info[col] = lbl_infos_new + cols_to_label_info[col]
kwargs_new['cols_to_label_info'] = cols_to_label_info
plot_series(df_, **kwargs_new)
#%%
def plot_maxmins(data, col, col_label, label_legend=['Max', 'Min'],
figsize=(11, 6), grid=True, title=None, n_xticks=8,
markersize=10, fig_save_path=None, **kwargs):
'''
| 绘制序列数据(data中col指定列)并标注极大极小值点
| col_label指定列中值1表示极大值点,-1表示极小值点,0表示普通点
| label_legend指定col_label为1和-1时的图标标注
| \\**kwargs为 :func:`dramkit.plottools.plot_common.plot_series` 支持的其它参数
'''
plot_series(data, {col: ('-k.', None)},
cols_to_label_info={col: [[col_label, (1, -1), ('bv', 'r^'),
label_legend]]},
grids=grid, figsize=figsize, title=title, n_xticks=n_xticks,
markersize=markersize, fig_save_path=fig_save_path, **kwargs)
#%%
def _plot_maxmins_bk(data, col, col_label, label_legend=['Max', 'Min'],
figsize=(11, 6), grid=True, title=None, n_xticks=8,
markersize=10, fontsize=15, fig_save_path=None):
'''
绘制序列数据(data中col指定列)并标注极大极小值点
col_label指定列中值1表示极大值点,-1表示极小值点,0表示普通点
label_legend指定col_label为1和-1时的图标标注
n_xticks设置x轴刻度显示数量
'''
df = data.copy()
if df.index.name is None:
df.index.name = 'idx'
idx_name = df.index.name
if idx_name in df.columns:
df.drop(idx_name, axis=1, inplace=True)
df.reset_index(inplace=True)
series = df[col]
series_max = df[df[col_label] == 1][col]
series_min = df[df[col_label] == -1][col]
plt.figure(figsize=figsize)
plt.plot(series, '-k.', label=col)
plt.plot(series_max, 'bv', markersize=markersize, label=label_legend[0])
plt.plot(series_min, 'r^', markersize=markersize, label=label_legend[1])
plt.legend(loc=0, fontsize=fontsize)
n = df.shape[0]
xpos = [int(x*n/n_xticks) for x in range(0, n_xticks)] + [n-1]
plt.xticks(xpos, [df.loc[x, idx_name] for x in xpos])
plt.grid(grid)
if title:
plt.title(title, fontsize=fontsize)
if fig_save_path:
plt.savefig(fig_save_path)
plt.show()
#%%
if __name__ == '__main__':
import time
import pandas as pd
strt_tm = time.time()
#%%
col1 = np.random.normal(10, 5, (100, 1))
col2 = np.random.rand(100, 1)
col3 = np.random.uniform(0, 20, (100, 1))
col4 = col1 ** 2
df = pd.DataFrame(np.concatenate((col1, col2, col3, col4), axis=1))
df.columns = ['col1', 'col2', 'col3', 'col4']
df['label1'] = df['col1'].apply(lambda x: 1 if x > 15 else \
(-1 if x < 5 else 0))
df['label2'] = df['col3'].apply(lambda x: 1 if x > 15 else \
(-1 if x < 5 else 0))
df.index = list(map(lambda x: 'idx'+str(x), df.index))
plot_maxmins(df, 'col1', 'label1', label_legend=['high', 'low'],
figsize=(11, 7), grid=False, title='col1', n_xticks=20,
markersize=10, fig_save_path=None)
plot_series(df, {'col1': ('.-r', None)},
cols_styl_up_right={'col2': ('.-y', 0),
'col3': ('-3', '3')},
# cols_styl_low_left={'col1': ('.-r', 't1')},
cols_styl_low_right={'col4': ('.-k', 't4')},
cols_to_label_info={'col2':
[['label1', (1, -1), ('gv', 'r^'), None]],
'col4':
[['label2', (-1, 1), ('b*', 'mo'), None]]},
yscales=None,
xparls_info={'col1': [(10, 'k', '--', 3), (15, 'b', '-', 1)],
'col4': [(200, None, None, None)]},
yparls_info_up=[('idx20', None, None, None),
('idx90', 'g', '-', 4)],
yparls_info_low=[('idx50', None, None, None),
('idx60', 'b', '--', 2)],
fills_yparl_up=[(['idx2', 'idx12'], 'black', 0.5),
(['idx55', 'idx77'], None, None)],
fills_yparl_low=[(['idx22', 'idx32'], 'red', 0.5),
(['idx65', 'idx87'], None, None, {}),
(['idx37', 'idx50'], None, None, {})],
fills_xparl={'col1': [([20, 25], 'green', 0.5),
([0, 5], None, None, {})],
'col2': [([10, 12.5], 'blue', 0.5)],
'col3': [([5.5, 8.5], 'red', 0.5)],
'col4': [([200, 400], 'yellow', None, {}),
([0, 100], 'green', None, {})]},
ylabels=['y1', 'y2', None, False],
xlabels=['$X_1$', '$x^2$'],
grids=[True, False, True, True], figsize=(10, 8),
title='test', n_xticks=8,
# fontname='Times New Roman',
xticks_rotation=45,
fontsize_label=15, fontsize_title=15, fontsize_legend=15,
fontsize_tick=15, markersize=10, logger=None,
fig_save_path='./test/plot_common.png')
plot_series(df, {'col1': ('.-r', None)},
# cols_to_label_info={'col1': [['label1', (1, -1), ('gv', 'r^'),
# None], ['label2', (-1, 1), ('*', 'o'), None]]},
cols_to_label_info=\
{'col1': [
['label1', (1, -1), ('gv', 'r^'), None, {'alpha': 0.5}],
['label2', (-1, 1), ('*', 'o'), None,
{'markersize': 20, -1: {'alpha': 1}, 1: {'alpha': 0.3}}]
]},
yscales=None,
xparls_info={'col1': [(10, 'k', '--', 5, {'alpha': 0.3}),
(15, None, None, None)],
'col4': [(200, None, None, None)]},
yparls_info_up=[('idx20', None, None, None),
('idx90', 'g', '-', 5, {'alpha': 0.5})],
yparls_info_low=[('idx50', None, None, None),
('idx60', 'b', '--', 5)],
ylabels=['a', '2', None, False],
grids=False, figsize=(10, 8), title='test', n_xticks=10,
fontsize_label=30, markersize=10,
fig_save_path='./test/plot_common.png', logger=None)
#%%
df1 = pd.DataFrame({'col': [1, 10, 100, 10, 100, 10000, 100]})
plot_series(df1, {'col': '.-k'})
plot_series(df1, {'col': '.-k'}, yscales=['log'])
#%%
df2 = pd.DataFrame({'y1': [1, 2, 3, 1, 5, 6, 7],
'y2': [0.0, -0.1, -0.2, -0.25, np.nan, -0.2, -0.05],
'y3': [2, 3, 4, 2, 6, 7, 8],})
plot_series(df2, {'y1': '.-k', 'y3': '.-y'},
cols_styl_up_right={'y2': ('-b', None, {'alpha': 0.4})},
cols_to_fill_info={
'y2': {'color': 'c', 'alpha': 0.3},
# 'y1': {'color': 'c', 'alpha': 0.3},
# 'y3': {'color': 'm', 'alpha': 0.5}
}
)
#%%
df3 = pd.DataFrame({'x': np.random.normal(10, 5, (100,))})
df3['label0'] = 0
df3.loc[df3.index[[2, 20, 30, 90]], 'label0'] = 1
df3.loc[df3.index[[5, 26, 40, 70]], 'label0'] = -1
df3['label'] = 0
df3.loc[df3.index[5:20], 'label'] = 1
df3.loc[df3.index[30:50], 'label'] = -1
df3.loc[df3.index[60:80], 'label'] = 1
df3['x1'] = df3['x'] - 5
plot_series_conlabel(df3,
# conlabel_info={},
conlabel_info={'x': [['label', (1, -1), ('.-r', '.-b'), (None, None),
{'alpha': 1, 1: {'markersize': 20}}]]},
cols_styl_up_left={'x': '.-k'},
cols_to_label_info={'x':
[['label', (-1, 1), ('r^', 'gv'), False]]},
del_repeat_lbl=False,
# cols_to_fill_info={
# 'x': {'y2': df3['x'].min(),
# 'color': 'c', 'alpha': 0.3}}
# cols_to_fill_info={
# 'x': {'y2': df3['x'].max(),
# 'color': 'c', 'alpha': 0.3}}
cols_to_fill_info={
'x': {'y2': df3['x1'],
'color': 'c', 'alpha': 0.3}},
xticks_rotation=45)
#%%
print('used time: {}s.'.format(round(time.time()-strt_tm, 6)))
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
1330,
384,
397,
1211,
355,
3013,
82,
198,
2,
3013,
82,
13,
2617,
3419,
198,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
4850,
15813,
13,
6783,
10141,
... | 1.470072 | 27,249 |
import os
import cv2
import sys
import numpy as np
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
import torchvision
import argparse
import config as cf
import operator
import csv
from torchvision import datasets, models, transforms
from networks import *
from torch.autograd import Variable
from PIL import Image
parser = argparse.ArgumentParser(description='Baseline')
parser.add_argument('--net_type', default='resnet', type=str, help='model')
parser.add_argument('--depth', default=50, type=str, help='depth of model')
args = parser.parse_args()
# Phase 1 : Model Upload
print('\n[Test Phase] : Model Weight Upload')
use_gpu = torch.cuda.is_available()
# upload labels
data_dir = cf.aug_dir+'Only_WBC'
trainset_dir = 'Only_WBC/'
dsets = datasets.ImageFolder(data_dir, None)
H = datasets.ImageFolder(os.path.join(data_dir, 'train'))
dset_classes = H.classes
# uploading the model
print("| Loading checkpoint model for crop inference...")
assert os.path.isdir('../3_classifier/checkpoint'),'[Error]: No checkpoint directory found!'
assert os.path.isdir('../3_classifier/checkpoint/'+trainset_dir),'[Error]: There is no model weight to upload!'
file_name = getNetwork(args)
checkpoint = torch.load('../3_classifier/checkpoint/'+trainset_dir+file_name+'.t7')
model = checkpoint['model']
if use_gpu:
model.cuda()
cudnn.benchmark = True
model.eval()
sample_input = Variable(torch.randn(1,3,224,224), volatile=False)
if use_gpu:
sampe_input = sample_input.cuda()
test_transform = transforms.Compose([
transforms.Scale(224),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(cf.mean, cf.std)
])
check_and_mkdir('results/baseline/')
background_root = '/home/bumsoo/Data/test/CT_20/'
for thresh in [200, 1]:
print("| Baseline with Threshold : %d" %thresh)
check_and_mkdir('results/baseline/%d' %thresh)
for test_num in range(1, 27+1):
print("\t| Inferencing TEST%d..." %test_num)
baseline_dir = '/home/bumsoo/Data/baseline_info/%d_TEST%d.csv' %(thresh, test_num)
with open(baseline_dir, 'r') as csvfile:
reader = csv.reader(csvfile)
check_and_mkdir('results/baseline/%d/TEST%d/' %(thresh, test_num))
with open('results/baseline/%d/TEST%d/TEST%d.csv' %(thresh, test_num, test_num), 'w') as wrfile:
fieldnames = ['prediction', 'x', 'y', 'w', 'h']
writer = csv.DictWriter(wrfile, fieldnames=fieldnames)
original_img = cv2.imread(background_root + 'TEST%d.png' %test_num)
for row in reader:
x,y,w,h = map(int, row)
crop = original_img[y:y+h, x:x+w]
crop = cv2.cvtColor(crop, cv2.COLOR_BGR2RGB)
if test_transform is not None:
img = test_transform(Image.fromarray(crop, mode='RGB'))
inputs = img
inputs = Variable(inputs, volatile=True)
if use_gpu :
inputs = inputs.cuda()
inputs = inputs.view(1, inputs.size(0), inputs.size(1), inputs.size(2))
outputs = model(inputs)
softmax_res = softmax(outputs.data.cpu().numpy()[0])
index, score = max(enumerate(softmax_res), key=operator.itemgetter(1))
pred = dset_classes[index]
writer.writerow({
'prediction': pred,
'x': x,
'y': y,
'w': w,
'h': h
})
| [
11748,
28686,
198,
11748,
269,
85,
17,
198,
11748,
25064,
198,
11748,
299,
32152,
355,
45941,
198,
198,
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
11748,
28034,
13,
1891,
2412,
13,
66,
463,
20471,
355,
269,
463,
20... | 2.164796 | 1,693 |
import json
from collections import Counter
import spacy
import en_core_web_sm
from tqdm import tqdm
nlp = en_core_web_sm.load()
# def get_subject_verb_obj(sentence):
# print(sentence)
# tokens = nlp(sentence)
# svos = findSVOs(tokens)
# print(svos)
# print("-------------------------------")
if __name__ == '__main__':
main() | [
11748,
33918,
198,
6738,
17268,
1330,
15034,
198,
11748,
599,
1590,
198,
11748,
551,
62,
7295,
62,
12384,
62,
5796,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
198,
198,
21283,
79,
796,
551,
62,
7295,
62,
12384,
62,
5796,
13,
2... | 2.492958 | 142 |
import os
from setuptools import setup
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = "0"
_MINOR_VERSION = "1"
_PATCH_VERSION = "0"
with open(os.path.join(os.path.dirname(__file__), "requirements.txt")) as fp:
install_requires = fp.read().split("\n")
setup(
name="multi-graph",
description=(
"Package for simulateously building and connecting multiple tensorflow graphs "
"for data pipelining."
),
url="https://github.com/jackd/multi-graph",
author="Dominic Jack",
author_email="thedomjack@gmail.com",
license="Apache 2.0",
packages=["multi_graph"],
install_requires=install_requires,
zip_safe=True,
python_requires=">=3.6",
version=".".join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION]),
)
| [
11748,
28686,
198,
198,
6738,
900,
37623,
10141,
1330,
9058,
198,
198,
2,
775,
1061,
12449,
5109,
10628,
278,
357,
5450,
1378,
43616,
332,
13,
2398,
34729,
198,
62,
5673,
41,
1581,
62,
43717,
796,
366,
15,
1,
198,
62,
23678,
1581,
6... | 2.576547 | 307 |
#!/usr/bin/env python3
import argparse
from argparse import RawTextHelpFormatter
import logging
from pbr.version import VersionInfo
from Bio import SeqIO
import itertools
from prettytable import PrettyTable
import tabulate
from xopen import xopen
from fastaqc.alphabet import Alphabet
import pprint
__version__ = VersionInfo('fastaqc').semantic_version().release_string()
def count(record, stats):
'''counts the number of processed sequences in the field "sequences".'''
if 'sequences' not in stats:
stats['sequences'] = 0
stats['sequences'] = stats['sequences'] + 1
def compute_character_positions(record, stats):
'''computes a dictionary with all positions per character of the current
sequence and stores it in "_character_positions"'''
positions = {}
for i, c in enumerate(record.seq.upper()):
if c not in positions:
positions[c] = []
positions[c].append(i)
stats['_character_positions'] = positions
def count_sequences_with_special_characters(record, stats):
'''counts the sequences with special characters (depends on the alphabet)
and stores them in "special_char_count.<sequence_category>.<character>"'''
alphabet = assert_sequence_type_available(stats)
_count(stats, 'special_char_count', alphabet.special_chars)
def count_sequences_with_ambiguous_characters(record, stats):
'''counts the sequences with ambiguous characters (depends on the alphabet)
and stores them in "ambiguous_char_count.<sequence_category>.<character>"'''
alphabet = assert_sequence_type_available(stats)
_count(stats, 'ambiguous_char_count', alphabet.ambiguous_chars)
def count_sequences_with_unknown_characters(record, stats):
'''counts the sequences with unknown characters (depends on the alphabet)
and stores them in "ambiguous_char_count.<sequence_category>.<character>"'''
category_name = assert_sequence_category_name_available(stats)
alphabet = assert_sequence_type_available(stats)
c_dist = assert_character_distribution_available(stats)
if 'unknown_char_count' not in stats:
stats['unknown_char_count'] = {}
if category_name not in stats['unknown_char_count']:
stats['unknown_char_count'][category_name] = {}
counts = stats['unknown_char_count'][category_name]
chars = set(alphabet.all_chars)
for c in c_dist.keys():
if c not in chars:
if c not in counts:
counts[c] = 0
counts[c] = counts[c] + 1
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
11748,
1822,
29572,
198,
6738,
1822,
29572,
1330,
16089,
8206,
22087,
8479,
1436,
198,
11748,
18931,
198,
6738,
279,
1671,
13,
9641,
1330,
10628,
12360,
198,
6738,
16024,
1330,
1001,
... | 3.205263 | 760 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
pyplr.pupil
===========
A module for interfacing with a Pupil Core eye tracker.
@author: jtm
"""
from time import time
from concurrent import futures
from typing import List, Tuple
import numpy as np
import msgpack
import zmq
class PupilCore:
"""Class to facilitate working with Pupil Core via the Network API.
Example
-------
>>> p = PupilCore()
>>> p.command('R my_recording')
>>> sleep(2.)
>>> p.command('r')
"""
# TODO: use this
eyemap = {'left': 0, 'right': 1}
def __init__(self,
address: str = '127.0.0.1',
request_port: str = '50020') -> None:
"""Initialize the connection with Pupil Core.
Parameters
----------
address : string, optional
The IP address of the device. The default is `127.0.0.1`.
request_port : string, optional
Pupil Remote accepts requests via a REP socket, by default on port
50020. Alternatively, you can set a custom port in Pupil Capture
or via the `--port` application argument. The default is `50020`.
"""
self.address = address
self.request_port = request_port
# connect to pupil remote
self.context = zmq.Context()
self.remote = zmq.Socket(self.context, zmq.REQ)
self.remote.connect(
'tcp://{}:{}'.format(self.address, self.request_port))
# request 'SUB_PORT' for reading data
self.remote.send_string('SUB_PORT')
self.sub_port = self.remote.recv_string()
# request 'PUB_PORT' for writing data
self.remote.send_string('PUB_PORT')
self.pub_port = self.remote.recv_string()
# open socket for publishing
self.pub_socket = zmq.Socket(self.context, zmq.PUB)
self.pub_socket.connect(
'tcp://{}:{}'.format(self.address, self.pub_port))
def command(self, cmd: str) -> str:
"""
Send a command via `Pupil Remote
<https://docs.pupil-labs.com/developer/core/network-api/#pupil-remote>`_.
Parameters
----------
cmd : string
Must be one of the following:
* 'R' - start recording with auto generated session name
* 'R my_rec' - start recording named `my_rec`
* 'r' - stop recording
* 'C' - start currently selected calibration
* 'c' - stop currently selected calibration
* 'T 123.45' - resets current Pupil time to given timestamp
* 't' - get current Pupil time; returns a float as string
* 'v' - get the Pupil Core software version string
* 'PUB_PORT' - return the current pub port of the IPC Backbone
* 'SUB_PORT' - return the current sub port of the IPC Backbone
Returns
-------
string
The result of the command. If the command was not acceptable, this
will be 'Unknown command.'
"""
self.remote.send_string(cmd)
return self.remote.recv_string()
def notify(self, notification: dict) -> str:
"""Send a `notification <https://docs.pupil-labs.com/developer/core/network-api/#notification-message>`_
to Pupil Remote.
Every notification has a topic and can contain potential payload data.
The payload data has to be serializable, so not every Python object
will work. To find out which plugins send and receive notifications,
open the codebase and search for ``.notify_all(`` and ``def on_notify(``.
Parameters
----------
notification : dict
The notification dict. For example::
{
'subject': 'start_plugin',
'name': 'Annotation_Capture',
'args': {}})
}
Returns
-------
string
The response.
"""
topic = 'notify.' + notification['subject']
self.remote.send_string(topic, flags=zmq.SNDMORE)
payload = msgpack.dumps(notification, use_bin_type=True)
self.remote.send(payload)
return self.remote.recv_string()
def annotation_capture_plugin(self, should: str) -> None:
"""Start or stop the Annotatiob Capture plugin.
Parameters
----------
should : str
Either 'start' or 'stop'.
Raises
------
ValueError
If `should` not `start` or `stop`.
Returns
-------
None.
"""
if should not in ['start', 'stop']:
raise ValueError('Must specify start or stop for should.')
subject = '{}_plugin'.format(should)
return self.notify({
'subject': subject,
'name': 'Annotation_Capture',
'args': {}
})
# TODO: is this correct?
def get_corrected_pupil_time(self) -> float:
"""Get the current Pupil Timestamp, corrected for transmission delay.
Returns
-------
float
The current pupil time.
"""
t_before = time()
t = float(self.command('t'))
t_after = time()
delay = (t_after - t_before) / 2.0
return t + delay
def _broadcast_pupil_detector_properties(
self,
detector_name: str,
eye: str) -> None:
"""Request property broadcast from a single pupil detector running in
single eye process.
Parameters
----------
detector_name : string
`'Detector2DPlugin'` or `'Pye3DPlugin'`.
eye : str
Left or right.
Returns
-------
None.
"""
if eye not in ['left', 'right']:
raise ValueError('Eye must be "left" or "right".')
payload = {
"subject": "pupil_detector.broadcast_properties",
"eye_id": self.eyemap[eye],
"detector_plugin_class_name": detector_name,
}
payload = {k: v for k, v in payload.items() if v is not None}
self.notify(payload)
def get_pupil_detector_properties(self,
detector_name: str,
eye_id: int) -> dict:
"""Get the detector properties for a single pupil detector running in
a single eye process.
Parameters
----------
detector_name : string
`'Detector2DPlugin'` or `'Pye3DPlugin'`.
eye_id : int
For the left (0) or right(1) eye.
Returns
-------
payload : dict
Dictionary of detector properties.
"""
self._broadcast_pupil_detector_properties(detector_name, eye_id)
subscriber = self.subscribe_to_topic(
topic='notify.pupil_detector.properties')
_, payload = self.recv_from_subscriber(subscriber)
return payload
def freeze_3d_model(self, eye_id: int, frozen: bool) -> str:
"""Freeze or unfreeze the Pye3D pupil detector model.
The Pye3D pupil detector updates continuously unless the model is
frozen. The updates help to account for head slippage, but can cause
artefacts in the pupil data. If there is unlikely to be any slippage
(e.g.., the participant is using a chinrest) then it makes sense to
freeze the 3D model before presenting stimuli.
Parameters
----------
eye_id : int
Whether to freeze the model for the left (1) or right (0) eye.
frozen : bool
Whether to freeze or unfreeze the model.
Raises
------
ValueError
If eye_id is not specified correctly.
Returns
-------
string
The notification response.
"""
if eye_id not in [0, 1]:
raise ValueError('Must specify 0 or 1 for eye_id')
if not isinstance(frozen, bool):
raise TypeError('Must specify True or False for frozen')
notification = {
'topic': 'notify.pupil_detector.set_properties',
'subject': 'pupil_detector.set_properties',
'values': {'is_long_term_model_frozen': frozen},
'eye_id': eye_id,
'detector_plugin_class_name': 'Pye3DPlugin'
}
mode = 'Freezing' if frozen else 'Unfreezing'
print(f'> {mode} 3d model for eye {eye_id}')
return self.notify(notification)
def check_3d_model(self,
eyes: List[int] = [0, 1],
alert: bool = False) -> None:
"""Stop and ask the overseer whether the 3D model should be refit.
The model is well-fit when the blue and red ellipses overlap as much
as possible for all gaze angles and when the size of the green ellipse
is close to that of the eye ball. Open the debug windows if in doubt.
Parameters
----------
eyes : list of int, optional
Which eyes to refit. The default is [0,1].
Returns
-------
None.
"""
if alert:
print('\a')
while True:
response = input('> Refit the 3d model? [y/n]: ')
if not response in ['y', 'n']:
print("> Sorry, I didn't understand that.")
continue
else:
break
if response == 'y':
for eye in eyes:
self.freeze_3d_model(eye_id=eye, frozen=False)
print('> Ask the participant to roll their eyes')
input('> Press "Enter" when ready to freeze the model: ')
for eye in eyes:
self.freeze_3d_model(eye_id=eye, frozen=True)
else:
pass
def new_annotation(self, label: str, custom_fields: dict = None) -> dict:
"""Create a new `annotation <https://docs.pupil-labs.com/core/software/pupil-capture/#annotations>`_
a.k.a. message / event marker / trigger. Send it to Pupil Capture with
the `.send_annotation(...)` method.
Note
----
The default timestamp for an annotation is the current Pupil time
(corrected for transmission delay) at the time of creation, but this
can be overridden at a later point if desired.
Parameters
----------
label : string
A label for the event.
custom_fields : dict, optional
Any additional information to add (e.g., `{'duration': 2,
'color': 'blue'}`).
The default is `None`.
Returns
-------
annotation : dict
The annotation dictionary, ready to be sent.
"""
annotation = {
'topic': 'annotation',
'label': label,
'timestamp': self.get_corrected_pupil_time()
}
if custom_fields is not None:
if not isinstance(custom_fields, dict):
for k, v in custom_fields.items():
annotation[k] = v
return annotation
def send_annotation(self, annotation: dict) -> None:
"""Send an annotation to Pupil Capture.
Use to mark the timing of events.
Parameters
----------
annotation : dict
Customiseable - see the ``.new_annotation(...)`` method.
Returns
-------
None.
"""
payload = msgpack.dumps(annotation, use_bin_type=True)
self.pub_socket.send_string(annotation['topic'], flags=zmq.SNDMORE)
self.pub_socket.send(payload)
def pupil_grabber(self, topic: str, seconds: float) -> futures.Future:
"""Concurrent access to data from Pupil Core.
Executes the ``.grab_data(...)`` method in a thread using
``concurrent.futures.ThreadPoolExecutor()``, returning a Future object
with access to the return value.
Parameters
----------
topic : string
See ``.grab_data(...)`` for more info.
seconds : float
Ammount of time to spend grabbing data.
Example
-------
>>> p = PupilCore()
>>> seconds = 10.
>>> pgr_future = p.pupil_grabber(topic='pupil.0.3d', seconds=seconds)
>>> sleep(seconds)
>>> data = pgr_future.result()
Returns
-------
concurrent.futures._base_Future
An object giving access to the data from the thread.
"""
args = (topic, seconds)
return futures.ThreadPoolExecutor().submit(self.grab_data, *args)
def grab_data(self, topic: str, seconds: float) -> futures.Future:
"""Start grabbing data in real time from Pupil Core.
Parameters
----------
topic : string
Subscription topic. Can be:
* 'pupil.0.2d' - 2d pupil datum (left)
* 'pupil.1.2d' - 2d pupil datum (right)
* 'pupil.0.3d' - 3d pupil datum (left)
* 'pupil.1.3d' - 3d pupil datum (right)
* 'gaze.3d.1.' - monocular gaze datum
* 'gaze.3d.01.' - binocular gaze datum
* 'logging' - logging data
seconds : float
Ammount of time to spend grabbing data.
Returns
-------
data : list
A list of dictionaries.
"""
print('> Grabbing {} seconds of {}'.format(seconds, topic))
subscriber = self.subscribe_to_topic(topic)
data = []
start_time = time()
while time() - start_time < seconds:
_, message = self.recv_from_subscriber(subscriber)
data.append(message)
print('> PupilGrabber done grabbing {} seconds of {}'.format(
seconds, topic))
return data
def light_stamper(self,
annotation: dict,
timeout: float,
threshold: int = 15,
topic: str = 'frame.world') -> futures.Future:
"""Concurrent timestamping of light stimuli with World Camera.
Executes the ``.detect_light_onset(...)`` method in a thread using
``concurrent.futures.ThreadPoolExecutor()``, returning a Future object
with access to the return value.
Parameters
----------
annotation : dict
timeout : float, optional
threshold : int
topic : string
See ``.detect_light_onset(...)`` for more information on parameters.
Example
-------
>>> annotation = new_annotation(label='LIGHT_ON')
>>> p = PupilCore()
>>> p.command('R')
>>> sleep(2.)
>>> lst_future = p.light_stamper(annotation, threshold=15, timeout=10)
>>> sleep(10)
>>> # light stimulus here
>>> p.command('r')
>>> data = lst_future.result()
Note
----
Requires a suitable geometry and for the World Camera to be pointed at
the light source. Also requires the following settings in Pupil
Capture:
* Auto Exposure mode - Manual Exposure (eye and world)
* Frame publisher format - BGR
Returns
-------
concurrent.futures._base_Future
An object giving access to the data from the thread.
"""
args = (annotation, threshold, timeout, topic)
return futures.ThreadPoolExecutor().submit(
self.detect_light_onset, *args)
# TODO: Add option to stamp offset
def detect_light_onset(self,
annotation: dict,
timeout: float,
threshold: int = 15,
topic: str = 'frame.world') -> Tuple:
"""Algorithm to detect onset of light stimulus with the World Camera.
Parameters
----------
annotation : dict
A dictionary with at least the following::
{
'topic': 'annotation',
'label': '<your label>',
'timestamp': None
}
timestamp will be overwritten with the new pupil timestamp for the
detected light. See ``.new_annotation(...)`` for more info.
timeout : float
Time to wait in seconds before giving up. For `STLAB`, use 6 s,
because on rare occasions it can take about 5 seconds for the
`LIGHT_HUB` to process a request.
threshold : int
Detection threshold for luminance increase. The right value depends
on the nature of the light stimulus and the ambient lighting
conditions. Requires some guesswork right now, but could easily
write a function that works it out for us.
topic : string
The camera frames to subscribe to. In most cases this will be
`'frame.world'`, but the method will also work for `'frame.eye.0'`
and `'frame.eye.1'` if the light source contains enough near-
infrared. The default is `'frame.world'`.
"""
subscriber = self.subscribe_to_topic(topic)
print('> Waiting for a light to stamp...')
start_time = time()
previous_frame, _ = self.get_next_camera_frame(
subscriber, topic)
while True:
current_frame, timestamp = self.get_next_camera_frame(
subscriber, topic)
if self._luminance_jump(current_frame, previous_frame, threshold):
self._stamp_light(timestamp, annotation, topic)
return (True, timestamp)
if timeout:
if time() - start_time > timeout:
print('> light_stamper failed to detect a light...')
return (False,)
previous_frame = current_frame
def subscribe_to_topic(self, topic: str) -> zmq.sugar.socket.Socket:
"""Subscribe to a topic.
Parameters
----------
topic : string
The topic to which you want to subscribe, e.g., `'pupil.1.3d'`.
Returns
-------
subscriber : zmq.sugar.socket.Socket
Subscriber socket.
"""
subscriber = self.context.socket(zmq.SUB)
subscriber.connect(
'tcp://{}:{}'.format(self.address, self.sub_port))
subscriber.setsockopt_string(zmq.SUBSCRIBE, topic)
return subscriber
def get_next_camera_frame(self,
subscriber: zmq.sugar.socket.Socket,
topic: str) -> Tuple:
"""Get the next camera frame.
Used by ``.detect_light_onset(...)``.
Parameters
----------
subscriber : zmq.sugar.socket.Socket
Subscriber to camera frames.
topic : string
Topic string.
Returns
-------
recent_frame : numpy.ndarray
The camera frame.
recent_frame_ts : float
Timestamp of the camera frame.
"""
target = ''
while target != topic:
target, msg = self.recv_from_subscriber(subscriber)
recent_frame = np.frombuffer(
msg['__raw_data__'][0], dtype=np.uint8).reshape(
msg['height'], msg['width'], 3)
recent_frame_ts = msg['timestamp']
return (recent_frame, recent_frame_ts)
def recv_from_subscriber(self,
subscriber: zmq.sugar.socket.Socket) -> Tuple:
"""Receive a message with topic and payload.
Parameters
----------
subscriber : zmq.sugar.socket.Socket
A subscriber to any valid topic.
Returns
-------
topic : str
A utf-8 encoded string, returned as a unicode object.
payload : dict
A msgpack serialized dictionary, returned as a python dictionary.
Any addional message frames will be added as a list in the payload
dictionary with key: ``'__raw_data__'``.
"""
topic = subscriber.recv_string()
payload = msgpack.unpackb(subscriber.recv())
extra_frames = []
while subscriber.get(zmq.RCVMORE):
extra_frames.append(subscriber.recv())
if extra_frames:
payload['__raw_data__'] = extra_frames
return (topic, payload)
def fixation_trigger(self,
max_dispersion: float = 3.0,
min_duration: int = 300,
trigger_region: List[float] = [0.0, 0.0, 1.0, 1.0]
) -> dict:
"""Wait for a fixation that satisfies the given constraints.
Use to check for stable fixation before presenting a stimulus, for
example.
Note
----
Uses real-time data published by Pupil Capture's `Online Fixation
Detector Plugin
<https://docs.pupil-labs.com/developer/core/network-api/#fixation-messages>`_
Parameters
----------
max_dispersion : float, optional
Maximum dispersion threshold in degrees of visual angle. In other
words, how much spatial movement is allowed within a fixation?
Pupil Capture allows manual selection of values from `0.01` to
`4.91`. The default is `3.0`.
min_duration : int, optional
Minimum duration threshold in milliseconds. In other words, what is
the minimum time required for gaze data to be within the dispersion
threshold? Pupil Capture allows manual selection of values from
`10` to `4000`. The default is `300`.
trigger_region : list, optional
World coordinates within which the fixation must fall to be valid.
The default is ``[0.0, 0.0, 1.0, 1.0]``, which corresponds to the
whole camera scene in normalised coordinates.
Returns
-------
fixation : dict
The triggering fixation.
"""
self.notify({
'subject': 'start_plugin',
'name': 'Fixation_Detector',
'args': {'max_dispersion': max_dispersion,
'min_duration': min_duration}
})
s = self.subscribe_to_topic(topic='fixation')
print('> Waiting for a fixation...')
while True:
_, fixation = self.recv_from_subscriber(s)
if self._fixation_in_trigger_region(fixation, trigger_region):
print('> Valid fixation detected...')
return fixation
def _fixation_in_trigger_region(
self,
fixation: dict,
trigger_region: List[float] = [0.0, 0.0, 1.0, 1.0]) -> bool:
"""Return True if fixation is within trigger_region else False.
"""
x, y = fixation['norm_pos']
return (x > trigger_region[0] and x < trigger_region[2]
and y > trigger_region[1] and y < trigger_region[3])
def _luminance_jump(self,
current_frame: np.array,
previous_frame: np.array,
threshold: int) -> bool:
"""Detect an increase in luminance.
"""
return current_frame.mean() - previous_frame.mean() > threshold
def _stamp_light(self,
timestamp: float,
annotation: dict,
subscription: str) -> None:
"""Send annotation with updated timestamp.
"""
print('> Light stamped on {} at {}'.format(
subscription, timestamp))
annotation['timestamp'] = timestamp
self.send_annotation(annotation)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
201,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
201,
198,
37811,
201,
198,
9078,
489,
81,
13,
79,
929,
346,
201,
198,
2559,
18604,
201,
198,
201,
198,
32,
8265... | 2.092875 | 11,747 |
from ._loader import ParkLoader, ParkData
| [
6738,
47540,
29356,
1330,
3250,
17401,
11,
3250,
6601,
198
] | 4.2 | 10 |
import orchestrator
| [
11748,
28127,
1352,
628,
628
] | 4.6 | 5 |
from .base import (
preprocess_box_for_cv,
preprocess_box_for_dl,
load_part_model
) | [
6738,
764,
8692,
1330,
357,
198,
220,
220,
220,
662,
14681,
62,
3524,
62,
1640,
62,
33967,
11,
198,
220,
220,
220,
662,
14681,
62,
3524,
62,
1640,
62,
25404,
11,
198,
220,
220,
220,
3440,
62,
3911,
62,
19849,
198,
8
] | 2.261905 | 42 |
"""Authentication and authorization tools
"""
import os
from typing import Optional
from fastapi import Depends, HTTPException, status
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from fastapi.security.utils import get_authorization_scheme_param
from typing import Optional
from datetime import datetime,timedelta
from jose import JWTError, jwt
from pydantic import BaseModel
from ..main import config
from ..monitoring import logger
from .database import users_db,programmers_db
from ..model.utils import NotFoundError
oauth2_scheme = OAuth2PasswordBearer(tokenUrl='/login',auto_error=config.AUTH=="ENABLED")
class Token(BaseModel):
"""Authorization token response
"""
access_token: str
token_type: Optional[str]
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
"""Generates a access token busing the email
Args:
data (dict): data to be encoded with jwt
expires_delta (Optional[timedelta], optional): expiration time.
Returns:
token: jwt token generated
"""
to_encode = data.copy()
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(minutes=15)
to_encode.update({"exp": expire})
encoded_jwt = jwt.encode(to_encode, config.SECRET_KEY, algorithm="HS256")
return encoded_jwt
async def get_current_user(token: str = Depends(oauth2_scheme)):
"""Check authorization and get account email
"""
if config.AUTH == "DISABLED":
return config.ADMIN_EMAIL
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="invalid credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, config.SECRET_KEY, algorithms=["HS256"])
email: str = payload.get("sub")
if email is None:
raise credentials_exception
try:
user = await users_db.search_email(email)
except NotFoundError as nfe:
raise credentials_exception
if user.black_list:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Blacklisted credentials"
)
logger.debug(f"User Credentials used : {email}")
return email
except JWTError:
raise credentials_exception
async def get_current_programmer(token: str = Depends(oauth2_scheme)):
"""Check authorization and get account email
"""
if config.AUTH == "DISABLED":
return config.ADMIN_EMAIL
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="invalid credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, config.SECRET_KEY, algorithms=["HS256"])
email: str = payload.get("sub")
if email is None:
raise credentials_exception
try:
programmer = await programmers_db.search_email(email)
except NotFoundError as nfe:
raise credentials_exception
if programmer.black_list:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Blacklisted credentials"
)
logger.debug(f"Programmer Credentials used : {email}")
return email
except JWTError:
raise credentials_exception
return email
async def get_admin_programmer(token: str = Depends(oauth2_scheme)):
"""Check authorization and get account email
"""
if config.AUTH == "DISABLED":
return config.ADMIN_EMAIL
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="invalid credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, config.SECRET_KEY, algorithms=["HS256"])
email: str = payload.get("sub")
if email is None:
raise credentials_exception
try:
programmer = await programmers_db.search_email(email)
except NotFoundError as nfe:
raise credentials_exception
if not programmer.admin:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Admin priveleges needed"
)
logger.debug(f"Admin Credentials used : {email}")
return email
except JWTError:
raise credentials_exception
return email
| [
37811,
47649,
3299,
290,
19601,
4899,
198,
37811,
198,
11748,
28686,
198,
6738,
19720,
1330,
32233,
198,
6738,
3049,
15042,
1330,
2129,
2412,
11,
14626,
16922,
11,
3722,
198,
6738,
3049,
15042,
13,
12961,
1330,
440,
30515,
17,
35215,
3856... | 2.447537 | 1,868 |
# Generated by Django 3.1.4 on 2021-01-11 03:52
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
513,
13,
16,
13,
19,
319,
33448,
12,
486,
12,
1157,
7643,
25,
4309,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.84375 | 32 |
import dash
import dash_html_components as html
import dash_core_components as dcc
from dash.dependencies import Input, Output, State
import dash_bootstrap_components as dbc
import numpy as np
from jsonschema import validate
import json
import yaml
import base64
from json_schema_to_dash_forms.forms import SchemaFormContainer
from pathlib import Path
import flask
import importlib.resources as pkg_resources
from .. import examples
| [
11748,
14470,
198,
11748,
14470,
62,
6494,
62,
5589,
3906,
355,
27711,
198,
11748,
14470,
62,
7295,
62,
5589,
3906,
355,
288,
535,
198,
6738,
14470,
13,
45841,
3976,
1330,
23412,
11,
25235,
11,
1812,
198,
11748,
14470,
62,
18769,
26418,... | 3.686441 | 118 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
使用 ffmpeg 截取视频片段并且重新拼接
使用方式:
提供文件格式如下:比如 input.txt
./input.mp4
00:01:00 00:02:00
00:04:00 00:08:00
"""
import os
import sys
CONCAT_FILE = '_concat.txt'
def remove(filepath_list):
"""移除中间文件"""
for path in filepath_list + [CONCAT_FILE]:
if os.path.exists(path):
os.remove(path)
if __name__ == '__main__':
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
40477,
12,
23,
532,
9,
12,
198,
37811,
198,
45635,
18796,
101,
31246,
43913,
10545,
230,
103,
20998,
244,
164,
100,
228,
165,
95,
239,
31965,
229,
162,
10... | 1.568093 | 257 |
from logging import getLogger
from typing import TYPE_CHECKING, List, Optional
from pyknp import BList, Tag
from pyknp_eventgraph.builder import Builder
from pyknp_eventgraph.component import Component
from pyknp_eventgraph.event import Event, EventBuilder
from pyknp_eventgraph.helper import convert_mrphs_to_surf
if TYPE_CHECKING:
from pyknp_eventgraph.document import Document
logger = getLogger(__name__)
class Sentence(Component):
"""A sentence is a collection of events.
Attributes:
document (Document): A document that includes this sentence.
sid (str): An original sentence ID.
ssid (int): A serial sentence ID.
blist (:class:`pyknp.knp.blist.BList`, optional): A list of bunsetsu-s.
events (List[Event]): A list of events in this sentence.
"""
@property
def surf(self) -> str:
"""A surface string."""
return convert_mrphs_to_surf(self.mrphs)
@property
def mrphs(self) -> str:
"""A tokenized surface string."""
if self._mrphs is None:
self._mrphs = " ".join(m.midasi for m in self.blist.mrph_list())
return self._mrphs
@property
def reps(self) -> str:
"""A representative string."""
if self._reps is None:
self._reps = " ".join(m.repname or f"{m.midasi}/{m.midasi}" for m in self.blist.mrph_list())
return self._reps
def to_dict(self) -> dict:
"""Convert this object into a dictionary."""
return dict(sid=self.sid, ssid=self.ssid, surf=self.surf, mrphs=self.mrphs, reps=self.reps)
def to_string(self) -> str:
"""Convert this object into a string."""
return f"<Sentence, sid: {self.sid}, ssid: {self.ssid}, surf: {self.surf}>"
| [
6738,
18931,
1330,
651,
11187,
1362,
198,
6738,
19720,
1330,
41876,
62,
50084,
2751,
11,
7343,
11,
32233,
198,
198,
6738,
12972,
15418,
79,
1330,
347,
8053,
11,
17467,
198,
198,
6738,
12972,
15418,
79,
62,
15596,
34960,
13,
38272,
1330,... | 2.469761 | 711 |
# !/usr/bin/env python
# -*-coding:utf-8 -*-
# PROJECT : Python-Exercise
# Time :2020/12/19 15:59
# Warning :The Hard Way Is Easier
from typing import List
"""
给定一个整数数组,编写一个函数,找出索引m和n,只要将索引区间[m,n]的元素排好序,整个数组就是有序的。注意:n-m尽量最小,也就是说,找出符合条件的最短序列。函数返回值为[m,n],若不存在这样的m和n(例如整个数组是有序的),请返回[-1,-1]。
输入: [1,2,4,7,10,11,7,12,6,7,16,18,19]
输出: [3,9]
提示:
0 <= len(array) <= 1000000
"""
# TODO 前提假设:数列为递增
if __name__ == '__main__':
l = [5, 3, 1, 7, 9]
s = Solution()
print(s.subSort(l))
| [
2,
5145,
14,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
66,
7656,
25,
40477,
12,
23,
532,
9,
12,
198,
2,
21965,
23680,
220,
220,
220,
1058,
11361,
12,
3109,
23697,
198,
2,
3862,
220,
220,
220,
220,
220,
220,
27332,
... | 1.054622 | 476 |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import payload_pb2 as payload__pb2
| [
2,
2980,
515,
416,
262,
308,
49,
5662,
11361,
8435,
17050,
13877,
13,
8410,
5626,
48483,
0,
198,
11748,
1036,
14751,
198,
198,
11748,
21437,
62,
40842,
17,
355,
21437,
834,
40842,
17,
628,
628
] | 3.457143 | 35 |
# Standard library imports
# /
# Third party imports
import pandas as pd
# Local application imports
from scifin.timeseries.timeseries import build_from_csv, CatTimeSeries, multi_plot
# Build a time series from a CSV file online
ts1 = build_from_csv(filepath_or_buffer='https://raw.githubusercontent.com/selva86/datasets/master/a10.csv',
parse_dates=['date'], index_col='date', unit="Number of sales", name="Sales_TimeSeries")
# Define min and max values of a range
range_min = 10
range_max = 20
# Create a DataFrame with categories
cts2_idx = ts1.data.index
cts2_vals = []
for x in range(ts1.nvalues):
if range_min <= ts1.data.values[x] <= range_max:
cts2_vals.append('In Range')
else:
cts2_vals.append('Out of Range')
cts2_df = pd.DataFrame(index=cts2_idx, data=cts2_vals)
# Build a CatTimeSeries from it
cts2 = CatTimeSeries(cts2_df)
# Plot them together
multi_plot([ts1, cts2])
| [
2,
8997,
5888,
17944,
198,
2,
1220,
198,
198,
2,
10467,
2151,
17944,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
2,
10714,
3586,
17944,
198,
6738,
629,
361,
259,
13,
22355,
10640,
13,
22355,
10640,
1330,
1382,
62,
6738,
62,
4066... | 2.588398 | 362 |
# -*- coding: utf-8 -*-
from cleo.inputs import ListInput, InputDefinition, InputArgument, InputOption
from .. import CleoTestCase
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
1190,
78,
13,
15414,
82,
1330,
7343,
20560,
11,
23412,
36621,
11,
23412,
28100,
1713,
11,
23412,
19722,
198,
198,
6738,
11485,
1330,
3779,
78,
14402,
20448,
6... | 3.045455 | 44 |
"""The sensor tests for the tado platform."""
from homeassistant.const import STATE_ON
from .util import async_init_integration
async def test_home_create_binary_sensors(hass):
"""Test creation of home binary sensors."""
await async_init_integration(hass)
state = hass.states.get("binary_sensor.wr1_connection_state")
assert state.state == STATE_ON
| [
37811,
464,
12694,
5254,
329,
262,
256,
4533,
3859,
526,
15931,
198,
198,
6738,
1363,
562,
10167,
13,
9979,
1330,
35454,
62,
1340,
198,
198,
6738,
764,
22602,
1330,
30351,
62,
15003,
62,
18908,
1358,
628,
198,
292,
13361,
825,
1332,
6... | 3.066116 | 121 |
import json
import sys
from collections import defaultdict
from difflib import ndiff
from pathlib import Path
from pprint import pformat
from typing import List
from conmon.utils import shorten
if __name__ == "__main__":
test_main(*sys.argv[1:2])
| [
11748,
33918,
198,
11748,
25064,
198,
6738,
17268,
1330,
4277,
11600,
198,
6738,
814,
8019,
1330,
299,
26069,
198,
6738,
3108,
8019,
1330,
10644,
198,
6738,
279,
4798,
1330,
279,
18982,
198,
6738,
19720,
1330,
7343,
198,
198,
6738,
369,
... | 3.350649 | 77 |
from logger import logger
logging = logger.getChild('sessions.twitter.buffers.local_trends')
import output
import threading
from trends import Trends
| [
6738,
49706,
1330,
49706,
201,
198,
6404,
2667,
796,
49706,
13,
1136,
16424,
10786,
82,
6202,
13,
6956,
13,
36873,
364,
13,
12001,
62,
83,
10920,
82,
11537,
201,
198,
201,
198,
11748,
5072,
201,
198,
11748,
4704,
278,
201,
198,
201,
... | 3.244898 | 49 |
import numpy as np
import os
# import matplotlib.pyplot as plt
import scipy.constants as c
if __name__ == '__main__':
main()
| [
11748,
299,
32152,
355,
45941,
198,
11748,
28686,
198,
2,
220,
1330,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
629,
541,
88,
13,
9979,
1187,
355,
269,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
... | 2.64 | 50 |
# For tensorflow version 1.x
import warnings
warnings.filterwarnings('ignore', category=Warning)
#warnings.filterwarnings('ignore', category=DeprecationWarning)
#warnings.filterwarnings('ignore', category=FutureWarning) | [
2,
1114,
11192,
273,
11125,
2196,
352,
13,
87,
198,
11748,
14601,
198,
40539,
654,
13,
24455,
40539,
654,
10786,
46430,
3256,
6536,
28,
20361,
8,
198,
2,
40539,
654,
13,
24455,
40539,
654,
10786,
46430,
3256,
6536,
28,
12156,
8344,
34... | 3.65 | 60 |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="Flask2Neo4J",
version="0.1a1",
author="Ganggas95",
author_email="subhannizar25@amail.com",
description="""Extension Flask for integration with neo4j graph database""",
long_description=long_description,
license="MIT",
long_description_content_type="text/markdown",
url="https://github.com/ganggas95/flask2neo4j",
packages=setuptools.find_packages(),
py_module=["flask2neo4j"],
include_package_data=True,
install_requires=[
'Flask >= 1.0',
'py2neo >= 3.0',
"prompt_toolkit<2.1,>=2.0.7"
],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Database"
],
) | [
11748,
900,
37623,
10141,
198,
198,
4480,
1280,
7203,
15675,
11682,
13,
9132,
1600,
366,
81,
4943,
355,
277,
71,
25,
198,
220,
220,
220,
890,
62,
11213,
796,
277,
71,
13,
961,
3419,
198,
198,
2617,
37623,
10141,
13,
40406,
7,
198,
... | 2.396465 | 396 |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import argparse
import os
import sys
import time
try:
from urllib2 import HTTPError, URLError, urlopen
except ImportError: # For Py3 compatibility
from urllib.error import HTTPError, URLError
from urllib.request import urlopen
def DownloadUrl(url, output_file):
"""Download url into output_file."""
CHUNK_SIZE = 4096
num_retries = 3
retry_wait_s = 5 # Doubled at each retry.
while True:
try:
sys.stdout.write('Downloading %s...' % url)
sys.stdout.flush()
response = urlopen(url)
bytes_done = 0
while True:
chunk = response.read(CHUNK_SIZE)
if not chunk:
break
output_file.write(chunk)
bytes_done += len(chunk)
if bytes_done == 0:
raise URLError("empty response")
print(' Done.')
return
except URLError as e:
sys.stdout.write('\n')
print(e)
if num_retries == 0 or isinstance(e, HTTPError) and e.code == 404:
raise e
num_retries -= 1
print('Retrying in %d s ...' % retry_wait_s)
sys.stdout.flush()
time.sleep(retry_wait_s)
retry_wait_s *= 2
if __name__ == '__main__':
sys.exit(main())
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
15069,
357,
66,
8,
2321,
383,
18255,
1505,
46665,
13,
1439,
2489,
10395,
13,
198,
2,
5765,
286,
428,
2723,
2438,
318,
21825,
416,
257,
347,
10305,
12,
7635,
5964,
326,
460,
307,
... | 2.083113 | 758 |
"""MIT License
Copyright (c) 2022 Daniel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from yt_dlp import YoutubeDL
from mbot import LOGGER,LOG_GROUP
from requests import get
from asgiref.sync import sync_to_async
@sync_to_async
@sync_to_async
@sync_to_async
@sync_to_async
def fetch_tracks(dz, item_type, item_id):
"""
Fetches tracks from the provided URL.
"""
songs_list = []
offset = 0
if item_type == 'playlist':
get_play = dz.get_playlist(item_id)
items = get_play.tracks
for item in items:
track_name = item.title
track_artist = item.artist.name
track_album = item.album.title
cover = item.album.cover_xl
thumb = item.album.cover_small
deezer_id = item.id
songs_list.append({"name": track_name, "artist": track_artist, "album": track_album,"playlist_num": offset + 1,
"cover": cover,"deezer_id": deezer_id,"thumb":thumb,"duration":item.duration})
offset += 1
if len(items) == offset:
break
elif item_type == 'album':
get_al = dz.get_album(item_id)
track_album = get_al.title
cover = get_al.cover_xl
thumb = get_al.cover_small
items = get_al.tracks
for item in items:
track_name = item.title
track_artist = item.artist.name
deezer_id = item.id
songs_list.append({"name": track_name, "artist": track_artist, "album": track_album,"playlist_num": offset + 1,
"cover": cover,"deezer_id": deezer_id,"thumb": thumb,"duration": item.duration})
offset += 1
if len(items) == offset:
break
elif item_type == 'track':
get_track = dz.get_track(item_id)
songs_list.append({"name": get_track.title, "artist": get_track.artist.name, "album": get_track.album.title,"playlist_num": offset + 1,
"cover": get_track.album.cover_xl,"deezer_id": get_track.id,"thumb": get_track.album.cover_small,"duration": get_track.duration})
return songs_list
@sync_to_async
def fetch_spotify_track(client,item_id):
"""
Fetch tracks from provided item.
"""
item = client.track(track_id=item_id)
track_name = item.get("name")
album_info = item.get("album")
track_artist = ", ".join([artist['name'] for artist in item['artists']])
if album_info:
track_album = album_info.get('name')
track_year = album_info.get('release_date')[:4] if album_info.get('release_date') else ''
album_total = album_info.get('total_tracks')
track_num = item['track_number']
deezer_id = item_id
cover = item['album']['images'][0]['url'] if len(item['album']['images']) > 0 else None
genre = client.artist(artist_id=item['artists'][0]['uri'])['genres'][0] if len(client.artist(artist_id=item['artists'][0]['uri'])['genres']) > 0 else ""
offset = 0
return {
"name": track_name,
"artist": track_artist,
"album": track_album,
"year": track_year,
"num_tracks": album_total,
"num": track_num,
"playlist_num": offset + 1,
"cover": cover,
"genre": genre,
"deezer_id": deezer_id,
}
@sync_to_async
@sync_to_async | [
37811,
36393,
13789,
198,
198,
15269,
357,
66,
8,
33160,
7806,
198,
198,
5990,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11,
284,
597,
1048,
16727,
257,
4866,
198,
1659,
428,
3788,
290,
3917,
10314,
3696,
357,
1169,
366,
25423,
123... | 2.379554 | 1,839 |
from __future__ import unicode_literals
import os
from django.template import Context, Engine
from django.test import SimpleTestCase, ignore_warnings
from django.utils.deprecation import RemovedInDjango20Warning
from ..utils import ROOT, setup
@ignore_warnings(category=RemovedInDjango20Warning)
@ignore_warnings(category=RemovedInDjango20Warning)
| [
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
11748,
28686,
198,
198,
6738,
42625,
14208,
13,
28243,
1330,
30532,
11,
7117,
198,
6738,
42625,
14208,
13,
9288,
1330,
17427,
14402,
20448,
11,
8856,
62,
40539,
654,
... | 3.380952 | 105 |
from encoder import decode_unsigned
from encoder import decode_float
import sys
FLOAT_SIZE = 8
INT_SIZE = 4
LONG_SIZE = 8
| [
6738,
2207,
12342,
1330,
36899,
62,
43375,
198,
6738,
2207,
12342,
1330,
36899,
62,
22468,
198,
11748,
25064,
198,
198,
3697,
46,
1404,
62,
33489,
796,
807,
198,
12394,
62,
33489,
796,
604,
198,
43,
18494,
62,
33489,
796,
807,
198
] | 3 | 41 |
#!/usr/bin/env python3
'''Drive differentially with an Xbox controller.'''
from wpilib.command import Command
class DriveForward(Command):
'''Drive differentially with an Xbox controller.'''
def __init__(self, robot):
'''Save the robot object and pull in the drivetrain subsystem.'''
super().__init__()
self.robot = robot
self.requires(self.robot.drivetrain)
def initialize(self):
"""Called just before this Command runs the first time"""
def execute(self):
"""Called repeatedly when this Command is scheduled to run."""
self.robot.drivetrain.driveForward()
def isFinished(self):
"""Make this return true when this Command no longer needs to
run execute()"""
return False # Runs until interrupted
def end(self):
"""Called once after isFinished returns true"""
self.robot.drivetrain.stopDriving()
def interrupted(self):
"""Called when another command which requires one or more of
the same subsystems is scheduled to run"""
self.end() | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
7061,
6,
24825,
1180,
1927,
351,
281,
9445,
10444,
2637,
7061,
198,
198,
6738,
266,
79,
22282,
13,
21812,
1330,
9455,
198,
198,
4871,
9974,
39746,
7,
21575,
2599,
198,
220,
220,
... | 2.868421 | 380 |
import numpy as np
import cv2
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml')
cap = cv2.VideoCapture(0)
count=1
while True:
ret, img = cap.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.2, 5)
for (x,y,w,h) in faces:
cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)
roi_gray = gray[y:y+h, x:x+w]
roi_color = img[y:y+h, x:x+w]
eyes = eye_cascade.detectMultiScale(roi_gray)
for (ex,ey,ew,eh) in eyes:
#print(count)
#crop_img = roi_color[ey: ey + eh, ex: ex + ew]
cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,255,0),2)
#s1='tmp/{}.jpg'.format(count)
#count=count+1
#cv2.imwrite(s1,crop_img)
cv2.imshow('img',img)
k = cv2.waitKey(30) & 0xff
if k == 27:
break
cap.release()
cv2.destroyAllWindows()
| [
11748,
299,
32152,
355,
45941,
198,
11748,
269,
85,
17,
198,
198,
2550,
62,
66,
28966,
796,
269,
85,
17,
13,
34,
28966,
9487,
7483,
10786,
3099,
5605,
28966,
62,
8534,
1604,
558,
62,
12286,
13,
19875,
11537,
198,
25379,
62,
66,
2896... | 1.829314 | 539 |
print("Insert a number")
s = input()
print("Insert a list of letters")
lis = input().split(',')
if s in lis:
print(True)
else:
print(False)
| [
4798,
7203,
44402,
257,
1271,
4943,
198,
82,
796,
5128,
3419,
198,
4798,
7203,
44402,
257,
1351,
286,
7475,
4943,
198,
27999,
796,
5128,
22446,
35312,
7,
3256,
11537,
198,
361,
264,
287,
300,
271,
25,
198,
220,
220,
220,
3601,
7,
17... | 2.642857 | 56 |
from app import app
from flask import render_template, abort
import json
@app.route('/')
@app.route('/health') | [
6738,
598,
1330,
598,
198,
6738,
42903,
1330,
8543,
62,
28243,
11,
15614,
198,
11748,
33918,
198,
198,
31,
1324,
13,
38629,
10786,
14,
11537,
198,
198,
31,
1324,
13,
38629,
10786,
14,
13948,
11537
] | 3.2 | 35 |
from genetic import *
if __name__ == '__main__':
# basic_func.DEBUG = True
# init()
# target = lambda x: math.sin(10*x)
# populations_ = genetic_algorithm(target, population_size=40, unit_length=10, epochs=120,
# selection_type='rank', default_std=1, save_king=True, p_c=.4, metric='int')
# print('dupa')
# learning_curve(populations_, filename=f'learning_curve_rank_3.png')
# populations_ = list(map(lambda x: x.census(), populations_))
# make_film(target, populations_, filename='genetic_diversity_2.mp4', fps=1, resolution=(1280, 720), step=1, top_n=5,
# number_of_frames=8, save_ram=True, id='_gndiv_', read_only=False)
# generate_curve(pop_size=100, unit_len=15, epochs=200, selection='rank', std=4, pc=.1, metric='int', inverse=True,
# filename='test.png')
target = lambda x: 5*math.sin(10*x)*math.exp(x/3)
# target = lambda x: math.sin(10 * x)
populations_ = genetic_algorithm(target, population_size=100, unit_length=21, epochs=1000,
selection_type='rank', default_std=5, save_king=True, p_c=.65, metric='int')
learning_curve(populations_, filename='symmetric_big_21.png', inverse=True)
populations_ = list(map(lambda x: x.census(), populations_))
make_film(target, populations_, filename='symmetric_big_21.mp4', fps=5, resolution=(1280, 720), step=1, top_n=5,
number_of_frames=60, save_ram=True, id='_sym_slow_', read_only=False) | [
6738,
8513,
1330,
1635,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
1303,
4096,
62,
20786,
13,
30531,
796,
6407,
198,
220,
220,
220,
1303,
2315,
3419,
198,
220,
220,
220,
1303,
2496,
796,
... | 2.302115 | 662 |
"""
Utilities Tests
---------------
"""
from wikirec import utils
| [
37811,
198,
18274,
2410,
30307,
198,
24305,
198,
37811,
198,
198,
6738,
47145,
557,
66,
1330,
3384,
4487,
628,
628
] | 3.5 | 20 |
#
# MIT License
# Copyright (c) 2021 MjTs-140914
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# '''
# Thanks to the following people who have contributed to this project:
# leus
# MjTs140914
# the4chancup
# Atvaark
# Suat Cadgas/sxsxsx
# themex
# zlac
# '''
import bpy, os, bpy.utils.previews, bpy_extras, shutil, bmesh, re, math
from struct import pack,unpack
from bpy.props import (EnumProperty, CollectionProperty, IntProperty, StringProperty, BoolProperty, FloatProperty, FloatVectorProperty)
from Tools import FmdlFile, Ftex, IO, PesFoxShader, PesFoxXML, PesEnlighten, PesScarecrow, PesStaff
from xml.dom import minidom
from mathutils import Vector
bl_info = {
"name": "PES Stadium Exporter",
"description": "eFootbal PES2021 PES Stadium Exporter",
"author": "MjTs-140914 || the4chancup",
"version": (0, 6, 6),
"blender": (2, 90, 0),
"location": "Under Scene Tab",
"warning": "This addon is still in development.",
"wiki_url": "https://github.com/MjTs140914/PES_Stadium_Exporter/wiki",
"tracker_url": "https://github.com/MjTs140914/PES_Stadium_Exporter/issues",
"category": "System"
}
(major, minor, build) = bpy.app.version
icons_collections = {}
myver="v0.6.6b"
AddonsPath = str()
AddonsPath = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
base_file_blend = '%s\\addons\\Tools\\Gzs\\base_file.blend' % AddonsPath
texconvTools = '"%s\\addons\\Tools\\Gzs\\texconv.exe"' % AddonsPath
FtexTools ='"%s\\addons\\Tools\\Gzs\\FtexTools.exe"' % AddonsPath
GZSPATH = '"%s\\addons\\Tools\\Gzs\\GzsTool.exe"' % AddonsPath
foxTools = '"%s\\addons\\Tools\\Gzs\\FoxTool\\FoxTool.exe"' % AddonsPath
icons_dir = '%s\\addons\\Tools\\Gzs\\icons' % AddonsPath
xml_dir = '%s\\addons\\Tools\\Gzs\\xml\\' % AddonsPath
lightFxPath = '%s\\addons\\Tools\\Gzs\\' % AddonsPath
baseStartupFile = '%s\\addons\\Tools\\Gzs\\startup.blend' % AddonsPath
startupFile = '%sconfig\\startup.blend'%AddonsPath[:-7]
EnlightenPath="%s\\addons\\Tools\\Gzs\\EnlightenOutput\\" % AddonsPath
commonfile = "%s\\addons\\Tools\\Gzs\\xml\\scarecrow\\common" % AddonsPath
ob_id = None
group_list=["MAIN", "TV", "AUDIAREA", "FLAGAREA", "STAFF", "SCARECROW", "PITCH2021", "CHEER1", "CHEER2", "LIGHTS", "AD"]
parent_main_list=["MESH_back1","MESH_back2","MESH_back3",
"MESH_center1","MESH_center2","MESH_center3",
"MESH_front1", "MESH_front2","MESH_front3",
"MESH_left1","MESH_left2","MESH_left3",
"MESH_right1","MESH_right2","MESH_right3",
"MESH_Pitch","MESH_front1_demo","MESH_front1_game",
"MESH_center1_snow","MESH_center1_rain","MESH_center1_tifo",
"MESH_ad_acl","MESH_ad_cl","MESH_ad_el","MESH_ad_normal",
"MESH_ad_olc","MESH_ad_sc",
"MESH_cheer_back1_h_a1","MESH_cheer_front1_h_a1", "MESH_cheer_left1_h_a1", "MESH_cheer_right1_h_a1",
"MESH_cheer_back1_h_a2","MESH_cheer_front1_h_a2", "MESH_cheer_left1_h_a2", "MESH_cheer_right1_h_a2",
]
main_list=["back1","back2","back3",
"center1","center2","center3",
"front1", "front2","front3",
"left1","left2","left3",
"right1","right2","right3",
"front1_demo","front1_game","center1_snow","center1_rain","center1_tifo",
"MESH_CROWD","MESH_FLAGAREA","Pitch",
"TV_Large_Left","TV_Large_Right","TV_Large_Front","TV_Large_Back",
"TV_Small_Left","TV_Small_Right","TV_Small_Front","TV_Small_Back",
"L_FRONT","L_RIGHT","L_LEFT","L_BACK",
"H_FRONT","H_RIGHT","H_LEFT","H_BACK",
"F_FRONT","F_RIGHT","F_LEFT","F_BACK",
"ad_acl","ad_cl","ad_el","ad_normal",
"ad_olc","ad_sc", "LightBillboard", "LensFlare", "Halo",
"cheer_back1_h_a1","cheer_front1_h_a1", "cheer_left1_h_a1", "cheer_right1_h_a1",
"cheer_back1_h_a2","cheer_front1_h_a2", "cheer_left1_h_a2", "cheer_right1_h_a2",
"Staff Coach","Steward", "Staff Walk","Ballboy","Cameraman Crew","Staff Common"
]
part_export=[("MAIN","MAIN","MAIN"),
("SCARECROW","SCARECROW","SCARECROW"),
("TV","TV","TV"),
("PITCH2021","PITCH2021","PITCH2021"),
("STAFF","STAFF","STAFF"),
("CHEER1","CHEER1","CHEER1"),
("CHEER2","CHEER2","CHEER2"),
("FLAGAREA","FLAGAREA","FLAGAREA"),
("AUDIAREA","AUDIAREA","AUDIAREA"),
("LIGHTS","LIGHTS","LIGHTS"),
("AD","AD","AD"),
]
crowd_part=['C_front1','C_front2','C_front3',
"C_back1","C_back2","C_back3",
"C_left1","C_left2","C_left3",
"C_right1","C_right2","C_right3"
]
crowd_side={0: 'C_front',
1: 'C_back',
2: 'C_left',
3: 'C_right',
}
flags_part=['F_front1','F_front2','F_front3',
"F_back1","F_back2","F_back3",
"F_left1","F_left2","F_left3",
"F_right1","F_right2","F_right3"
]
crowd_part_type=[0x00010000,0x00010100,0x00010200,
0x00010001,0x00010101,0x00010201,
0x00010002,0x00010102,0x00010202,
0x00010003,0x00010103,0x00010203
]
tvdatalist=[0x02D72E00,0x02D730A0,0x02D73340,
0x02D73650,0x02D73490,0x02D72D20,
0x02D72FC0,0x02D73260,0x02D73570,
0x02D73810,
]
light_sidelist=[]
timeMode=[("df","DAY FINE","DAY FINE"),
("dr","DAY RAINY","DAY RAINY"),
("nf","NIGHT FINE","NIGHT FINE"),
("nr","NIGHT RAINY","NIGHT RAINY")
]
parent_list=[('MESH_back1','MESH_back1','MESH_back1'),
('MESH_back2','MESH_back2','MESH_back2'),
('MESH_back3','MESH_back3','MESH_back3'),
('MESH_center1','MESH_center1','MESH_center1'),
('MESH_center2','MESH_center2','MESH_center2'),
('MESH_center3','MESH_center3','MESH_center3'),
('MESH_front1','MESH_front1','MESH_front1'),
('MESH_front2','MESH_front2','MESH_front2'),
('MESH_front3','MESH_front3','MESH_front3'),
('MESH_left1','MESH_left1','MESH_left1'),
('MESH_left2','MESH_left2','MESH_left2'),
('MESH_left3','MESH_left3','MESH_left3'),
('MESH_right1','MESH_right1','MESH_right1'),
('MESH_right2','MESH_right2','MESH_right2'),
('MESH_right3','MESH_right3','MESH_right3'),
('MESH_CROWD','MESH_CROWD','MESH_CROWD'),
('MESH_PITCH','MESH_PITCH','MESH_PITCH'),
('MESH_TV','MESH_TV','MESH_TV')
]
datalist=["back1","back2","back3",
"center1","center2","center3",
"front1","front2","front3",
"left1","left2","left3",
"right1","right2","right3",
"center1_snow", "center1_rain", "center1_tifo",
"front1_game","front1_demo"
]
StadiumModel=["StadiumModel_B1","StadiumModel_B2","StadiumModel_B3",
"StadiumModel_C1","StadiumModel_C2","StadiumModel_C3",
"StadiumModel_F1","StadiumModel_F2","StadiumModel_F3",
"StadiumModel_L1","StadiumModel_L2","StadiumModel_L3",
"StadiumModel_R1","StadiumModel_R2","StadiumModel_R3",
"StadiumModel_C1_ForSnow", "StadiumModel_C1_rain", "StadiumModel_C1_tifo",
"StadiumModel_F1_game","StadiumModel_F1_demo",
]
StadiumKind=[0,1,2,
0,1,2,
0,1,2,
0,1,2,
0,1,2,
0,0,2,
14,15
]
StadiumDir=[1,1,1,
4,4,0,
0,0,0,
2,2,2,
3,3,3,
4,4,0,
4,4
]
transformlist=[0x02D72C40,0x02D72D20,0x02D72E00,
0x02D72EE0,0x02D72FC0,0x02D730A0,
0x02D73180,0x02D73260,0x02D73340,
0x02D73420,0x02D73570,0x02D73650,
0x02D73730,0x02D73810,0x02D73490,
0xC11921D0,0x03173880,0x031738E4,
0x03173E30,0x03173FF0,
]
TransformEntity=[0x03172D20,0x03172EE0,0x03172EE2,
0x031730A0,0x031730A2,0x03173260,
0x03173420,0x03173650,0x03173750,
0x03173810,0x03173960,0x03173970,
0x03173B20,0x03173CE0,0x03173CE5,
0xC12714B0,0xC12714B2,0x03173B3A,
0x03173EA0,0x03174060,
]
shearTransform=[0x03173F10,0x03173D50,0x03173D60,
0x03173B90,0x03173B95,0x031739D0,
0x031732CB,0x031732D0,0x031732D2,
0x03172D90,0x03172F50,0x03172F52,
0x03174140,0x03173180,0x03173182,
0x00000000,0xB13C0250,0x03173D90,
0x03173490,0x031736C0,
]
pivotTransform=[0x03173F80,0x03173DC0,0x03173DC2,
0x03173C00,0x03173C01,0x03173A40,
0x031738F0,0x03173340,0x03173342,
0x03172E00,0x03172FC0,0x03172FC2,
0x03173110,0x03174290,0x03174292,
0x00000000,0x00000000,0x03173FE6,
0x03173570,0x03173730,
]
cheerhexKey=[0x00000200,0x00000400,0x00000600,0x00000800
]
cheerhextfrm=[0x00000300,0x00000500,0x00000700,0x00000900
]
crowd_type = {'C1-UltraHome':0.9999, 'C2-UltraHome':2.9999, 'C3-UltraHome':4.9999,
'C1-HardcoreHome':0.8999, 'C2-HardcoreHome':2.8999, 'C3-HardcoreHome':4.8999,
'C1-HeavyHome':0.8599, 'C2-HeavyHome':2.8599, 'C3-HeavyHome':4.8599,
'C1-PopHome':0.7999, 'C2-PopHome':2.7999, 'C3-PopHome':4.7999,
'C1-FolkHome':0.6999, 'C2-FolkHome':2.6999, 'C3-FolkHome':4.6999,
'C1-Neutral':0.5, 'C2-Neutral':2.5, 'C3-Neutral':4.5,
'C1-FolkAway':0.4999, 'C2-FolkAway':2.4999, 'C3-FolkAway':4.4999,
'C1-PopAway':0.3999,'C2-PopAway':2.3999,'C3-PopAway':4.3999,
'C1-HeavyAway':0.2999, 'C2-HeavyAway':2.2999, 'C3-HeavyAway':4.2999,
'C1-HardcoreAway':0.1999, 'C2-HardcoreAway':2.1999, 'C3-HardcoreAway':4.1999,
'C1-UltraAway':0.0999, 'C2-UltraAway':2.0999, 'C3-UltraAway':4.0999
}
crowd_typedict={0:'C1-UltraHome', 11:'C2-UltraHome', 22:'C3-UltraHome',
1:'C1-HardcoreHome', 12:'C2-HardcoreHome', 23:'C3-HardcoreHome',
2:'C1-HeavyHome', 13:'C2-HeavyHome', 24:'C3-HeavyHome',
3:'C1-PopHome', 14:'C2-PopHome', 25:'C3-PopHome',
4:'C1-FolkHome', 15:'C2-FolkHome', 26:'C3-FolkHome',
5:'C1-Neutral', 16:'C2-Neutral', 27:'C3-Neutral',
6:'C1-FolkAway', 17:'C2-FolkAway', 28:'C3-FolkAway',
7:'C1-PopAway', 18:'C2-PopAway', 29:'C3-PopAway',
8:'C1-HeavyAway', 19:'C2-HeavyAway', 30:'C3-HeavyAway',
9:'C1-HardcoreAway', 20:'C2-HardcoreAway', 31:'C3-HardcoreAway',
10:'C1-UltraAway', 21:'C2-UltraAway', 32:'C3-UltraAway'
}
behavior0=[('C1-UltraHome', 'C1-UltraHome', 'Stance Type : Normal'),
('C1-HardcoreHome', 'C1-HardcoreHome', 'Stance Type : Normal'),
('C1-HeavyHome', 'C1-HeavyHome', 'Stance Type : Normal'),
('C1-PopHome', 'C1-PopHome', 'Stance Type : Normal'),
('C1-FolkHome', 'C1-FolkHome', 'Stance Type : Normal'),
('C1-Neutral', 'C1-Neutral', 'Stance Type : Normal'),
('C1-FolkAway', 'C1-FolkAway', 'Stance Type : Normal'),
('C1-PopAway', 'C1-PopAway', 'Stance Type : Normal'),
('C1-HeavyAway', 'C1-HeavyAway', 'Stance Type : Normal'),
('C1-HardcoreAway', 'C1-HardcoreAway', 'Stance Type : Normal'),
('C1-UltraAway', 'C1-UltraAway', 'Stance Type : Normal'),
]
behavior1=[('C2-UltraHome', 'C2-UltraHome', 'Stance Type : Standing Non-chair'),
('C2-HardcoreHome', 'C2-HardcoreHome', 'Stance Type : Standing Non-chair'),
('C2-HeavyHome', 'C2-HeavyHome', 'Stance Type : Standing Non-chair'),
('C2-PopHome', 'C2-PopHome', 'Stance Type : Standing Non-chair'),
('C2-FolkHome', 'C2-FolkHome', 'Stance Type : Standing Non-chair'),
('C2-Neutral', 'C2-Neutral', 'Stance Type : Standing Non-chair'),
('C2-FolkAway', 'C2-FolkAway', 'Stance Type : Standing Non-chair'),
('C2-PopAway', 'C2-PopAway', 'Stance Type : Standing Non-chair'),
('C2-HeavyAway', 'C2-HeavyAway', 'Stance Type : Standing Non-chair'),
('C2-HardcoreAway', 'C2-HardcoreAway', 'Stance Type : Standing Non-chair'),
('C2-UltraAway', 'C2-UltraAway', 'Stance Type : Standing Non-chair'),
]
behavior2=[('C3-UltraHome', 'C3-UltraHome', 'Stance Type : Standing with Chair'),
('C3-HardcoreHome', 'C3-HardcoreHome', 'Stance Type : Standing with Chair'),
('C3-HeavyHome', 'C3-HeavyHome', 'Stance Type : Standing with Chair'),
('C3-PopHome', 'C3-PopHome', 'Stance Type : Standing with Chair'),
('C3-FolkHome', 'C3-FolkHome', 'Stance Type : Standing with Chair'),
('C3-Neutral', 'C3-Neutral', 'Stance Type : Standing with Chair'),
('C3-FolkAway', 'C3-FolkAway', 'Stance Type : Standing with Chair'),
('C3-PopAway', 'C3-PopAway', 'Stance Type : Standing with Chair'),
('C3-HeavyAway', 'C3-HeavyAway', 'Stance Type : Standing with Chair'),
('C3-HardcoreAway', 'C3-HardcoreAway', 'Stance Type : Standing with Chair'),
('C3-UltraAway', 'C3-UltraAway', 'Stance Type : Standing with Chair')
]
parentlist, shaders=[],[]
L_Side=["back","front","left","right"
]
L_P_List=["L_BACK",
"L_FRONT",
"L_LEFT",
"L_RIGHT"
]
lfx_tex_list=[("tex_star_00.ftex","00 - tex_star_00","tex_star_00"),
("tex_star_01.ftex","01 - tex_star_01","tex_star_01"),
("tex_star_02.ftex","02 - tex_star_02","tex_star_02"),
("tex_star_03.ftex","03 - tex_star_03","tex_star_03"),
("tex_star_04.ftex","04 - tex_star_04","tex_star_04"),
("tex_star_05_alp.ftex","05 - tex_star_05","tex_star_05_alp"),
("tex_star_07_alp.ftex","07 - tex_star_07","tex_star_07_alp"),
("tex_star_08_alp.ftex","08 - tex_star_08","tex_star_08_alp"),
("tex_star_20.ftex","20 - tex_star_20","tex_star_20"),
("tex_star_21.ftex","21 - tex_star_21","tex_star_21"),
("tex_star_22.ftex","22 - tex_star_22","tex_star_22"),
("tex_star_23.ftex","23 - tex_star_23","tex_star_23"),
("tex_star_24.ftex","24 - tex_star_24","tex_star_24"),
("tex_star_25.ftex","25 - tex_star_25","tex_star_25"),
("tex_star_26.ftex","26 - tex_star_26","tex_star_26"),
("tex_star_27.ftex","27 - tex_star_27","tex_star_27"),
("tex_star_28.ftex","28 - tex_star_28","tex_star_28"),
]
LensFlareTexList=[("tex_ghost_00.ftex","00 - tex_ghost_00","tex_ghost_00.ftex"),
("tex_ghost_01.ftex","01 - tex_ghost_01","tex_ghost_01.ftex"),
("tex_ghost_02.ftex","02 - tex_ghost_02","tex_ghost_02.ftex"),
("tex_ghost_03.ftex","03 - tex_ghost_03","tex_ghost_03.ftex"),
("tex_ghost_04.ftex","04 - tex_ghost_04","tex_ghost_04.ftex"),
("tex_ghost_05.ftex","05 - tex_ghost_05","tex_ghost_05.ftex"),
("tex_ghost_06.ftex","06 - tex_ghost_06","tex_ghost_06.ftex")
]
HaloTexList=[("tex_halo_D00.ftex","D00 - tex_halo_D00","tex_halo_D00.ftex"),
("tex_halo_D01.ftex","D01 - tex_halo_D01","tex_halo_D01.ftex"),
("tex_halo_D02.ftex","D02 - tex_halo_D02","tex_halo_D02.ftex"),
("tex_halo_N00.ftex","N00 - tex_halo_N00","tex_halo_N00.ftex"),
("tex_halo_N01.ftex","N01 - tex_halo_N01","tex_halo_N01.ftex"),
("tex_halo_N02.ftex","N02 - tex_halo_N02","tex_halo_N02.ftex"),
("tex_halo_N03.ftex","N03 - tex_halo_N03","tex_halo_N03.ftex"),
("tex_halo_N04.ftex","N04 - tex_halo_N04","tex_halo_N04.ftex"),
("tex_halo_N05.ftex","N05 - tex_halo_N05","tex_halo_N05.ftex"),
("tex_halo_N06.ftex","N06 - tex_halo_N06","tex_halo_N06.ftex"),
("tex_halo_N07.ftex","N07 - tex_halo_N07","tex_halo_N07.ftex"),
("tex_halo_N08.ftex","N08 - tex_halo_N08","tex_halo_N08.ftex"),
("tex_halo_N09.ftex","N09 - tex_halo_N09","tex_halo_N09.ftex"),
("tex_halo_S00.ftex","S00 - tex_halo_S00","tex_halo_S00.ftex"),
("tex_halo_S01.ftex","S01 - tex_halo_S01","tex_halo_S01.ftex"),
("tex_halo_S02.ftex","S02 - tex_halo_S02","tex_halo_S02.ftex")
]
class FMDL_Material_Parameter_List_Add(bpy.types.Operator):
"""Add New Parameter"""
bl_idname = "fmdl.material_parameter_add"
bl_label = "Add Parameter"
@classmethod
class FMDL_Material_Parameter_List_Remove(bpy.types.Operator):
"""Remove Selected Parameter"""
bl_idname = "fmdl.material_parameter_remove"
bl_label = "Remove Parameter"
@classmethod
class FMDL_Material_Parameter_List_MoveUp(bpy.types.Operator):
"""Move Selected Parameter Up"""
bl_idname = "fmdl.material_parameter_moveup"
bl_label = "Move Parameter Up"
@classmethod
class FMDL_Material_Parameter_List_MoveDown(bpy.types.Operator):
"""Move Selected Parameter Down"""
bl_idname = "fmdl.material_parameter_movedown"
bl_label = "Move Parameter Down"
@classmethod
class FMDL_Object_BoundingBox_Create(bpy.types.Operator):
"""Create custom bounding box"""
bl_idname = "fmdl.boundingbox_create"
bl_label = "Create custom bounding box"
bl_options = {'REGISTER', 'UNDO'}
@classmethod
class FMDL_Object_BoundingBox_Remove(bpy.types.Operator):
"""Remove custom bounding box"""
bl_idname = "fmdl.boundingbox_remove"
bl_label = "Remove custom bounding box"
bl_options = {'REGISTER', 'UNDO'}
@classmethod
class Stadium_Scarecrow(bpy.types.Operator):
"""Stadium Scarecrow"""
bl_idname = "stadium_scarecrow.operator"
bl_label = str()
opname : StringProperty()
@classmethod
pass
class Stadium_Banner(bpy.types.Operator):
"""Stadium Banner"""
bl_idname = "stadium_banner.operator"
bl_label = str()
opname : StringProperty()
@classmethod
pass
class Staff_Coach_Pos(bpy.types.Operator):
"""Load / Assign Staff Position"""
bl_idname = "staff_pos.operator"
bl_label = str()
opname : StringProperty()
@classmethod
pass
class New_STID(bpy.types.Operator):
"""Swap old ID to new ID"""
bl_idname = "newid.operator"
bl_label = str()
@classmethod
pass
class TV_Objects(bpy.types.Operator):
"""Add TV Objects"""
bl_idname = "tv_object.operator"
bl_label = str()
@classmethod
pass
class Refresh_Light_Side(bpy.types.Operator):
"""Refresh Lights Side"""
bl_idname = "lights_side.operator"
bl_label = str()
@classmethod
pass
class Light_FX(bpy.types.Operator):
"""Light FX Exporter"""
bl_idname = "lightfx.operator"
bl_label = str()
opname : StringProperty()
@classmethod
class Refresh_OT(bpy.types.Operator):
"""Refresh Parent List"""
bl_idname = "refresh.operator"
bl_label = str()
@classmethod
pass
class Import_OT(bpy.types.Operator):
"""Import Stadium"""
bl_idname = "import.operator"
bl_label = str()
@classmethod
pass
class Import_Crowd_OT(bpy.types.Operator):
"""Import Crowd Audiarea"""
bl_idname = "crowd_import.operator"
bl_label = str()
@classmethod
pass
class Crowd_OT(bpy.types.Operator):
"""Export Crowd"""
bl_idname = "crowd.operator"
bl_label = str()
@classmethod
pass
class Flags_Area_OT(bpy.types.Operator):
"""Export Flag Area"""
bl_idname = "flags.operator"
bl_label = str()
@classmethod
pass
class PES_21_OT_assign_crowd_type(bpy.types.Operator):
"""Click to assign selected vertices to the selected crowd type"""
bl_idname = "assign.operator"
bl_label = str()
opname : StringProperty()
pass
class Import_lightfx_OT(bpy.types.Operator):
"""Light FX Importer"""
bl_idname = "ligtfx_importer.operator"
bl_label = str()
@classmethod
pass
class Export_OT(bpy.types.Operator):
"""Export Stadium"""
bl_idname = "export_stadium.operator"
bl_label = str()
opname : StringProperty()
@classmethod
pass
class Pitch_Objects(bpy.types.Operator):
"""Export Pitch Objects"""
bl_idname = "export_pitch.operator"
bl_label = str()
opname : StringProperty()
@classmethod
pass
class ExportStadium_AD(bpy.types.Operator):
"""Export Adboard of Stadium"""
bl_idname = "export_ad.operator"
bl_label = str()
@classmethod
class Export_TV(bpy.types.Operator):
"""Export TV"""
bl_idname = "export_tv.operator"
bl_label = str()
opname : StringProperty()
@classmethod
pass
TexDimensions=["8","16","32","48","64","80","96","112","128","144","160","176","192","208",
"224","240","256","272","288","304","320","336","352","368","384","400","416","432","448",
"464","480","496","512","528","544","560","576","592","608","624","640","656","672","688",
"704","720","736","752","768","784","800","816","832","848","864","880","896","912","928",
"944","960","976","992","1008","1024","1040","1056","1072","1088","1104","1120","1136","1152",
"1168","1184","1200","1216","1232","1248","1264","1280","1296","1312","1328","1344","1360","1376",
"1392","1408","1424","1440","1456","1472","1488","1504","1520","1536","1552","1568","1584","1600",
"1616","1632","1648","1664","1680","1696","1712","1728","1744","1760","1776","1792","1808","1824",
"1840","1856","1872","1888","1904","1920","1936","1952","1968","1984","2000","2016","2032","2048",
"2064","2080","2096","2112","2128","2144","2160","2176","2192","2208","2224","2240","2256","2272",
"2288","2304","2320","2336","2352","2368","2384","2400","2416","2432","2448","2464","2480","2496",
"2512","2528","2544","2560","2576","2592","2608","2624","2640","2656","2672","2688","2704","2720",
"2736","2752","2768","2784","2800","2816","2832","2848","2864","2880","2896","2912","2928","2944",
"2960","2976","2992","3008","3024","3040","3056","3072","3088","3104","3120","3136","3152","3168",
"3184","3200","3216","3232","3248","3264","3280","3296","3312","3328","3344","3360","3376","3392",
"3408","3424","3440","3456","3472","3488","3504","3520","3536","3552","3568","3584","3600","3616",
"3632","3648","3664","3680","3696","3712","3728","3744","3760","3776","3792","3808","3824","3840",
"3856","3872","3888","3904","3920","3936","3952","3968","3984","4000","4016","4032","4048","4064",
"4080","4096","4112","4128","4144","4160","4176","4192","4208","4224","4240","4256","4272","4288",
"4304","4320","4336","4352","4368","4384","4400","4416","4432","4448","4464","4480","4496","4512",
"4528","4544","4560","4576","4592","4608","4624","4640","4656","4672","4688","4704","4720","4736",
"4752","4768","4784","4800","4816","4832","4848","4864","4880","4896","4912","4928","4944","4960",
"4976","4992","5008","5024","5040","5056","5072","5088","5104","5120","5136","5152","5168","5184",
"5200","5216","5232","5248","5264","5280","5296","5312","5328","5344","5360","5376","5392","5408",
"5424","5440","5456","5472","5488","5504","5520","5536","5552","5568","5584","5600","5616","5632",
"5648","5664","5680","5696","5712","5728","5744","5760","5776","5792","5808","5824","5840","5856",
"5872","5888","5904","5920","5936","5952","5968","5984","6000","6016","6032","6048","6064","6080",
"6096","6112","6128","6144","6160","6176","6192","6208","6224","6240","6256","6272","6288","6304",
"6320","6336","6352","6368","6384","6400","6416","6432","6448","6464","6480","6496","6512","6528",
"6544","6560","6576","6592","6608","6624","6640","6656","6672","6688","6704","6720","6736","6752",
"6768","6784","6800","6816","6832","6848","6864","6880","6896","6912","6928","6944","6960","6976",
"6992","7008","7024","7040","7056","7072","7088","7104","7120","7136","7152","7168","7184","7200",
"7216","7232","7248","7264","7280","7296","7312","7328","7344","7360","7376","7392","7408","7424",
"7440","7456","7472","7488","7504","7520","7536","7552","7568","7584","7600","7616","7632","7648",
"7664","7680"]
class Convert_OT(bpy.types.Operator):
"""Export and Convert all texture to FTEX"""
bl_idname = "convert.operator"
bl_label = str()
@classmethod
pass
class Clear_OT(bpy.types.Operator):
"""Clear Temporary Data"""
bl_idname = "clear_temp.operator"
bl_label = str()
opname : StringProperty()
@classmethod
pass
class Parent_OT(bpy.types.Operator):
"""Assign active object to parent list"""
bl_idname = "set_parent.operator"
bl_label = str()
@classmethod
pass
class remove_OT(bpy.types.Operator):
"""Unassign active object from parent list"""
bl_idname = "clr.operator"
bl_label = str()
@classmethod
pass
class FMDL_Shader_Set(bpy.types.Operator):
"""Set a Shader from list"""
bl_idname = "shader.operator"
bl_label = "Set Shader"
@classmethod
pass
class Start_New_Scene(bpy.types.Operator):
"""Start New Scene"""
bl_idname = "scene.operator"
bl_label = str()
@classmethod
pass
class Create_Main_Parts(bpy.types.Operator):
"""Create Main Parts"""
bl_idname = "main_parts.operator"
bl_label = str()
@classmethod
pass
class FMDL_Scene_Open_Image(bpy.types.Operator, bpy_extras.io_utils.ImportHelper):
"""Open a Image Texture DDS / PNG / TGA"""
bl_idname = "open.image"
bl_label = "Open Image Texture"
bl_options = {'REGISTER', 'UNDO'}
import_label = "Open Image Texture"
filename_ext = "DDS, PNG, TGA"
filter_glob : StringProperty(default="*.dds;*.png;*.tga", options={'HIDDEN'})
class FMDL_Externally_Edit(bpy.types.Operator):
"""Edit texture with externally editor"""
bl_idname = "edit.operator"
bl_label = "Externally Editor"
@classmethod
pass
class FMDL_Reload_Image(bpy.types.Operator):
"""Reload All Image Texture"""
bl_idname = "reload.operator"
bl_label = str()
@classmethod
pass
classes = [
Import_OT,
FMDL_21_PT_Texture_Panel,
FMDL_Scene_Open_Image,
FMDL_21_PT_Mesh_Panel,
FMDL_21_PT_UIPanel,
Create_Main_Parts,
Refresh_OT,
Parent_OT,
remove_OT,
Clear_OT,
FMDL_Shader_Set,
FMDL_Externally_Edit,
FMDL_Reload_Image,
FMDL_Object_BoundingBox_Create,
FMDL_Object_BoundingBox_Remove,
FMDL_21_PT_Object_BoundingBox_Panel,
Export_OT,
Convert_OT,
Start_New_Scene,
Crowd_OT,
Import_Crowd_OT,
Flags_Area_OT,
Light_FX,
Export_TV,
TV_Objects,
Pitch_Objects,
Staff_Coach_Pos,
New_STID,
ExportStadium_AD,
Refresh_Light_Side,
Stadium_Banner,
Stadium_Scarecrow,
Import_lightfx_OT,
PES_21_PT_CrowdSection,
PES_21_OT_assign_crowd_type,
FMDL_Material_Parameter_List_Add,
FMDL_Material_Parameter_List_Remove,
FMDL_Material_Parameter_List_MoveUp,
FMDL_Material_Parameter_List_MoveDown,
FMDL_UL_material_parameter_list,
FMDL_21_PT_Material_Panel,
FMDL_MaterialParameter,
] | [
2,
198,
2,
17168,
13789,
198,
198,
2,
15069,
357,
66,
8,
33448,
337,
73,
33758,
12,
1415,
2931,
1415,
198,
198,
2,
2448,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11,
284,
597,
1048,
16727,
257,
4866,
198,
2,
286,
428,
3788,
... | 2.212885 | 11,579 |
from django.db import models
# Create your models here.
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
198,
2,
13610,
534,
4981,
994,
13,
628
] | 3.625 | 16 |
#!/usr/bin/env python
"""Tests for `spectrapepper` package."""
import unittest
import spectrapepper as spep
import numpy as np
import pandas as pd
# import my_functions as spep
class TestSpectrapepper(unittest.TestCase):
"""Tests for `spectrapepper` package."""
if __name__ == '__main__':
unittest.main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
37811,
51,
3558,
329,
4600,
4443,
13484,
2848,
63,
5301,
526,
15931,
198,
198,
11748,
555,
715,
395,
198,
11748,
5444,
13484,
2848,
355,
693,
79,
198,
11748,
299,
32152,
355,
4594... | 2.80531 | 113 |
#!/Applications/anaconda/envs/Python3/bin
def main():
'''Examples Using Exceptions in Python'''
# Python exceptions: http://docs.python.org/library/exceptions.html
# Catch exceptions with try
try:
f = open('noFile.txt')
except IOError as e:
print('Oh no, IOError:', e)
except ValueError as e:
print('Oh no, ValueError:', e)
else:
# Can put the else code in the try part, too
# Runs when try body completes with no exceptions
for line in f:
print(line, end='')
finally:
# Always executed after try, except, and else even if exceptions raised
# or hit break/continue/return statement. Good for clean-up
# f.close()
pass
# Exceptions in a while loop
while True:
try:
n = input('Please enter an integer: ')
n = int(n)
break
except ValueError:
print('Input not an integer, please try again: ')
print('Correct input!')
# Raise own exceptions
try:
for line in readDocFile('noFile.txt'):
print(line.strip())
except ValueError as e:
print('Bad filename:', e)
testBool = True
if testBool:
raise CustomException('NOOOOOO!')
# Assert that input is correct
grades = [79, 92, 84]
assert not len(grades) == 0, 'no grades data'
return 0
if __name__ == '__main__':
main()
| [
2,
48443,
41995,
14,
272,
330,
13533,
14,
268,
14259,
14,
37906,
18,
14,
8800,
198,
198,
4299,
1388,
33529,
198,
220,
220,
220,
705,
7061,
27730,
8554,
1475,
11755,
287,
11361,
7061,
6,
198,
220,
220,
220,
1303,
11361,
13269,
25,
26... | 2.375 | 608 |
"""Main application logic"""
import logging
from flask import Flask, jsonify, render_template, request
from summarizer import Summarizer
from src.utils import setup_logging
DEFAULT_MODEL_NAME = 'distilbert-base-uncased'
DEFAULT_NUM_SENTENCES = 1
setup_logging()
logger = logging.getLogger('ext_summarizer')
summarization_model = Summarizer(model=DEFAULT_MODEL_NAME)
app = Flask(__name__)
@app.route('/')
def index():
"""Main service page"""
return render_template('index.html')
@app.route('/summarize', methods=['POST'])
def summarize():
"""Endpoint for text summarization"""
logger.info('Processing input...')
errors = []
summary = ''
if request.method == 'POST':
data = request.get_json()
text = data.get('text', '')
logger.debug("text: %s", text)
num_sentences = int(data.get('num_sentences', DEFAULT_NUM_SENTENCES))
logger.debug("num_sentences: %s", num_sentences)
summary = summarization_model(text, num_sentences=num_sentences)
logger.debug("summary: %s", summary)
logger.debug('Returning summary: %s', summary)
return jsonify({"errors": errors, "summary": summary})
if __name__ == "__main__":
app.run()
| [
37811,
13383,
3586,
9156,
37811,
198,
11748,
18931,
198,
198,
6738,
42903,
1330,
46947,
11,
33918,
1958,
11,
8543,
62,
28243,
11,
2581,
198,
6738,
15676,
7509,
1330,
5060,
3876,
7509,
198,
198,
6738,
12351,
13,
26791,
1330,
9058,
62,
64... | 2.707317 | 451 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2015 Felix Wunsch, Communications Engineering Lab (CEL) / Karlsruhe Institute of Technology (KIT) <wunsch.felix@googlemail.com>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
from gnuradio import blocks
import time
import ieee802_15_4_swig as ieee802_15_4
if __name__ == '__main__':
gr_unittest.run(qa_zeropadding_b, "qa_zeropadding_b.xml")
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
220,
198,
2,
15069,
1853,
29721,
370,
13271,
354,
11,
14620,
14044,
3498,
357,
34,
3698,
8,
1220,
15415,
82,
622,
... | 3.218659 | 343 |
""" Input for Redmine shell. """
import sys
import os
import termios
import contextlib
from enum import Enum
from .command import Command
from redmine_shell.command.system.commands import History, HistoryMove
class State(Enum):
''' Character Key Event State. '''
CONTINUE = -1
BREAK = -2
@contextlib.contextmanager
def _raw_mode(file):
""" Make terminal raw mode for getting an event pressing a key. """
old_attrs = termios.tcgetattr(file.fileno())
new_attrs = old_attrs[:]
new_attrs[3] = new_attrs[3] & ~(termios.ECHO | termios.ICANON)
try:
termios.tcsetattr(file.fileno(), termios.TCSADRAIN, new_attrs)
yield
finally:
termios.tcsetattr(file.fileno(), termios.TCSADRAIN, old_attrs)
def redmine_input(prompt='', complete_command=None, history=False):
""" Customized input function for redmine shell. """
if complete_command is None:
complete_command = []
# TODO: inline
sys.stdout.write(prompt)
sys.stdout.flush()
with _raw_mode(sys.stdin):
keyword = {'prompt': prompt, 'complete_command': complete_command,
'history': history,}
keyword['type_buf'] = []
keyword['history_move'] = HistoryMove(
History.instance().load())
special_key_handlers = {chr(4): ctrl_d,
chr(16): ctrl_p,
chr(14): ctrl_j,
# MacOS uses 13 as ctrl-j
chr(13): ctrl_j,
chr(12): ctrl_l,
chr(8): ctrl_h,
chr(9): tab,
chr(10): newline,
chr(127): backspace, }
while True:
char = sys.stdin.read(1)
if not char:
break
if char in special_key_handlers:
handler = special_key_handlers[char]
elif 41 <= ord(char) <= 176 or ord(char) == 32:
handler = normal
else:
handler = other
keyword['char'] = char
ret = handler(keyword)
if ret == State.CONTINUE:
continue
elif ret == State.BREAK:
break
else:
return ret
| [
37811,
23412,
329,
2297,
3810,
7582,
13,
37227,
628,
198,
11748,
25064,
198,
11748,
28686,
198,
11748,
3381,
4267,
198,
11748,
4732,
8019,
198,
6738,
33829,
1330,
2039,
388,
198,
6738,
764,
21812,
1330,
9455,
198,
6738,
2266,
3810,
62,
... | 1.908797 | 1,239 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 06 09:13:14 2021
@author: sudhir
"""
# =============================================================================
# Import library
# =============================================================================
import pandas as pd
import numpy as np
import re
from sklearn.base import TransformerMixin, BaseEstimator
# =============================================================================
# Bureau feature
# =============================================================================
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
26223,
3158,
9130,
7769,
25,
1485,
25,
1415,
33448,
628,
198,
31,
9800,
25,
424,
34985,
3... | 5.428571 | 105 |
import torch
import torch.nn as nn
from sacred import Ingredient
model = Ingredient('model')
@model.config
@model.capture
| [
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
6738,
13626,
1330,
17589,
445,
1153,
628,
198,
19849,
796,
17589,
445,
1153,
10786,
19849,
11537,
628,
198,
31,
19849,
13,
11250,
628,
628,
198,
198,
31,
19849,
13,
27144,
... | 3.046512 | 43 |
import itertools
from threading import Thread
| [
11748,
340,
861,
10141,
198,
6738,
4704,
278,
1330,
14122,
628,
198
] | 4 | 12 |
import tensorflow as tf
import numpy as np
import h5py
#broadcasting:先将实数或向量扩展再对对应元素进行运算
A = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]])
print(A)
print(A.shape[1]) #输出为3
print(A + 100)
#reshape()函数,注意reshape以后不能丢失数据:1x12 12x1 2x6 6x2 3x4 4x3
#-1表示未知
print(A.reshape(-1))
print(A.reshape(-1, 1))
print(A.reshape(-1, 2))
print(A.reshape(3, -1))
print(A.reshape(2, 2, 3)) #三维矩阵
print(A.shape)
#[[[ 1 2 3]
# [ 4 5 6]]
#[[ 7 8 9]
# [10 11 12]]]
#从数组的形状中删除单维度条目,即把shape中为1的维度去掉,默认删除所有单维度条目
print(np.squeeze(A))
#[[ 1 2 3]
# [ 4 5 6]
# [ 7 8 9]
# [10 11 12]]
#a既不属于行向量,也不是列向量
#a = np.array([1, 2, 3])
#print(a)
#print(a.shape)
#print(a.T)
#print(np.dot(a.T, a))
a = np.random.randn(5)
print(a)
print(a.shape)
#a.T还是它本身
print(a.T)
#二者做内积应该是一个矩阵但实际结果是一个数
print(np.dot(a.T, a))
#当.py文件被直接运行时,if __name__ == '__main__'之下的代码块将被运行;
#当.py文件以模块形式被导入时,if __name__ == '__main__'之下的代码块不被运行。
#python xxx.py,直接运行xxx.py文件
#python -m xxx,把xxx.py当做模块运行
#compute real number
if __name__ == '__main__':
x = 3
s = sigmoid(x)
print(s)
#compute array
if __name__ == '__main__':
x = np.array([2, 3, 4])
print(x.shape)
s = sigmoid(x)
print(s)
#compute matrix
if __name__ == '__main__':
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
print(x.shape)
s = sigmoid(x)
print(s)
#h5py用来存放数据集(dataset)或组(group)
#在当前目录下创建mfh5py.hdf5文件,或mfh5py.h5文件
f = h5py.File("mfh5py.hdf5", "w")
#创建dataset数据集,i表示元素类型,代表int
d1 = f.create_dataset("dset1", (20,), 'i')
for key in f.keys():
print(key)
print(f[key].name)
print(f[key].shape)
#初始化默认为0
print(f[key].value)
#赋值的两种方式
d1[...] = np.arange(20)
print(d1)
#单引号也可以
f['dset2'] = np.arange(15)
for key in f.keys():
print(f[key].name)
print(f[key].value)
#直接将numpy数组传给参数data
a = np.arange(20)
d1 = f.create_dataset("dset3", data = a)
#创建一个名字为bar的组
g1 = f.create_group("bar")
#在bar这个组里面分别创建name为dset1,dset2的数据集并赋值。
g1["ddset1"] = np.arange(10)
g1["ddset2"] = np.arange(12).reshape((3, 4))
for key in g1.keys():
print(g1[key].name)
print(g1[key].value)
#创建组bar1,组bar2,数据集dset
g1=f.create_group("bar1")
g2=f.create_group("bar2")
d=f.create_dataset("dset",data=np.arange(10))
#在bar1组里面创建一个组car1和一个数据集dset1。
c1=g1.create_group("car1")
d1=g1.create_dataset("dset1",data=np.arange(10))
#在bar2组里面创建一个组car2和一个数据集dset2
c2=g2.create_group("car2")
d2=g2.create_dataset("dset2",data=np.arange(10))
#根目录下的组和数据集
print(".............")
for key in f.keys():
print(f[key].name)
#bar1这个组下面的组和数据集
print(".............")
for key in g1.keys():
print(g1[key].name)
#bar2这个组下面的组和数据集
print(".............")
for key in g2.keys():
print(g2[key].name)
#顺便看下car1组和car2组下面都有什么,估计你都猜到了为空。
print(".............")
print(c1.keys())
print(c2.keys())
#python列表和numpy的数组
a = [1, 2, 3, 4] #a表示数组,长度是4
arr = np.array([1, 2, 3, 4]) #arr表示向量
print(a, len(a))
print(arr, arr.shape)
#python元组的列表和numpy数组
b = [(1, 2), (3, 4)]
brr = np.array([(1, 2), (3, 4)])
crr = np.array([[1, 2], [3, 4]])
print(b[0][0], len(b)) #b是一个二维数组,也可以看成是一个含有两个元组的列表
print(brr.T, brr.shape) #brr是一个2x2的矩阵
print(crr.T, crr.shape) #crr和brr效果相同
#eval()函数用来执行一个字符串表达式,并返回表达式的值
print(eval("2 * 3 + 4"))
# ndarray多维数组
x = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] #list
x = np.array(x) # 将任意序列类型的对象转换成ndarray数组
# 或直接这样定义x:x = np.arange(10)
print(type(x)) # ndarray
# sklearn中,与逻辑回归有关的主要是这三个类:LogisticRegression, LogisticRegressionCV 和logistic_regression_path。
# 其中LogisticRegression和LogisticRegressionCV的主要区别是LogisticRegressionCV使用了交叉验证来选择正则化系数C。
# 而LogisticRegression需要自己每次指定一个正则化系数。
# 方法:
# fit(X,y[,sample_weight]):训练模型。
# predict(X):用模型进行预测,返回预测值。
# score(X,y[,sample_weight]):返回(X,y)上的预测准确率(accuracy)。
# predict_log_proba(X):返回一个数组,数组的元素依次是 X 预测为各个类别的概率的对数值。
# predict_proba(X):返回一个数组,数组元素依次是 X 预测为各个类别的概率的概率值。
| [
11748,
11192,
273,
11125,
355,
48700,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
289,
20,
9078,
198,
2,
36654,
19913,
25,
17739,
230,
49546,
22522,
252,
46763,
108,
22755,
244,
28938,
239,
34932,
237,
33699,
102,
161,
109,
243,
378... | 1.23543 | 3,020 |
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
from unittest import mock
from pytest import fixture
import json
from servicecatalog_puppet import constants, luigi_tasks_and_targets
@fixture
| [
2,
15069,
13130,
6186,
13,
785,
11,
3457,
13,
393,
663,
29116,
13,
1439,
6923,
33876,
13,
198,
2,
30628,
55,
12,
34156,
12,
33234,
7483,
25,
24843,
12,
17,
13,
15,
198,
6738,
555,
715,
395,
1330,
15290,
198,
198,
6738,
12972,
9288... | 3.2625 | 80 |
from commonconf.backends import use_configparser_backend
from commonconf import settings
from datetime import datetime
import argparse
import json
import os
if __name__ == '__main__':
settings_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'settings.cfg')
use_configparser_backend(settings_path, 'Canvas')
parser = argparse.ArgumentParser()
parser.add_argument('login', help='Login for which to get page views')
args = parser.parse_args()
get_page_views(args.login)
| [
6738,
2219,
10414,
13,
1891,
2412,
1330,
779,
62,
11250,
48610,
62,
1891,
437,
198,
6738,
2219,
10414,
1330,
6460,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
11748,
1822,
29572,
198,
11748,
33918,
198,
11748,
28686,
628,
198,
198,
36... | 2.679803 | 203 |
"""nycrud URL Configuration"""
# Django
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('users/', include(('users.urls', 'users'), namespace='users')),
path('', include(('posts.urls', 'posts'), namespace='posts')),
]
| [
37811,
3281,
6098,
463,
10289,
28373,
37811,
198,
198,
2,
37770,
198,
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
198,
6738,
42625,
14208,
13,
6371,
82,
1330,
3108,
11,
2291,
628,
198,
6371,
33279,
82,
796,
685,
198,
220,
220,
220... | 2.942857 | 105 |
_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py'
model = dict(
pretrained='open-mmlab://detectron2/resnet50_caffe',
neck=dict(
_delete_=True,
type='FPN_CARAFE_LDCN3_PDCN_CAT',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5,
start_level=1,
# add_extra_convs=True,
# extra_convs_on_inputs=False, # use P5
upsample_cfg=dict(
type='carafe',
up_kernel=5,
up_group=1,
encoder_kernel=3,
encoder_dilation=1,
compressed_channels=64)),
bbox_head=dict(
norm_on_bbox=True,
centerness_on_reg=True,
dcn_on_last_conv=False,
center_sampling=True,
conv_bias=True,
loss_bbox=dict(type='GIoULoss', loss_weight=1.0)),
# training and testing settings
test_cfg=dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=300))
# dataset settings
img_norm_cfg = dict(
mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=4,
workers_per_gpu=4,
train=dict(pipeline=train_pipeline),
val=dict(pipeline=test_pipeline),
test=dict(pipeline=test_pipeline))
optimizer_config = dict(_delete_=True, grad_clip=None)
optimizer = dict(
type='SGD',
lr=0.005,
momentum=0.9,
weight_decay=0.0001,
paramwise_cfg=dict(bias_lr_mult=2.0, bias_decay_mult=0.0))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[16, 22])
runner = dict(type='EpochBasedRunner', max_epochs=24) | [
62,
8692,
62,
796,
705,
69,
6966,
62,
81,
1120,
62,
66,
21223,
62,
69,
21999,
62,
4593,
12,
2256,
62,
16,
87,
62,
66,
25634,
13,
9078,
6,
198,
198,
19849,
796,
8633,
7,
198,
220,
220,
220,
2181,
13363,
11639,
9654,
12,
3020,
2... | 1.979405 | 1,311 |
from __future__ import print_function
import argparse
import io
import locale
import os
import sys
import shlex
from collections import defaultdict
import bdemeta.resolver
import bdemeta.commands
if __name__ == "__main__":
main()
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
198,
11748,
1822,
29572,
198,
11748,
33245,
198,
11748,
36693,
198,
11748,
28686,
198,
11748,
25064,
198,
11748,
427,
2588,
198,
6738,
17268,
1330,
4277,
11600,
198,
198,
11748,
275,
953... | 3.338028 | 71 |
from rest_framework.routers import DefaultRouter
from app.api import views
router = DefaultRouter(trailing_slash=False)
router.register("users", views.UserViewSet, basename="user")
urlpatterns = router.urls
| [
6738,
1334,
62,
30604,
13,
472,
1010,
1330,
15161,
49,
39605,
198,
198,
6738,
598,
13,
15042,
1330,
5009,
628,
198,
472,
353,
796,
15161,
49,
39605,
7,
9535,
4386,
62,
6649,
1077,
28,
25101,
8,
198,
472,
353,
13,
30238,
7203,
18417,... | 3.19697 | 66 |
import os
import time
#-------------------- 1/4 Run Config Begin --------------------
RUN_MODE = "slurmCluster" # singleProc, multiProc, slurmCluster
REDIRECT_TERMINAL_OUTPUT = True
COMPILE_APP = False
#-------------------- 1/4 Run Config End --------------------
#-------------------- 2/4 Base CPU Config Begin --------------------
runOpt = ' --cpu-type=DerivO3CPU \
--num-cpus=1 \
--cacheline_size=256 \
--caches --l1d_size=16kB --l1i_size=16kB \
--l1d_assoc=4 --l1i_assoc=4 \
--l2cache \
--l2_size=128kB --l2_assoc=8 \
--l3cache \
--l3_size=4MB --l3_assoc=16 --l3_mshrs=%d \
--mem-size=4GB' % (4*1024*1024 / 256 + 20)
#-------------------- 2/4 Base CPU Config End --------------------
#-------------------- 3/4 Security Config Begin --------------------
rekHOptBase = ' --l3reKeyHit --l3_max_evict_per_epoch=%d'
rekMOptBase = ' --l3reKeyMiss --l3_max_evict_per_epoch=%d'
rekMAOptBase = ' --l3reKeyMissAddr --l3_max_evict_per_epoch=%d'
secPara = [[10000, 2], [20000, 4*1024*1024 / 256 *1], [30000, 4*1024*1024 / 256 *2], [40000, 4*1024*1024 / 256 *3], [50000, 4*1024*1024 / 256 *4], [60000, 4*1024*1024 / 256 *6], [70000, 4*1024*1024 / 256 *8], [80000, 4*1024*1024 / 256 *12], [90000, 4*1024*1024 / 256 *20], [100000, 4*1024*1024 / 256 *40], [110000, 4*1024*1024 / 256 *100]]
#-------------------- 3/4 Security Config End --------------------
#-------------------- 4/4 experiment Config Begin --------------------
experimentList = []
## SEPT1 hello
#rstCktOpt = ' --checkpoint-restore=1 --maxinsts=50000000 --warmup-insts=1000000'
#experimentList.append([0, 'X86/gem5.opt', runOpt, 'hello', ''])
#experimentList.append([1, 'X86/gem5.opt', runOpt + rstCktOpt, 'hello', ''])
#experimentList.append([2, 'X86/gem5.opt', runOpt + rstCktOpt + rekHOpt, 'hello', ''])
#experimentList.append([3, 'X86/gem5.opt', runOpt + rstCktOpt + rekMOpt, 'hello', ''])
## STEP2 stream
#experimentList.append([20, 'X86/gem5.opt', runOpt, 'stream', ''])
#experimentList.append([21, 'X86/gem5.opt', runOpt + rstCktOpt, 'stream', ''])
#experimentList.append([22, 'X86/gem5.opt', runOpt + rstCktOpt + rekHOpt, 'stream', ''])
#experimentList.append([23, 'X86/gem5.opt', runOpt + rstCktOpt + rekMOpt, 'stream', ''])
#experimentList.append([24, 'X86/gem5.opt', runOpt + rstCktOpt + rekMAOpt, 'stream', ''])
for baseI, size in secPara:
rekHOpt = rekHOptBase % size
rekMOpt = rekMOptBase % size
rekMAOpt = rekMAOptBase % max(2, int(size/4096))
## STEP3 docDist
#experimentList.append([100, 'X86/gem5.opt', runOpt, 'docDist', ''])
#experimentList.append([baseI+101, 'X86/gem5.opt', runOpt + rstCktOpt, 'docDist', ''])
#experimentList.append([baseI+102, 'X86/gem5.opt', runOpt + rstCktOpt + rekHOpt, 'docDist', ''])
#experimentList.append([baseI+103, 'X86/gem5.opt', runOpt + rstCktOpt + rekMOpt, 'docDist', ''])
#experimentList.append([baseI+104, 'X86/gem5.opt', runOpt + rstCktOpt + rekMAOpt, 'docDist', ''])
## STEP4 mrsFast
#arg = '--search myWorkDir/app/mrsFast/dataset/chr3_50K.fa --seq myWorkDir/app/mrsFast/dataset/chr3_50K_2000.fq'
#experimentList.append([200, 'X86/gem5.opt', runOpt, 'mrsFast', arg])
#experimentList.append([baseI+201, 'X86/gem5.opt', runOpt + rstCktOpt, 'mrsFast', arg])
#experimentList.append([baseI+202, 'X86/gem5.opt', runOpt + rstCktOpt + rekHOpt, 'mrsFast', arg])
#experimentList.append([baseI+203, 'X86/gem5.opt', runOpt + rstCktOpt + rekMOpt, 'mrsFast', arg])
#experimentList.append([baseI+204, 'X86/gem5.opt', runOpt + rstCktOpt + rekMAOpt, 'mrsFast', arg])
## STEP5 SPEC2017
SPECOpt = ' --benchmark=%s --simpt-ckpt=%d \
--checkpoint-restore=1 --at-instruction \
--maxinsts=100000000 --warmup-insts=20000000'
from SPECList import SPECList
#SPECList = []
#SPECList = [[0, "blender_r", 0]]
for i, name, simptID in SPECList:
experimentList.append([baseI+i+1000, 'X86/gem5.opt', runOpt + SPECOpt%(name,simptID), 'SPEC2017', ''])
experimentList.append([baseI+i+2000, 'X86/gem5.opt', runOpt + SPECOpt%(name,simptID) + rekHOpt, 'SPEC2017', ''])
experimentList.append([baseI+i+3000, 'X86/gem5.opt', runOpt + SPECOpt%(name,simptID) + rekMOpt, 'SPEC2017', ''])
experimentList.append([baseI+i+4000, 'X86/gem5.opt', runOpt + SPECOpt%(name,simptID) + rekMAOpt, 'SPEC2017', ''])
print("Number of experiments: ", len(experimentList))
#-------------------- 4/4 Run Config Begin --------------------
if __name__ == "__main__":
GEM5_DIR = os.getcwd()
# STEP0 compile
for index, binary, config, app, _ in experimentList:
os.makedirs(GEM5_DIR + '/myWorkDir/result/' + str(index) + '-' + app, exist_ok=True)
if COMPILE_APP:
if app == 'SPEC2017':
continue
if binary[0] == "R":
os.system('ISA=riscv CCPRE=riscv64-linux-gnu- make -C '+GEM5_DIR+'/myWorkDir/app/'+app)
elif binary[0] == "X":
os.system('ISA=X86 CCPRE=x86_64-linux-gnu- make -C '+GEM5_DIR+'/myWorkDir/app/'+app)
else:
assert(False)
# STEP1 init the cluster or multiProcess
if not RUN_MODE == "singleProc":
client = initClient(RUN_MODE)
# STEP2 mark start time
startTime = time.time()
# STEP3 run them
if RUN_MODE == "singleProc":
for i, index_binary_config_app_arg in enumerate(experimentList):
runSimu(index_binary_config_app_arg)
print("----------> Finish %d/%d Simu, After %f minutes" % \
(i+1, len(experimentList), (time.time() - startTime)/60))
else:
futureList = []
for index_binary_config_app_arg in experimentList:
futureList.append(client.submit(runSimu, index_binary_config_app_arg))
for i, future in enumerate(futureList):
future.result()
print("----------> Finish %d/%d Simu, After %f minutes" % \
(i+1, len(experimentList), (time.time() - startTime)/60))
| [
198,
11748,
28686,
198,
11748,
640,
198,
198,
2,
19351,
352,
14,
19,
5660,
17056,
16623,
41436,
198,
49,
4944,
62,
49058,
796,
366,
6649,
333,
76,
2601,
5819,
1,
1303,
2060,
2964,
66,
11,
5021,
2964,
66,
11,
40066,
76,
2601,
5819,
... | 2.384211 | 2,470 |
# -*- coding: utf-8 -*-
import os
PKG_DIR = os.path.abspath(os.path.dirname(__file__))
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
28686,
198,
198,
40492,
38,
62,
34720,
796,
28686,
13,
6978,
13,
397,
2777,
776,
7,
418,
13,
6978,
13,
15908,
3672,
7,
834,
7753,
834,
4008,
628
] | 2.045455 | 44 |
# coding: utf-8
# The problem: CF compliant readers cannot read HOPS dataset directly.
# The solution: read with the `netCDF4-python` raw interface and create a CF object from the data.
#
# NOTE: Ideally this should be a `nco` script that could be run as a CLI script and fix the files.
# Here I am using `Python`+`iris`. That works and could be written as a CLI script too.
# The main advantage is that it takes care of the CF boilerplate.
# However, this approach is to "heavy-weight" to be applied in many variables and files.
# In[1]:
from netCDF4 import Dataset
#url = ('http://geoport.whoi.edu/thredds/dodsC/usgs/data2/rsignell/gdrive/'
# 'nsf-alpha/Data/MIT_MSEAS/MSEAS_Tides_20160317/mseas_tides_2015071612_2015081612_01h.nc')
url = ('/usgs/data2/rsignell/gdrive/'
'nsf-alpha/Data/MIT_MSEAS/MSEAS_Tides_20160317/mseas_tides_2015071612_2015081612_01h.nc')
nc = Dataset(url)
# Extract `lon`, `lat` variables from `vgrid2` and `u`, `v` variables from `vbaro`.
# The goal is to split the joint variables into individual CF compliant phenomena.
# In[2]:
vtime = nc['time']
coords = nc['vgrid2']
vbaro = nc['vbaro']
# Using iris to create the CF object.
#
# NOTE: ideally `lon`, `lat` should be `DimCoord` like time and not `AuxCoord`,
# but iris refuses to create 2D `DimCoord`. Not sure if CF enforces that though.
# First the Coordinates.
#
# FIXME: change to a full time slice later!
# In[3]:
import iris
iris.FUTURE.netcdf_no_unlimited = True
longitude = iris.coords.AuxCoord(coords[:, :, 0],
var_name='vlat',
long_name='lon values',
units='degrees')
latitude = iris.coords.AuxCoord(coords[:, :, 1],
var_name='vlon',
long_name='lat values',
units='degrees')
# Dummy Dimension coordinate to avoid default names.
# (This is either a bug in CF or in iris. We should not need to do this!)
lon = iris.coords.DimCoord(range(866),
var_name='x',
long_name='lon_range',
standard_name='longitude')
lat = iris.coords.DimCoord(range(1032),
var_name='y',
long_name='lat_range',
standard_name='latitude')
# Now the phenomena.
#
# NOTE: You don't need the `broadcast_to` trick if saving more than 1 time step.
# Here I just wanted the single time snapshot to have the time dimension to create a full example.
# In[4]:
vbaro.shape
# In[5]:
import numpy as np
u_cubes = iris.cube.CubeList()
v_cubes = iris.cube.CubeList()
for k in range(vbaro.shape[0]): # vbaro.shape[0]
time = iris.coords.DimCoord(vtime[k],
var_name='time',
long_name=vtime.long_name,
standard_name='time',
units=vtime.units)
u = vbaro[k, :, :, 0]
u_cubes.append(iris.cube.Cube(np.broadcast_to(u, (1,) + u.shape),
units=vbaro.units,
long_name=vbaro.long_name,
var_name='u',
standard_name='barotropic_eastward_sea_water_velocity',
dim_coords_and_dims=[(time, 0), (lon, 1), (lat, 2)],
aux_coords_and_dims=[(latitude, (1, 2)),
(longitude, (1, 2))]))
v = vbaro[k, :, :, 1]
v_cubes.append(iris.cube.Cube(np.broadcast_to(v, (1,) + v.shape),
units=vbaro.units,
long_name=vbaro.long_name,
var_name='v',
standard_name='barotropic_northward_sea_water_velocity',
dim_coords_and_dims=[(time, 0), (lon, 1), (lat, 2)],
aux_coords_and_dims=[(longitude, (1, 2)),
(latitude, (1, 2))]))
# Join the individual CF phenomena into one dataset.
# In[6]:
u_cube = u_cubes.concatenate_cube()
v_cube = v_cubes.concatenate_cube()
cubes = iris.cube.CubeList([u_cube, v_cube])
# Save the CF-compliant file!
# In[7]:
iris.save(cubes, 'hops.nc')
# In[8]:
get_ipython().system(u'ncdump -h hops.nc')
# In[ ]:
| [
198,
2,
19617,
25,
3384,
69,
12,
23,
198,
198,
2,
383,
1917,
25,
18551,
31332,
7183,
2314,
1100,
220,
367,
30737,
27039,
3264,
13,
198,
2,
383,
4610,
25,
1100,
351,
262,
4600,
3262,
34,
8068,
19,
12,
29412,
63,
8246,
7071,
290,
... | 1.896523 | 2,387 |
import base64
import re
import time
from datetime import datetime
from fontTools.ttLib import TTFont
from io import BytesIO
import scrapy
from A58Spider.items import A58SpiderItem
if __name__ == '__main__':
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
process = CrawlerProcess(get_project_settings())
process.crawl('a58')
process.start()
| [
11748,
2779,
2414,
198,
11748,
302,
198,
11748,
640,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
6738,
10369,
33637,
13,
926,
25835,
1330,
309,
10234,
756,
198,
6738,
33245,
1330,
2750,
4879,
9399,
198,
11748,
15881,
88,
198,
198,
673... | 3.136364 | 132 |
#!/usr/bin/env python
"""
tumult.py - Because everyone needs a little chaos every now and again.
"""
try:
import demiurgic
except ImportError:
print("Warning: You're not demiurgic. Actually, I think that's normal.")
try:
import mystificate
except ImportError:
print("Warning: Dark voodoo may be unreliable.")
# Globals
ATLAS = False # Nothing holds up the world by default
class Foo(object):
"""
The Foo class is an abstract flabbergaster that when instantiated
represents a discrete dextrogyratory inversion of a cattywompus
octothorp.
"""
def __init__(self, *args, **kwargs):
"""
The initialization vector whereby the ineffably obstreperous
becomes paramount.
"""
# TODO. BTW: What happens if we remove that docstring? :)
def demiurgic_mystificator(self, dactyl):
"""
A vainglorious implementation of bedizenment.
"""
inception = demiurgic.palpitation(dactyl) # Note the imported call
demarcation = mystificate.dark_voodoo(inception)
return demarcation
def test(self, whatever):
"""
This test method tests the test by testing your patience.
"""
print(whatever)
if __name__ == "__main__":
print("Forming...")
f = Foo("epicaricacy", "perseverate")
f.test("Codswallop") | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
37811,
198,
83,
388,
586,
13,
9078,
532,
4362,
2506,
2476,
257,
1310,
11918,
790,
783,
290,
757,
13,
198,
37811,
198,
198,
28311,
25,
198,
220,
220,
220,
1330,
1357,
72,
3686,
291,
... | 2.636187 | 514 |
import logging
import array
from store import database
OPERATIONS = {}
@register_oper(key="SET")
@check_paras_len(gt=2)
@register_oper(key="GET")
@check_paras_len(eq=2)
@register_oper(key="DEL")
@check_paras_len(gt=1)
@register_oper(key="DUMP")
@check_paras_len(eq=2)
INFO = """# Server
redis_version:2.5.9
redis_git_sha1:473f3090
redis_git_dirty:0
os:Linux 3.3.7-1-ARCH i686
arch_bits:32
multiplexing_api:epoll
gcc_version:4.7.0
process_id:8104
run_id:bc9e20c6f0aac67d0d396ab950940ae4d1479ad1
tcp_port:6379
uptime_in_seconds:7
uptime_in_days:0
lru_clock:1680564
# Clients
connected_clients:1
client_longest_output_list:0
client_biggest_input_buf:0
blocked_clients:0
# Memory
used_memory:439304
used_memory_human:429.01K
used_memory_rss:13897728
used_memory_peak:401776
used_memory_peak_human:392.36K
used_memory_lua:20480
mem_fragmentation_ratio:31.64
mem_allocator:jemalloc-3.0.0
# Persistence
loading:0
rdb_changes_since_last_save:0
rdb_bgsave_in_progress:0
rdb_last_save_time:1338011402
rdb_last_bgsave_status:ok
rdb_last_bgsave_time_sec:-1
rdb_current_bgsave_time_sec:-1
aof_enabled:0
aof_rewrite_in_progress:0
aof_rewrite_scheduled:0
aof_last_rewrite_time_sec:-1
aof_current_rewrite_time_sec:-1
# Stats
total_connections_received:1
total_commands_processed:0
instantaneous_ops_per_sec:0
rejected_connections:0
expired_keys:0
evicted_keys:0
keyspace_hits:0
keyspace_misses:0
pubsub_channels:0
pubsub_patterns:0
latest_fork_usec:0
# Replication
role:master
connected_slaves:0
# CPU
used_cpu_sys:0.03
used_cpu_user:0.01
used_cpu_sys_children:0.00
used_cpu_user_children:0.00
# Keyspace
"""
@register_oper(key="INFO")
@check_paras_len(lt=3)
@register_oper(key="CONFIG")
@check_paras_len(gt=1)
@register_oper(key="KEYS")
@check_paras_len(eq=2)
@register_oper(key="TYPE")
@check_paras_len(eq=2)
@register_oper(key="TTL")
@check_paras_len(eq=2)
@register_oper(key="PTTL")
@check_paras_len(eq=2)
@register_oper(key="OBJECT", subkey="REFCOUNT")
@check_paras_len(eq=3)
@register_oper(key="OBJECT", subkey="IDLETIME")
@check_paras_len(eq=3)
@register_oper(key="OBJECT", subkey="ENCODING")
@check_paras_len(eq=3)
@register_oper(key="EXISTS")
@check_paras_len(eq=2)
@register_oper(key="SELECT")
@check_paras_len(eq=2)
#############################################
# implement operations for Key
#############################################
@register_oper(key="EXPIRE")
@check_paras_len(eq=3)
@register_oper(key="PEXPIRE")
@check_paras_len(eq=3)
@register_oper(key="EXPIREAT")
@check_paras_len(eq=3)
@register_oper(key="PEXPIREAT")
@check_paras_len(eq=3)
@register_oper(key="MOVE")
@check_paras_len(eq=3)
@register_oper(key="PERSIST")
@check_paras_len(eq=2)
@register_oper(key="RANDOMKEY")
@check_paras_len(eq=1)
@register_oper(key="RENAME")
@check_paras_len(eq=3)
@register_oper(key="RENAMENX")
@check_paras_len(eq=3)
@register_oper(key="RESTORE")
@check_paras_len(eq=4)
#############################################
# implement operations for String
#############################################
@register_oper(key="APPEND")
@check_paras_len(eq=3)
@register_oper(key="SETBIT")
@check_paras_len(eq=4)
@register_oper(key="GETBIT")
@check_paras_len(eq=3)
@register_oper(key="BITCOUNT")
@check_paras_len(gt=2)
@register_oper(key="BITOP", subkey="AND")
@check_paras_len(gt=3)
@register_oper(key="BITOP", subkey="OR")
@check_paras_len(gt=3)
@register_oper(key="BITOP", subkey="XOR")
@check_paras_len(gt=3)
@register_oper(key="BITOP", subkey="NOT")
@check_paras_len(eq=3)
@register_oper(key="DECR")
@check_paras_len(eq=2)
@register_oper(key="DECRBY")
@check_paras_len(eq=3)
@register_oper(key="INCR")
@check_paras_len(eq=2)
@register_oper(key="INCRBY")
@check_paras_len(eq=3)
@register_oper(key="INCRBYFLOAT")
@check_paras_len(eq=3)
@register_oper(key="GETRANGE")
@check_paras_len(eq=4)
@register_oper(key="GETSET")
@check_paras_len(eq=3)
@register_oper(key="MGET")
@check_paras_len(gt=1)
@register_oper(key="MSET")
@check_paras_len(gt=2)
@register_oper(key="MSETNX")
@check_paras_len(gt=2)
| [
11748,
18931,
198,
11748,
7177,
198,
6738,
3650,
1330,
6831,
628,
198,
31054,
18421,
796,
23884,
628,
628,
628,
198,
198,
31,
30238,
62,
3575,
7,
2539,
2625,
28480,
4943,
198,
31,
9122,
62,
1845,
292,
62,
11925,
7,
13655,
28,
17,
8,... | 2.266223 | 1,803 |
#!/usr/bin/env python3
"""LockedIterator."""
from threading import Lock
from collections.abc import Iterator
# pylint: disable=too-few-public-methods
class LockedIterator(Iterator):
"""Locked Iterator."""
def __init__(self, _it):
"""Initialise object."""
self.lock = Lock()
self._it = _it.__iter__()
def __next__(self):
"""Return next."""
self.lock.acquire()
try:
return self._it.__next__()
finally:
self.lock.release()
def send(self, msg):
"""Send message."""
self.lock.acquire()
try:
self._it.send(msg)
except StopIteration as _si:
print(_si)
finally:
self.lock.release()
# pylint: enable=too-few-public-methods
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
37811,
43,
3543,
37787,
526,
15931,
198,
6738,
4704,
278,
1330,
13656,
198,
6738,
17268,
13,
39305,
1330,
40806,
1352,
198,
198,
2,
279,
2645,
600,
25,
15560,
28,
18820,
12,
32146,... | 2.13172 | 372 |
"""Initialize tests""" | [
37811,
24243,
1096,
5254,
37811
] | 4.4 | 5 |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from django.db.models import Q
from datamanage.pro.datamodel.models.datamodel import DmmModelFieldStage
from datamanage.pro.datamodel.models.model_dict import TimeField
def get_schema_list(model_id, with_field_details=False):
"""
拿到主表字段列表,用于统计口径和指标sql校验
:param model_id: {int} 模型ID
:param with_field_details: {bool} 是否返回字段类型
:return:schema_list: {list} 主表字段列表
"""
# 获取主表字段queryset
field_queryset = DmmModelFieldStage.objects.filter(model_id=model_id)
# 获取扩展字段对应的来源字段queryset
source_field_queryset = get_source_field_queryset(field_queryset)
source_field_obj_dict = {
source_field_obj.field_name: source_field_obj for source_field_obj in source_field_queryset
}
schema_list = []
for field_obj in field_queryset:
if field_obj.field_name != TimeField.TIME_FIELD_NAME and field_obj.field_type != TimeField.TIME_FIELD_TYPE:
# 如果是扩展字段, 数据类型和字段类型从关联维度表中继承
if field_obj.source_model_id and field_obj.source_field_name:
source_field_obj = source_field_obj_dict[field_obj.source_field_name]
field_type = source_field_obj.field_type
field_category = source_field_obj.field_category
else:
field_type = field_obj.field_type
field_category = field_obj.field_category
# 是否返回字段类型
if not with_field_details:
schema_list.append({'field_type': field_type, 'field_name': field_obj.field_name})
else:
schema_list.append(
{
'field_type': field_type,
'field_name': field_obj.field_name,
'field_category': field_category,
'description': field_obj.description,
'field_alias': field_obj.field_alias,
}
)
return schema_list
def get_source_field_queryset(field_queryset):
"""
获取主表扩展字段对应来源字段的queryset
:param field_queryset:{QuerySet} 主表字段QuerySet
:return: source_field_queryset: {QuerySet} 来源字段QuerySet
"""
condition = None
for field_obj in field_queryset:
if field_obj.source_model_id and field_obj.source_field_name:
if condition is None:
condition = Q(model_id=field_obj.source_model_id, field_name=field_obj.source_field_name)
else:
condition |= Q(model_id=field_obj.source_model_id, field_name=field_obj.source_field_name)
source_field_queryset = DmmModelFieldStage.objects.filter(condition) if condition is not None else []
return source_field_queryset
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
24893,
1087,
318,
10607,
284,
1104,
262,
1280,
2723,
2055,
416,
1642,
347,
42,
12,
33,
11159,
5525,
241,
251,
165,
110,
116,
161,
253,
118,
163,
94,
222,
... | 2.185946 | 1,850 |
"""examples.configuration_modes.eos_configure_session"""
from scrapli.driver.core import EOSDriver
MY_DEVICE = {
"host": "172.18.0.14",
"auth_username": "scrapli",
"auth_password": "scrapli",
"auth_secondary": "VR-netlab9",
"auth_strict_key": False,
}
def main():
"""Connect to an EOS Device and create and acquire a configuration session"""
configs = ["show configuration sessions"]
with EOSDriver(**MY_DEVICE) as conn:
conn.register_configuration_session(session_name="my-config-session")
# for configuration sessions we have to first "register" the session with scrapli:
result = conn.send_configs(configs=configs, privilege_level="my-config-session")
# we should see our session name with an "*" indicating that is the active config session
print(result[0].result)
if __name__ == "__main__":
main()
| [
37811,
1069,
12629,
13,
11250,
3924,
62,
76,
4147,
13,
68,
418,
62,
11250,
495,
62,
29891,
37811,
198,
6738,
19320,
489,
72,
13,
26230,
13,
7295,
1330,
412,
2640,
32103,
198,
198,
26708,
62,
7206,
27389,
796,
1391,
198,
220,
220,
22... | 2.819936 | 311 |