code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
"""
Generic Caracara API module.
This module contains the the FalconApiModule class, which represents a generic
Caracara API module. All modules, including Hosts, Prevention Policies, etc.
derive from this abstract base class.
"""
import logging
from abc import ABC, abstractmethod
from falconpy import OAuth2
class FalconApiModule(ABC):
"""
Meta class for a generic Caracara API Module.
Each module provides API Methods.
"""
@property
@abstractmethod
def name(self) -> str:
"""Store the name for the developer to use when instantiating the API module."""
@property
@abstractmethod
def help(self) -> str:
"""Store the help string to be made available for each API module."""
def __init__(self, api_authentication: OAuth2):
"""Configure a Caracara API module with a FalconPy OAuth2 module."""
class_name = self.__class__.__name__
self.logger = logging.getLogger(class_name)
self.logger.debug("Initialising API module: %s", class_name)
self.api_authentication = api_authentication
|
[
"logging.getLogger"
] |
[((936, 965), 'logging.getLogger', 'logging.getLogger', (['class_name'], {}), '(class_name)\n', (953, 965), False, 'import logging\n')]
|
import os
import socket
import ssl
from OpenSSL import crypto, SSL
import random
def getSSLContext(app=None,config_folder="/tmp/config", cert_file="app.crt", key_file="app.key"):
""" Create SSL Cert in config folder if it does not exists """
if not os.path.exists(config_folder):
os.makedirs(config_folder)
CERT_FILE = os.path.join(config_folder, cert_file)
KEY_FILE = os.path.join(config_folder, key_file)
if not os.path.exists(CERT_FILE) or not os.path.exists(KEY_FILE):
if app != None: app.logger.info(" * Generating Certificate files (%s,%s)"%(cert_file,key_file))
k = crypto.PKey()
k.generate_key(crypto.TYPE_RSA, 4096)
cert = crypto.X509()
cert.get_subject().C = "US"
cert.get_subject().ST = "Example"
cert.get_subject().L = "Example"
cert.get_subject().O = "Example Company"
cert.get_subject().OU = "Example Organization"
cert.get_subject().CN = socket.gethostname()
cert.set_serial_number(random.randint(1, 100000))
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(10*365*24*60*60)
cert.set_issuer(cert.get_subject())
cert.set_pubkey(k)
cert.sign(k, 'sha1')
open(CERT_FILE, "wt").write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
open(KEY_FILE, "wt").write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))
if hasattr(ssl, 'SSLContext'):
if app != None: app.logger.info(" * Creating perfect forward secrey SSL Context")
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
context.set_ecdh_curve('prime256v1')
context.verify_mode = ssl.CERT_REQUIRED
context.set_ciphers('ECDHE-RSA-AES256-SHA')
context.options |= ssl.OP_NO_COMPRESSION
context.options |= ssl.OP_SINGLE_ECDH_USE
context.options |= ssl.OP_CIPHER_SERVER_PREFERENCE
context.load_cert_chain(CERT_FILE, KEY_FILE)
else:
#if app != None: app.logger.warning(" ! No perfect forward secrecy supported. Update your python version!")
context = (CERT_FILE,KEY_FILE)
#context = SSL.Context(SSL.TLSv1_2_METHOD)
#context.use_privatekey_file(os.path.join(config_folder, key_file))
#context.use_certificate_file(os.path.join(config_folder, cert_file))
return context
|
[
"ssl.SSLContext",
"os.makedirs",
"OpenSSL.crypto.X509",
"random.randint",
"os.path.exists",
"socket.gethostname",
"OpenSSL.crypto.PKey",
"OpenSSL.crypto.dump_privatekey",
"os.path.join",
"OpenSSL.crypto.dump_certificate"
] |
[((340, 378), 'os.path.join', 'os.path.join', (['config_folder', 'cert_file'], {}), '(config_folder, cert_file)\n', (352, 378), False, 'import os\n'), ((394, 431), 'os.path.join', 'os.path.join', (['config_folder', 'key_file'], {}), '(config_folder, key_file)\n', (406, 431), False, 'import os\n'), ((258, 287), 'os.path.exists', 'os.path.exists', (['config_folder'], {}), '(config_folder)\n', (272, 287), False, 'import os\n'), ((297, 323), 'os.makedirs', 'os.makedirs', (['config_folder'], {}), '(config_folder)\n', (308, 323), False, 'import os\n'), ((618, 631), 'OpenSSL.crypto.PKey', 'crypto.PKey', ([], {}), '()\n', (629, 631), False, 'from OpenSSL import crypto, SSL\n'), ((693, 706), 'OpenSSL.crypto.X509', 'crypto.X509', ([], {}), '()\n', (704, 706), False, 'from OpenSSL import crypto, SSL\n'), ((962, 982), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (980, 982), False, 'import socket\n'), ((1542, 1578), 'ssl.SSLContext', 'ssl.SSLContext', (['ssl.PROTOCOL_TLSv1_2'], {}), '(ssl.PROTOCOL_TLSv1_2)\n', (1556, 1578), False, 'import ssl\n'), ((443, 468), 'os.path.exists', 'os.path.exists', (['CERT_FILE'], {}), '(CERT_FILE)\n', (457, 468), False, 'import os\n'), ((476, 500), 'os.path.exists', 'os.path.exists', (['KEY_FILE'], {}), '(KEY_FILE)\n', (490, 500), False, 'import os\n'), ((1014, 1039), 'random.randint', 'random.randint', (['(1)', '(100000)'], {}), '(1, 100000)\n', (1028, 1039), False, 'import random\n'), ((1264, 1314), 'OpenSSL.crypto.dump_certificate', 'crypto.dump_certificate', (['crypto.FILETYPE_PEM', 'cert'], {}), '(crypto.FILETYPE_PEM, cert)\n', (1287, 1314), False, 'from OpenSSL import crypto, SSL\n'), ((1351, 1397), 'OpenSSL.crypto.dump_privatekey', 'crypto.dump_privatekey', (['crypto.FILETYPE_PEM', 'k'], {}), '(crypto.FILETYPE_PEM, k)\n', (1373, 1397), False, 'from OpenSSL import crypto, SSL\n')]
|
import os
import uuid
import inflection
from django.db import models
from django.urls import reverse
from django.template.defaultfilters import slugify
from django.utils.translation import gettext_lazy as _
from django.conf import settings
from .abstracts import ArvestustRecord, ArvestustFile
from .validators.file import validate_file_size, validate_storage_quota
def file_upload_path(instance, filename):
name, extension = os.path.splitext(filename)
file = f'{str(instance.uuid)}{extension}'
folder = inflection.pluralize(str(instance.content_type))
return f'{folder}/{instance.content_object.uuid}/{file}'
class Image(ArvestustFile):
file = models.ImageField(
upload_to=file_upload_path,
verbose_name=_('file'),
storage=getattr(settings, 'PUBLIC_FILE_STORAGE'),
)
class Meta:
db_table = 'arvestust_images'
indexes = [models.Index(fields=['created_at'])]
ordering = ['-created_at']
def get_absolute_url(self):
return reverse('image-detail', kwargs={'slug': self.slug})
|
[
"django.urls.reverse",
"django.db.models.Index",
"os.path.splitext",
"django.utils.translation.gettext_lazy"
] |
[((432, 458), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (448, 458), False, 'import os\n'), ((1014, 1065), 'django.urls.reverse', 'reverse', (['"""image-detail"""'], {'kwargs': "{'slug': self.slug}"}), "('image-detail', kwargs={'slug': self.slug})\n", (1021, 1065), False, 'from django.urls import reverse\n'), ((745, 754), 'django.utils.translation.gettext_lazy', '_', (['"""file"""'], {}), "('file')\n", (746, 754), True, 'from django.utils.translation import gettext_lazy as _\n'), ((894, 929), 'django.db.models.Index', 'models.Index', ([], {'fields': "['created_at']"}), "(fields=['created_at'])\n", (906, 929), False, 'from django.db import models\n')]
|
from flask import Flask
from flask_jwt import JWT, jwt_required, current_identity
from werkzeug.security import safe_str_cmp
class User(object):
def __init__(self, id, username, password):
self.id = id
self.username = username
self.password = password
def __str__(self):
return "User(id='%s')" % self.id
users = [
User(1, 'user1', '<PASSWORD>'),
User(2, 'user2', '<PASSWORD>'),
]
username_table = {u.username: u for u in users}
userid_table = {u.id: u for u in users}
def authenticate(username, password):
user = username_table.get(username, None)
if user and safe_str_cmp(user.password.encode('utf-8'), password.encode('utf-8')):
return user
def identity(payload):
user_id = payload['identity']
return userid_table.get(user_id, None)
app = Flask(__name__)
app.debug = True
app.config['SECRET_KEY'] = 'super-secret'
jwt = JWT(app, authenticate, identity)
@app.route('/protected')
@jwt_required()
def protected():
return '%s' % current_identity
if __name__ == '__main__':
app.run()
|
[
"flask.Flask",
"flask_jwt.jwt_required",
"flask_jwt.JWT"
] |
[((823, 838), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (828, 838), False, 'from flask import Flask\n'), ((905, 937), 'flask_jwt.JWT', 'JWT', (['app', 'authenticate', 'identity'], {}), '(app, authenticate, identity)\n', (908, 937), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((966, 980), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (978, 980), False, 'from flask_jwt import JWT, jwt_required, current_identity\n')]
|
from mongoengine import signals
from mass_flask_core.models import AnalysisSystem, Report
from .dispatch_request import update_dispatch_request_for_new_sample, create_requests_for_new_analysis_system
from .copy_report_tags import copy_tags_from_report_to_sample
def connect_signals():
signals.post_save.connect(update_dispatch_request_for_new_sample)
signals.post_save.connect(create_requests_for_new_analysis_system, sender=AnalysisSystem)
signals.post_save.connect(copy_tags_from_report_to_sample, sender=Report)
|
[
"mongoengine.signals.post_save.connect"
] |
[((291, 356), 'mongoengine.signals.post_save.connect', 'signals.post_save.connect', (['update_dispatch_request_for_new_sample'], {}), '(update_dispatch_request_for_new_sample)\n', (316, 356), False, 'from mongoengine import signals\n'), ((361, 455), 'mongoengine.signals.post_save.connect', 'signals.post_save.connect', (['create_requests_for_new_analysis_system'], {'sender': 'AnalysisSystem'}), '(create_requests_for_new_analysis_system, sender=\n AnalysisSystem)\n', (386, 455), False, 'from mongoengine import signals\n'), ((455, 528), 'mongoengine.signals.post_save.connect', 'signals.post_save.connect', (['copy_tags_from_report_to_sample'], {'sender': 'Report'}), '(copy_tags_from_report_to_sample, sender=Report)\n', (480, 528), False, 'from mongoengine import signals\n')]
|
from datetime import datetime, timedelta
from nose.tools import eq_
from kitsune.products.tests import ProductFactory, TopicFactory
from kitsune.search.tests.test_es import ElasticTestCase
from kitsune.wiki.tests import (
DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory)
from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType
class DocumentUpdateTests(ElasticTestCase):
def test_add_and_delete(self):
"""Adding a doc should add it to the search index; deleting should
delete it."""
doc = DocumentFactory()
RevisionFactory(document=doc, is_approved=True)
self.refresh()
eq_(DocumentMappingType.search().count(), 1)
doc.delete()
self.refresh()
eq_(DocumentMappingType.search().count(), 0)
def test_translations_get_parent_tags(self):
t1 = TopicFactory(display_order=1)
t2 = TopicFactory(display_order=2)
p = ProductFactory()
doc1 = DocumentFactory(
title=u'Audio too loud',
products=[p],
topics=[t1, t2])
RevisionFactory(document=doc1, is_approved=True)
doc2 = DocumentFactory(title=u'Audio too loud bork bork', parent=doc1, tags=[u'badtag'])
RevisionFactory(document=doc2, is_approved=True)
# Verify the parent has the right tags.
doc_dict = DocumentMappingType.extract_document(doc1.id)
eq_(sorted(doc_dict['topic']), sorted([t1.slug, t2.slug]))
eq_(doc_dict['product'], [p.slug])
# Verify the translation has the parent's tags.
doc_dict = DocumentMappingType.extract_document(doc2.id)
eq_(sorted(doc_dict['topic']), sorted([t1.slug, t2.slug]))
eq_(doc_dict['product'], [p.slug])
def test_wiki_topics(self):
"""Make sure that adding topics to a Document causes it to
refresh the index.
"""
t = TopicFactory(slug=u'hiphop')
eq_(DocumentMappingType.search().filter(topic=t.slug).count(), 0)
doc = DocumentFactory()
RevisionFactory(document=doc, is_approved=True)
self.refresh()
eq_(DocumentMappingType.search().filter(topic=t.slug).count(), 0)
doc.topics.add(t)
self.refresh()
eq_(DocumentMappingType.search().filter(topic=t.slug).count(), 1)
doc.topics.clear()
self.refresh()
# Make sure the document itself is still there and that we didn't
# accidentally delete it through screwed up signal handling:
eq_(DocumentMappingType.search().filter().count(), 1)
eq_(DocumentMappingType.search().filter(topic=t.slug).count(), 0)
def test_wiki_products(self):
"""Make sure that adding products to a Document causes it to
refresh the index.
"""
p = ProductFactory(slug=u'desktop')
eq_(DocumentMappingType.search().filter(product=p.slug).count(), 0)
doc = DocumentFactory()
RevisionFactory(document=doc, is_approved=True)
self.refresh()
eq_(DocumentMappingType.search().filter(product=p.slug).count(), 0)
doc.products.add(p)
self.refresh()
eq_(DocumentMappingType.search().filter(product=p.slug).count(), 1)
doc.products.remove(p)
self.refresh()
# Make sure the document itself is still there and that we didn't
# accidentally delete it through screwed up signal handling:
eq_(DocumentMappingType.search().filter().count(), 1)
eq_(DocumentMappingType.search().filter(product=p.slug).count(), 0)
def test_wiki_no_revisions(self):
"""Don't index documents without approved revisions"""
# Create a document with no revisions and make sure the
# document is not in the index.
doc = DocumentFactory()
self.refresh()
eq_(DocumentMappingType.search().count(), 0)
# Create a revision that's not approved and make sure the
# document is still not in the index.
RevisionFactory(document=doc, is_approved=False)
self.refresh()
eq_(DocumentMappingType.search().count(), 0)
def test_wiki_redirects(self):
"""Make sure we don't index redirects"""
# First create a revision that doesn't have a redirect and
# make sure it's in the index.
doc = DocumentFactory(title=u'wool hats')
RevisionFactory(document=doc, is_approved=True)
self.refresh()
eq_(DocumentMappingType.search().query(document_title__match='wool').count(), 1)
# Now create a revision that is a redirect and make sure the
# document is removed from the index.
RedirectRevisionFactory(document=doc)
self.refresh()
eq_(DocumentMappingType.search().query(document_title__match='wool').count(), 0)
def test_wiki_keywords(self):
"""Make sure updating keywords updates the index."""
# Create a document with a revision with no keywords. It
# shouldn't show up with a document_keywords term query for
# 'wool' since it has no keywords.
doc = DocumentFactory(title=u'wool hats')
RevisionFactory(document=doc, is_approved=True)
self.refresh()
eq_(DocumentMappingType.search().query(
document_keywords='wool').count(), 0)
RevisionFactory(document=doc, is_approved=True, keywords='wool')
self.refresh()
eq_(DocumentMappingType.search().query(document_keywords='wool').count(), 1)
def test_recent_helpful_votes(self):
"""Recent helpful votes are indexed properly."""
# Create a document and verify it doesn't show up in a
# query for recent_helpful_votes__gt=0.
r = RevisionFactory(is_approved=True)
self.refresh()
eq_(DocumentMappingType.search().filter(
document_recent_helpful_votes__gt=0).count(), 0)
# Add an unhelpful vote, it still shouldn't show up.
HelpfulVoteFactory(revision=r, helpful=False)
r.document.save() # Votes don't trigger a reindex.
self.refresh()
eq_(DocumentMappingType.search().filter(
document_recent_helpful_votes__gt=0).count(), 0)
# Add an helpful vote created 31 days ago, it still shouldn't show up.
created = datetime.now() - timedelta(days=31)
HelpfulVoteFactory(revision=r, helpful=True, created=created)
r.document.save() # Votes don't trigger a reindex.
self.refresh()
eq_(DocumentMappingType.search().filter(
document_recent_helpful_votes__gt=0).count(), 0)
# Add an helpful vote created 29 days ago, it should show up now.
created = datetime.now() - timedelta(days=29)
HelpfulVoteFactory(revision=r, helpful=True, created=created)
r.document.save() # Votes don't trigger a reindex.
self.refresh()
eq_(DocumentMappingType.search().filter(
document_recent_helpful_votes__gt=0).count(), 1)
class RevisionMetricsTests(ElasticTestCase):
def test_add_and_delete(self):
"""Adding a revision should add it to the index.
Deleting should delete it.
"""
r = RevisionFactory()
self.refresh()
eq_(RevisionMetricsMappingType.search().count(), 1)
r.delete()
self.refresh()
eq_(RevisionMetricsMappingType.search().count(), 0)
def test_data_in_index(self):
"""Verify the data we are indexing."""
p = ProductFactory()
base_doc = DocumentFactory(locale='en-US', products=[p])
d = DocumentFactory(locale='es', parent=base_doc)
r = RevisionFactory(document=d, is_approved=True)
self.refresh()
eq_(RevisionMetricsMappingType.search().count(), 1)
data = RevisionMetricsMappingType.search()[0]
eq_(data['is_approved'], r.is_approved)
eq_(data['locale'], d.locale)
eq_(data['product'], [p.slug])
eq_(data['creator_id'], r.creator_id)
|
[
"kitsune.wiki.tests.RedirectRevisionFactory",
"kitsune.wiki.tests.HelpfulVoteFactory",
"kitsune.products.tests.TopicFactory",
"kitsune.wiki.models.RevisionMetricsMappingType.search",
"kitsune.wiki.models.DocumentMappingType.search",
"datetime.datetime.now",
"kitsune.wiki.models.DocumentMappingType.extract_document",
"kitsune.wiki.tests.DocumentFactory",
"nose.tools.eq_",
"datetime.timedelta",
"kitsune.products.tests.ProductFactory",
"kitsune.wiki.tests.RevisionFactory"
] |
[((579, 596), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {}), '()\n', (594, 596), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((605, 652), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'doc', 'is_approved': '(True)'}), '(document=doc, is_approved=True)\n', (620, 652), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((890, 919), 'kitsune.products.tests.TopicFactory', 'TopicFactory', ([], {'display_order': '(1)'}), '(display_order=1)\n', (902, 919), False, 'from kitsune.products.tests import ProductFactory, TopicFactory\n'), ((933, 962), 'kitsune.products.tests.TopicFactory', 'TopicFactory', ([], {'display_order': '(2)'}), '(display_order=2)\n', (945, 962), False, 'from kitsune.products.tests import ProductFactory, TopicFactory\n'), ((975, 991), 'kitsune.products.tests.ProductFactory', 'ProductFactory', ([], {}), '()\n', (989, 991), False, 'from kitsune.products.tests import ProductFactory, TopicFactory\n'), ((1007, 1078), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {'title': 'u"""Audio too loud"""', 'products': '[p]', 'topics': '[t1, t2]'}), "(title=u'Audio too loud', products=[p], topics=[t1, t2])\n", (1022, 1078), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((1124, 1172), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'doc1', 'is_approved': '(True)'}), '(document=doc1, is_approved=True)\n', (1139, 1172), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((1189, 1275), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {'title': 'u"""Audio too loud bork bork"""', 'parent': 'doc1', 'tags': "[u'badtag']"}), "(title=u'Audio too loud bork bork', parent=doc1, tags=[\n u'badtag'])\n", (1204, 1275), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((1279, 1327), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'doc2', 'is_approved': '(True)'}), '(document=doc2, is_approved=True)\n', (1294, 1327), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((1396, 1441), 'kitsune.wiki.models.DocumentMappingType.extract_document', 'DocumentMappingType.extract_document', (['doc1.id'], {}), '(doc1.id)\n', (1432, 1441), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((1517, 1551), 'nose.tools.eq_', 'eq_', (["doc_dict['product']", '[p.slug]'], {}), "(doc_dict['product'], [p.slug])\n", (1520, 1551), False, 'from nose.tools import eq_\n'), ((1628, 1673), 'kitsune.wiki.models.DocumentMappingType.extract_document', 'DocumentMappingType.extract_document', (['doc2.id'], {}), '(doc2.id)\n', (1664, 1673), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((1749, 1783), 'nose.tools.eq_', 'eq_', (["doc_dict['product']", '[p.slug]'], {}), "(doc_dict['product'], [p.slug])\n", (1752, 1783), False, 'from nose.tools import eq_\n'), ((1936, 1964), 'kitsune.products.tests.TopicFactory', 'TopicFactory', ([], {'slug': 'u"""hiphop"""'}), "(slug=u'hiphop')\n", (1948, 1964), False, 'from kitsune.products.tests import ProductFactory, TopicFactory\n'), ((2053, 2070), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {}), '()\n', (2068, 2070), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((2079, 2126), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'doc', 'is_approved': '(True)'}), '(document=doc, is_approved=True)\n', (2094, 2126), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((2834, 2865), 'kitsune.products.tests.ProductFactory', 'ProductFactory', ([], {'slug': 'u"""desktop"""'}), "(slug=u'desktop')\n", (2848, 2865), False, 'from kitsune.products.tests import ProductFactory, TopicFactory\n'), ((2956, 2973), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {}), '()\n', (2971, 2973), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((2982, 3029), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'doc', 'is_approved': '(True)'}), '(document=doc, is_approved=True)\n', (2997, 3029), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((3813, 3830), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {}), '()\n', (3828, 3830), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((4027, 4075), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'doc', 'is_approved': '(False)'}), '(document=doc, is_approved=False)\n', (4042, 4075), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((4357, 4392), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {'title': 'u"""wool hats"""'}), "(title=u'wool hats')\n", (4372, 4392), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((4401, 4448), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'doc', 'is_approved': '(True)'}), '(document=doc, is_approved=True)\n', (4416, 4448), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((4685, 4722), 'kitsune.wiki.tests.RedirectRevisionFactory', 'RedirectRevisionFactory', ([], {'document': 'doc'}), '(document=doc)\n', (4708, 4722), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((5121, 5156), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {'title': 'u"""wool hats"""'}), "(title=u'wool hats')\n", (5136, 5156), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((5165, 5212), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'doc', 'is_approved': '(True)'}), '(document=doc, is_approved=True)\n', (5180, 5212), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((5343, 5407), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'doc', 'is_approved': '(True)', 'keywords': '"""wool"""'}), "(document=doc, is_approved=True, keywords='wool')\n", (5358, 5407), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((5739, 5772), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'is_approved': '(True)'}), '(is_approved=True)\n', (5754, 5772), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((5976, 6021), 'kitsune.wiki.tests.HelpfulVoteFactory', 'HelpfulVoteFactory', ([], {'revision': 'r', 'helpful': '(False)'}), '(revision=r, helpful=False)\n', (5994, 6021), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((6357, 6418), 'kitsune.wiki.tests.HelpfulVoteFactory', 'HelpfulVoteFactory', ([], {'revision': 'r', 'helpful': '(True)', 'created': 'created'}), '(revision=r, helpful=True, created=created)\n', (6375, 6418), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((6749, 6810), 'kitsune.wiki.tests.HelpfulVoteFactory', 'HelpfulVoteFactory', ([], {'revision': 'r', 'helpful': '(True)', 'created': 'created'}), '(revision=r, helpful=True, created=created)\n', (6767, 6810), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((7203, 7220), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {}), '()\n', (7218, 7220), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((7501, 7517), 'kitsune.products.tests.ProductFactory', 'ProductFactory', ([], {}), '()\n', (7515, 7517), False, 'from kitsune.products.tests import ProductFactory, TopicFactory\n'), ((7537, 7582), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {'locale': '"""en-US"""', 'products': '[p]'}), "(locale='en-US', products=[p])\n", (7552, 7582), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((7595, 7640), 'kitsune.wiki.tests.DocumentFactory', 'DocumentFactory', ([], {'locale': '"""es"""', 'parent': 'base_doc'}), "(locale='es', parent=base_doc)\n", (7610, 7640), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((7653, 7698), 'kitsune.wiki.tests.RevisionFactory', 'RevisionFactory', ([], {'document': 'd', 'is_approved': '(True)'}), '(document=d, is_approved=True)\n', (7668, 7698), False, 'from kitsune.wiki.tests import DocumentFactory, RevisionFactory, HelpfulVoteFactory, RedirectRevisionFactory\n'), ((7846, 7885), 'nose.tools.eq_', 'eq_', (["data['is_approved']", 'r.is_approved'], {}), "(data['is_approved'], r.is_approved)\n", (7849, 7885), False, 'from nose.tools import eq_\n'), ((7894, 7923), 'nose.tools.eq_', 'eq_', (["data['locale']", 'd.locale'], {}), "(data['locale'], d.locale)\n", (7897, 7923), False, 'from nose.tools import eq_\n'), ((7932, 7962), 'nose.tools.eq_', 'eq_', (["data['product']", '[p.slug]'], {}), "(data['product'], [p.slug])\n", (7935, 7962), False, 'from nose.tools import eq_\n'), ((7971, 8008), 'nose.tools.eq_', 'eq_', (["data['creator_id']", 'r.creator_id'], {}), "(data['creator_id'], r.creator_id)\n", (7974, 8008), False, 'from nose.tools import eq_\n'), ((6313, 6327), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6325, 6327), False, 'from datetime import datetime, timedelta\n'), ((6330, 6348), 'datetime.timedelta', 'timedelta', ([], {'days': '(31)'}), '(days=31)\n', (6339, 6348), False, 'from datetime import datetime, timedelta\n'), ((6705, 6719), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6717, 6719), False, 'from datetime import datetime, timedelta\n'), ((6722, 6740), 'datetime.timedelta', 'timedelta', ([], {'days': '(29)'}), '(days=29)\n', (6731, 6740), False, 'from datetime import datetime, timedelta\n'), ((7799, 7834), 'kitsune.wiki.models.RevisionMetricsMappingType.search', 'RevisionMetricsMappingType.search', ([], {}), '()\n', (7832, 7834), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((688, 716), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (714, 716), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((786, 814), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (812, 814), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((3866, 3894), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (3892, 3894), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((4111, 4139), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (4137, 4139), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((7256, 7291), 'kitsune.wiki.models.RevisionMetricsMappingType.search', 'RevisionMetricsMappingType.search', ([], {}), '()\n', (7289, 7291), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((7359, 7394), 'kitsune.wiki.models.RevisionMetricsMappingType.search', 'RevisionMetricsMappingType.search', ([], {}), '()\n', (7392, 7394), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((7736, 7771), 'kitsune.wiki.models.RevisionMetricsMappingType.search', 'RevisionMetricsMappingType.search', ([], {}), '()\n', (7769, 7771), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((1977, 2005), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (2003, 2005), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((2162, 2190), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (2188, 2190), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((2285, 2313), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (2311, 2313), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((2553, 2581), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (2579, 2581), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((2616, 2644), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (2642, 2644), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((2878, 2906), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (2904, 2906), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((3065, 3093), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (3091, 3093), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((3192, 3220), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (3218, 3220), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((3466, 3494), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (3492, 3494), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((3529, 3557), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (3555, 3557), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((4484, 4512), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (4510, 4512), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((4758, 4786), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (4784, 4786), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((5248, 5276), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (5274, 5276), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((5444, 5472), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (5470, 5472), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((5808, 5836), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (5834, 5836), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((6117, 6145), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (6143, 6145), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((6514, 6542), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (6540, 6542), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n'), ((6906, 6934), 'kitsune.wiki.models.DocumentMappingType.search', 'DocumentMappingType.search', ([], {}), '()\n', (6932, 6934), False, 'from kitsune.wiki.models import DocumentMappingType, RevisionMetricsMappingType\n')]
|
"""
stateinterpreter
Interpretation of metastable states from MD simulations
"""
import sys
from setuptools import setup, find_packages, Extension
import versioneer
import numpy
os_name = sys.platform
compile_args = ["-O3", "-ffast-math", "-march=native", "-fopenmp" ]
libraries = ["m"]
link_args = ['-fopenmp']
if os_name.startswith('darwin'):
#clang compilation
compile_args.insert(-1, "-Xpreprocessor")
libraries.append("omp")
link_args.insert(-1, "-Xpreprocessor")
__cython__ = False # command line option, try-import, ...
try:
import Cython
__cython__ = True
except ModuleNotFoundError:
__cython__ = False
ext = '.pyx' if __cython__ else '.c'
short_description = "Interpretation of metastable states from MD simulations".split("\n")[0]
# from https://github.com/pytest-dev/pytest-runner#conditional-requirement
needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv)
pytest_runner = ['pytest-runner'] if needs_pytest else []
try:
with open("README.md", "r") as handle:
long_description = handle.read()
except:
long_description = None
ext_modules=[
Extension("stateinterpreter.utils._compiled_numerics",
["stateinterpreter/utils/_compiled_numerics"+ext],
libraries=libraries,
include_dirs=[numpy.get_include()],
extra_compile_args = compile_args,
extra_link_args= link_args
)
]
if __cython__:
from Cython.Build import cythonize
ext_modules = cythonize(ext_modules)
setup(
# Self-descriptive entries which should always be present
name='stateinterpreter',
author='<NAME> <<EMAIL>>, <NAME> <pietro.<EMAIL>li.iit>"',
description=short_description,
long_description=long_description,
long_description_content_type="text/markdown",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
license='MIT',
# Which Python importable modules should be included when your package is installed
# Handled automatically by setuptools. Use 'exclude' to prevent some specific
# subpackage(s) from being added, if needed
packages=find_packages(),
# Optional include package data to ship with your package
# Customize MANIFEST.in if the general case does not suit your needs
# Comment out this line to prevent the files from being packaged with your software
include_package_data=True,
# Allows `setup.py test` to work correctly with pytest
setup_requires=[] + pytest_runner,
ext_modules = ext_modules,
zip_safe = False,
# Additional entries you may want simply uncomment the lines you want and fill in the data
# url='http://www.my_package.com', # Website
# install_requires=[], # Required packages, pulls from pip if needed; do not use for Conda deployment
# platforms=['Linux',
# 'Mac OS-X',
# 'Unix',
# 'Windows'], # Valid platforms your code works on, adjust to your flavor
# python_requires=">=3.5", # Python version restrictions
# Manual control if final package is compressible or not, set False to prevent the .egg from being made
# zip_safe=False,
)
|
[
"versioneer.get_version",
"Cython.Build.cythonize",
"versioneer.get_cmdclass",
"numpy.get_include",
"setuptools.find_packages"
] |
[((1487, 1509), 'Cython.Build.cythonize', 'cythonize', (['ext_modules'], {}), '(ext_modules)\n', (1496, 1509), False, 'from Cython.Build import cythonize\n'), ((1809, 1833), 'versioneer.get_version', 'versioneer.get_version', ([], {}), '()\n', (1831, 1833), False, 'import versioneer\n'), ((1848, 1873), 'versioneer.get_cmdclass', 'versioneer.get_cmdclass', ([], {}), '()\n', (1871, 1873), False, 'import versioneer\n'), ((2126, 2141), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (2139, 2141), False, 'from setuptools import setup, find_packages, Extension\n'), ((1297, 1316), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (1314, 1316), False, 'import numpy\n')]
|
import os
from django.urls import reverse
from zentral.contrib.munki.forms import EnrollmentForm
from zentral.utils.osx_package import EnrollmentPackageBuilder
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class MunkiZentralEnrollPkgBuilder(EnrollmentPackageBuilder):
name = "Zentral Munki Enrollment"
form = EnrollmentForm
package_name = "zentral_munki_enroll.pkg"
base_package_identifier = "io.zentral.munki_enroll"
build_tmpl_dir = os.path.join(BASE_DIR, "build.tmpl")
def extra_build_steps(self):
# munki zentral postflight script
postflight_script = self.get_root_path("usr/local/zentral/munki/zentral_postflight")
self.replace_in_file(postflight_script,
(("%TLS_HOSTNAME%", self.get_tls_hostname()),
("%TLS_SERVER_CERTS%", self.include_tls_server_certs())))
# postinstall script
enrollment_url = "https://{}{}".format(self.get_tls_hostname(), reverse("munki:enroll"))
postinstall_script = self.get_build_path("scripts", "postinstall")
self.replace_in_file(postinstall_script,
(("%TLS_HOSTNAME%", self.get_tls_hostname()),
("%TLS_CA_CERT%", self.include_tls_ca_cert()),
("%ENROLLMENT_SECRET%", self.build_kwargs["enrollment_secret_secret"]),
("%ENROLLMENT_URL%", enrollment_url)))
|
[
"django.urls.reverse",
"os.path.abspath",
"os.path.join"
] |
[((188, 213), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (203, 213), False, 'import os\n'), ((466, 502), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""build.tmpl"""'], {}), "(BASE_DIR, 'build.tmpl')\n", (478, 502), False, 'import os\n'), ((985, 1008), 'django.urls.reverse', 'reverse', (['"""munki:enroll"""'], {}), "('munki:enroll')\n", (992, 1008), False, 'from django.urls import reverse\n')]
|
# Here we provide the key functions for tile-coding. To avoid huge dimensionality expansion, we have tiled
# per feature variable, but using feature-column cross functionality a pair of feature-variables
# also can be tiled, and also higher orders.
from typing import List
import numpy as np
import tensorflow as tf
from tensorflow.python.ops import math_ops
class Tilings(object):
def __init__(self, tile_strategy_boundaries, num_tilings):
self.num_tilings = num_tilings
self.tile_strategy_boundaries = tile_strategy_boundaries
def _get_stack_tiling_boundaries(self, boundaries) -> List[List[float]]:
boundaries = np.array(boundaries)
each_bucket_resolution = np.array(
[float(boundaries[i + 1] - boundaries[i]) / self.num_tilings for i in range(len(boundaries) - 1)] + [0])
return [list(boundaries + i * each_bucket_resolution) for i in range(self.num_tilings)]
@staticmethod
def _get_tiles(input_data, list_boundaries: List[List[float]]):
all_tiles = []
input_tensor = tf.cast(input_data, tf.float64)
for i, boundaries in enumerate(list_boundaries):
bucketized_tensor = math_ops.bucketize(input_tensor, boundaries)
bucketized_tensor = tf.reshape(bucketized_tensor, (-1, 1))
bucketized_tensor = tf.math.add(bucketized_tensor, i * (len(boundaries) - 1))
all_tiles.append(bucketized_tensor)
return tf.concat(all_tiles, axis=1)
def get_features_tiles(self, features):
features_tiles = dict()
for feature_name, boundaries in self.tile_strategy_boundaries.items():
list_boundaries = self._get_stack_tiling_boundaries(boundaries)
features_tiles[feature_name] = Tilings._get_tiles(features[feature_name], list_boundaries)
return features_tiles
|
[
"tensorflow.python.ops.math_ops.bucketize",
"tensorflow.reshape",
"tensorflow.concat",
"tensorflow.cast",
"numpy.array"
] |
[((652, 672), 'numpy.array', 'np.array', (['boundaries'], {}), '(boundaries)\n', (660, 672), True, 'import numpy as np\n'), ((1062, 1093), 'tensorflow.cast', 'tf.cast', (['input_data', 'tf.float64'], {}), '(input_data, tf.float64)\n', (1069, 1093), True, 'import tensorflow as tf\n'), ((1452, 1480), 'tensorflow.concat', 'tf.concat', (['all_tiles'], {'axis': '(1)'}), '(all_tiles, axis=1)\n', (1461, 1480), True, 'import tensorflow as tf\n'), ((1183, 1227), 'tensorflow.python.ops.math_ops.bucketize', 'math_ops.bucketize', (['input_tensor', 'boundaries'], {}), '(input_tensor, boundaries)\n', (1201, 1227), False, 'from tensorflow.python.ops import math_ops\n'), ((1260, 1298), 'tensorflow.reshape', 'tf.reshape', (['bucketized_tensor', '(-1, 1)'], {}), '(bucketized_tensor, (-1, 1))\n', (1270, 1298), True, 'import tensorflow as tf\n')]
|
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(
name='EOSim',
version='0.1',
description='Earth Observation Simulation',
author='BAERI',
author_email='<EMAIL>',
packages=['eosim'],
scripts=[
],
# Cartopy installation may not work due to additional dependencies it requires.
# CartoPy dependencies must be installed before running this setup.
# If using conda, cartopy along with its dependencies can be installed using the command `conda install -c conda-forge cartopy`
# In case of Runtime errors involving numpy, try the following command: `pip install numpy --upgrade --ignore-installed`
install_requires=['numpy', 'pandas', 'scipy', 'lowtran', 'astropy', 'cartopy']
)
|
[
"setuptools.setup"
] |
[((102, 345), 'setuptools.setup', 'setup', ([], {'name': '"""EOSim"""', 'version': '"""0.1"""', 'description': '"""Earth Observation Simulation"""', 'author': '"""BAERI"""', 'author_email': '"""<EMAIL>"""', 'packages': "['eosim']", 'scripts': '[]', 'install_requires': "['numpy', 'pandas', 'scipy', 'lowtran', 'astropy', 'cartopy']"}), "(name='EOSim', version='0.1', description=\n 'Earth Observation Simulation', author='BAERI', author_email='<EMAIL>',\n packages=['eosim'], scripts=[], install_requires=['numpy', 'pandas',\n 'scipy', 'lowtran', 'astropy', 'cartopy'])\n", (107, 345), False, 'from setuptools import setup\n')]
|
# Copyright (c) 2019 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities to handle processes."""
import logging
import os
from oslo_concurrency import processutils
LOG = logging.getLogger(__name__)
def execute(*cmd, **kwargs):
"""Convenience wrapper around oslo's execute() method.
Executes and logs results from a system command. See docs for
oslo_concurrency.processutils.execute for usage.
:param \\*cmd: positional arguments to pass to processutils.execute()
:param use_standard_locale: keyword-only argument. True | False.
Defaults to False. If set to True,
execute command with standard locale
added to environment variables.
:param log_stdout: keyword-only argument. True | False. Defaults
to True. If set to True, logs the output.
:param \\*\\*kwargs: keyword arguments to pass to processutils.execute()
:returns: (stdout, stderr) from process execution
:raises: UnknownArgumentError on receiving unknown arguments
:raises: ProcessExecutionError
:raises: OSError
"""
logger = kwargs.pop('logger', LOG)
use_standard_locale = kwargs.pop('use_standard_locale', False)
if use_standard_locale:
env = kwargs.pop('env_variables', os.environ.copy())
env['LC_ALL'] = 'C'
kwargs['env_variables'] = env
log_stdout = kwargs.pop('log_stdout', True)
result = processutils.execute(*cmd, **kwargs)
logger.debug('Execution completed, command line is "%s"',
' '.join(map(str, cmd)))
if log_stdout:
logger.debug('Command stdout is: "%s"', result[0])
logger.debug('Command stderr is: "%s"', result[1])
return result
|
[
"os.environ.copy",
"oslo_concurrency.processutils.execute",
"logging.getLogger"
] |
[((696, 723), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (713, 723), False, 'import logging\n'), ((1995, 2031), 'oslo_concurrency.processutils.execute', 'processutils.execute', (['*cmd'], {}), '(*cmd, **kwargs)\n', (2015, 2031), False, 'from oslo_concurrency import processutils\n'), ((1849, 1866), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (1864, 1866), False, 'import os\n')]
|
from collections import deque
from random import *
class Maze():
def __init__(self, player, canvas, y, x):
self.player = player
self.canvas = canvas
self.size = {"x": 2 * x - 1,
"y": 2 * y - 1}
# 0 = air, 1 = wall, 2 = player. 3 = target
def generate(self):
def UnvisitedNeighbours(cell):
neighbours = []
if cell[0] + 2 < self.size["x"]:
if maze[cell[0] + 2][cell[1]]:
neighbours.append("r")
if cell[0] - 2 >= 0:
if maze[cell[0] - 2][cell[1]]:
neighbours.append("l")
if cell[1] + 2 < self.size["y"]:
if maze[cell[0]][cell[1] + 2]:
neighbours.append("u")
if cell[1] - 2 >= 0:
if maze[cell[0]][cell[1] - 2]:
neighbours.append("d")
return neighbours
stack = deque()
maze = []
for i in range(self.size["x"]):
maze.append([True] * (self.size["y"]))
# Choose the initial cell, mark it as visited and push it to the stack
current_cell = [randrange(2, self.size["x"], 2), randrange(2, self.size["y"], 2)]
maze[current_cell[0]][current_cell[1]] = False
stack.append(current_cell)
# While the stack is not empty
while(len(stack) > 0):
# Pop a cell from the stack and make it a current cell
current_cell = stack.pop()
# maze[current_cell[0]][current_cell[1]] = False
# If the current cell has any neighbours which have not been visited
if len(neighbours := UnvisitedNeighbours(current_cell)):
# Push the current cell to the stack
stack.append(current_cell)
# Choose one of the unvisited neighbours
direction = choice(neighbours)
# Remove the wall between the current cell and the chosen cell
if direction == "u":
maze[current_cell[0]][current_cell[1] + 1] = False
maze[current_cell[0]][current_cell[1] + 2] = False
stack.append([current_cell[0], current_cell[1] + 2])
elif direction == "d":
maze[current_cell[0]][current_cell[1] - 1] = False
maze[current_cell[0]][current_cell[1] - 2] = False
stack.append([current_cell[0], current_cell[1] - 2])
elif direction == "l":
maze[current_cell[0] - 1][current_cell[1]] = False
maze[current_cell[0] - 2][current_cell[1]] = False
stack.append([current_cell[0] - 2, current_cell[1]])
elif direction == "r":
maze[current_cell[0] + 1][current_cell[1]] = False
maze[current_cell[0] + 2][current_cell[1]] = False
stack.append([current_cell[0] + 2, current_cell[1]])
maze[0][0] = 2
self.maze = maze
"""
Choose the initial cell, mark it as visited and push it to the stack
While the stack is not empty
Pop a cell from the stack and make it a current cell
If the current cell has any neighbours which have not been visited
Push the current cell to the stack
Choose one of the unvisited neighbours
Remove the wall between the current cell and the chosen cell
Mark the chosen cell as visited and push it to the stack
"""
def build(self, player, scale):
self.scale = scale
self.obstacles = []
self.player = player
for y in range(len(self.maze)):
for x in range(len(self.maze[y])):
if self.maze[y][x] == 1:
self.obstacles.append(self.canvas.create_rectangle(
x * self.scale, y * self.scale,
x * self.scale + self.scale, y * self.scale + self.scale,
fill="black"))
elif self.maze[y][x] == 2:
player.setCoords((x + 0.5) * scale, (y + 0.5) * scale)
self.playerCoords = {"x": x, "y": y}
def canMove(self, _direction, move=False):
newPos = []
direction = _direction.lower()
if direction == "up":
newPos = [self.playerCoords["x"], self.playerCoords["y"] - 1]
elif direction == "down":
newPos = [self.playerCoords["x"], self.playerCoords["y"] + 1]
elif direction == "left":
newPos = [self.playerCoords["x"] - 1, self.playerCoords["y"]]
elif direction == "right":
newPos = [self.playerCoords["x"] + 1, self.playerCoords["y"]]
else:
raise ValueError("Unknown direction: " + direction +
". Direction must be one of these: up, down, left, right (ignore case).")
# out of maze bounds
if newPos[0] < 0 or newPos[1] < 0:
return False
if newPos[1] > len(self.maze) - 1:
return False
if newPos[0] > len(self.maze[newPos[1]]) - 1:
return False
# obstacle
if self.maze[newPos[1]][newPos[0]] == 1:
return False
if move:
self.playerCoords["x"] = newPos[0]
self.playerCoords["y"] = newPos[1]
return True
|
[
"collections.deque"
] |
[((957, 964), 'collections.deque', 'deque', ([], {}), '()\n', (962, 964), False, 'from collections import deque\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=256, verbose_name='\u6807\u9898')),
('slug', models.CharField(max_length=256, verbose_name='\u7f51\u5740', db_index=True)),
('content', models.TextField(default='', verbose_name='\u5185\u5bb9', blank=True)),
('published', models.BooleanField(default=True, verbose_name='\u6b63\u5f0f\u53d1\u5e03')),
('author', models.ForeignKey(verbose_name='\u4f5c\u8005', blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'verbose_name': '\u6559\u7a0b',
'verbose_name_plural': '\u6559\u7a0b',
},
),
migrations.CreateModel(
name='Column',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256, verbose_name='\u680f\u76ee\u540d\u79f0')),
('slug', models.CharField(max_length=256, verbose_name='\u680f\u76ee\u7f51\u5740', db_index=True)),
('intro', models.TextField(default='', verbose_name='\u680f\u76ee\u7b80\u4ecb')),
],
options={
'ordering': ['name'],
'verbose_name': '\u680f\u76ee',
'verbose_name_plural': '\u680f\u76ee',
},
),
migrations.AddField(
model_name='article',
name='column',
field=models.ManyToManyField(to='news.Column', verbose_name='\u5f52\u5c5e\u680f\u76ee'),
),
]
|
[
"django.db.models.TextField",
"django.db.migrations.swappable_dependency",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField"
] |
[((210, 267), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (241, 267), False, 'from django.db import models, migrations\n'), ((2008, 2069), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""news.Column"""', 'verbose_name': '"""归属栏目"""'}), "(to='news.Column', verbose_name='归属栏目')\n", (2030, 2069), False, 'from django.db import models, migrations\n'), ((399, 492), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (415, 492), False, 'from django.db import models, migrations\n'), ((517, 568), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'verbose_name': '"""标题"""'}), "(max_length=256, verbose_name='标题')\n", (533, 568), False, 'from django.db import models, migrations\n'), ((606, 672), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'verbose_name': '"""网址"""', 'db_index': '(True)'}), "(max_length=256, verbose_name='网址', db_index=True)\n", (622, 672), False, 'from django.db import models, migrations\n'), ((713, 772), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""', 'verbose_name': '"""内容"""', 'blank': '(True)'}), "(default='', verbose_name='内容', blank=True)\n", (729, 772), False, 'from django.db import models, migrations\n'), ((815, 869), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""正式发布"""'}), "(default=True, verbose_name='正式发布')\n", (834, 869), False, 'from django.db import models, migrations\n'), ((919, 1012), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'verbose_name': '"""作者"""', 'blank': '(True)', 'to': 'settings.AUTH_USER_MODEL', 'null': '(True)'}), "(verbose_name='作者', blank=True, to=settings.\n AUTH_USER_MODEL, null=True)\n", (936, 1012), False, 'from django.db import models, migrations\n'), ((1289, 1382), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (1305, 1382), False, 'from django.db import models, migrations\n'), ((1406, 1459), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'verbose_name': '"""栏目名称"""'}), "(max_length=256, verbose_name='栏目名称')\n", (1422, 1459), False, 'from django.db import models, migrations\n'), ((1507, 1575), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'verbose_name': '"""栏目网址"""', 'db_index': '(True)'}), "(max_length=256, verbose_name='栏目网址', db_index=True)\n", (1523, 1575), False, 'from django.db import models, migrations\n'), ((1624, 1673), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""', 'verbose_name': '"""栏目简介"""'}), "(default='', verbose_name='栏目简介')\n", (1640, 1673), False, 'from django.db import models, migrations\n')]
|
# -*- coding: utf-8 -*-"""
"""
Setup file for pymusepipe.
Use setup.cfg to configure your project.
"""
# Licensed under a MIT style license - see LICENSE.txt
from __future__ import absolute_import, division, print_function
from setuptools import setup, find_packages
version = {}
with open("src/pymusepipe/version.py") as fp:
exec(fp.read(), version)
with open('README.md', 'r') as f:
readme = f.read()
with open('LICENSE.txt') as f:
license = f.read()
setup(name='pymusepipe',
version = version['__version__'],
description='python module to reduce MUSE Raw data and combine them',
long_description=readme,
long_description_content_type="text/markdown",
keywords=['MUSE', 'DATAREDUCTION'],
url="https://github.com/emsellem/pymusepipe",
download_url="https://github.com/emsellem/pymusepipe/archive/v2.9.6.beta.tar.gz",
author='<NAME>',
author_email='<EMAIL>',
license="MIT",
packages=find_packages(exclude=('tests', 'docs')),
install_requires=['mpdaf', 'numpy', 'scipy', 'astropy'],
include_package_data=True,
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
[
"setuptools.find_packages"
] |
[((974, 1014), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "('tests', 'docs')"}), "(exclude=('tests', 'docs'))\n", (987, 1014), False, 'from setuptools import setup, find_packages\n')]
|
import tensorflow as tf
import numpy as np
import src.utils as utils
"""
Implementation of InfoVAE
https://arxiv.org/abs/1706.02262
"""
def reparameterise(x, n, stddev):
"""
Model each output as bing guassian distributed.
Use the reparameterisation trick so we can sample while remaining
differentiable.
"""
with tf.name_scope('reparameterise'):
z_mean = x[:,:,:,:n]
z_stddev = x[:,:,:,n:]
e = tf.random_normal(tf.shape(z_mean), stddev=stddev)
# TODO log_var or stddev?
return z_mean + tf.square(z_stddev)*e
def compute_kernel(x, y):
"""
Compute the distance between x and y using a guassian kernel.
"""
x_size = tf.shape(x)[0]
y_size = tf.shape(y)[0]
dim = tf.shape(x)[1]
tiled_x = tf.tile(tf.reshape(x, [x_size, 1, dim]), [1, y_size, 1])
tiled_y = tf.tile(tf.reshape(y, [1, y_size, dim]), [x_size, 1, 1])
return tf.exp(-tf.reduce_mean(tf.square(tiled_x - tiled_y), axis=2) / tf.cast(dim, tf.float32))
def compute_mmd(x, y):
"""
Calculate the maximum mean disrepancy..
"""
x_kernel = compute_kernel(x, x)
y_kernel = compute_kernel(y, y)
xy_kernel = compute_kernel(x, y)
return tf.reduce_mean(x_kernel) + tf.reduce_mean(y_kernel) - 2 * tf.reduce_mean(xy_kernel)
def gaussian_d(x, y):
"""
A conceptual lack of understanding here.
Do I need a dx to calculate this over?
Doesnt make sense for a single point!?
"""
d = tf.norm(x - y, axis=1)
return tf.exp(-0.5*d)/(tf.sqrt(2*tf.constant(np.pi)))
def pz(z):
"""
Estimate p(z) using our prior on z.
"""
z = tf.layers.flatten(z)
return gaussian_d(z , tf.zeros_like(z))
def px_z(x_, y):
# the added noise in the hidden layer.
return gaussian_d(tf.layers.flatten(y[:,:,:,:1]),
tf.layers.flatten(x_))
def pz_x(h, z):
# the added noise in the final layer.
shape = h.get_shape().as_list()
return gaussian_d(tf.layers.flatten(h[:,:,:,:shape[-1]//2]),
tf.layers.flatten(z))
def p_bayes(x_, y, h, z):
"""
If p(z | x) is far away from p(z) then p(x) is low
p(x) = p(x | z) p(z) / p(z | x)
"""
return px_z(x_, y) * pz(z) / pz_x(h, z)
# def KL_divergence(p, q):
# return tf.reduce_sum(p * tf.log(p/q), axis=-1)
#
# def bayesian_surprise(z):
# """
#
# """
# return kl(z, prior)
class InfoVAE():
def __init__(self, n_hidden, width, depth, stddev=0.0001):
"""
Args:
"""
self.n_hidden = n_hidden
self.width = width
self.depth = depth
self.n_channels = 1
self.stddev = stddev
self.construct()
def construct(self):
"""
Constructs:
encoder (tf.keras.Model): encode the gradient into the hidden space
decoder (tf.keras.Model): decodes a hidden state into an image
"""
layers = []
layers.append(tf.keras.layers.Conv2D(self.width, 4, strides=(2, 2),
padding='same',
# input_shape=(28,28,1)
))
layers.append(tf.keras.layers.Activation(tf.keras.activations.selu))
for i in range(self.depth):
layers.append(tf.keras.layers.Conv2D(self.width,
4,
strides=(2, 2),
padding='same'),)
layers.append(tf.keras.layers.Activation(tf.keras.activations.selu))
layers.append(tf.keras.layers.Conv2D(self.n_hidden*2,
1,
strides=(1, 1),
padding='same'))
self.encoder = tf.keras.Sequential(layers)
# decoder
layers = []
layers.append(tf.keras.layers.Conv2DTranspose(self.width, 4, strides=(2, 2),
padding='same',
# input_shape=(1,1,self.n_hidden)
))
layers.append(tf.keras.layers.Activation(tf.keras.activations.selu))
for _ in range(self.depth):
layers.append(tf.keras.layers.Conv2DTranspose(self.width, 4, strides=(2, 2), padding='same'))
layers.append(tf.keras.layers.Activation(tf.keras.activations.selu))
layers.append(tf.keras.layers.Conv2DTranspose(self.n_channels*2, 1, strides=(1, 1), padding='same'))
self.decoder = tf.keras.Sequential(layers)
def __call__(self, x):
"""
Args:
x (tf.tensor): the input
shape is [None, width, height, channels],
dtype is tf.float32
"""
with tf.name_scope('infovae'):
self.h = self.encoder(x)
self.z = reparameterise(self.h, self.n_hidden, self.stddev)
self.y = self.decoder(self.z)
self.x_ = reparameterise(self.y, self.n_channels, self.stddev)
return self.x_
def make_losses(self, x, y=None):
self.x = x
if y is None:
print('...')
y = self.__call__(self.x)
with tf.name_scope('loss'):
recon_loss = tf.losses.sigmoid_cross_entropy(
logits=tf.layers.flatten(y),
multi_class_labels=tf.layers.flatten(self.x))
latent_loss = compute_mmd(tf.layers.flatten(self.z),
tf.layers.flatten(tf.random_normal(shape=tf.shape(self.z))))
return recon_loss, latent_loss
def make_contractive_loss(self):
# assumes make_losses has already been called
print(self.h, self.x)
dhdx = tf.gradients(self.h, self.x)[0]
print(dhdx)
if dhdx is None:
raise ValueError()
return tf.reduce_mean(tf.reduce_sum(tf.square(dhdx), axis=[1,2,3]))
def estimate_density(self, x):
x_ = self.__call__(x)
return p_bayes(x_, self.y, self.h, self.z)
@staticmethod
def preprocess(x):
im = np.reshape(x, [-1, 28, 28, 1])
im = np.round(im).astype(np.float32) # NOTE important !?
return np.pad(im, [(0,0), (2,2), (2,2), (0,0)], 'constant', constant_values=0)
if __name__ == '__main__':
tf.enable_eager_execution()
x = tf.random_normal((100, 28, 28, 1))
nn = InfoVAE(12, 16, 3)
x_ = nn(x)
# loss = nn.make_losses(x)
assert x_.shape == x.shape
|
[
"tensorflow.reshape",
"tensorflow.zeros_like",
"tensorflow.keras.Sequential",
"numpy.round",
"numpy.pad",
"tensorflow.cast",
"tensorflow.keras.layers.Activation",
"tensorflow.exp",
"numpy.reshape",
"tensorflow.gradients",
"tensorflow.name_scope",
"tensorflow.norm",
"tensorflow.layers.flatten",
"tensorflow.reduce_mean",
"tensorflow.constant",
"tensorflow.random_normal",
"tensorflow.keras.layers.Conv2DTranspose",
"tensorflow.enable_eager_execution",
"tensorflow.keras.layers.Conv2D",
"tensorflow.shape",
"tensorflow.square"
] |
[((1473, 1495), 'tensorflow.norm', 'tf.norm', (['(x - y)'], {'axis': '(1)'}), '(x - y, axis=1)\n', (1480, 1495), True, 'import tensorflow as tf\n'), ((1630, 1650), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['z'], {}), '(z)\n', (1647, 1650), True, 'import tensorflow as tf\n'), ((6352, 6379), 'tensorflow.enable_eager_execution', 'tf.enable_eager_execution', ([], {}), '()\n', (6377, 6379), True, 'import tensorflow as tf\n'), ((6388, 6422), 'tensorflow.random_normal', 'tf.random_normal', (['(100, 28, 28, 1)'], {}), '((100, 28, 28, 1))\n', (6404, 6422), True, 'import tensorflow as tf\n'), ((339, 370), 'tensorflow.name_scope', 'tf.name_scope', (['"""reparameterise"""'], {}), "('reparameterise')\n", (352, 370), True, 'import tensorflow as tf\n'), ((697, 708), 'tensorflow.shape', 'tf.shape', (['x'], {}), '(x)\n', (705, 708), True, 'import tensorflow as tf\n'), ((725, 736), 'tensorflow.shape', 'tf.shape', (['y'], {}), '(y)\n', (733, 736), True, 'import tensorflow as tf\n'), ((750, 761), 'tensorflow.shape', 'tf.shape', (['x'], {}), '(x)\n', (758, 761), True, 'import tensorflow as tf\n'), ((787, 818), 'tensorflow.reshape', 'tf.reshape', (['x', '[x_size, 1, dim]'], {}), '(x, [x_size, 1, dim])\n', (797, 818), True, 'import tensorflow as tf\n'), ((858, 889), 'tensorflow.reshape', 'tf.reshape', (['y', '[1, y_size, dim]'], {}), '(y, [1, y_size, dim])\n', (868, 889), True, 'import tensorflow as tf\n'), ((1507, 1523), 'tensorflow.exp', 'tf.exp', (['(-0.5 * d)'], {}), '(-0.5 * d)\n', (1513, 1523), True, 'import tensorflow as tf\n'), ((1677, 1693), 'tensorflow.zeros_like', 'tf.zeros_like', (['z'], {}), '(z)\n', (1690, 1693), True, 'import tensorflow as tf\n'), ((1778, 1811), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['y[:, :, :, :1]'], {}), '(y[:, :, :, :1])\n', (1795, 1811), True, 'import tensorflow as tf\n'), ((1832, 1853), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['x_'], {}), '(x_)\n', (1849, 1853), True, 'import tensorflow as tf\n'), ((1972, 2018), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['h[:, :, :, :shape[-1] // 2]'], {}), '(h[:, :, :, :shape[-1] // 2])\n', (1989, 2018), True, 'import tensorflow as tf\n'), ((2037, 2057), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['z'], {}), '(z)\n', (2054, 2057), True, 'import tensorflow as tf\n'), ((3812, 3839), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', (['layers'], {}), '(layers)\n', (3831, 3839), True, 'import tensorflow as tf\n'), ((4581, 4608), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', (['layers'], {}), '(layers)\n', (4600, 4608), True, 'import tensorflow as tf\n'), ((6136, 6166), 'numpy.reshape', 'np.reshape', (['x', '[-1, 28, 28, 1]'], {}), '(x, [-1, 28, 28, 1])\n', (6146, 6166), True, 'import numpy as np\n'), ((6248, 6323), 'numpy.pad', 'np.pad', (['im', '[(0, 0), (2, 2), (2, 2), (0, 0)]', '"""constant"""'], {'constant_values': '(0)'}), "(im, [(0, 0), (2, 2), (2, 2), (0, 0)], 'constant', constant_values=0)\n", (6254, 6323), True, 'import numpy as np\n'), ((461, 477), 'tensorflow.shape', 'tf.shape', (['z_mean'], {}), '(z_mean)\n', (469, 477), True, 'import tensorflow as tf\n'), ((981, 1005), 'tensorflow.cast', 'tf.cast', (['dim', 'tf.float32'], {}), '(dim, tf.float32)\n', (988, 1005), True, 'import tensorflow as tf\n'), ((1211, 1235), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['x_kernel'], {}), '(x_kernel)\n', (1225, 1235), True, 'import tensorflow as tf\n'), ((1238, 1262), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['y_kernel'], {}), '(y_kernel)\n', (1252, 1262), True, 'import tensorflow as tf\n'), ((1269, 1294), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['xy_kernel'], {}), '(xy_kernel)\n', (1283, 1294), True, 'import tensorflow as tf\n'), ((2954, 3023), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', (['self.width', '(4)'], {'strides': '(2, 2)', 'padding': '"""same"""'}), "(self.width, 4, strides=(2, 2), padding='same')\n", (2976, 3023), True, 'import tensorflow as tf\n'), ((3178, 3231), 'tensorflow.keras.layers.Activation', 'tf.keras.layers.Activation', (['tf.keras.activations.selu'], {}), '(tf.keras.activations.selu)\n', (3204, 3231), True, 'import tensorflow as tf\n'), ((3617, 3693), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', (['(self.n_hidden * 2)', '(1)'], {'strides': '(1, 1)', 'padding': '"""same"""'}), "(self.n_hidden * 2, 1, strides=(1, 1), padding='same')\n", (3639, 3693), True, 'import tensorflow as tf\n'), ((3901, 3979), 'tensorflow.keras.layers.Conv2DTranspose', 'tf.keras.layers.Conv2DTranspose', (['self.width', '(4)'], {'strides': '(2, 2)', 'padding': '"""same"""'}), "(self.width, 4, strides=(2, 2), padding='same')\n", (3932, 3979), True, 'import tensorflow as tf\n'), ((4171, 4224), 'tensorflow.keras.layers.Activation', 'tf.keras.layers.Activation', (['tf.keras.activations.selu'], {}), '(tf.keras.activations.selu)\n', (4197, 4224), True, 'import tensorflow as tf\n'), ((4471, 4562), 'tensorflow.keras.layers.Conv2DTranspose', 'tf.keras.layers.Conv2DTranspose', (['(self.n_channels * 2)', '(1)'], {'strides': '(1, 1)', 'padding': '"""same"""'}), "(self.n_channels * 2, 1, strides=(1, 1),\n padding='same')\n", (4502, 4562), True, 'import tensorflow as tf\n'), ((4819, 4843), 'tensorflow.name_scope', 'tf.name_scope', (['"""infovae"""'], {}), "('infovae')\n", (4832, 4843), True, 'import tensorflow as tf\n'), ((5255, 5276), 'tensorflow.name_scope', 'tf.name_scope', (['"""loss"""'], {}), "('loss')\n", (5268, 5276), True, 'import tensorflow as tf\n'), ((5780, 5808), 'tensorflow.gradients', 'tf.gradients', (['self.h', 'self.x'], {}), '(self.h, self.x)\n', (5792, 5808), True, 'import tensorflow as tf\n'), ((553, 572), 'tensorflow.square', 'tf.square', (['z_stddev'], {}), '(z_stddev)\n', (562, 572), True, 'import tensorflow as tf\n'), ((1533, 1551), 'tensorflow.constant', 'tf.constant', (['np.pi'], {}), '(np.pi)\n', (1544, 1551), True, 'import tensorflow as tf\n'), ((3295, 3364), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', (['self.width', '(4)'], {'strides': '(2, 2)', 'padding': '"""same"""'}), "(self.width, 4, strides=(2, 2), padding='same')\n", (3317, 3364), True, 'import tensorflow as tf\n'), ((3540, 3593), 'tensorflow.keras.layers.Activation', 'tf.keras.layers.Activation', (['tf.keras.activations.selu'], {}), '(tf.keras.activations.selu)\n', (3566, 3593), True, 'import tensorflow as tf\n'), ((4288, 4366), 'tensorflow.keras.layers.Conv2DTranspose', 'tf.keras.layers.Conv2DTranspose', (['self.width', '(4)'], {'strides': '(2, 2)', 'padding': '"""same"""'}), "(self.width, 4, strides=(2, 2), padding='same')\n", (4319, 4366), True, 'import tensorflow as tf\n'), ((4394, 4447), 'tensorflow.keras.layers.Activation', 'tf.keras.layers.Activation', (['tf.keras.activations.selu'], {}), '(tf.keras.activations.selu)\n', (4420, 4447), True, 'import tensorflow as tf\n'), ((5481, 5506), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['self.z'], {}), '(self.z)\n', (5498, 5506), True, 'import tensorflow as tf\n'), ((5932, 5947), 'tensorflow.square', 'tf.square', (['dhdx'], {}), '(dhdx)\n', (5941, 5947), True, 'import tensorflow as tf\n'), ((6180, 6192), 'numpy.round', 'np.round', (['im'], {}), '(im)\n', (6188, 6192), True, 'import numpy as np\n'), ((941, 969), 'tensorflow.square', 'tf.square', (['(tiled_x - tiled_y)'], {}), '(tiled_x - tiled_y)\n', (950, 969), True, 'import tensorflow as tf\n'), ((5359, 5379), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['y'], {}), '(y)\n', (5376, 5379), True, 'import tensorflow as tf\n'), ((5416, 5441), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['self.x'], {}), '(self.x)\n', (5433, 5441), True, 'import tensorflow as tf\n'), ((5583, 5599), 'tensorflow.shape', 'tf.shape', (['self.z'], {}), '(self.z)\n', (5591, 5599), True, 'import tensorflow as tf\n')]
|
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tetration_application
short description: Enables creation, modification, deletion and query of an application
version_added: '2.9'
description:
- Enables creation, modification, deletion and query of an application
options:
alternate_query_mode:
description: Indicates if dynamic mode is used for the application. In the dynamic
mode, an ADM run creates one or more candidate queries for each cluster. Default
value is false
type: bool
app_id:
description:
- The id for the Application
- Require one of [C(app_name), C(app_id)]
- Mutually exclusive to C(app_name)
type: string
app_name:
description:
- The name for the Application
- Require one of [C(app_name), C(app_id)]
- Mutually exclusive to C(app_id)
type: string
app_scope_id:
description:
- The id for the Scope associated with the application
- Require one of [C(app_scope_name), C(app_scope_id), C(app_id)]
- Mutually exclusive to C(app_scope_name)
type: string
app_scope_name:
description:
- The name for the Scope associated with the application
- Require one of [C(app_scope_name), C(app_scope_id), C(app_id)]
- Mutually exclusive to C(app_scope_id)
type: string
description:
description: User specified description of the application
type: string
strict_validation:
description:
- Will return an error if there are unknown keys/attributes in the uploaded data.
- Useful for catching misspelled keys.
- Default value is false.
type: bool
primary:
description: Indicates if the application is primary for its scope
type: bool
state:
choices: '[present, absent]'
description: Add, change, or remove an application
required: true
type: string
extends_documentation_fragment: tetration_doc_common
notes:
- Requires the requests Python module.
- Only the fields C(app_name), C(description), C(primary) can be updated on an existing application
requirements:
- requests
- 'Required API Permission(s): app_policy_management'
author:
- <NAME> (@techbeck03)
- <NAME> (@joej164)
'''
EXAMPLES = '''
# Add or Modify application
tetration_application:
app_name: ACME InfoSec Policies
app_scope_name: ACME:Example:Application
description: InfoSec Policies for Acme Application
primary: yes
state: present
provider:
host: "https://tetration-cluster.company.com"
api_key: 1234567890QWERTY
api_secret: 1234567890QWERTY
# Delete application
tetration_application:
app_name: ACME InfoSec Policies
app_scope_name: ACME:Example:Application
primary: yes
state: absent
provider:
host: "https://tetration-cluster.company.com"
api_key: 1234567890QWERTY
api_secret: 1234567890QWERTY
'''
RETURN = '''
---
object:
contains:
alternate_query_mode:
description: Indicates if dynamic mode is used for the application
returned: when C(state) is present or query
sample: 'false'
type: bool
app_scope_id:
description: Unique identifier of app scope associated with application workspace
returned: when C(state) is present or query
sample: 596d5215497d4f3eaef1fd04
type: int
author:
description: Author of application workspace
returned: when C(state) is present or query
sample: Brandon Beck
type: string
created_at:
description: Date this application was created (Unix Epoch)
returned: when C(state) is present or query
sample: 1500402190
type: string
description:
description: A description for the application
returned: when C(state) is present or query
sample: Security policies for my application
type: string
enforced_version:
description: The policy version to enforce
returned: when C(state) is present or query
sample: 7
type: int
enforcement_enabled:
description: Sets whether enforcement is enabled on this application
returned: when C(state) is present or query
sample: 'true'
type: bool
id:
description: Unique identifier for the application workspace
returned: when C(state) is present or query
sample: 5c93da83497d4f33d7145960
type: int
latest_adm_version:
description: Latest policy version
returned: when C(state) is present or query
sample: 8
type: int
name:
description: Name of application workspace
returned: when C(state) is present or query
sample: My Application Policy
type: string
primary:
description: Sets whether this application should be primary for the given scope
returned: when C(state) is present or query
sample: 'true'
type: bool
description: the changed or modified object
returned: always
type: complex
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.tetration import TetrationApiModule
from ansible.module_utils.tetration_constants import TETRATION_API_APPLICATIONS
from ansible.module_utils.tetration_constants import TETRATION_API_SCOPES
from ansible.module_utils.tetration_constants import TETRATION_PROVIDER_SPEC
def main():
module_args = dict(
app_name=dict(type='str', required=False),
app_id=dict(type='str', required=False),
app_scope_id=dict(type='str', required=False),
app_scope_name=dict(type='str', required=False),
description=dict(type='str', required=False),
alternate_query_mode=dict(type='bool', required=False, default=False),
strict_validation=dict(type='bool', required=False, default=False),
primary=dict(type='bool', required=False),
state=dict(required=True, choices=['present', 'absent']),
provider=dict(type='dict', options=TETRATION_PROVIDER_SPEC)
)
module = AnsibleModule(
argument_spec=module_args,
mutually_exclusive=[
['app_scope_name', 'app_scope_id']
],
required_one_of=[
['app_name', 'app_id'],
],
)
tet_module = TetrationApiModule(module)
# These are all elements we put in our return JSON object for clarity
result = {
'changed': False,
'object': None,
}
# =========================================================================
# Verify passed in data is accurate.
existing_app_scope = {}
if module.params['app_scope_id']:
app_scope_route = f"{TETRATION_API_SCOPES}/{module.params['app_scope_id']}"
existing_app_scope = tet_module.run_method('GET', app_scope_route)
if not existing_app_scope:
module.fail_json(msg=f"Unable to find existing app with the id of: {module.params['app_scope_id']}")
elif module.params['app_scope_name']:
all_scopes = tet_module.run_method('GET', TETRATION_API_SCOPES)
found_app_scopes = [scope for scope in all_scopes if scope['name'] == module.params['app_scope_name']]
if len(found_app_scopes) == 0:
module.fail_json(
msg=("There were no app scopes that matched the name entered. "
f"Searched for: {module.params['app_scope_name']}"))
elif len(found_app_scopes) > 1:
module.fail_json(
msg=("There were too many app scopes that matched the name entered. "
f"Searched for: {module.params['app_scope_name']}"))
existing_app_scope = found_app_scopes[0]
existing_app = {}
if module.params['app_id']:
app_route = f"{TETRATION_API_APPLICATIONS}/{module.params['app_id']}"
existing_app = tet_module.run_method('GET', app_route)
if not existing_app:
module.fail_json(msg=f"The App ID entered is not in the system. Searched for: {module.params['app_id']}")
elif module.params['app_name']:
# If we have an app_id, and it's valid, we don't care about searching for the app_id by name
# If we don't have an app_id, then we need to find an app, but it's ok if one doesn't exist
# because we'll then make it, or we could be verifying it's absent
apps = tet_module.run_method('GET', TETRATION_API_APPLICATIONS)
found_apps = [found for found in apps if found['name'] == module.params['app_name']]
if len(found_apps) > 1:
module.fail_json(
msg=f"There were too many apps that matched the name entered. Searched for: {module.params['app_name']}")
elif len(found_apps) == 1:
existing_app = found_apps[0]
app_route = ""
if existing_app:
app_route = f"{TETRATION_API_APPLICATIONS}/{existing_app['id']}"
# =========================================================================
# Now enforce the desired state (present, absent)
# ---------------------------------
# STATE == 'present'
# ---------------------------------
if module.params['state'] == 'present':
# if the object does not exist at all, create it but verify we have all needed data first
if not existing_app and not existing_app_scope:
module.fail_json(msg=("The application does not exist. "
"Must provide a Scope ID or Scope Name to create a new scope."))
if not existing_app and module.params['primary'] is None:
module.fail_json(
msg=("The application does not exist. "
"Must provide info on if the scope is primary or not when creating a scope."))
if existing_app:
updated_app = {
'name': module.params['app_name'],
'description': module.params['description'],
'primary': module.params['primary']
}
if not module.params['app_name']:
updated_app.pop('name')
if module.params['description'] is None:
updated_app.pop('description')
if module.params['primary'] is None:
updated_app.pop('primary')
is_subset = tet_module.is_subset(updated_app, existing_app)
if not is_subset:
result['object'] = tet_module.run_method('PUT', app_route, req_payload=updated_app)
result['changed'] = True
else:
result['object'] = existing_app
else:
new_app = {
'app_scope_id': existing_app_scope['id'],
'name': module.params['app_name'],
'description': module.params['description'],
'alternate_query_mode': module.params['alternate_query_mode'],
'strict_validation': module.params['strict_validation'],
'primary': module.params['primary']
}
result['object'] = tet_module.run_method("POST", TETRATION_API_APPLICATIONS, req_payload=new_app)
result['changed'] = True
# ---------------------------------
# STATE == 'absent'
# ---------------------------------
elif module.params['state'] == 'absent':
if existing_app:
if existing_app['enforcement_enabled']:
module.fail_json(
msg='Cannot delete workspace with enforcement enabled. Disable enforcement before deleting')
elif existing_app['primary']:
module.fail_json(
msg='Cannot delete primary application. Try making application secondary before deleting')
result['object'] = tet_module.run_method('DELETE', app_route)
result['changed'] = True
# Return result
module.exit_json(**result)
if __name__ == '__main__':
main()
|
[
"ansible.module_utils.basic.AnsibleModule",
"ansible.module_utils.tetration.TetrationApiModule"
] |
[((6042, 6190), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'module_args', 'mutually_exclusive': "[['app_scope_name', 'app_scope_id']]", 'required_one_of': "[['app_name', 'app_id']]"}), "(argument_spec=module_args, mutually_exclusive=[[\n 'app_scope_name', 'app_scope_id']], required_one_of=[['app_name',\n 'app_id']])\n", (6055, 6190), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((6276, 6302), 'ansible.module_utils.tetration.TetrationApiModule', 'TetrationApiModule', (['module'], {}), '(module)\n', (6294, 6302), False, 'from ansible.module_utils.tetration import TetrationApiModule\n')]
|
import unittest
from unittest import mock
import uuid
import asyncio
from rafter.server import RaftServer
from rafter.models import LogEntry
from rafter.exceptions import NotLeaderException
from .mocks import Log, Storage, Service
class RaftServerTest(unittest.TestCase):
def setUp(self):
self.loop = asyncio.get_event_loop()
self.server = RaftServer(
Service(),
log=Log(),
server_protocol=mock.Mock(),
storage=Storage(),
bootstrap=True
)
self.server.election_timer = mock.Mock()
def test_start_stop(self):
server = RaftServer(
Service(),
log=Log(),
storage=Storage(),
bootstrap=True
)
server.election_timer = mock.Mock()
with mock.patch('rafter.server.random.randint', return_value=100):
server.start()
server.election_timer.start.assert_called_with(1)
def test_initial_heartbeat_calls_add_peer(self):
with mock.patch('rafter.server.asyncio.ensure_future') as ensure_future:
self.server.heartbeat(bootstraps=True)
ensure_future.assert_called_with(self.server.service.add_peer())
def test_heartbeat_should_schedule_ae(self):
with mock.patch('rafter.server.asyncio.ensure_future') as ensure_future:
self.server.send_append_entries = mock.Mock()
self.server.heartbeat(bootstraps=False)
ensure_future.assert_called_with(self.server.send_append_entries())
def test_handle_calls_correct_state_method(self):
self.server.state = mock.Mock()
method = 'test_method'
res = self.server.handle(method)
getattr(self.server.state, method).assert_called_with()
def test_handle_write_raises_error_when_not_leader(self):
with self.assertRaises(NotLeaderException):
self.loop.run_until_complete(self.server.handle_write_command('test', (1, 2), {1: 1}))
def test_handle_read_command(self):
self.server.state.to_leader()
res = self.loop.run_until_complete(self.server.handle_read_command('test', (1, 2), {1: 1}))
self.assertEqual(res, 'result')
def test_handle_read_raises_error_when_not_leader(self):
with self.assertRaises(NotLeaderException):
self.loop.run_until_complete(self.server.handle_read_command('test', (1, 2), {1: 1}))
def test_add_peer(self):
self.server.add_peer({'id': 'peer-2'})
self.assertIn(b'peer-2', self.server.peers)
def test_remove_peer(self):
with self.assertRaises(KeyError):
self.server.remove_peer('notapeer')
self.server.remove_peer(self.server.id)
self.assertNotIn(self.server.id, self.server.peers)
def test_list_peers(self):
self.assertListEqual(self.server.list_peers(), list(self.server.peers))
|
[
"unittest.mock.patch",
"unittest.mock.Mock",
"asyncio.get_event_loop"
] |
[((317, 341), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (339, 341), False, 'import asyncio\n'), ((568, 579), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (577, 579), False, 'from unittest import mock\n'), ((787, 798), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (796, 798), False, 'from unittest import mock\n'), ((1631, 1642), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1640, 1642), False, 'from unittest import mock\n'), ((812, 872), 'unittest.mock.patch', 'mock.patch', (['"""rafter.server.random.randint"""'], {'return_value': '(100)'}), "('rafter.server.random.randint', return_value=100)\n", (822, 872), False, 'from unittest import mock\n'), ((1030, 1079), 'unittest.mock.patch', 'mock.patch', (['"""rafter.server.asyncio.ensure_future"""'], {}), "('rafter.server.asyncio.ensure_future')\n", (1040, 1079), False, 'from unittest import mock\n'), ((1290, 1339), 'unittest.mock.patch', 'mock.patch', (['"""rafter.server.asyncio.ensure_future"""'], {}), "('rafter.server.asyncio.ensure_future')\n", (1300, 1339), False, 'from unittest import mock\n'), ((1404, 1415), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1413, 1415), False, 'from unittest import mock\n'), ((450, 461), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (459, 461), False, 'from unittest import mock\n')]
|
from collections import Counter
from time import time
from typing import List, Optional, Tuple
from ..framework.load_file import load_file
from ..library.base import list_to_number
Sudoku = List[List[int]]
spec = '{:6.6f}'
class SetNoZero(set):
def add(self, x) -> None:
if x != 0:
super().add(x)
def __sub__(self, other) -> None:
return SetNoZero(super().__sub__(other))
digits = SetNoZero(range(1, 10))
def sstr(input: Sudoku) -> str:
output = ''
for i in range(9):
for j in range(9):
output += str(input[i][j])
if j % 3 == 2:
output += ' '
output += '\n'
if i % 3 == 2:
output += '\n'
return output.strip('\n')
def least(input: Sudoku) -> Optional[Tuple[int, int]]:
for i in range(9):
for j in range(9):
if input[i][j] == 0:
return i, j
return None
def constraint_solve_sudoku(input: Sudoku) -> Tuple[bool, Optional[Sudoku]]:
# these are kept up to date as parts filled in
lines = []
columns = []
boxes = []
for i in range(9):
line = SetNoZero()
column = SetNoZero()
for j in range(9):
line.add(input[i][j])
column.add(input[j][i])
lines.append(digits - line)
columns.append(digits - column)
for i in range(3):
boxes.append([])
for j in range(3):
box = SetNoZero()
for k in range(3):
for l in range(3):
box.add(input[i * 3 + k][j * 3 + l])
boxes[i].append(digits - box)
flag = True
while least(input):
if not flag:
i, j = least(input)
# allowed guaranteed to exist, because the flag starts as true.
for value in allowed[i][j]: # noqa: F821
new_input = [[input[i][j] for j in range(9)] for i in range(9)]
new_input[i][j] = value
done, solution = constraint_solve_sudoku(new_input)
if done:
return True, solution
return False, None
flag = False
allowed = []
for i in range(9):
line = lines[i]
allowed.append([])
for j in range(9):
if input[i][j] != 0:
allowed[i].append(None)
else:
column = columns[j]
box = boxes[i // 3][j // 3]
allowed[i].append({d for d in digits if
d in line and d in column and d in box})
updates = []
for i in range(9):
for j in range(9):
if allowed[i][j] and len(allowed[i][j]) == 1:
updates.append((i, j, list(allowed[i][j])[0]))
for i in range(9):
counter = Counter()
for j in range(9):
if allowed[i][j]:
counter.update(allowed[i][j])
for value, count in counter.items():
if count == 1:
for j in range(9):
if allowed[i][j] and value in allowed[i][j]:
updates.append((i, j, value))
for i in range(9):
counter = Counter()
for j in range(9):
if allowed[j][i]:
counter.update(allowed[j][i])
for value, count in counter.items():
if count == 1:
for j in range(9):
if allowed[j][i] and value in allowed[j][i]:
updates.append((j, i, value))
for i in range(3):
for j in range(3):
counter = Counter()
for k in range(3):
for l in range(3):
allowedness = allowed[i * 3 + k][j * 3 + l]
if allowedness:
counter.update(allowed[i * 3 + k][j * 3 + l])
for value, count in counter.items():
if count == 1:
for k in range(3):
for l in range(3):
allowedness = allowed[i * 3 + k][j * 3 + l]
if allowedness and value in allowedness:
updates.append((i * 3 + k, j * 3 + l,
value))
if updates:
flag = True
updates = set(updates)
for i, j, value in updates:
input[i][j] = value
allowed[i][j] = None
try:
lines[i].remove(value)
columns[j].remove(value)
boxes[i // 3][j // 3].remove(value)
except KeyError: # means that this sudoku is inconsistent
return False, None
return True, input
def solve(name: str='sudoku.txt', relative: bool=True) -> int:
raw = load_file(96, name, relative)
grids_str = [[line for line in grid.split('\n')[1:] if line]
for grid in raw.split('Grid') if grid]
grids = [[[int(d) for d in line] for line in grid] for grid in grids_str]
accumulate = 0
for i, grid in enumerate(grids):
print('={:>2}th grid='.format(i))
start = time()
_, solution = constraint_solve_sudoku(grid)
spent = time() - start
print(sstr(solution))
accumulate += list_to_number(solution[0][0:3])
print(f'({spec.format(spent)}s)')
return accumulate
|
[
"collections.Counter",
"time.time"
] |
[((5396, 5402), 'time.time', 'time', ([], {}), '()\n', (5400, 5402), False, 'from time import time\n'), ((2906, 2915), 'collections.Counter', 'Counter', ([], {}), '()\n', (2913, 2915), False, 'from collections import Counter\n'), ((3329, 3338), 'collections.Counter', 'Counter', ([], {}), '()\n', (3336, 3338), False, 'from collections import Counter\n'), ((5471, 5477), 'time.time', 'time', ([], {}), '()\n', (5475, 5477), False, 'from time import time\n'), ((3787, 3796), 'collections.Counter', 'Counter', ([], {}), '()\n', (3794, 3796), False, 'from collections import Counter\n')]
|
from unittest import mock
import pytest
from karp5.server import searching
def test_autocompletequery(app):
mode = "foo"
q = "any"
boost = {"term": {"field": {"boost": "500", "value": q}}}
result = searching.autocompletequery(mode, boost, q)
expected = {"bool": {"should": [boost, {"match_phrase": {"foo": q}}]}}
assert result == expected
@pytest.mark.parametrize("user_is_authorized", [False, True])
def test_autocomplete_foo(app, user_is_authorized):
q = "any"
mode = "foo"
path = f"/autocomplete?q={q}&mode={mode}"
with app.test_request_context(path):
with mock.patch("karp5.server.searching.jsonify", return_value=None), mock.patch(
"karp5.server.translator.parser.adapt_query", return_value=None
) as adapt_query_mock, mock.patch(
"karp5.config.conf_mgr.elastic", return_value="ES"
), mock.patch(
"karp5.context.auth.validate_user", return_value=(user_is_authorized, ["foo"])
):
searching.autocomplete()
expected_must = [
{
"bool": {
"should": [
{"term": {"foo": {"boost": "500", "value": "any"}}},
{"match_phrase": {"foo": "any"}},
]
}
},
{"exists": {"field": "foo"}},
{"term": {"lexiconName": "foo"}},
]
if not user_is_authorized:
expected_must.append({"term": {"status": "ok"}})
expected_elasticq = {
"query": {
"constant_score": {
"filter": {
"bool": {
"must": expected_must
}
}
}
}
}
adapt_query_mock.assert_called_with(
1000, 0, "ES", expected_elasticq, {"size": 1000, "index": mode, "_source": [mode]}
)
@pytest.mark.parametrize("lexicon", ["foo"])
@pytest.mark.parametrize("user_is_authorized", [False, True])
@pytest.mark.parametrize("with_center", [False, True])
def test_get_context(app, lexicon, user_is_authorized, with_center):
center_id = "ID_TEST"
if with_center:
path = f"/getcontext/{lexicon}?center={center_id}"
else:
path = f"/getcontext/{lexicon}"
sortvalue = "KEY_TEST"
center_q_hits = {"hits": {"hits": [{"sort": [sortvalue], "_id": center_id}]}}
with app.test_request_context(path):
with mock.patch("karp5.server.searching.jsonify", return_value=None), mock.patch(
"karp5.config.conf_mgr.elastic"
) as conf_mgr_elastic_mock, mock.patch(
"karp5.context.auth.validate_user", return_value=(user_is_authorized, [lexicon]),
), mock.patch(
"karp5.server.searching.get_pre_post", return_value=[None]
) as get_pre_post_mock:
attrs = {"search.return_value": center_q_hits}
es_search_mock = mock.Mock()
es_search_mock.configure_mock(**attrs)
conf_mgr_elastic_mock.return_value = es_search_mock
searching.get_context(lexicon)
if with_center:
expected_q = {"term": {"_id": center_id}}
else:
expected_q = {"match_phrase": {"lexiconName": lexicon}}
if user_is_authorized:
expected_filters = []
else:
expected_filters = [{"term": {"status": "ok"}}]
if with_center:
if user_is_authorized:
expected_center_q = {"query": expected_q}
else:
expected_center_q = {
"query": {"bool": {"must": expected_q, "filter": expected_filters}}
}
else:
if user_is_authorized:
expected_center_q = {"query": {"bool": {"must": [expected_q],}}}
else:
expected_center_q = {
"query": {"bool": {"must": [expected_q, expected_filters[0],],}}
}
es_search_mock.search.assert_called_with(
index=lexicon,
doc_type="lexicalentry",
size=1,
body=expected_center_q,
sort=["foo.raw:asc"],
)
assert get_pre_post_mock.call_count == 2
for call_args in get_pre_post_mock.call_args_list:
print(f"call_args = {call_args}")
args, kwargs = call_args
assert "place" in kwargs
assert "filters" in kwargs
assert kwargs["filters"] == expected_filters
assert isinstance(args[6], int)
@pytest.mark.parametrize("place", ["post", "pre"])
@pytest.mark.parametrize("user_is_authorized", [False, True])
def test_get_pre_post_foo(app, place, user_is_authorized):
mode = "foo"
exps = []
center_id = None
sortfield = ["SORTFIELD_TEST"]
sortfieldname = "foo" # must exist in config
sortvalue = "SORTVALUE_TEST"
size = 10
es = "ES"
if user_is_authorized:
filters = []
else:
filters = [{"term": {"status": "ok"}}]
with mock.patch(
"karp5.server.translator.parser.adapt_query", return_value={}
) as adapt_query_mock:
searching.get_pre_post(
exps,
center_id,
sortfield,
sortfieldname,
sortvalue,
mode,
size,
es,
mode,
place=place,
filters=filters,
)
expected_q = {"range": {sortfieldname: {"gte" if place == "post" else "lte": sortvalue}}}
if user_is_authorized:
expected_elasticq = {"query": {"bool": {"must": [expected_q]}}}
else:
expected_elasticq = {
"bool": {"must": [expected_q], "filter": {"bool": {"must": filters}},}
}
expected_size = 3 * (size + 1)
expected_sort = ["{}:{}".format(sortfield[0], "asc" if place == "post" else "desc")]
adapt_query_mock.assert_called_once()
args, _ = adapt_query_mock.call_args
assert args[0] == expected_size
assert args[3] == expected_elasticq
assert args[4]["size"] == expected_size
assert args[4]["sort"] == expected_sort
def test_export_foo_unauth_user(app):
lexicon = "foo"
path = f"/export/{lexicon}"
with app.test_request_context(path):
with mock.patch("karp5.context.auth.validate_user", return_value=(False, ["foo"])):
with pytest.raises(searching.errors.KarpAuthenticationError):
searching.export(lexicon)
def test_export_foo_lexicon_not_permitted(app):
lexicon = "restricted"
path = f"/export/{lexicon}"
with app.test_request_context(path):
with mock.patch("karp5.context.auth.validate_user", return_value=(False, ["permitted"])):
with pytest.raises(searching.errors.KarpAuthenticationError):
searching.export(lexicon)
|
[
"karp5.server.searching.get_pre_post",
"karp5.server.searching.autocompletequery",
"unittest.mock.Mock",
"karp5.server.searching.export",
"karp5.server.searching.autocomplete",
"unittest.mock.patch",
"pytest.raises",
"karp5.server.searching.get_context",
"pytest.mark.parametrize"
] |
[((373, 433), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""user_is_authorized"""', '[False, True]'], {}), "('user_is_authorized', [False, True])\n", (396, 433), False, 'import pytest\n'), ((2080, 2123), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""lexicon"""', "['foo']"], {}), "('lexicon', ['foo'])\n", (2103, 2123), False, 'import pytest\n'), ((2125, 2185), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""user_is_authorized"""', '[False, True]'], {}), "('user_is_authorized', [False, True])\n", (2148, 2185), False, 'import pytest\n'), ((2187, 2240), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""with_center"""', '[False, True]'], {}), "('with_center', [False, True])\n", (2210, 2240), False, 'import pytest\n'), ((4881, 4930), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""place"""', "['post', 'pre']"], {}), "('place', ['post', 'pre'])\n", (4904, 4930), False, 'import pytest\n'), ((4932, 4992), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""user_is_authorized"""', '[False, True]'], {}), "('user_is_authorized', [False, True])\n", (4955, 4992), False, 'import pytest\n'), ((219, 262), 'karp5.server.searching.autocompletequery', 'searching.autocompletequery', (['mode', 'boost', 'q'], {}), '(mode, boost, q)\n', (246, 262), False, 'from karp5.server import searching\n'), ((5364, 5437), 'unittest.mock.patch', 'mock.patch', (['"""karp5.server.translator.parser.adapt_query"""'], {'return_value': '{}'}), "('karp5.server.translator.parser.adapt_query', return_value={})\n", (5374, 5437), False, 'from unittest import mock\n'), ((5481, 5613), 'karp5.server.searching.get_pre_post', 'searching.get_pre_post', (['exps', 'center_id', 'sortfield', 'sortfieldname', 'sortvalue', 'mode', 'size', 'es', 'mode'], {'place': 'place', 'filters': 'filters'}), '(exps, center_id, sortfield, sortfieldname, sortvalue,\n mode, size, es, mode, place=place, filters=filters)\n', (5503, 5613), False, 'from karp5.server import searching\n'), ((617, 680), 'unittest.mock.patch', 'mock.patch', (['"""karp5.server.searching.jsonify"""'], {'return_value': 'None'}), "('karp5.server.searching.jsonify', return_value=None)\n", (627, 680), False, 'from unittest import mock\n'), ((682, 757), 'unittest.mock.patch', 'mock.patch', (['"""karp5.server.translator.parser.adapt_query"""'], {'return_value': 'None'}), "('karp5.server.translator.parser.adapt_query', return_value=None)\n", (692, 757), False, 'from unittest import mock\n'), ((801, 863), 'unittest.mock.patch', 'mock.patch', (['"""karp5.config.conf_mgr.elastic"""'], {'return_value': '"""ES"""'}), "('karp5.config.conf_mgr.elastic', return_value='ES')\n", (811, 863), False, 'from unittest import mock\n'), ((887, 982), 'unittest.mock.patch', 'mock.patch', (['"""karp5.context.auth.validate_user"""'], {'return_value': "(user_is_authorized, ['foo'])"}), "('karp5.context.auth.validate_user', return_value=(\n user_is_authorized, ['foo']))\n", (897, 982), False, 'from unittest import mock\n'), ((1013, 1037), 'karp5.server.searching.autocomplete', 'searching.autocomplete', ([], {}), '()\n', (1035, 1037), False, 'from karp5.server import searching\n'), ((2629, 2692), 'unittest.mock.patch', 'mock.patch', (['"""karp5.server.searching.jsonify"""'], {'return_value': 'None'}), "('karp5.server.searching.jsonify', return_value=None)\n", (2639, 2692), False, 'from unittest import mock\n'), ((2694, 2737), 'unittest.mock.patch', 'mock.patch', (['"""karp5.config.conf_mgr.elastic"""'], {}), "('karp5.config.conf_mgr.elastic')\n", (2704, 2737), False, 'from unittest import mock\n'), ((2786, 2883), 'unittest.mock.patch', 'mock.patch', (['"""karp5.context.auth.validate_user"""'], {'return_value': '(user_is_authorized, [lexicon])'}), "('karp5.context.auth.validate_user', return_value=(\n user_is_authorized, [lexicon]))\n", (2796, 2883), False, 'from unittest import mock\n'), ((2903, 2973), 'unittest.mock.patch', 'mock.patch', (['"""karp5.server.searching.get_pre_post"""'], {'return_value': '[None]'}), "('karp5.server.searching.get_pre_post', return_value=[None])\n", (2913, 2973), False, 'from unittest import mock\n'), ((3106, 3117), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3115, 3117), False, 'from unittest import mock\n'), ((3246, 3276), 'karp5.server.searching.get_context', 'searching.get_context', (['lexicon'], {}), '(lexicon)\n', (3267, 3276), False, 'from karp5.server import searching\n'), ((6657, 6734), 'unittest.mock.patch', 'mock.patch', (['"""karp5.context.auth.validate_user"""'], {'return_value': "(False, ['foo'])"}), "('karp5.context.auth.validate_user', return_value=(False, ['foo']))\n", (6667, 6734), False, 'from unittest import mock\n'), ((7015, 7103), 'unittest.mock.patch', 'mock.patch', (['"""karp5.context.auth.validate_user"""'], {'return_value': "(False, ['permitted'])"}), "('karp5.context.auth.validate_user', return_value=(False, [\n 'permitted']))\n", (7025, 7103), False, 'from unittest import mock\n'), ((6753, 6808), 'pytest.raises', 'pytest.raises', (['searching.errors.KarpAuthenticationError'], {}), '(searching.errors.KarpAuthenticationError)\n', (6766, 6808), False, 'import pytest\n'), ((6826, 6851), 'karp5.server.searching.export', 'searching.export', (['lexicon'], {}), '(lexicon)\n', (6842, 6851), False, 'from karp5.server import searching\n'), ((7117, 7172), 'pytest.raises', 'pytest.raises', (['searching.errors.KarpAuthenticationError'], {}), '(searching.errors.KarpAuthenticationError)\n', (7130, 7172), False, 'import pytest\n'), ((7190, 7215), 'karp5.server.searching.export', 'searching.export', (['lexicon'], {}), '(lexicon)\n', (7206, 7215), False, 'from karp5.server import searching\n')]
|
#!/usr/bin/env python3
import argparse
def parse_args():
p = argparse.ArgumentParser()
p.add_argument('path', type=str)
p.add_argument('-m', '--minpow', type=int, default=3)
p.add_argument('-M', '--maxpow', type=int, default=7)
p.add_argument('-s', '--step', type=int, default=2)
p.add_argument('-t', '--trials', type=int, default=10)
p.add_argument('--speed_funcs', type=str)
return p.parse_args()
# We do this ahead of time so that if we end up only printing the
# usage message we don't bother with the other (e.g. MPI-related)
# setup below here
if __name__ == '__main__':
args = parse_args()
import sys
if '../../build/Release' not in sys.path:
sys.path.insert(0, '../../build/Release')
import pyolim as olim
import h5py
import mpi4py.MPI
import numpy as np
import os.path
from common3d import compute_soln, get_exact_soln, get_marcher_name, marchers, \
time_marcher
from itertools import product
from speedfuncs3d import get_speed_func_name, get_speed_func_by_name, \
get_soln_func, speed_funcs
comm = mpi4py.MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
def rms(x):
y = x.flatten()
n = y.size
assert(n > 0)
return np.sqrt(y.dot(y)/n)
def linf_error(x):
return np.linalg.norm(x.flatten(), np.inf)
def get_ns(args):
minpow = args.minpow
maxpow = args.maxpow
steps = args.step
ns = np.logspace(minpow, maxpow, steps*(maxpow - minpow) + 1, base=2)
return (2*np.round(ns/2)).astype(int) + 1
def get_dataset_name(Marcher, s):
mname = get_marcher_name(Marcher)
sname = get_speed_func_name(s)
return '%s/%s' % (mname.replace(' ', '_'), sname)
def create_datasets(f, M_by_s, ns):
for Marcher, s in M_by_s:
name = get_dataset_name(Marcher, s)
f.create_dataset(name + '/n', (len(ns),), dtype=np.int)
for n in ns:
shape = (n, n, n)
f.create_dataset(name + '/u' + str(n), shape, dtype=np.float)
f.create_dataset(name + '/U' + str(n), shape, dtype=np.float)
f.create_dataset(name + '/rms', (len(ns),), dtype=np.float)
f.create_dataset(name + '/max', (len(ns),), dtype=np.float)
f.create_dataset(name + '/t', (len(ns),), dtype=np.float)
def populate_datasets(Marcher, s, ns, t):
name = get_dataset_name(Marcher, s)
print(name)
f[name + '/n'][:] = ns
print('- computing exact solutions')
us = [get_exact_soln(get_soln_func(s), n) for n in ns]
for n, u in zip(ns, us):
f[name + '/u' + str(n)][:, :, :] = u
print('- computing numerical solutions')
Us = [compute_soln(Marcher, s, n) for n in ns]
for n, U in zip(ns, Us):
f[name + '/U' + str(n)][:, :, :] = U
print('- evaluating errors')
f[name + '/rms'][:] = [rms(u - U) for u, U in zip(us, Us)]
f[name + '/max'][:] = [linf_error(u - U) for u, U in zip(us, Us)]
print('- collecting CPU times')
f[name + '/t'][:] = [time_marcher(Marcher, s, n, ntrials=t) for n in ns]
if __name__ == '__main__':
with h5py.File(args.path, 'w', driver='mpio', comm=comm) as f:
if args.speed_funcs is not None:
speed_funcs_ = [
get_speed_func_by_name(name) for name in
args.speed_funcs.split(',')]
else:
speed_funcs_ = speed_funcs()
ns = get_ns(args)
if rank == 0:
print('Test problem sizes: ' + ', '.join(map(str, ns)))
if rank == 0:
print('Creating datasets')
create_datasets(f, product(marchers, speed_funcs_), ns)
for i, (Marcher, s) in enumerate(product(marchers, speed_funcs_)):
if i % size != rank:
continue
populate_datasets(Marcher, s, ns, args.trials)
|
[
"speedfuncs3d.speed_funcs",
"common3d.time_marcher",
"h5py.File",
"argparse.ArgumentParser",
"common3d.get_marcher_name",
"numpy.logspace",
"sys.path.insert",
"speedfuncs3d.get_soln_func",
"itertools.product",
"numpy.round",
"common3d.compute_soln",
"speedfuncs3d.get_speed_func_name",
"speedfuncs3d.get_speed_func_by_name"
] |
[((67, 92), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (90, 92), False, 'import argparse\n'), ((694, 735), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../../build/Release"""'], {}), "(0, '../../build/Release')\n", (709, 735), False, 'import sys\n'), ((1395, 1461), 'numpy.logspace', 'np.logspace', (['minpow', 'maxpow', '(steps * (maxpow - minpow) + 1)'], {'base': '(2)'}), '(minpow, maxpow, steps * (maxpow - minpow) + 1, base=2)\n', (1406, 1461), True, 'import numpy as np\n'), ((1553, 1578), 'common3d.get_marcher_name', 'get_marcher_name', (['Marcher'], {}), '(Marcher)\n', (1569, 1578), False, 'from common3d import compute_soln, get_exact_soln, get_marcher_name, marchers, time_marcher\n'), ((1591, 1613), 'speedfuncs3d.get_speed_func_name', 'get_speed_func_name', (['s'], {}), '(s)\n', (1610, 1613), False, 'from speedfuncs3d import get_speed_func_name, get_speed_func_by_name, get_soln_func, speed_funcs\n'), ((2602, 2629), 'common3d.compute_soln', 'compute_soln', (['Marcher', 's', 'n'], {}), '(Marcher, s, n)\n', (2614, 2629), False, 'from common3d import compute_soln, get_exact_soln, get_marcher_name, marchers, time_marcher\n'), ((2946, 2984), 'common3d.time_marcher', 'time_marcher', (['Marcher', 's', 'n'], {'ntrials': 't'}), '(Marcher, s, n, ntrials=t)\n', (2958, 2984), False, 'from common3d import compute_soln, get_exact_soln, get_marcher_name, marchers, time_marcher\n'), ((3036, 3087), 'h5py.File', 'h5py.File', (['args.path', '"""w"""'], {'driver': '"""mpio"""', 'comm': 'comm'}), "(args.path, 'w', driver='mpio', comm=comm)\n", (3045, 3087), False, 'import h5py\n'), ((2438, 2454), 'speedfuncs3d.get_soln_func', 'get_soln_func', (['s'], {}), '(s)\n', (2451, 2454), False, 'from speedfuncs3d import get_speed_func_name, get_speed_func_by_name, get_soln_func, speed_funcs\n'), ((3308, 3321), 'speedfuncs3d.speed_funcs', 'speed_funcs', ([], {}), '()\n', (3319, 3321), False, 'from speedfuncs3d import get_speed_func_name, get_speed_func_by_name, get_soln_func, speed_funcs\n'), ((3528, 3559), 'itertools.product', 'product', (['marchers', 'speed_funcs_'], {}), '(marchers, speed_funcs_)\n', (3535, 3559), False, 'from itertools import product\n'), ((3607, 3638), 'itertools.product', 'product', (['marchers', 'speed_funcs_'], {}), '(marchers, speed_funcs_)\n', (3614, 3638), False, 'from itertools import product\n'), ((3181, 3209), 'speedfuncs3d.get_speed_func_by_name', 'get_speed_func_by_name', (['name'], {}), '(name)\n', (3203, 3209), False, 'from speedfuncs3d import get_speed_func_name, get_speed_func_by_name, get_soln_func, speed_funcs\n'), ((1474, 1490), 'numpy.round', 'np.round', (['(ns / 2)'], {}), '(ns / 2)\n', (1482, 1490), True, 'import numpy as np\n')]
|
import threading as th
import queue as q
import yaml
STOP_STAT = "STOP"
class StatsFileHandler:
class StatsWriter(th.Thread):
def __init__(self, queue, file_path):
self.running = True
self.queue = queue
self.file_path = file_path
self.f_pointer_map = {}
def _handle(self, stat):
if self._is_time_to_stop(stat):
self.running = False
return
name = stat["name"]
value = stat["stat"]
file_name = self.f_pointer_map.setdefault(name, "".join([self.file_path, name, ".yml"]))
with open(file_name, "a+") as f:
yaml.dump(data=value, stream=f)
def run(self):
while self.running:
self._handle(self.queue.pop())
def _is_time_to_stop(self, task):
return task is STOP_STAT
def __init__(self, path):
self.queue = q.Queue()
self.worker = self.StatsWriter(self.queue, path)
def add_stats(self, name, value):
self.queue.put({"name": name,
"stat": value})
def start(self):
self.worker.start()
def stop(self):
pass
|
[
"yaml.dump",
"queue.Queue"
] |
[((951, 960), 'queue.Queue', 'q.Queue', ([], {}), '()\n', (958, 960), True, 'import queue as q\n'), ((683, 714), 'yaml.dump', 'yaml.dump', ([], {'data': 'value', 'stream': 'f'}), '(data=value, stream=f)\n', (692, 714), False, 'import yaml\n')]
|
import time
import logging
import yaml
from kubernetes import client
from kubernetes.client.rest import ApiException
LOGGER = logging.getLogger()
def set_global_namespace_value(namespace_name):
"""
Make namespace as global to be used in later functions
Args:
param1: namespace_name - namespace name
Returns:
None
Raises:
None
"""
global namespace_value
namespace_value = namespace_name
def create_namespace():
"""
Create namespace namespace_value(global parameter)
Args:
None
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
namespace_api_instance = client.CoreV1Api()
namespace_metadata = client.V1ObjectMeta(
name=namespace_value,
labels={"product": "ibm-spectrum-scale-csi"}
)
namespace_body = client.V1Namespace(
api_version="v1", kind="Namespace", metadata=namespace_metadata)
try:
LOGGER.info(f'Creating new Namespace {namespace_value}')
namespace_api_response = namespace_api_instance.create_namespace(
body=namespace_body, pretty=True)
LOGGER.debug(str(namespace_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->create_namespace: {e}")
assert False
def create_deployment():
"""
Create IBM Spectrum Scale CSI Operator deployment object in operator namespace using
deployment_operator_image_for_crd and deployment_driver_image_for_crd parameters from
config.json file
Args:
None
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
deployment_apps_api_instance = client.AppsV1Api()
filepath = "../../operator/deploy/operator.yaml"
try:
with open(filepath, "r") as f:
loaddep_yaml = yaml.full_load(f.read())
except yaml.YAMLError as exc:
print ("Error in configuration file:", exc)
assert False
try:
LOGGER.info("Creating Operator Deployment")
deployment_apps_api_response = deployment_apps_api_instance.create_namespaced_deployment(
namespace=namespace_value, body=loaddep_yaml)
LOGGER.debug(str(deployment_apps_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling RbacAuthorizationV1Api->create_namespaced_deployment: {e}")
assert False
def create_deployment_old(config_file):
"""
Create IBM Spectrum Scale CSI Operator deployment object in operator namespace using
deployment_operator_image_for_crd and deployment_driver_image_for_crd parameters from
config.json file
Args:
param1: config_file - configuration json file
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
deployment_apps_api_instance = client.AppsV1Api()
deployment_labels = {
"app.kubernetes.io/instance": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/managed-by": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/name": "ibm-spectrum-scale-csi-operator",
"product": "ibm-spectrum-scale-csi",
"release": "ibm-spectrum-scale-csi-operator"
}
deployment_annotations = {
"productID": "ibm-spectrum-scale-csi-operator",
"productName": "IBM Spectrum Scale CSI Operator",
"productVersion": "2.0.0"
}
deployment_metadata = client.V1ObjectMeta(
name="ibm-spectrum-scale-csi-operator", labels=deployment_labels, namespace=namespace_value)
deployment_selector = client.V1LabelSelector(
match_labels={"app.kubernetes.io/name": "ibm-spectrum-scale-csi-operator"})
podtemplate_metadata = client.V1ObjectMeta(
labels=deployment_labels, annotations=deployment_annotations)
pod_affinity = client.V1Affinity(
node_affinity=client.V1NodeAffinity(
required_during_scheduling_ignored_during_execution=client.V1NodeSelector(
node_selector_terms=[client.V1NodeSelectorTerm(
match_expressions=[client.V1NodeSelectorRequirement(
key="beta.kubernetes.io/arch", operator="Exists")]
)]
)
)
)
ansible_pod_container = client.V1Container(
image=config_file["deployment_operator_image_for_crd"],
command=["/usr/local/bin/ao-logs",
"/tmp/ansible-operator/runner", "stdout"],
liveness_probe=client.V1Probe(_exec=client.V1ExecAction(
command=["/health_check.sh"]), initial_delay_seconds=10, period_seconds=30),
readiness_probe=client.V1Probe(_exec=client.V1ExecAction(
command=["/health_check.sh"]), initial_delay_seconds=3, period_seconds=1),
name="ansible", image_pull_policy="IfNotPresent",
security_context=client.V1SecurityContext(
capabilities=client.V1Capabilities(drop=["ALL"])),
volume_mounts=[client.V1VolumeMount(
mount_path="/tmp/ansible-operator/runner", name="runner", read_only=True)],
env=[client.V1EnvVar(name="CSI_DRIVER_IMAGE", value=config_file["deployment_driver_image_for_crd"])])
operator_pod_container = client.V1Container(
image=config_file["deployment_operator_image_for_crd"],
name="operator", image_pull_policy="IfNotPresent",
liveness_probe=client.V1Probe(_exec=client.V1ExecAction(
command=["/health_check.sh"]), initial_delay_seconds=10, period_seconds=30),
readiness_probe=client.V1Probe(_exec=client.V1ExecAction(
command=["/health_check.sh"]), initial_delay_seconds=3, period_seconds=1),
security_context=client.V1SecurityContext(
capabilities=client.V1Capabilities(drop=["ALL"])),
env=[client.V1EnvVar(name="WATCH_NAMESPACE",
value_from=client.V1EnvVarSource(field_ref=client.V1ObjectFieldSelector(
field_path="metadata.namespace"))),
client.V1EnvVar(name="POD_NAME", value_from=client.V1EnvVarSource(
field_ref=client.V1ObjectFieldSelector(field_path="metadata.name"))),
client.V1EnvVar(name="OPERATOR_NAME",
value="ibm-spectrum-scale-csi-operator"),
client.V1EnvVar(name="CSI_DRIVER_IMAGE", value=config_file["deployment_driver_image_for_crd"])],
volume_mounts=[client.V1VolumeMount(
mount_path="/tmp/ansible-operator/runner", name="runner")]
)
pod_spec = client.V1PodSpec(affinity=pod_affinity,
containers=[ansible_pod_container,
operator_pod_container],
service_account_name="ibm-spectrum-scale-csi-operator",
volumes=[client.V1Volume(empty_dir=client.V1EmptyDirVolumeSource(medium="Memory"), name="runner")])
podtemplate_spec = client.V1PodTemplateSpec(
metadata=podtemplate_metadata, spec=pod_spec)
deployment_spec = client.V1DeploymentSpec(
replicas=1, selector=deployment_selector, template=podtemplate_spec)
body_dep = client.V1Deployment(
kind='Deployment', api_version='apps/v1', metadata=deployment_metadata, spec=deployment_spec)
try:
LOGGER.info("creating deployment for operator")
deployment_apps_api_response = deployment_apps_api_instance.create_namespaced_deployment(
namespace=namespace_value, body=body_dep)
LOGGER.debug(str(deployment_apps_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling RbacAuthorizationV1Api->create_namespaced_deployment: {e}")
assert False
def create_cluster_role():
"""
Create IBM Spectrum Scale CSI Operator cluster role in Operator namespace
Args:
None
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
cluster_role_api_instance = client.RbacAuthorizationV1Api()
pretty = True
cluster_role_labels = {
"app.kubernetes.io/instance": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/managed-by": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/name": "ibm-spectrum-scale-csi-operator",
"product": "ibm-spectrum-scale-csi",
"release": "ibm-spectrum-scale-csi-operator"
}
cluster_role_metadata = client.V1ObjectMeta(
name="ibm-spectrum-scale-csi-operator", labels=cluster_role_labels, namespace=namespace_value)
cluster_role_rules = []
cluster_role_rules.append(client.V1PolicyRule(api_groups=["*"], resources=[
'pods', 'persistentvolumeclaims', 'services',
'endpoints', 'events', 'configmaps', 'secrets',
'secrets/status', 'services/finalizers', 'serviceaccounts', 'securitycontextconstraints'], verbs=["*"]))
cluster_role_rules.append(client.V1PolicyRule(api_groups=['rbac.authorization.k8s.io'], resources=[
'clusterroles', 'clusterrolebindings'], verbs=["*"]))
cluster_role_rules.append(client.V1PolicyRule(api_groups=['apps'], resources=[
'deployments', 'daemonsets', 'replicasets', 'statefulsets'], verbs=["*"]))
cluster_role_rules.append(client.V1PolicyRule(api_groups=[
'monitoring.coreos.com'], resources=['servicemonitors'], verbs=['get', 'create']))
cluster_role_rules.append(client.V1PolicyRule(
api_groups=['apps'], resources=['replicasets'], verbs=["get"]))
cluster_role_rules.append(client.V1PolicyRule(
api_groups=['csi.ibm.com'], resources=['*'], verbs=["*"]))
cluster_role_rules.append(client.V1PolicyRule(api_groups=[
'security.openshift.io'], resources=['securitycontextconstraints'], verbs=["*"]))
cluster_role_rules.append(client.V1PolicyRule(api_groups=['storage.k8s.io'], resources=[
'volumeattachments', 'storageclasses'], verbs=["*"]))
cluster_role_rules.append(client.V1PolicyRule(api_groups=['apps'], resource_names=[
'ibm-spectrum-scale-csi-operator'], resources=['deployments/finalizers'], verbs=['update']))
body = client.V1ClusterRole(kind='ClusterRole', api_version='rbac.authorization.k8s.io/v1',
metadata=cluster_role_metadata, rules=cluster_role_rules)
try:
LOGGER.info("Creating ibm-spectrum-scale-csi-operator ClusterRole ")
cluster_role_api_response = cluster_role_api_instance.create_cluster_role(
body, pretty=pretty)
LOGGER.debug(str(cluster_role_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling RbacAuthorizationV1Api->create_cluster_role: {e}")
assert False
def create_cluster_role_binding():
"""
Create IBM Spectrum Scale CSI Operator ClusterRoleBinding object in Operator namepsace
Args:
None
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
cluster_role_binding_api_instance = client.RbacAuthorizationV1Api()
pretty = True
cluster_role_binding_labels = {
"app.kubernetes.io/instance": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/managed-by": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/name": "ibm-spectrum-scale-csi-operator",
"product": "ibm-spectrum-scale-csi",
"release": "ibm-spectrum-scale-csi-operator"
}
cluster_role_binding_metadata = client.V1ObjectMeta(
name="ibm-spectrum-scale-csi-operator", labels=cluster_role_binding_labels, namespace=namespace_value)
cluster_role_binding_role_ref = client.V1RoleRef(
api_group="rbac.authorization.k8s.io", kind="ClusterRole", name="ibm-spectrum-scale-csi-operator")
cluster_role_binding_subjects = client.V1Subject(
kind="ServiceAccount", name="ibm-spectrum-scale-csi-operator", namespace=namespace_value)
cluster_role_binding_body = client.V1ClusterRoleBinding(kind='ClusterRoleBinding',
api_version='rbac.authorization.k8s.io/v1',
metadata=cluster_role_binding_metadata,
role_ref=cluster_role_binding_role_ref,
subjects=[cluster_role_binding_subjects])
try:
LOGGER.info("creating cluster role binding")
cluster_role_binding_api_response = cluster_role_binding_api_instance.create_cluster_role_binding(
cluster_role_binding_body, pretty=pretty)
LOGGER.debug(cluster_role_binding_api_response)
except ApiException as e:
LOGGER.error(
f"Exception when calling RbacAuthorizationV1Api->create_cluster_role_binding: {e}")
assert False
def create_service_account():
"""
Create IBM Spectrum Scale CSI Operator ServiceAccount in Operator namespace
Args:
None
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
pretty = True
service_account_api_instance = client.CoreV1Api()
service_account_labels = {
"app.kubernetes.io/instance": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/managed-by": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/name": "ibm-spectrum-scale-csi-operator",
"product": "ibm-spectrum-scale-csi",
"release": "ibm-spectrum-scale-csi-operator"
}
service_account_metadata = client.V1ObjectMeta(
name="ibm-spectrum-scale-csi-operator", namespace=namespace_value, labels=service_account_labels)
service_account_body = client.V1ServiceAccount(
api_version="v1", kind="ServiceAccount", metadata=service_account_metadata)
try:
LOGGER.info("Creating ibm-spectrum-scale-csi-operator ServiceAccount")
service_account_api_response = service_account_api_instance.create_namespaced_service_account(
namespace=namespace_value, body=service_account_body, pretty=pretty)
LOGGER.debug(str(service_account_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->create_namespaced_service_account: {e}")
assert False
def create_crd():
"""
Create IBM Spectrum Scale CSI Operator CRD (Custom Resource Defination) Object
Args:
None
Returns:
None
Raises:
Raises an ValueError exception but it is expected. hence we pass.
"""
filepath = "../../operator/deploy/crds/csiscaleoperators.csi.ibm.com.crd.yaml"
try:
with open(filepath, "r") as f:
loadcrd_yaml = yaml.full_load(f.read())
except yaml.YAMLError as exc:
print ("Error in configuration file:", exc)
assert False
crd_api_instance = client.ApiextensionsV1beta1Api()
try:
LOGGER.info(
"Creating IBM SpectrumScale CRD object using csiscaleoperators.csi.ibm.com.crd.yaml file")
crd_api_response = crd_api_instance.create_custom_resource_definition(
loadcrd_yaml, pretty=True)
LOGGER.debug(str(crd_api_response))
except ValueError:
LOGGER.info(
"while there is valuerror expection,but CRD created successfully")
def create_crd_old():
"""
Create IBM Spectrum Scale CSI Operator CRD (Custom Resource Defination) Object
Args:
None
Returns:
None
Raises:
Raises an ValueError exception but it is expected. hence we pass.
"""
# input to crd_metadata
crd_labels = {
"app.kubernetes.io/instance": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/managed-by": "ibm-spectrum-scale-csi-operator",
"app.kubernetes.io/name": "ibm-spectrum-scale-csi-operator",
"release": "ibm-spectrum-scale-csi-operator"
}
# input to crd_body
crd_metadata = client.V1ObjectMeta(
name="csiscaleoperators.csi.ibm.com", labels=crd_labels)
crd_names = client.V1beta1CustomResourceDefinitionNames(
kind="CSIScaleOperator",
list_kind="CSIScaleOperatorList",
plural="csiscaleoperators",
singular="csiscaleoperator"
)
crd_subresources = client.V1beta1CustomResourceSubresources(status={})
# input to crd_validation json input
filepath = "../../operator/deploy/crds/csiscaleoperators.csi.ibm.com.crd.yaml"
try:
with open(filepath, "r") as f:
loadcrd_yaml = yaml.full_load(f.read())
except yaml.YAMLError as exc:
print ("Error in configuration file:", exc)
assert False
properties = loadcrd_yaml['spec']['validation']['openAPIV3Schema']['properties']
crd_open_apiv3_schema = client.V1beta1JSONSchemaProps(
properties=properties, type="object")
crd_validation = client.V1beta1CustomResourceValidation(
open_apiv3_schema=crd_open_apiv3_schema)
crd_versions = [client.V1beta1CustomResourceDefinitionVersion(
name="v1", served=True, storage=True)]
crd_spec = client.V1beta1CustomResourceDefinitionSpec(
group="csi.ibm.com",
names=crd_names,
scope="Namespaced",
subresources=crd_subresources,
validation=crd_validation,
version="v1",
versions=crd_versions
)
crd_body = client.V1beta1CustomResourceDefinition(
api_version="apiextensions.k8s.io/v1beta1",
kind="CustomResourceDefinition",
metadata=crd_metadata,
spec=crd_spec)
crd_api_instance = client.ApiextensionsV1beta1Api()
try:
LOGGER.info("creating crd")
crd_api_response = crd_api_instance.create_custom_resource_definition(
crd_body, pretty=True)
LOGGER.debug(str(crd_api_response))
except ValueError:
LOGGER.info(
"while there is valuerror expection,but CRD created successfully")
def delete_crd():
"""
Delete existing IBM Spectrum Scale CSI Operator CRD (Custom Resource Defination) Object
Args:
None
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
delete_crd_api_instance = client.ApiextensionsV1beta1Api()
try:
delete_crd_api_response = delete_crd_api_instance.delete_custom_resource_definition(
name="csiscaleoperators.csi.ibm.com", pretty=True)
LOGGER.debug(str(delete_crd_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling ApiextensionsV1beta1Api->delete_custom_resource_definition: {e}")
assert False
def delete_namespace():
"""
Delete IBM Spectrum Scale CSI Operator namespace
Args:
None
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
delete_namespace_api_instance = client.CoreV1Api()
try:
delete_namespace_api_response = delete_namespace_api_instance.delete_namespace(
name=namespace_value, pretty=True)
LOGGER.debug(str(delete_namespace_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->delete_namespace: {e}")
assert False
def delete_deployment():
"""
Delete IBM Spectrum Scale CSI Operator Deployment object from Operator namespace
Args:
None
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
delete_deployment_api_instance = client.AppsV1Api()
try:
delete_deployment_api_response = delete_deployment_api_instance.delete_namespaced_deployment(
name="ibm-spectrum-scale-csi-operator", namespace=namespace_value, pretty=True)
LOGGER.debug(str(delete_deployment_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling ExtensionsV1beta1Api->delete_namespaced_deployment: {e}")
assert False
def delete_service_account(service_account_name):
"""
Delete IBM Spectrum Scale CSI Operator ServiceAccount from Operator namespace
Args:
param1: service_accout_name - service account name to be deleted
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
delete_service_account_api_instance = client.CoreV1Api()
try:
delete_service_account_api_response = delete_service_account_api_instance.delete_namespaced_service_account(
name=service_account_name, namespace=namespace_value, pretty=True)
LOGGER.debug(str(delete_service_account_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->delete_namespaced_service_account: {e}")
assert False
def delete_cluster_role(cluster_role_name):
"""
Delete IBM Spectrum Scale CSI Operator ClusterRole Object
Args:
param1: cluster_role_name - cluster role name to be deleted
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
delete_cluster_role_api_instance = client.RbacAuthorizationV1Api()
try:
delete_cluster_role_api_response = delete_cluster_role_api_instance.delete_cluster_role(
name=cluster_role_name, pretty=True)
LOGGER.debug(str(delete_cluster_role_api_response))
except ApiException as e:
LOGGER.error(
f"Exception when calling RbacAuthorizationV1Api->delete_cluster_role: {e}")
assert False
def delete_cluster_role_binding(cluster_role_binding_name):
"""
Delete IBM Spectrum Scale CSI Operator ClusterRoleBinding Object
Args:
param1: cluster_role_name - cluster role name to be deleted
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
delete_cluster_role_binding_api_instance = client.RbacAuthorizationV1Api()
try:
delete_cluster_role_binding_api_response = delete_cluster_role_binding_api_instance.delete_cluster_role_binding(
name=cluster_role_binding_name, pretty=True)
LOGGER.debug(delete_cluster_role_binding_api_response)
except ApiException as e:
LOGGER.error(
f"Exception when calling RbacAuthorizationV1Api->delete_cluster_role_binding: {e}")
assert False
def check_crd_deleted():
"""
Function for checking CRD (Custom Resource Defination) is deleted or not
If CRD is not deleted in 60 seconds,function asserts
Args:
None
Returns:
None
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
var = True
count = 12
list_crd_api_instance = client.ApiextensionsV1beta1Api()
while (var and count > 0):
try:
list_crd_api_response = list_crd_api_instance.read_custom_resource_definition(
pretty=True, name="ibm-spectrum-scale-csi")
LOGGER.debug(list_crd_api_response)
LOGGER.info("still deleting crd")
count -= 1
time.sleep(5)
except ApiException:
LOGGER.info("crd deleted")
var = False
if count <= 0:
LOGGER.error("crd is not deleted")
assert False
def check_namespace_deleted():
"""
Function for checking namespace object is deleted or not
If namespace is not deleted in 120 seconds, Function asserts
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
var = True
count = 24
list_namespace_api_instance = client.CoreV1Api()
while (var and count > 0):
try:
list_namespace_api_response = list_namespace_api_instance.read_namespace(
name=namespace_value, pretty=True)
LOGGER.debug(str(list_namespace_api_response))
LOGGER.info(f'Still deleting namespace {namespace_value}')
count = count-1
time.sleep(5)
except ApiException:
LOGGER.info(f'namespace {namespace_value} is deleted')
var = False
if count <= 0:
LOGGER.error(f'namespace {namespace_value} is not deleted')
assert False
def check_deployment_deleted():
"""
Function for checking deployment is deleted or not
If deployment is not deleted in 30 seconds, Function asserts
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
var = True
count = 6
api_instance = client.AppsV1Api()
while (var and count > 0):
try:
api_response = api_instance.read_namespaced_deployment(
name="ibm-spectrum-scale-csi-operator", namespace=namespace_value, pretty=True)
LOGGER.debug(str(api_response))
LOGGER.info(f'Still Deleting ibm-spectrum-scale-csi-operator deployment')
count = count-1
time.sleep(5)
except ApiException:
LOGGER.info("Deployment ibm-spectrum-scale-csi-operator is deleted")
var = False
if count <= 0:
LOGGER.error("deployment is not deleted")
assert False
def check_service_account_deleted(service_account_name):
"""
Function to check ServiceAccount is deleted or not
If ServiceAccount is not deleted in 30 seconds, Function asserts
Args:
param1: service_accout_name - service account name to be checked
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
var = True
count = 6
api_instance = client.CoreV1Api()
while (var and count > 0):
try:
api_response = api_instance.read_namespaced_service_account(
name=service_account_name, namespace=namespace_value, pretty=True)
LOGGER.debug(str(api_response))
LOGGER.info(f'Still deleting ServiceAccount {service_account_name}')
count = count-1
time.sleep(5)
except ApiException:
LOGGER.info(f'ServiceAccount {service_account_name} is deleted')
var = False
if count <= 0:
LOGGER.error("service account is not deleted")
assert False
def check_cluster_role_deleted(cluster_role_name):
"""
Function to check ClusterRole is deleted or not
If ClusterRole not deleted in 30 seconds, Function asserts
Args:
param1: cluster_role_name - cluster role name to be checked
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
var = True
count = 6
api_instance = client.RbacAuthorizationV1Api()
while (var and count > 0):
try:
api_response = api_instance.read_cluster_role(
name=cluster_role_name, pretty=True)
LOGGER.debug(str(api_response))
LOGGER.info(f'Still deleting ClusterRole {cluster_role_name} ')
count = count-1
time.sleep(5)
except ApiException:
LOGGER.info(f'ClusterRole {cluster_role_name} is deleted')
var = False
if count <= 0:
LOGGER.error(f'ClusterRole {cluster_role_name} is not deleted')
assert False
def check_cluster_role_binding_deleted(cluster_role_binding_name):
"""
Function to check ClusterRoleBinding is deleted or not
If ClusterRoleBinding is not deleted in 30 seconds, Function asserts
Args:
param1: cluster_role_binding_name - cluster role binding name to be checked
Raises:
Raises an exception on kubernetes client api failure and asserts
"""
var = True
count = 6
api_instance = client.RbacAuthorizationV1Api()
while (var and count > 0):
try:
api_response = api_instance.read_cluster_role_binding(
name=cluster_role_binding_name, pretty=True)
LOGGER.debug(str(api_response))
LOGGER.info(f'Still deleting ClusterRoleBinding {cluster_role_binding_name}')
count = count-1
time.sleep(5)
except ApiException:
LOGGER.info(f'ClusterRoleBinding {cluster_role_binding_name} is deleted')
var = False
if count <= 0:
LOGGER.error(f'ClusterRoleBinding {cluster_role_binding_name} is not deleted')
assert False
def check_crd_exists():
"""
Checks custom resource defination exists or not
Args:
None
Returns:
return True , if crd exists
return False , if crd does not exists
Raises:
None
"""
read_crd_api_instance = client.ApiextensionsV1beta1Api()
try:
read_crd_api_response = read_crd_api_instance.read_custom_resource_definition(
pretty=True, name="csiscaleoperators.csi.ibm.com")
LOGGER.debug(str(read_crd_api_response))
LOGGER.info("crd exists")
return True
except ApiException:
LOGGER.info("crd does not exist")
return False
def check_namespace_exists():
"""
Checks namespace namespace_value exists or not
Args:
None
Returns:
return True , if namespace exists
return False , if namespace does not exists
Raises:
None
"""
read_namespace_api_instance = client.CoreV1Api()
try:
read_namespace_api_response = read_namespace_api_instance.read_namespace(
name=namespace_value, pretty=True)
LOGGER.debug(str(read_namespace_api_response))
LOGGER.info("namespace exists")
return True
except ApiException:
LOGGER.info("namespace does not exists")
return False
def check_deployment_exists():
"""
Checks deployment exists or not
Args:
None
Returns:
return True , if deployment exists
return False , if deployment does not exists
Raises:
None
"""
read_deployment_api_instance = client.AppsV1Api()
try:
read_deployment_api_response = read_deployment_api_instance.read_namespaced_deployment(
name="ibm-spectrum-scale-csi-operator", namespace=namespace_value, pretty=True)
LOGGER.debug(str(read_deployment_api_response))
LOGGER.info("deployment exists")
return True
except ApiException:
LOGGER.info("deployment does not exists")
return False
def check_service_account_exists(service_account_name):
"""
Checks service account exists or not
Args:
None
Returns:
return True , if service account exists
return False , if service account does not exists
Raises:
None
"""
api_instance = client.CoreV1Api()
try:
api_response = api_instance.read_namespaced_service_account(
name=service_account_name, namespace=namespace_value, pretty=True)
LOGGER.debug(str(api_response))
LOGGER.info("Service account exists")
return True
except ApiException:
LOGGER.info("Service account does not exists")
return False
def check_cluster_role_exists(cluster_role_name):
"""
Checks cluster role exists or not
Args:
None
Returns:
return True , if cluster role exists
return False , if cluster role does not exists
Raises:
None
"""
api_instance = client.RbacAuthorizationV1Api()
try:
api_response = api_instance.read_cluster_role(
name=cluster_role_name, pretty=True)
LOGGER.debug(str(api_response))
LOGGER.info("cluster role exists")
return True
except ApiException:
LOGGER.info("cluster role does not exists")
return False
def check_cluster_role_binding_exists(cluster_role_binding_name):
"""
Checks cluster role binding exists or not
Args:
None
Returns:
return True , if cluster role binding exists
return False , if cluster role binding does not exists
Raises:
None
"""
api_instance = client.RbacAuthorizationV1Api()
try:
api_response = api_instance.read_cluster_role_binding(
name=cluster_role_binding_name, pretty=True)
LOGGER.debug(str(api_response))
LOGGER.info("cluster role binding exists")
return True
except ApiException:
LOGGER.info("cluster role binding does not exists")
return False
|
[
"kubernetes.client.V1ServiceAccount",
"kubernetes.client.V1ClusterRole",
"kubernetes.client.V1Namespace",
"kubernetes.client.V1beta1JSONSchemaProps",
"kubernetes.client.V1ObjectMeta",
"logging.getLogger",
"kubernetes.client.V1LabelSelector",
"kubernetes.client.V1EmptyDirVolumeSource",
"kubernetes.client.V1ObjectFieldSelector",
"kubernetes.client.ApiextensionsV1beta1Api",
"kubernetes.client.V1beta1CustomResourceDefinitionNames",
"kubernetes.client.V1EnvVar",
"kubernetes.client.V1PodTemplateSpec",
"kubernetes.client.V1RoleRef",
"kubernetes.client.CoreV1Api",
"kubernetes.client.AppsV1Api",
"kubernetes.client.V1beta1CustomResourceDefinition",
"kubernetes.client.V1Capabilities",
"kubernetes.client.V1Deployment",
"kubernetes.client.V1beta1CustomResourceSubresources",
"kubernetes.client.V1Subject",
"kubernetes.client.V1beta1CustomResourceDefinitionSpec",
"kubernetes.client.V1ExecAction",
"time.sleep",
"kubernetes.client.RbacAuthorizationV1Api",
"kubernetes.client.V1PolicyRule",
"kubernetes.client.V1beta1CustomResourceValidation",
"kubernetes.client.V1DeploymentSpec",
"kubernetes.client.V1NodeSelectorRequirement",
"kubernetes.client.V1VolumeMount",
"kubernetes.client.V1beta1CustomResourceDefinitionVersion",
"kubernetes.client.V1ClusterRoleBinding"
] |
[((131, 150), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (148, 150), False, 'import logging\n'), ((748, 766), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (764, 766), False, 'from kubernetes import client\n'), ((793, 884), 'kubernetes.client.V1ObjectMeta', 'client.V1ObjectMeta', ([], {'name': 'namespace_value', 'labels': "{'product': 'ibm-spectrum-scale-csi'}"}), "(name=namespace_value, labels={'product':\n 'ibm-spectrum-scale-csi'})\n", (812, 884), False, 'from kubernetes import client\n'), ((928, 1016), 'kubernetes.client.V1Namespace', 'client.V1Namespace', ([], {'api_version': '"""v1"""', 'kind': '"""Namespace"""', 'metadata': 'namespace_metadata'}), "(api_version='v1', kind='Namespace', metadata=\n namespace_metadata)\n", (946, 1016), False, 'from kubernetes import client\n'), ((1854, 1872), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (1870, 1872), False, 'from kubernetes import client\n'), ((3086, 3104), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (3102, 3104), False, 'from kubernetes import client\n'), ((3686, 3803), 'kubernetes.client.V1ObjectMeta', 'client.V1ObjectMeta', ([], {'name': '"""ibm-spectrum-scale-csi-operator"""', 'labels': 'deployment_labels', 'namespace': 'namespace_value'}), "(name='ibm-spectrum-scale-csi-operator', labels=\n deployment_labels, namespace=namespace_value)\n", (3705, 3803), False, 'from kubernetes import client\n'), ((3838, 3940), 'kubernetes.client.V1LabelSelector', 'client.V1LabelSelector', ([], {'match_labels': "{'app.kubernetes.io/name': 'ibm-spectrum-scale-csi-operator'}"}), "(match_labels={'app.kubernetes.io/name':\n 'ibm-spectrum-scale-csi-operator'})\n", (3860, 3940), False, 'from kubernetes import client\n'), ((3977, 4063), 'kubernetes.client.V1ObjectMeta', 'client.V1ObjectMeta', ([], {'labels': 'deployment_labels', 'annotations': 'deployment_annotations'}), '(labels=deployment_labels, annotations=\n deployment_annotations)\n', (3996, 4063), False, 'from kubernetes import client\n'), ((7265, 7335), 'kubernetes.client.V1PodTemplateSpec', 'client.V1PodTemplateSpec', ([], {'metadata': 'podtemplate_metadata', 'spec': 'pod_spec'}), '(metadata=podtemplate_metadata, spec=pod_spec)\n', (7289, 7335), False, 'from kubernetes import client\n'), ((7371, 7468), 'kubernetes.client.V1DeploymentSpec', 'client.V1DeploymentSpec', ([], {'replicas': '(1)', 'selector': 'deployment_selector', 'template': 'podtemplate_spec'}), '(replicas=1, selector=deployment_selector, template=\n podtemplate_spec)\n', (7394, 7468), False, 'from kubernetes import client\n'), ((7492, 7610), 'kubernetes.client.V1Deployment', 'client.V1Deployment', ([], {'kind': '"""Deployment"""', 'api_version': '"""apps/v1"""', 'metadata': 'deployment_metadata', 'spec': 'deployment_spec'}), "(kind='Deployment', api_version='apps/v1', metadata=\n deployment_metadata, spec=deployment_spec)\n", (7511, 7610), False, 'from kubernetes import client\n'), ((8382, 8413), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (8411, 8413), False, 'from kubernetes import client\n'), ((8820, 8939), 'kubernetes.client.V1ObjectMeta', 'client.V1ObjectMeta', ([], {'name': '"""ibm-spectrum-scale-csi-operator"""', 'labels': 'cluster_role_labels', 'namespace': 'namespace_value'}), "(name='ibm-spectrum-scale-csi-operator', labels=\n cluster_role_labels, namespace=namespace_value)\n", (8839, 8939), False, 'from kubernetes import client\n'), ((10733, 10885), 'kubernetes.client.V1ClusterRole', 'client.V1ClusterRole', ([], {'kind': '"""ClusterRole"""', 'api_version': '"""rbac.authorization.k8s.io/v1"""', 'metadata': 'cluster_role_metadata', 'rules': 'cluster_role_rules'}), "(kind='ClusterRole', api_version=\n 'rbac.authorization.k8s.io/v1', metadata=cluster_role_metadata, rules=\n cluster_role_rules)\n", (10753, 10885), False, 'from kubernetes import client\n'), ((11673, 11704), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (11702, 11704), False, 'from kubernetes import client\n'), ((12179, 12306), 'kubernetes.client.V1ObjectMeta', 'client.V1ObjectMeta', ([], {'name': '"""ibm-spectrum-scale-csi-operator"""', 'labels': 'cluster_role_binding_labels', 'namespace': 'namespace_value'}), "(name='ibm-spectrum-scale-csi-operator', labels=\n cluster_role_binding_labels, namespace=namespace_value)\n", (12198, 12306), False, 'from kubernetes import client\n'), ((12351, 12470), 'kubernetes.client.V1RoleRef', 'client.V1RoleRef', ([], {'api_group': '"""rbac.authorization.k8s.io"""', 'kind': '"""ClusterRole"""', 'name': '"""ibm-spectrum-scale-csi-operator"""'}), "(api_group='rbac.authorization.k8s.io', kind='ClusterRole',\n name='ibm-spectrum-scale-csi-operator')\n", (12367, 12470), False, 'from kubernetes import client\n'), ((12516, 12627), 'kubernetes.client.V1Subject', 'client.V1Subject', ([], {'kind': '"""ServiceAccount"""', 'name': '"""ibm-spectrum-scale-csi-operator"""', 'namespace': 'namespace_value'}), "(kind='ServiceAccount', name=\n 'ibm-spectrum-scale-csi-operator', namespace=namespace_value)\n", (12532, 12627), False, 'from kubernetes import client\n'), ((12668, 12902), 'kubernetes.client.V1ClusterRoleBinding', 'client.V1ClusterRoleBinding', ([], {'kind': '"""ClusterRoleBinding"""', 'api_version': '"""rbac.authorization.k8s.io/v1"""', 'metadata': 'cluster_role_binding_metadata', 'role_ref': 'cluster_role_binding_role_ref', 'subjects': '[cluster_role_binding_subjects]'}), "(kind='ClusterRoleBinding', api_version=\n 'rbac.authorization.k8s.io/v1', metadata=cluster_role_binding_metadata,\n role_ref=cluster_role_binding_role_ref, subjects=[\n cluster_role_binding_subjects])\n", (12695, 12902), False, 'from kubernetes import client\n'), ((13927, 13945), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (13943, 13945), False, 'from kubernetes import client\n'), ((14339, 14461), 'kubernetes.client.V1ObjectMeta', 'client.V1ObjectMeta', ([], {'name': '"""ibm-spectrum-scale-csi-operator"""', 'namespace': 'namespace_value', 'labels': 'service_account_labels'}), "(name='ibm-spectrum-scale-csi-operator', namespace=\n namespace_value, labels=service_account_labels)\n", (14358, 14461), False, 'from kubernetes import client\n'), ((14495, 14599), 'kubernetes.client.V1ServiceAccount', 'client.V1ServiceAccount', ([], {'api_version': '"""v1"""', 'kind': '"""ServiceAccount"""', 'metadata': 'service_account_metadata'}), "(api_version='v1', kind='ServiceAccount', metadata=\n service_account_metadata)\n", (14518, 14599), False, 'from kubernetes import client\n'), ((15701, 15733), 'kubernetes.client.ApiextensionsV1beta1Api', 'client.ApiextensionsV1beta1Api', ([], {}), '()\n', (15731, 15733), False, 'from kubernetes import client\n'), ((16822, 16898), 'kubernetes.client.V1ObjectMeta', 'client.V1ObjectMeta', ([], {'name': '"""csiscaleoperators.csi.ibm.com"""', 'labels': 'crd_labels'}), "(name='csiscaleoperators.csi.ibm.com', labels=crd_labels)\n", (16841, 16898), False, 'from kubernetes import client\n'), ((16928, 17096), 'kubernetes.client.V1beta1CustomResourceDefinitionNames', 'client.V1beta1CustomResourceDefinitionNames', ([], {'kind': '"""CSIScaleOperator"""', 'list_kind': '"""CSIScaleOperatorList"""', 'plural': '"""csiscaleoperators"""', 'singular': '"""csiscaleoperator"""'}), "(kind='CSIScaleOperator',\n list_kind='CSIScaleOperatorList', plural='csiscaleoperators', singular=\n 'csiscaleoperator')\n", (16971, 17096), False, 'from kubernetes import client\n'), ((17157, 17208), 'kubernetes.client.V1beta1CustomResourceSubresources', 'client.V1beta1CustomResourceSubresources', ([], {'status': '{}'}), '(status={})\n', (17197, 17208), False, 'from kubernetes import client\n'), ((17682, 17749), 'kubernetes.client.V1beta1JSONSchemaProps', 'client.V1beta1JSONSchemaProps', ([], {'properties': 'properties', 'type': '"""object"""'}), "(properties=properties, type='object')\n", (17711, 17749), False, 'from kubernetes import client\n'), ((17782, 17861), 'kubernetes.client.V1beta1CustomResourceValidation', 'client.V1beta1CustomResourceValidation', ([], {'open_apiv3_schema': 'crd_open_apiv3_schema'}), '(open_apiv3_schema=crd_open_apiv3_schema)\n', (17820, 17861), False, 'from kubernetes import client\n'), ((18006, 18210), 'kubernetes.client.V1beta1CustomResourceDefinitionSpec', 'client.V1beta1CustomResourceDefinitionSpec', ([], {'group': '"""csi.ibm.com"""', 'names': 'crd_names', 'scope': '"""Namespaced"""', 'subresources': 'crd_subresources', 'validation': 'crd_validation', 'version': '"""v1"""', 'versions': 'crd_versions'}), "(group='csi.ibm.com', names=\n crd_names, scope='Namespaced', subresources=crd_subresources,\n validation=crd_validation, version='v1', versions=crd_versions)\n", (18048, 18210), False, 'from kubernetes import client\n'), ((18300, 18462), 'kubernetes.client.V1beta1CustomResourceDefinition', 'client.V1beta1CustomResourceDefinition', ([], {'api_version': '"""apiextensions.k8s.io/v1beta1"""', 'kind': '"""CustomResourceDefinition"""', 'metadata': 'crd_metadata', 'spec': 'crd_spec'}), "(api_version=\n 'apiextensions.k8s.io/v1beta1', kind='CustomResourceDefinition',\n metadata=crd_metadata, spec=crd_spec)\n", (18338, 18462), False, 'from kubernetes import client\n'), ((18521, 18553), 'kubernetes.client.ApiextensionsV1beta1Api', 'client.ApiextensionsV1beta1Api', ([], {}), '()\n', (18551, 18553), False, 'from kubernetes import client\n'), ((19199, 19231), 'kubernetes.client.ApiextensionsV1beta1Api', 'client.ApiextensionsV1beta1Api', ([], {}), '()\n', (19229, 19231), False, 'from kubernetes import client\n'), ((19916, 19934), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (19932, 19934), False, 'from kubernetes import client\n'), ((20607, 20625), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (20623, 20625), False, 'from kubernetes import client\n'), ((21468, 21486), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (21484, 21486), False, 'from kubernetes import client\n'), ((22296, 22327), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (22325, 22327), False, 'from kubernetes import client\n'), ((23114, 23145), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (23143, 23145), False, 'from kubernetes import client\n'), ((23964, 23996), 'kubernetes.client.ApiextensionsV1beta1Api', 'client.ApiextensionsV1beta1Api', ([], {}), '()\n', (23994, 23996), False, 'from kubernetes import client\n'), ((24868, 24886), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (24884, 24886), False, 'from kubernetes import client\n'), ((25816, 25834), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (25832, 25834), False, 'from kubernetes import client\n'), ((26901, 26919), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (26917, 26919), False, 'from kubernetes import client\n'), ((27954, 27985), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (27983, 27985), False, 'from kubernetes import client\n'), ((29031, 29062), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (29060, 29062), False, 'from kubernetes import client\n'), ((29990, 30022), 'kubernetes.client.ApiextensionsV1beta1Api', 'client.ApiextensionsV1beta1Api', ([], {}), '()\n', (30020, 30022), False, 'from kubernetes import client\n'), ((30690, 30708), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (30706, 30708), False, 'from kubernetes import client\n'), ((31363, 31381), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (31379, 31381), False, 'from kubernetes import client\n'), ((32122, 32140), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (32138, 32140), False, 'from kubernetes import client\n'), ((32820, 32851), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (32849, 32851), False, 'from kubernetes import client\n'), ((33521, 33552), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (33550, 33552), False, 'from kubernetes import client\n'), ((9007, 9265), 'kubernetes.client.V1PolicyRule', 'client.V1PolicyRule', ([], {'api_groups': "['*']", 'resources': "['pods', 'persistentvolumeclaims', 'services', 'endpoints', 'events',\n 'configmaps', 'secrets', 'secrets/status', 'services/finalizers',\n 'serviceaccounts', 'securitycontextconstraints']", 'verbs': "['*']"}), "(api_groups=['*'], resources=['pods',\n 'persistentvolumeclaims', 'services', 'endpoints', 'events',\n 'configmaps', 'secrets', 'secrets/status', 'services/finalizers',\n 'serviceaccounts', 'securitycontextconstraints'], verbs=['*'])\n", (9026, 9265), False, 'from kubernetes import client\n'), ((9380, 9510), 'kubernetes.client.V1PolicyRule', 'client.V1PolicyRule', ([], {'api_groups': "['rbac.authorization.k8s.io']", 'resources': "['clusterroles', 'clusterrolebindings']", 'verbs': "['*']"}), "(api_groups=['rbac.authorization.k8s.io'], resources=[\n 'clusterroles', 'clusterrolebindings'], verbs=['*'])\n", (9399, 9510), False, 'from kubernetes import client\n'), ((9570, 9699), 'kubernetes.client.V1PolicyRule', 'client.V1PolicyRule', ([], {'api_groups': "['apps']", 'resources': "['deployments', 'daemonsets', 'replicasets', 'statefulsets']", 'verbs': "['*']"}), "(api_groups=['apps'], resources=['deployments',\n 'daemonsets', 'replicasets', 'statefulsets'], verbs=['*'])\n", (9589, 9699), False, 'from kubernetes import client\n'), ((9760, 9878), 'kubernetes.client.V1PolicyRule', 'client.V1PolicyRule', ([], {'api_groups': "['monitoring.coreos.com']", 'resources': "['servicemonitors']", 'verbs': "['get', 'create']"}), "(api_groups=['monitoring.coreos.com'], resources=[\n 'servicemonitors'], verbs=['get', 'create'])\n", (9779, 9878), False, 'from kubernetes import client\n'), ((9938, 10025), 'kubernetes.client.V1PolicyRule', 'client.V1PolicyRule', ([], {'api_groups': "['apps']", 'resources': "['replicasets']", 'verbs': "['get']"}), "(api_groups=['apps'], resources=['replicasets'], verbs=[\n 'get'])\n", (9957, 10025), False, 'from kubernetes import client\n'), ((10063, 10140), 'kubernetes.client.V1PolicyRule', 'client.V1PolicyRule', ([], {'api_groups': "['csi.ibm.com']", 'resources': "['*']", 'verbs': "['*']"}), "(api_groups=['csi.ibm.com'], resources=['*'], verbs=['*'])\n", (10082, 10140), False, 'from kubernetes import client\n'), ((10183, 10300), 'kubernetes.client.V1PolicyRule', 'client.V1PolicyRule', ([], {'api_groups': "['security.openshift.io']", 'resources': "['securitycontextconstraints']", 'verbs': "['*']"}), "(api_groups=['security.openshift.io'], resources=[\n 'securitycontextconstraints'], verbs=['*'])\n", (10202, 10300), False, 'from kubernetes import client\n'), ((10360, 10479), 'kubernetes.client.V1PolicyRule', 'client.V1PolicyRule', ([], {'api_groups': "['storage.k8s.io']", 'resources': "['volumeattachments', 'storageclasses']", 'verbs': "['*']"}), "(api_groups=['storage.k8s.io'], resources=[\n 'volumeattachments', 'storageclasses'], verbs=['*'])\n", (10379, 10479), False, 'from kubernetes import client\n'), ((10539, 10697), 'kubernetes.client.V1PolicyRule', 'client.V1PolicyRule', ([], {'api_groups': "['apps']", 'resource_names': "['ibm-spectrum-scale-csi-operator']", 'resources': "['deployments/finalizers']", 'verbs': "['update']"}), "(api_groups=['apps'], resource_names=[\n 'ibm-spectrum-scale-csi-operator'], resources=['deployments/finalizers'\n ], verbs=['update'])\n", (10558, 10697), False, 'from kubernetes import client\n'), ((17893, 17980), 'kubernetes.client.V1beta1CustomResourceDefinitionVersion', 'client.V1beta1CustomResourceDefinitionVersion', ([], {'name': '"""v1"""', 'served': '(True)', 'storage': '(True)'}), "(name='v1', served=True,\n storage=True)\n", (17938, 17980), False, 'from kubernetes import client\n'), ((24329, 24342), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (24339, 24342), False, 'import time\n'), ((25246, 25259), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (25256, 25259), False, 'import time\n'), ((26221, 26234), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (26231, 26234), False, 'import time\n'), ((27293, 27306), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (27303, 27306), False, 'import time\n'), ((28310, 28323), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (28320, 28323), False, 'import time\n'), ((29417, 29430), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (29427, 29430), False, 'import time\n'), ((5241, 5340), 'kubernetes.client.V1VolumeMount', 'client.V1VolumeMount', ([], {'mount_path': '"""/tmp/ansible-operator/runner"""', 'name': '"""runner"""', 'read_only': '(True)'}), "(mount_path='/tmp/ansible-operator/runner', name=\n 'runner', read_only=True)\n", (5261, 5340), False, 'from kubernetes import client\n'), ((5366, 5465), 'kubernetes.client.V1EnvVar', 'client.V1EnvVar', ([], {'name': '"""CSI_DRIVER_IMAGE"""', 'value': "config_file['deployment_driver_image_for_crd']"}), "(name='CSI_DRIVER_IMAGE', value=config_file[\n 'deployment_driver_image_for_crd'])\n", (5381, 5465), False, 'from kubernetes import client\n'), ((6477, 6555), 'kubernetes.client.V1EnvVar', 'client.V1EnvVar', ([], {'name': '"""OPERATOR_NAME"""', 'value': '"""ibm-spectrum-scale-csi-operator"""'}), "(name='OPERATOR_NAME', value='ibm-spectrum-scale-csi-operator')\n", (6492, 6555), False, 'from kubernetes import client\n'), ((6601, 6700), 'kubernetes.client.V1EnvVar', 'client.V1EnvVar', ([], {'name': '"""CSI_DRIVER_IMAGE"""', 'value': "config_file['deployment_driver_image_for_crd']"}), "(name='CSI_DRIVER_IMAGE', value=config_file[\n 'deployment_driver_image_for_crd'])\n", (6616, 6700), False, 'from kubernetes import client\n'), ((6722, 6800), 'kubernetes.client.V1VolumeMount', 'client.V1VolumeMount', ([], {'mount_path': '"""/tmp/ansible-operator/runner"""', 'name': '"""runner"""'}), "(mount_path='/tmp/ansible-operator/runner', name='runner')\n", (6742, 6800), False, 'from kubernetes import client\n'), ((4776, 4825), 'kubernetes.client.V1ExecAction', 'client.V1ExecAction', ([], {'command': "['/health_check.sh']"}), "(command=['/health_check.sh'])\n", (4795, 4825), False, 'from kubernetes import client\n'), ((4933, 4982), 'kubernetes.client.V1ExecAction', 'client.V1ExecAction', ([], {'command': "['/health_check.sh']"}), "(command=['/health_check.sh'])\n", (4952, 4982), False, 'from kubernetes import client\n'), ((5179, 5214), 'kubernetes.client.V1Capabilities', 'client.V1Capabilities', ([], {'drop': "['ALL']"}), "(drop=['ALL'])\n", (5200, 5214), False, 'from kubernetes import client\n'), ((5685, 5734), 'kubernetes.client.V1ExecAction', 'client.V1ExecAction', ([], {'command': "['/health_check.sh']"}), "(command=['/health_check.sh'])\n", (5704, 5734), False, 'from kubernetes import client\n'), ((5842, 5891), 'kubernetes.client.V1ExecAction', 'client.V1ExecAction', ([], {'command': "['/health_check.sh']"}), "(command=['/health_check.sh'])\n", (5861, 5891), False, 'from kubernetes import client\n'), ((6029, 6064), 'kubernetes.client.V1Capabilities', 'client.V1Capabilities', ([], {'drop': "['ALL']"}), "(drop=['ALL'])\n", (6050, 6064), False, 'from kubernetes import client\n'), ((7174, 7220), 'kubernetes.client.V1EmptyDirVolumeSource', 'client.V1EmptyDirVolumeSource', ([], {'medium': '"""Memory"""'}), "(medium='Memory')\n", (7203, 7220), False, 'from kubernetes import client\n'), ((6194, 6255), 'kubernetes.client.V1ObjectFieldSelector', 'client.V1ObjectFieldSelector', ([], {'field_path': '"""metadata.namespace"""'}), "(field_path='metadata.namespace')\n", (6222, 6255), False, 'from kubernetes import client\n'), ((6403, 6459), 'kubernetes.client.V1ObjectFieldSelector', 'client.V1ObjectFieldSelector', ([], {'field_path': '"""metadata.name"""'}), "(field_path='metadata.name')\n", (6431, 6459), False, 'from kubernetes import client\n'), ((4349, 4436), 'kubernetes.client.V1NodeSelectorRequirement', 'client.V1NodeSelectorRequirement', ([], {'key': '"""beta.kubernetes.io/arch"""', 'operator': '"""Exists"""'}), "(key='beta.kubernetes.io/arch', operator=\n 'Exists')\n", (4381, 4436), False, 'from kubernetes import client\n')]
|
# coding: utf-8
from datetime import date, datetime
from typing import List, Dict, Type
from openapi_server.models.base_model_ import Model
from openapi_server import util
class Block(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, block_hash: str=None, height: int=None, no_txs: int=None, timestamp: int=None):
"""Block - a model defined in OpenAPI
:param block_hash: The block_hash of this Block.
:param height: The height of this Block.
:param no_txs: The no_txs of this Block.
:param timestamp: The timestamp of this Block.
"""
self.openapi_types = {
'block_hash': str,
'height': int,
'no_txs': int,
'timestamp': int
}
self.attribute_map = {
'block_hash': 'block_hash',
'height': 'height',
'no_txs': 'no_txs',
'timestamp': 'timestamp'
}
self._block_hash = block_hash
self._height = height
self._no_txs = no_txs
self._timestamp = timestamp
@classmethod
def from_dict(cls, dikt: dict) -> 'Block':
"""Returns the dict as a model
:param dikt: A dict.
:return: The block of this Block.
"""
return util.deserialize_model(dikt, cls)
@property
def block_hash(self):
"""Gets the block_hash of this Block.
:return: The block_hash of this Block.
:rtype: str
"""
return self._block_hash
@block_hash.setter
def block_hash(self, block_hash):
"""Sets the block_hash of this Block.
:param block_hash: The block_hash of this Block.
:type block_hash: str
"""
self._block_hash = block_hash
@property
def height(self):
"""Gets the height of this Block.
Height
:return: The height of this Block.
:rtype: int
"""
return self._height
@height.setter
def height(self, height):
"""Sets the height of this Block.
Height
:param height: The height of this Block.
:type height: int
"""
if height is not None and height < 0:
raise ValueError("Invalid value for `height`, must be a value greater than or equal to `0`")
self._height = height
@property
def no_txs(self):
"""Gets the no_txs of this Block.
:return: The no_txs of this Block.
:rtype: int
"""
return self._no_txs
@no_txs.setter
def no_txs(self, no_txs):
"""Sets the no_txs of this Block.
:param no_txs: The no_txs of this Block.
:type no_txs: int
"""
self._no_txs = no_txs
@property
def timestamp(self):
"""Gets the timestamp of this Block.
Timestamp
:return: The timestamp of this Block.
:rtype: int
"""
return self._timestamp
@timestamp.setter
def timestamp(self, timestamp):
"""Sets the timestamp of this Block.
Timestamp
:param timestamp: The timestamp of this Block.
:type timestamp: int
"""
self._timestamp = timestamp
|
[
"openapi_server.util.deserialize_model"
] |
[((1388, 1421), 'openapi_server.util.deserialize_model', 'util.deserialize_model', (['dikt', 'cls'], {}), '(dikt, cls)\n', (1410, 1421), False, 'from openapi_server import util\n')]
|
import torch
from models.experimental import attempt_load
from utils.datasets import LoadImages
from utils.general import check_img_size, non_max_suppression, scale_coords, set_logging
from utils.torch_utils import select_device, time_synchronized
import multiprocessing as mp
# 目标检测
def detect_center(frame_cap,condition:mp.Condition,conn:mp.Pipe):
weights, imgsz = '/home/nvidia/yolov3/core_mi.pt', 640
# Initialize
set_logging()
device = select_device('')
half = device.type != 'cpu' # half precision only supported on CUDA
# Load model
model = attempt_load(weights, map_location=device) # load FP32 model
stride = int(model.stride.max()) # model stride
imgsz = check_img_size(imgsz, s=stride) # check img_size
names = model.module.names if hasattr(model, 'module') else model.names # get class names
if half:
model.half() # to FP16
while True:
#info={"red":None,"yellow":None}
with condition:
condition.wait()
image = frame_cap.frame
# image = cv2.imread('000125.jpg')
print("begin detect")
if image is None:
continue
img, im0s = LoadImages(image, img_size=imgsz, stride=stride).get_img()
# Run inference
if device.type != 'cpu':
model(torch.zeros(1, 3, imgsz, imgsz).to(device).type_as(next(model.parameters()))) # run once
#t0 = time.time()
img = torch.from_numpy(img).to(device)
img = img.half() if half else img.float() # uint8 to fp16/32
img /= 255.0 # 0 - 255 to 0.0 - 1.0
if img.ndimension() == 3:
img = img.unsqueeze(0)
# Inference
t1 = time_synchronized()
pred = model(img, augment=False)[0]
# Apply NMS
pred = non_max_suppression(pred, agnostic=True, max_det=300)
t2 = time_synchronized()
# Process detections
for i, det in enumerate(pred): # detections per image
'''if webcam: # batch_size >= 1
p, s, im0, frame = path[i], f'{i}: ', im0s[i].copy(), dataset.count
else:'''
s, im0 = '', im0s.copy()
s += '%gx%g ' % img.shape[2:] # print string
if len(det):
# Rescale boxes from img_size to im0 size
det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round()
# Print results
for c in det[:, -1].unique():
n = (det[:, -1] == c).sum() # detections per class
s += f"{n} {names[int(c)]}{'s' * (n > 1)}, " # add to string
det_new = reversed(det)
# Write results
for *xyxy, conf, cls in det_new:
ans = torch.tensor(xyxy).view(1, 4).tolist()[0]
ans = [(ans[0] + ans[2]) / 2, (ans[1] + ans[3]) / 2, ans[2]-ans[0], ans[3]-ans[1] ]
if (not int(cls)):
frame_cap.detect_box['red'] = ans
else:
frame_cap.detect_box['yellow'] = ans
print(frame_cap.detect_box['red'])
else:
frame_cap.detect_box['red'] = []
frame_cap.detect_box['yellow'] = []
#print(info)
conn.send(frame_cap.detect_box)
# Print time (inference + NMS)
# print(f'{s}Done. ({t2 - t1:.3f}s)')
# print("end detect one image")
|
[
"utils.torch_utils.time_synchronized",
"torch.tensor",
"utils.general.set_logging",
"utils.general.non_max_suppression",
"models.experimental.attempt_load",
"utils.datasets.LoadImages",
"torch.zeros",
"utils.torch_utils.select_device",
"utils.general.check_img_size",
"utils.general.scale_coords",
"torch.from_numpy"
] |
[((431, 444), 'utils.general.set_logging', 'set_logging', ([], {}), '()\n', (442, 444), False, 'from utils.general import check_img_size, non_max_suppression, scale_coords, set_logging\n'), ((458, 475), 'utils.torch_utils.select_device', 'select_device', (['""""""'], {}), "('')\n", (471, 475), False, 'from utils.torch_utils import select_device, time_synchronized\n'), ((579, 621), 'models.experimental.attempt_load', 'attempt_load', (['weights'], {'map_location': 'device'}), '(weights, map_location=device)\n', (591, 621), False, 'from models.experimental import attempt_load\n'), ((706, 737), 'utils.general.check_img_size', 'check_img_size', (['imgsz'], {'s': 'stride'}), '(imgsz, s=stride)\n', (720, 737), False, 'from utils.general import check_img_size, non_max_suppression, scale_coords, set_logging\n'), ((1696, 1715), 'utils.torch_utils.time_synchronized', 'time_synchronized', ([], {}), '()\n', (1713, 1715), False, 'from utils.torch_utils import select_device, time_synchronized\n'), ((1796, 1849), 'utils.general.non_max_suppression', 'non_max_suppression', (['pred'], {'agnostic': '(True)', 'max_det': '(300)'}), '(pred, agnostic=True, max_det=300)\n', (1815, 1849), False, 'from utils.general import check_img_size, non_max_suppression, scale_coords, set_logging\n'), ((1863, 1882), 'utils.torch_utils.time_synchronized', 'time_synchronized', ([], {}), '()\n', (1880, 1882), False, 'from utils.torch_utils import select_device, time_synchronized\n'), ((1179, 1227), 'utils.datasets.LoadImages', 'LoadImages', (['image'], {'img_size': 'imgsz', 'stride': 'stride'}), '(image, img_size=imgsz, stride=stride)\n', (1189, 1227), False, 'from utils.datasets import LoadImages\n'), ((1445, 1466), 'torch.from_numpy', 'torch.from_numpy', (['img'], {}), '(img)\n', (1461, 1466), False, 'import torch\n'), ((2333, 2383), 'utils.general.scale_coords', 'scale_coords', (['img.shape[2:]', 'det[:, :4]', 'im0.shape'], {}), '(img.shape[2:], det[:, :4], im0.shape)\n', (2345, 2383), False, 'from utils.general import check_img_size, non_max_suppression, scale_coords, set_logging\n'), ((1314, 1345), 'torch.zeros', 'torch.zeros', (['(1)', '(3)', 'imgsz', 'imgsz'], {}), '(1, 3, imgsz, imgsz)\n', (1325, 1345), False, 'import torch\n'), ((2773, 2791), 'torch.tensor', 'torch.tensor', (['xyxy'], {}), '(xyxy)\n', (2785, 2791), False, 'import torch\n')]
|
"""Test Suites: 370 passed, 370 total
Tests: 4 skipped, 1050 passed, 1054 total
Tests: 28 passed, 28 total
Snapshots: 830 passed, 830 total
Time: 67.988s
Ran all test suites.
Done in 99.84s."""
"""Test Suites: 187 passed, 187 total
Tests: 1 skipped, 579 passed, 580 total
Snapshots: 429 passed, 429 total
Time: 285.168s
Ran all test suites.
Done in 339.53s."""
#185464744
"""[1m[32m › [39m[22m[1m[32m259 tests passed[39m[22m (259 total in 61 test suites, 25 snapshots, run time 38.11s)
[2K[1GDone in 39.56s."""
from log_retriever import read_job_log, joblog
import re
#Regex
TEST_REGEX_D_P_T = "Tests:(\ *)(\d*) skipped, (\d*) passed, (\d*) total"
TEST_REGEX_P_T = "Tests:(\ *)(\d*) passed, (\d*) total"
TEST_REGEX_FORMAT_2_P_T = "Tests:(\ *)\\x1b\[(\d*)m\\x1b\[(\d*)m\\x1b\[(\d*)m(\d*) passed\\x1b\[(\d*)m\\x1b\[(\d*)m(\d*), (\d*) total"
TEST_REGEX_P_T_2 = "(\d*) tests passed(.*)\((\d*) total"
FORMAT_2 = "\\x1b\[(\d*)mTests"
def test_parser_format2(log):
total_tests = 0
test_passed = 0
test_skipped = 0
test_failed = 0
allRes = re.findall(TEST_REGEX_FORMAT_2_P_T, log)
for res in allRes:
test_passed += int(res[4])
total_tests += int(res[8])
test_failed += total_tests - test_passed
return total_tests, test_passed, test_failed, test_skipped
def get_test_results(log):
total_tests = 0
test_passed = 0
test_skipped = 0
test_failed = 0
allRes = re.findall(TEST_REGEX_P_T_2, log)
for res in allRes:
test_passed += int(res[0])
total_tests += int(res[2])
test_failed += total_tests - test_passed
if(total_tests > 0):
return total_tests, test_passed, test_failed, test_skipped
allRes = re.findall(TEST_REGEX_D_P_T, log)
for res in allRes:
test_skipped += int(res[1])
test_passed += int(res[2])
total_tests += int(res[3])
test_failed += total_tests - test_passed - test_skipped
if(total_tests > 0):
return total_tests, test_passed, test_failed, test_skipped
allRes = re.findall(TEST_REGEX_P_T, log)
for res in allRes:
test_skipped += 0
test_passed += int(res[1])
total_tests += int(res[2])
test_failed += total_tests - test_passed - test_skipped
if(total_tests > 0):
return total_tests, test_passed, test_failed, test_skipped
allRes = re.findall(TEST_REGEX_FORMAT_2_P_T, log)
for res in allRes:
test_passed += int(res[4])
total_tests += int(res[8])
test_failed += total_tests - test_passed
return total_tests, test_passed, test_failed, test_skipped
def get_metrics(log):
total, passed, failed, skipped = get_test_results(log)
return total, passed, failed, skipped
if __name__ == "__main__":
#dump_job_log(728138257)
log = joblog(185464744)
print(get_metrics(log))
|
[
"log_retriever.joblog",
"re.findall"
] |
[((1108, 1148), 're.findall', 're.findall', (['TEST_REGEX_FORMAT_2_P_T', 'log'], {}), '(TEST_REGEX_FORMAT_2_P_T, log)\n', (1118, 1148), False, 'import re\n'), ((1478, 1511), 're.findall', 're.findall', (['TEST_REGEX_P_T_2', 'log'], {}), '(TEST_REGEX_P_T_2, log)\n', (1488, 1511), False, 'import re\n'), ((1765, 1798), 're.findall', 're.findall', (['TEST_REGEX_D_P_T', 'log'], {}), '(TEST_REGEX_D_P_T, log)\n', (1775, 1798), False, 'import re\n'), ((2099, 2130), 're.findall', 're.findall', (['TEST_REGEX_P_T', 'log'], {}), '(TEST_REGEX_P_T, log)\n', (2109, 2130), False, 'import re\n'), ((2421, 2461), 're.findall', 're.findall', (['TEST_REGEX_FORMAT_2_P_T', 'log'], {}), '(TEST_REGEX_FORMAT_2_P_T, log)\n', (2431, 2461), False, 'import re\n'), ((2864, 2881), 'log_retriever.joblog', 'joblog', (['(185464744)'], {}), '(185464744)\n', (2870, 2881), False, 'from log_retriever import read_job_log, joblog\n')]
|
# Generated by Django 2.2.7 on 2019-11-17 17:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Pedido', '0039_auto_20191117_1417'),
]
operations = [
migrations.AlterField(
model_name='pedido',
name='valor_total',
field=models.DecimalField(decimal_places=2, max_digits=7, null=True, verbose_name='Valor Total'),
),
]
|
[
"django.db.models.DecimalField"
] |
[((340, 435), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(7)', 'null': '(True)', 'verbose_name': '"""Valor Total"""'}), "(decimal_places=2, max_digits=7, null=True, verbose_name\n ='Valor Total')\n", (359, 435), False, 'from django.db import migrations, models\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ScalingConfigurationArgs', 'ScalingConfiguration']
@pulumi.input_type
class ScalingConfigurationArgs:
def __init__(__self__, *,
scaling_group_id: pulumi.Input[str],
active: Optional[pulumi.Input[bool]] = None,
credit_specification: Optional[pulumi.Input[str]] = None,
data_disks: Optional[pulumi.Input[Sequence[pulumi.Input['ScalingConfigurationDataDiskArgs']]]] = None,
enable: Optional[pulumi.Input[bool]] = None,
force_delete: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
instance_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
internet_max_bandwidth_in: Optional[pulumi.Input[int]] = None,
internet_max_bandwidth_out: Optional[pulumi.Input[int]] = None,
io_optimized: Optional[pulumi.Input[str]] = None,
is_outdated: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
override: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
password_inherit: Optional[pulumi.Input[bool]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
scaling_configuration_name: Optional[pulumi.Input[str]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
substitute: Optional[pulumi.Input[str]] = None,
system_disk_auto_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
system_disk_category: Optional[pulumi.Input[str]] = None,
system_disk_description: Optional[pulumi.Input[str]] = None,
system_disk_name: Optional[pulumi.Input[str]] = None,
system_disk_performance_level: Optional[pulumi.Input[str]] = None,
system_disk_size: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
user_data: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ScalingConfiguration resource.
:param pulumi.Input[str] scaling_group_id: ID of the scaling group of a scaling configuration.
:param pulumi.Input[bool] active: Whether active current scaling configuration in the specified scaling group. Default to `false`.
:param pulumi.Input[str] credit_specification: Performance mode of the t5 burstable instance. Valid values: 'Standard', 'Unlimited'.
:param pulumi.Input[Sequence[pulumi.Input['ScalingConfigurationDataDiskArgs']]] data_disks: DataDisk mappings to attach to ecs instance. See Block datadisk below for details.
:param pulumi.Input[bool] enable: Whether enable the specified scaling group(make it active) to which the current scaling configuration belongs.
:param pulumi.Input[bool] force_delete: The last scaling configuration will be deleted forcibly with deleting its scaling group. Default to false.
:param pulumi.Input[str] image_id: ID of an image file, indicating the image resource selected when an instance is enabled.
:param pulumi.Input[str] image_name: Name of an image file, indicating the image resource selected when an instance is enabled.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: It has been deprecated from version 1.6.0. New resource `ess.Attachment` replaces it.
:param pulumi.Input[str] instance_name: Name of an ECS instance. Default to "ESS-Instance". It is valid from version 1.7.1.
:param pulumi.Input[str] instance_type: Resource type of an ECS instance.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_types: Resource types of an ECS instance.
:param pulumi.Input[str] internet_charge_type: Network billing type, Values: PayByBandwidth or PayByTraffic. Default to `PayByBandwidth`.
:param pulumi.Input[int] internet_max_bandwidth_in: Maximum incoming bandwidth from the public network, measured in Mbps (Mega bit per second). The value range is [1,200].
:param pulumi.Input[int] internet_max_bandwidth_out: Maximum outgoing bandwidth from the public network, measured in Mbps (Mega bit per second). The value range for PayByBandwidth is [0,100].
:param pulumi.Input[str] io_optimized: It has been deprecated on instance resource. All the launched alicloud instances will be I/O optimized.
:param pulumi.Input[bool] is_outdated: Whether to use outdated instance type. Default to false.
:param pulumi.Input[str] key_name: The name of key pair that can login ECS instance successfully without password. If it is specified, the password would be invalid.
:param pulumi.Input[str] kms_encrypted_password: An KMS encrypts password used to a db account. If the `password` is filled in, this field will be ignored.
:param pulumi.Input[Mapping[str, Any]] kms_encryption_context: An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a db account with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
:param pulumi.Input[bool] override: Indicates whether to overwrite the existing data. Default to false.
:param pulumi.Input[str] password: The password of the ECS instance. The password must be 8 to 30 characters in length. It must contains at least three of the following character types: uppercase letters, lowercase letters, digits, and special characters. Special characters include `() ~!@#$%^&*-_+=\|{}[]:;'<>,.?/`, The password of Windows-based instances cannot start with a forward slash (/).
:param pulumi.Input[bool] password_inherit: Specifies whether to use the password that is predefined in the image. If the PasswordInherit parameter is set to true, the `password` and `kms_encrypted_password` will be ignored. You must ensure that the selected image has a password configured.
:param pulumi.Input[str] resource_group_id: ID of resource group.
:param pulumi.Input[str] role_name: Instance RAM role name. The name is provided and maintained by RAM. You can use `ram.Role` to create a new one.
:param pulumi.Input[str] scaling_configuration_name: Name shown for the scheduled task. which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is ScalingConfigurationId.
:param pulumi.Input[str] security_group_id: ID of the security group used to create new instance. It is conflict with `security_group_ids`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: List IDs of the security group used to create new instances. It is conflict with `security_group_id`.
:param pulumi.Input[str] substitute: The another scaling configuration which will be active automatically and replace current configuration when setting `active` to 'false'. It is invalid when `active` is 'true'.
:param pulumi.Input[str] system_disk_auto_snapshot_policy_id: The id of auto snapshot policy for system disk.
:param pulumi.Input[str] system_disk_category: Category of the system disk. The parameter value options are `ephemeral_ssd`, `cloud_efficiency`, `cloud_ssd`, `cloud_essd` and `cloud`. `cloud` only is used to some no I/O optimized instance. Default to `cloud_efficiency`.
:param pulumi.Input[str] system_disk_description: The description of the system disk. The description must be 2 to 256 characters in length and cannot start with http:// or https://.
:param pulumi.Input[str] system_disk_name: The name of the system disk. It must be 2 to 128 characters in length. It must start with a letter and cannot start with http:// or https://. It can contain letters, digits, colons (:), underscores (_), and hyphens (-). Default value: null.
:param pulumi.Input[str] system_disk_performance_level: The performance level of the ESSD used as the system disk.
:param pulumi.Input[int] system_disk_size: Size of system disk, in GiB. Optional values: cloud: 20-500, cloud_efficiency: 20-500, cloud_ssd: 20-500, ephemeral_ssd: 20-500 The default value is max{40, ImageSize}. If this parameter is set, the system disk size must be greater than or equal to max{40, ImageSize}.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource. It will be applied for ECS instances finally.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "http://", or "https://" It can be a null string.
:param pulumi.Input[str] user_data: User-defined data to customize the startup behaviors of the ECS instance and to pass data into the ECS instance.
"""
pulumi.set(__self__, "scaling_group_id", scaling_group_id)
if active is not None:
pulumi.set(__self__, "active", active)
if credit_specification is not None:
pulumi.set(__self__, "credit_specification", credit_specification)
if data_disks is not None:
pulumi.set(__self__, "data_disks", data_disks)
if enable is not None:
pulumi.set(__self__, "enable", enable)
if force_delete is not None:
pulumi.set(__self__, "force_delete", force_delete)
if image_id is not None:
pulumi.set(__self__, "image_id", image_id)
if image_name is not None:
pulumi.set(__self__, "image_name", image_name)
if instance_ids is not None:
warnings.warn("""Field 'instance_ids' has been deprecated from provider version 1.6.0. New resource 'alicloud_ess_attachment' replaces it.""", DeprecationWarning)
pulumi.log.warn("""instance_ids is deprecated: Field 'instance_ids' has been deprecated from provider version 1.6.0. New resource 'alicloud_ess_attachment' replaces it.""")
if instance_ids is not None:
pulumi.set(__self__, "instance_ids", instance_ids)
if instance_name is not None:
pulumi.set(__self__, "instance_name", instance_name)
if instance_type is not None:
pulumi.set(__self__, "instance_type", instance_type)
if instance_types is not None:
pulumi.set(__self__, "instance_types", instance_types)
if internet_charge_type is not None:
pulumi.set(__self__, "internet_charge_type", internet_charge_type)
if internet_max_bandwidth_in is not None:
pulumi.set(__self__, "internet_max_bandwidth_in", internet_max_bandwidth_in)
if internet_max_bandwidth_out is not None:
pulumi.set(__self__, "internet_max_bandwidth_out", internet_max_bandwidth_out)
if io_optimized is not None:
warnings.warn("""Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.""", DeprecationWarning)
pulumi.log.warn("""io_optimized is deprecated: Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.""")
if io_optimized is not None:
pulumi.set(__self__, "io_optimized", io_optimized)
if is_outdated is not None:
pulumi.set(__self__, "is_outdated", is_outdated)
if key_name is not None:
pulumi.set(__self__, "key_name", key_name)
if kms_encrypted_password is not None:
pulumi.set(__self__, "kms_encrypted_password", kms_encrypted_password)
if kms_encryption_context is not None:
pulumi.set(__self__, "kms_encryption_context", kms_encryption_context)
if override is not None:
pulumi.set(__self__, "override", override)
if password is not None:
pulumi.set(__self__, "password", password)
if password_inherit is not None:
pulumi.set(__self__, "password_inherit", password_inherit)
if resource_group_id is not None:
pulumi.set(__self__, "resource_group_id", resource_group_id)
if role_name is not None:
pulumi.set(__self__, "role_name", role_name)
if scaling_configuration_name is not None:
pulumi.set(__self__, "scaling_configuration_name", scaling_configuration_name)
if security_group_id is not None:
pulumi.set(__self__, "security_group_id", security_group_id)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if substitute is not None:
pulumi.set(__self__, "substitute", substitute)
if system_disk_auto_snapshot_policy_id is not None:
pulumi.set(__self__, "system_disk_auto_snapshot_policy_id", system_disk_auto_snapshot_policy_id)
if system_disk_category is not None:
pulumi.set(__self__, "system_disk_category", system_disk_category)
if system_disk_description is not None:
pulumi.set(__self__, "system_disk_description", system_disk_description)
if system_disk_name is not None:
pulumi.set(__self__, "system_disk_name", system_disk_name)
if system_disk_performance_level is not None:
pulumi.set(__self__, "system_disk_performance_level", system_disk_performance_level)
if system_disk_size is not None:
pulumi.set(__self__, "system_disk_size", system_disk_size)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if user_data is not None:
pulumi.set(__self__, "user_data", user_data)
@property
@pulumi.getter(name="scalingGroupId")
def scaling_group_id(self) -> pulumi.Input[str]:
"""
ID of the scaling group of a scaling configuration.
"""
return pulumi.get(self, "scaling_group_id")
@scaling_group_id.setter
def scaling_group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "scaling_group_id", value)
@property
@pulumi.getter
def active(self) -> Optional[pulumi.Input[bool]]:
"""
Whether active current scaling configuration in the specified scaling group. Default to `false`.
"""
return pulumi.get(self, "active")
@active.setter
def active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "active", value)
@property
@pulumi.getter(name="creditSpecification")
def credit_specification(self) -> Optional[pulumi.Input[str]]:
"""
Performance mode of the t5 burstable instance. Valid values: 'Standard', 'Unlimited'.
"""
return pulumi.get(self, "credit_specification")
@credit_specification.setter
def credit_specification(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "credit_specification", value)
@property
@pulumi.getter(name="dataDisks")
def data_disks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ScalingConfigurationDataDiskArgs']]]]:
"""
DataDisk mappings to attach to ecs instance. See Block datadisk below for details.
"""
return pulumi.get(self, "data_disks")
@data_disks.setter
def data_disks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ScalingConfigurationDataDiskArgs']]]]):
pulumi.set(self, "data_disks", value)
@property
@pulumi.getter
def enable(self) -> Optional[pulumi.Input[bool]]:
"""
Whether enable the specified scaling group(make it active) to which the current scaling configuration belongs.
"""
return pulumi.get(self, "enable")
@enable.setter
def enable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable", value)
@property
@pulumi.getter(name="forceDelete")
def force_delete(self) -> Optional[pulumi.Input[bool]]:
"""
The last scaling configuration will be deleted forcibly with deleting its scaling group. Default to false.
"""
return pulumi.get(self, "force_delete")
@force_delete.setter
def force_delete(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_delete", value)
@property
@pulumi.getter(name="imageId")
def image_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of an image file, indicating the image resource selected when an instance is enabled.
"""
return pulumi.get(self, "image_id")
@image_id.setter
def image_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_id", value)
@property
@pulumi.getter(name="imageName")
def image_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of an image file, indicating the image resource selected when an instance is enabled.
"""
return pulumi.get(self, "image_name")
@image_name.setter
def image_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_name", value)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
It has been deprecated from version 1.6.0. New resource `ess.Attachment` replaces it.
"""
return pulumi.get(self, "instance_ids")
@instance_ids.setter
def instance_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "instance_ids", value)
@property
@pulumi.getter(name="instanceName")
def instance_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of an ECS instance. Default to "ESS-Instance". It is valid from version 1.7.1.
"""
return pulumi.get(self, "instance_name")
@instance_name.setter
def instance_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_name", value)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> Optional[pulumi.Input[str]]:
"""
Resource type of an ECS instance.
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter(name="instanceTypes")
def instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Resource types of an ECS instance.
"""
return pulumi.get(self, "instance_types")
@instance_types.setter
def instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "instance_types", value)
@property
@pulumi.getter(name="internetChargeType")
def internet_charge_type(self) -> Optional[pulumi.Input[str]]:
"""
Network billing type, Values: PayByBandwidth or PayByTraffic. Default to `PayByBandwidth`.
"""
return pulumi.get(self, "internet_charge_type")
@internet_charge_type.setter
def internet_charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internet_charge_type", value)
@property
@pulumi.getter(name="internetMaxBandwidthIn")
def internet_max_bandwidth_in(self) -> Optional[pulumi.Input[int]]:
"""
Maximum incoming bandwidth from the public network, measured in Mbps (Mega bit per second). The value range is [1,200].
"""
return pulumi.get(self, "internet_max_bandwidth_in")
@internet_max_bandwidth_in.setter
def internet_max_bandwidth_in(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "internet_max_bandwidth_in", value)
@property
@pulumi.getter(name="internetMaxBandwidthOut")
def internet_max_bandwidth_out(self) -> Optional[pulumi.Input[int]]:
"""
Maximum outgoing bandwidth from the public network, measured in Mbps (Mega bit per second). The value range for PayByBandwidth is [0,100].
"""
return pulumi.get(self, "internet_max_bandwidth_out")
@internet_max_bandwidth_out.setter
def internet_max_bandwidth_out(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "internet_max_bandwidth_out", value)
@property
@pulumi.getter(name="ioOptimized")
def io_optimized(self) -> Optional[pulumi.Input[str]]:
"""
It has been deprecated on instance resource. All the launched alicloud instances will be I/O optimized.
"""
return pulumi.get(self, "io_optimized")
@io_optimized.setter
def io_optimized(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "io_optimized", value)
@property
@pulumi.getter(name="isOutdated")
def is_outdated(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to use outdated instance type. Default to false.
"""
return pulumi.get(self, "is_outdated")
@is_outdated.setter
def is_outdated(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_outdated", value)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of key pair that can login ECS instance successfully without password. If it is specified, the password would be invalid.
"""
return pulumi.get(self, "key_name")
@key_name.setter
def key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_name", value)
@property
@pulumi.getter(name="kmsEncryptedPassword")
def kms_encrypted_password(self) -> Optional[pulumi.Input[str]]:
"""
An KMS encrypts password used to a db account. If the `password` is filled in, this field will be ignored.
"""
return pulumi.get(self, "kms_encrypted_password")
@kms_encrypted_password.setter
def kms_encrypted_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_encrypted_password", value)
@property
@pulumi.getter(name="kmsEncryptionContext")
def kms_encryption_context(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a db account with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
"""
return pulumi.get(self, "kms_encryption_context")
@kms_encryption_context.setter
def kms_encryption_context(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "kms_encryption_context", value)
@property
@pulumi.getter
def override(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether to overwrite the existing data. Default to false.
"""
return pulumi.get(self, "override")
@override.setter
def override(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "override", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
The password of the ECS instance. The password must be 8 to 30 characters in length. It must contains at least three of the following character types: uppercase letters, lowercase letters, digits, and special characters. Special characters include `() ~!@#$%^&*-_+=\|{}[]:;'<>,.?/`, The password of Windows-based instances cannot start with a forward slash (/).
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="passwordInherit")
def password_inherit(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether to use the password that is predefined in the image. If the PasswordInherit parameter is set to true, the `password` and `kms_<PASSWORD>_password` will be ignored. You must ensure that the selected image has a password configured.
"""
return pulumi.get(self, "password_inherit")
@password_inherit.setter
def password_inherit(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "password_inherit", value)
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of resource group.
"""
return pulumi.get(self, "resource_group_id")
@resource_group_id.setter
def resource_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_id", value)
@property
@pulumi.getter(name="roleName")
def role_name(self) -> Optional[pulumi.Input[str]]:
"""
Instance RAM role name. The name is provided and maintained by RAM. You can use `ram.Role` to create a new one.
"""
return pulumi.get(self, "role_name")
@role_name.setter
def role_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_name", value)
@property
@pulumi.getter(name="scalingConfigurationName")
def scaling_configuration_name(self) -> Optional[pulumi.Input[str]]:
"""
Name shown for the scheduled task. which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is ScalingConfigurationId.
"""
return pulumi.get(self, "scaling_configuration_name")
@scaling_configuration_name.setter
def scaling_configuration_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scaling_configuration_name", value)
@property
@pulumi.getter(name="securityGroupId")
def security_group_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the security group used to create new instance. It is conflict with `security_group_ids`.
"""
return pulumi.get(self, "security_group_id")
@security_group_id.setter
def security_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_group_id", value)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List IDs of the security group used to create new instances. It is conflict with `security_group_id`.
"""
return pulumi.get(self, "security_group_ids")
@security_group_ids.setter
def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "security_group_ids", value)
@property
@pulumi.getter
def substitute(self) -> Optional[pulumi.Input[str]]:
"""
The another scaling configuration which will be active automatically and replace current configuration when setting `active` to 'false'. It is invalid when `active` is 'true'.
"""
return pulumi.get(self, "substitute")
@substitute.setter
def substitute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "substitute", value)
@property
@pulumi.getter(name="systemDiskAutoSnapshotPolicyId")
def system_disk_auto_snapshot_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The id of auto snapshot policy for system disk.
"""
return pulumi.get(self, "system_disk_auto_snapshot_policy_id")
@system_disk_auto_snapshot_policy_id.setter
def system_disk_auto_snapshot_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_auto_snapshot_policy_id", value)
@property
@pulumi.getter(name="systemDiskCategory")
def system_disk_category(self) -> Optional[pulumi.Input[str]]:
"""
Category of the system disk. The parameter value options are `ephemeral_ssd`, `cloud_efficiency`, `cloud_ssd`, `cloud_essd` and `cloud`. `cloud` only is used to some no I/O optimized instance. Default to `cloud_efficiency`.
"""
return pulumi.get(self, "system_disk_category")
@system_disk_category.setter
def system_disk_category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_category", value)
@property
@pulumi.getter(name="systemDiskDescription")
def system_disk_description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the system disk. The description must be 2 to 256 characters in length and cannot start with http:// or https://.
"""
return pulumi.get(self, "system_disk_description")
@system_disk_description.setter
def system_disk_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_description", value)
@property
@pulumi.getter(name="systemDiskName")
def system_disk_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the system disk. It must be 2 to 128 characters in length. It must start with a letter and cannot start with http:// or https://. It can contain letters, digits, colons (:), underscores (_), and hyphens (-). Default value: null.
"""
return pulumi.get(self, "system_disk_name")
@system_disk_name.setter
def system_disk_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_name", value)
@property
@pulumi.getter(name="systemDiskPerformanceLevel")
def system_disk_performance_level(self) -> Optional[pulumi.Input[str]]:
"""
The performance level of the ESSD used as the system disk.
"""
return pulumi.get(self, "system_disk_performance_level")
@system_disk_performance_level.setter
def system_disk_performance_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_performance_level", value)
@property
@pulumi.getter(name="systemDiskSize")
def system_disk_size(self) -> Optional[pulumi.Input[int]]:
"""
Size of system disk, in GiB. Optional values: cloud: 20-500, cloud_efficiency: 20-500, cloud_ssd: 20-500, ephemeral_ssd: 20-500 The default value is max{40, ImageSize}. If this parameter is set, the system disk size must be greater than or equal to max{40, ImageSize}.
"""
return pulumi.get(self, "system_disk_size")
@system_disk_size.setter
def system_disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "system_disk_size", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
A mapping of tags to assign to the resource. It will be applied for ECS instances finally.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "http://", or "https://" It can be a null string.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="userData")
def user_data(self) -> Optional[pulumi.Input[str]]:
"""
User-defined data to customize the startup behaviors of the ECS instance and to pass data into the ECS instance.
"""
return pulumi.get(self, "user_data")
@user_data.setter
def user_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_data", value)
@pulumi.input_type
class _ScalingConfigurationState:
def __init__(__self__, *,
active: Optional[pulumi.Input[bool]] = None,
credit_specification: Optional[pulumi.Input[str]] = None,
data_disks: Optional[pulumi.Input[Sequence[pulumi.Input['ScalingConfigurationDataDiskArgs']]]] = None,
enable: Optional[pulumi.Input[bool]] = None,
force_delete: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
instance_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
internet_max_bandwidth_in: Optional[pulumi.Input[int]] = None,
internet_max_bandwidth_out: Optional[pulumi.Input[int]] = None,
io_optimized: Optional[pulumi.Input[str]] = None,
is_outdated: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
override: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
password_inherit: Optional[pulumi.Input[bool]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
scaling_configuration_name: Optional[pulumi.Input[str]] = None,
scaling_group_id: Optional[pulumi.Input[str]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
substitute: Optional[pulumi.Input[str]] = None,
system_disk_auto_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
system_disk_category: Optional[pulumi.Input[str]] = None,
system_disk_description: Optional[pulumi.Input[str]] = None,
system_disk_name: Optional[pulumi.Input[str]] = None,
system_disk_performance_level: Optional[pulumi.Input[str]] = None,
system_disk_size: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
user_data: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ScalingConfiguration resources.
:param pulumi.Input[bool] active: Whether active current scaling configuration in the specified scaling group. Default to `false`.
:param pulumi.Input[str] credit_specification: Performance mode of the t5 burstable instance. Valid values: 'Standard', 'Unlimited'.
:param pulumi.Input[Sequence[pulumi.Input['ScalingConfigurationDataDiskArgs']]] data_disks: DataDisk mappings to attach to ecs instance. See Block datadisk below for details.
:param pulumi.Input[bool] enable: Whether enable the specified scaling group(make it active) to which the current scaling configuration belongs.
:param pulumi.Input[bool] force_delete: The last scaling configuration will be deleted forcibly with deleting its scaling group. Default to false.
:param pulumi.Input[str] image_id: ID of an image file, indicating the image resource selected when an instance is enabled.
:param pulumi.Input[str] image_name: Name of an image file, indicating the image resource selected when an instance is enabled.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: It has been deprecated from version 1.6.0. New resource `ess.Attachment` replaces it.
:param pulumi.Input[str] instance_name: Name of an ECS instance. Default to "ESS-Instance". It is valid from version 1.7.1.
:param pulumi.Input[str] instance_type: Resource type of an ECS instance.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_types: Resource types of an ECS instance.
:param pulumi.Input[str] internet_charge_type: Network billing type, Values: PayByBandwidth or PayByTraffic. Default to `PayByBandwidth`.
:param pulumi.Input[int] internet_max_bandwidth_in: Maximum incoming bandwidth from the public network, measured in Mbps (Mega bit per second). The value range is [1,200].
:param pulumi.Input[int] internet_max_bandwidth_out: Maximum outgoing bandwidth from the public network, measured in Mbps (Mega bit per second). The value range for PayByBandwidth is [0,100].
:param pulumi.Input[str] io_optimized: It has been deprecated on instance resource. All the launched alicloud instances will be I/O optimized.
:param pulumi.Input[bool] is_outdated: Whether to use outdated instance type. Default to false.
:param pulumi.Input[str] key_name: The name of key pair that can login ECS instance successfully without password. If it is specified, the password would be invalid.
:param pulumi.Input[str] kms_encrypted_password: An KMS encrypts password used to a db account. If the `password` is filled in, this field will be ignored.
:param pulumi.Input[Mapping[str, Any]] kms_encryption_context: An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a db account with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
:param pulumi.Input[bool] override: Indicates whether to overwrite the existing data. Default to false.
:param pulumi.Input[str] password: The password of the ECS instance. The password must be 8 to 30 characters in length. It must contains at least three of the following character types: uppercase letters, lowercase letters, digits, and special characters. Special characters include `() ~!@#$%^&*-_+=\|{}[]:;'<>,.?/`, The password of Windows-based instances cannot start with a forward slash (/).
:param pulumi.Input[bool] password_inherit: Specifies whether to use the password that is predefined in the image. If the PasswordInherit parameter is set to true, the `password` and `kms_encrypted_password` will be ignored. You must ensure that the selected image has a password configured.
:param pulumi.Input[str] resource_group_id: ID of resource group.
:param pulumi.Input[str] role_name: Instance RAM role name. The name is provided and maintained by RAM. You can use `ram.Role` to create a new one.
:param pulumi.Input[str] scaling_configuration_name: Name shown for the scheduled task. which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is ScalingConfigurationId.
:param pulumi.Input[str] scaling_group_id: ID of the scaling group of a scaling configuration.
:param pulumi.Input[str] security_group_id: ID of the security group used to create new instance. It is conflict with `security_group_ids`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: List IDs of the security group used to create new instances. It is conflict with `security_group_id`.
:param pulumi.Input[str] substitute: The another scaling configuration which will be active automatically and replace current configuration when setting `active` to 'false'. It is invalid when `active` is 'true'.
:param pulumi.Input[str] system_disk_auto_snapshot_policy_id: The id of auto snapshot policy for system disk.
:param pulumi.Input[str] system_disk_category: Category of the system disk. The parameter value options are `ephemeral_ssd`, `cloud_efficiency`, `cloud_ssd`, `cloud_essd` and `cloud`. `cloud` only is used to some no I/O optimized instance. Default to `cloud_efficiency`.
:param pulumi.Input[str] system_disk_description: The description of the system disk. The description must be 2 to 256 characters in length and cannot start with http:// or https://.
:param pulumi.Input[str] system_disk_name: The name of the system disk. It must be 2 to 128 characters in length. It must start with a letter and cannot start with http:// or https://. It can contain letters, digits, colons (:), underscores (_), and hyphens (-). Default value: null.
:param pulumi.Input[str] system_disk_performance_level: The performance level of the ESSD used as the system disk.
:param pulumi.Input[int] system_disk_size: Size of system disk, in GiB. Optional values: cloud: 20-500, cloud_efficiency: 20-500, cloud_ssd: 20-500, ephemeral_ssd: 20-500 The default value is max{40, ImageSize}. If this parameter is set, the system disk size must be greater than or equal to max{40, ImageSize}.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource. It will be applied for ECS instances finally.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "http://", or "https://" It can be a null string.
:param pulumi.Input[str] user_data: User-defined data to customize the startup behaviors of the ECS instance and to pass data into the ECS instance.
"""
if active is not None:
pulumi.set(__self__, "active", active)
if credit_specification is not None:
pulumi.set(__self__, "credit_specification", credit_specification)
if data_disks is not None:
pulumi.set(__self__, "data_disks", data_disks)
if enable is not None:
pulumi.set(__self__, "enable", enable)
if force_delete is not None:
pulumi.set(__self__, "force_delete", force_delete)
if image_id is not None:
pulumi.set(__self__, "image_id", image_id)
if image_name is not None:
pulumi.set(__self__, "image_name", image_name)
if instance_ids is not None:
warnings.warn("""Field 'instance_ids' has been deprecated from provider version 1.6.0. New resource 'alicloud_ess_attachment' replaces it.""", DeprecationWarning)
pulumi.log.warn("""instance_ids is deprecated: Field 'instance_ids' has been deprecated from provider version 1.6.0. New resource 'alicloud_ess_attachment' replaces it.""")
if instance_ids is not None:
pulumi.set(__self__, "instance_ids", instance_ids)
if instance_name is not None:
pulumi.set(__self__, "instance_name", instance_name)
if instance_type is not None:
pulumi.set(__self__, "instance_type", instance_type)
if instance_types is not None:
pulumi.set(__self__, "instance_types", instance_types)
if internet_charge_type is not None:
pulumi.set(__self__, "internet_charge_type", internet_charge_type)
if internet_max_bandwidth_in is not None:
pulumi.set(__self__, "internet_max_bandwidth_in", internet_max_bandwidth_in)
if internet_max_bandwidth_out is not None:
pulumi.set(__self__, "internet_max_bandwidth_out", internet_max_bandwidth_out)
if io_optimized is not None:
warnings.warn("""Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.""", DeprecationWarning)
pulumi.log.warn("""io_optimized is deprecated: Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.""")
if io_optimized is not None:
pulumi.set(__self__, "io_optimized", io_optimized)
if is_outdated is not None:
pulumi.set(__self__, "is_outdated", is_outdated)
if key_name is not None:
pulumi.set(__self__, "key_name", key_name)
if kms_encrypted_password is not None:
pulumi.set(__self__, "kms_encrypted_password", kms_encrypted_password)
if kms_encryption_context is not None:
pulumi.set(__self__, "kms_encryption_context", kms_encryption_context)
if override is not None:
pulumi.set(__self__, "override", override)
if password is not None:
pulumi.set(__self__, "password", password)
if password_inherit is not None:
pulumi.set(__self__, "password_inherit", password_inherit)
if resource_group_id is not None:
pulumi.set(__self__, "resource_group_id", resource_group_id)
if role_name is not None:
pulumi.set(__self__, "role_name", role_name)
if scaling_configuration_name is not None:
pulumi.set(__self__, "scaling_configuration_name", scaling_configuration_name)
if scaling_group_id is not None:
pulumi.set(__self__, "scaling_group_id", scaling_group_id)
if security_group_id is not None:
pulumi.set(__self__, "security_group_id", security_group_id)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if substitute is not None:
pulumi.set(__self__, "substitute", substitute)
if system_disk_auto_snapshot_policy_id is not None:
pulumi.set(__self__, "system_disk_auto_snapshot_policy_id", system_disk_auto_snapshot_policy_id)
if system_disk_category is not None:
pulumi.set(__self__, "system_disk_category", system_disk_category)
if system_disk_description is not None:
pulumi.set(__self__, "system_disk_description", system_disk_description)
if system_disk_name is not None:
pulumi.set(__self__, "system_disk_name", system_disk_name)
if system_disk_performance_level is not None:
pulumi.set(__self__, "system_disk_performance_level", system_disk_performance_level)
if system_disk_size is not None:
pulumi.set(__self__, "system_disk_size", system_disk_size)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if user_data is not None:
pulumi.set(__self__, "user_data", user_data)
@property
@pulumi.getter
def active(self) -> Optional[pulumi.Input[bool]]:
"""
Whether active current scaling configuration in the specified scaling group. Default to `false`.
"""
return pulumi.get(self, "active")
@active.setter
def active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "active", value)
@property
@pulumi.getter(name="creditSpecification")
def credit_specification(self) -> Optional[pulumi.Input[str]]:
"""
Performance mode of the t5 burstable instance. Valid values: 'Standard', 'Unlimited'.
"""
return pulumi.get(self, "credit_specification")
@credit_specification.setter
def credit_specification(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "credit_specification", value)
@property
@pulumi.getter(name="dataDisks")
def data_disks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ScalingConfigurationDataDiskArgs']]]]:
"""
DataDisk mappings to attach to ecs instance. See Block datadisk below for details.
"""
return pulumi.get(self, "data_disks")
@data_disks.setter
def data_disks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ScalingConfigurationDataDiskArgs']]]]):
pulumi.set(self, "data_disks", value)
@property
@pulumi.getter
def enable(self) -> Optional[pulumi.Input[bool]]:
"""
Whether enable the specified scaling group(make it active) to which the current scaling configuration belongs.
"""
return pulumi.get(self, "enable")
@enable.setter
def enable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable", value)
@property
@pulumi.getter(name="forceDelete")
def force_delete(self) -> Optional[pulumi.Input[bool]]:
"""
The last scaling configuration will be deleted forcibly with deleting its scaling group. Default to false.
"""
return pulumi.get(self, "force_delete")
@force_delete.setter
def force_delete(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_delete", value)
@property
@pulumi.getter(name="imageId")
def image_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of an image file, indicating the image resource selected when an instance is enabled.
"""
return pulumi.get(self, "image_id")
@image_id.setter
def image_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_id", value)
@property
@pulumi.getter(name="imageName")
def image_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of an image file, indicating the image resource selected when an instance is enabled.
"""
return pulumi.get(self, "image_name")
@image_name.setter
def image_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_name", value)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
It has been deprecated from version 1.6.0. New resource `ess.Attachment` replaces it.
"""
return pulumi.get(self, "instance_ids")
@instance_ids.setter
def instance_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "instance_ids", value)
@property
@pulumi.getter(name="instanceName")
def instance_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of an ECS instance. Default to "ESS-Instance". It is valid from version 1.7.1.
"""
return pulumi.get(self, "instance_name")
@instance_name.setter
def instance_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_name", value)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> Optional[pulumi.Input[str]]:
"""
Resource type of an ECS instance.
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter(name="instanceTypes")
def instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Resource types of an ECS instance.
"""
return pulumi.get(self, "instance_types")
@instance_types.setter
def instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "instance_types", value)
@property
@pulumi.getter(name="internetChargeType")
def internet_charge_type(self) -> Optional[pulumi.Input[str]]:
"""
Network billing type, Values: PayByBandwidth or PayByTraffic. Default to `PayByBandwidth`.
"""
return pulumi.get(self, "internet_charge_type")
@internet_charge_type.setter
def internet_charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internet_charge_type", value)
@property
@pulumi.getter(name="internetMaxBandwidthIn")
def internet_max_bandwidth_in(self) -> Optional[pulumi.Input[int]]:
"""
Maximum incoming bandwidth from the public network, measured in Mbps (Mega bit per second). The value range is [1,200].
"""
return pulumi.get(self, "internet_max_bandwidth_in")
@internet_max_bandwidth_in.setter
def internet_max_bandwidth_in(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "internet_max_bandwidth_in", value)
@property
@pulumi.getter(name="internetMaxBandwidthOut")
def internet_max_bandwidth_out(self) -> Optional[pulumi.Input[int]]:
"""
Maximum outgoing bandwidth from the public network, measured in Mbps (Mega bit per second). The value range for PayByBandwidth is [0,100].
"""
return pulumi.get(self, "internet_max_bandwidth_out")
@internet_max_bandwidth_out.setter
def internet_max_bandwidth_out(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "internet_max_bandwidth_out", value)
@property
@pulumi.getter(name="ioOptimized")
def io_optimized(self) -> Optional[pulumi.Input[str]]:
"""
It has been deprecated on instance resource. All the launched alicloud instances will be I/O optimized.
"""
return pulumi.get(self, "io_optimized")
@io_optimized.setter
def io_optimized(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "io_optimized", value)
@property
@pulumi.getter(name="isOutdated")
def is_outdated(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to use outdated instance type. Default to false.
"""
return pulumi.get(self, "is_outdated")
@is_outdated.setter
def is_outdated(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_outdated", value)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of key pair that can login ECS instance successfully without password. If it is specified, the password would be invalid.
"""
return pulumi.get(self, "key_name")
@key_name.setter
def key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_name", value)
@property
@pulumi.getter(name="kmsEncryptedPassword")
def kms_encrypted_password(self) -> Optional[pulumi.Input[str]]:
"""
An KMS encrypts password used to a db account. If the `password` is filled in, this field will be ignored.
"""
return pulumi.get(self, "kms_encrypted_password")
@kms_encrypted_password.setter
def kms_encrypted_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_encrypted_password", value)
@property
@pulumi.getter(name="kmsEncryptionContext")
def kms_encryption_context(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a db account with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
"""
return pulumi.get(self, "kms_encryption_context")
@kms_encryption_context.setter
def kms_encryption_context(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "kms_encryption_context", value)
@property
@pulumi.getter
def override(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether to overwrite the existing data. Default to false.
"""
return pulumi.get(self, "override")
@override.setter
def override(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "override", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
The password of the ECS instance. The password must be 8 to 30 characters in length. It must contains at least three of the following character types: uppercase letters, lowercase letters, digits, and special characters. Special characters include `() ~!@#$%^&*-_+=\|{}[]:;'<>,.?/`, The password of Windows-based instances cannot start with a forward slash (/).
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="passwordInherit")
def password_inherit(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether to use the password that is predefined in the image. If the PasswordInherit parameter is set to true, the `password` and `kms_encrypted_password` will be ignored. You must ensure that the selected image has a password configured.
"""
return pulumi.get(self, "password_inherit")
@password_inherit.setter
def password_inherit(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "password_inherit", value)
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of resource group.
"""
return pulumi.get(self, "resource_group_id")
@resource_group_id.setter
def resource_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_id", value)
@property
@pulumi.getter(name="roleName")
def role_name(self) -> Optional[pulumi.Input[str]]:
"""
Instance RAM role name. The name is provided and maintained by RAM. You can use `ram.Role` to create a new one.
"""
return pulumi.get(self, "role_name")
@role_name.setter
def role_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_name", value)
@property
@pulumi.getter(name="scalingConfigurationName")
def scaling_configuration_name(self) -> Optional[pulumi.Input[str]]:
"""
Name shown for the scheduled task. which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is ScalingConfigurationId.
"""
return pulumi.get(self, "scaling_configuration_name")
@scaling_configuration_name.setter
def scaling_configuration_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scaling_configuration_name", value)
@property
@pulumi.getter(name="scalingGroupId")
def scaling_group_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the scaling group of a scaling configuration.
"""
return pulumi.get(self, "scaling_group_id")
@scaling_group_id.setter
def scaling_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scaling_group_id", value)
@property
@pulumi.getter(name="securityGroupId")
def security_group_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the security group used to create new instance. It is conflict with `security_group_ids`.
"""
return pulumi.get(self, "security_group_id")
@security_group_id.setter
def security_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_group_id", value)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List IDs of the security group used to create new instances. It is conflict with `security_group_id`.
"""
return pulumi.get(self, "security_group_ids")
@security_group_ids.setter
def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "security_group_ids", value)
@property
@pulumi.getter
def substitute(self) -> Optional[pulumi.Input[str]]:
"""
The another scaling configuration which will be active automatically and replace current configuration when setting `active` to 'false'. It is invalid when `active` is 'true'.
"""
return pulumi.get(self, "substitute")
@substitute.setter
def substitute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "substitute", value)
@property
@pulumi.getter(name="systemDiskAutoSnapshotPolicyId")
def system_disk_auto_snapshot_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The id of auto snapshot policy for system disk.
"""
return pulumi.get(self, "system_disk_auto_snapshot_policy_id")
@system_disk_auto_snapshot_policy_id.setter
def system_disk_auto_snapshot_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_auto_snapshot_policy_id", value)
@property
@pulumi.getter(name="systemDiskCategory")
def system_disk_category(self) -> Optional[pulumi.Input[str]]:
"""
Category of the system disk. The parameter value options are `ephemeral_ssd`, `cloud_efficiency`, `cloud_ssd`, `cloud_essd` and `cloud`. `cloud` only is used to some no I/O optimized instance. Default to `cloud_efficiency`.
"""
return pulumi.get(self, "system_disk_category")
@system_disk_category.setter
def system_disk_category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_category", value)
@property
@pulumi.getter(name="systemDiskDescription")
def system_disk_description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the system disk. The description must be 2 to 256 characters in length and cannot start with http:// or https://.
"""
return pulumi.get(self, "system_disk_description")
@system_disk_description.setter
def system_disk_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_description", value)
@property
@pulumi.getter(name="systemDiskName")
def system_disk_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the system disk. It must be 2 to 128 characters in length. It must start with a letter and cannot start with http:// or https://. It can contain letters, digits, colons (:), underscores (_), and hyphens (-). Default value: null.
"""
return pulumi.get(self, "system_disk_name")
@system_disk_name.setter
def system_disk_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_name", value)
@property
@pulumi.getter(name="systemDiskPerformanceLevel")
def system_disk_performance_level(self) -> Optional[pulumi.Input[str]]:
"""
The performance level of the ESSD used as the system disk.
"""
return pulumi.get(self, "system_disk_performance_level")
@system_disk_performance_level.setter
def system_disk_performance_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "system_disk_performance_level", value)
@property
@pulumi.getter(name="systemDiskSize")
def system_disk_size(self) -> Optional[pulumi.Input[int]]:
"""
Size of system disk, in GiB. Optional values: cloud: 20-500, cloud_efficiency: 20-500, cloud_ssd: 20-500, ephemeral_ssd: 20-500 The default value is max{40, ImageSize}. If this parameter is set, the system disk size must be greater than or equal to max{40, ImageSize}.
"""
return pulumi.get(self, "system_disk_size")
@system_disk_size.setter
def system_disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "system_disk_size", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
A mapping of tags to assign to the resource. It will be applied for ECS instances finally.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "http://", or "https://" It can be a null string.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="userData")
def user_data(self) -> Optional[pulumi.Input[str]]:
"""
User-defined data to customize the startup behaviors of the ECS instance and to pass data into the ECS instance.
"""
return pulumi.get(self, "user_data")
@user_data.setter
def user_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_data", value)
class ScalingConfiguration(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
credit_specification: Optional[pulumi.Input[str]] = None,
data_disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ScalingConfigurationDataDiskArgs']]]]] = None,
enable: Optional[pulumi.Input[bool]] = None,
force_delete: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
instance_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
internet_max_bandwidth_in: Optional[pulumi.Input[int]] = None,
internet_max_bandwidth_out: Optional[pulumi.Input[int]] = None,
io_optimized: Optional[pulumi.Input[str]] = None,
is_outdated: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
override: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
password_inherit: Optional[pulumi.Input[bool]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
scaling_configuration_name: Optional[pulumi.Input[str]] = None,
scaling_group_id: Optional[pulumi.Input[str]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
substitute: Optional[pulumi.Input[str]] = None,
system_disk_auto_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
system_disk_category: Optional[pulumi.Input[str]] = None,
system_disk_description: Optional[pulumi.Input[str]] = None,
system_disk_name: Optional[pulumi.Input[str]] = None,
system_disk_performance_level: Optional[pulumi.Input[str]] = None,
system_disk_size: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
user_data: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
## Import
ESS scaling configuration can be imported using the id, e.g.
```sh
$ pulumi import alicloud:ess/scalingConfiguration:ScalingConfiguration example asg-abc123456
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] active: Whether active current scaling configuration in the specified scaling group. Default to `false`.
:param pulumi.Input[str] credit_specification: Performance mode of the t5 burstable instance. Valid values: 'Standard', 'Unlimited'.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ScalingConfigurationDataDiskArgs']]]] data_disks: DataDisk mappings to attach to ecs instance. See Block datadisk below for details.
:param pulumi.Input[bool] enable: Whether enable the specified scaling group(make it active) to which the current scaling configuration belongs.
:param pulumi.Input[bool] force_delete: The last scaling configuration will be deleted forcibly with deleting its scaling group. Default to false.
:param pulumi.Input[str] image_id: ID of an image file, indicating the image resource selected when an instance is enabled.
:param pulumi.Input[str] image_name: Name of an image file, indicating the image resource selected when an instance is enabled.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: It has been deprecated from version 1.6.0. New resource `ess.Attachment` replaces it.
:param pulumi.Input[str] instance_name: Name of an ECS instance. Default to "ESS-Instance". It is valid from version 1.7.1.
:param pulumi.Input[str] instance_type: Resource type of an ECS instance.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_types: Resource types of an ECS instance.
:param pulumi.Input[str] internet_charge_type: Network billing type, Values: PayByBandwidth or PayByTraffic. Default to `PayByBandwidth`.
:param pulumi.Input[int] internet_max_bandwidth_in: Maximum incoming bandwidth from the public network, measured in Mbps (Mega bit per second). The value range is [1,200].
:param pulumi.Input[int] internet_max_bandwidth_out: Maximum outgoing bandwidth from the public network, measured in Mbps (Mega bit per second). The value range for PayByBandwidth is [0,100].
:param pulumi.Input[str] io_optimized: It has been deprecated on instance resource. All the launched alicloud instances will be I/O optimized.
:param pulumi.Input[bool] is_outdated: Whether to use outdated instance type. Default to false.
:param pulumi.Input[str] key_name: The name of key pair that can login ECS instance successfully without password. If it is specified, the password would be invalid.
:param pulumi.Input[str] kms_encrypted_password: An KMS encrypts password used to a db account. If the `password` is filled in, this field will be ignored.
:param pulumi.Input[Mapping[str, Any]] kms_encryption_context: An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a db account with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
:param pulumi.Input[bool] override: Indicates whether to overwrite the existing data. Default to false.
:param pulumi.Input[str] password: The password of the ECS instance. The password must be 8 to 30 characters in length. It must contains at least three of the following character types: uppercase letters, lowercase letters, digits, and special characters. Special characters include `() ~!@#$%^&*-_+=\|{}[]:;'<>,.?/`, The password of Windows-based instances cannot start with a forward slash (/).
:param pulumi.Input[bool] password_inherit: Specifies whether to use the password that is predefined in the image. If the PasswordInherit parameter is set to true, the `password` and `kms_encrypted_password` will be ignored. You must ensure that the selected image has a password configured.
:param pulumi.Input[str] resource_group_id: ID of resource group.
:param pulumi.Input[str] role_name: Instance RAM role name. The name is provided and maintained by RAM. You can use `ram.Role` to create a new one.
:param pulumi.Input[str] scaling_configuration_name: Name shown for the scheduled task. which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is ScalingConfigurationId.
:param pulumi.Input[str] scaling_group_id: ID of the scaling group of a scaling configuration.
:param pulumi.Input[str] security_group_id: ID of the security group used to create new instance. It is conflict with `security_group_ids`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: List IDs of the security group used to create new instances. It is conflict with `security_group_id`.
:param pulumi.Input[str] substitute: The another scaling configuration which will be active automatically and replace current configuration when setting `active` to 'false'. It is invalid when `active` is 'true'.
:param pulumi.Input[str] system_disk_auto_snapshot_policy_id: The id of auto snapshot policy for system disk.
:param pulumi.Input[str] system_disk_category: Category of the system disk. The parameter value options are `ephemeral_ssd`, `cloud_efficiency`, `cloud_ssd`, `cloud_essd` and `cloud`. `cloud` only is used to some no I/O optimized instance. Default to `cloud_efficiency`.
:param pulumi.Input[str] system_disk_description: The description of the system disk. The description must be 2 to 256 characters in length and cannot start with http:// or https://.
:param pulumi.Input[str] system_disk_name: The name of the system disk. It must be 2 to 128 characters in length. It must start with a letter and cannot start with http:// or https://. It can contain letters, digits, colons (:), underscores (_), and hyphens (-). Default value: null.
:param pulumi.Input[str] system_disk_performance_level: The performance level of the ESSD used as the system disk.
:param pulumi.Input[int] system_disk_size: Size of system disk, in GiB. Optional values: cloud: 20-500, cloud_efficiency: 20-500, cloud_ssd: 20-500, ephemeral_ssd: 20-500 The default value is max{40, ImageSize}. If this parameter is set, the system disk size must be greater than or equal to max{40, ImageSize}.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource. It will be applied for ECS instances finally.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "http://", or "https://" It can be a null string.
:param pulumi.Input[str] user_data: User-defined data to customize the startup behaviors of the ECS instance and to pass data into the ECS instance.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ScalingConfigurationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
ESS scaling configuration can be imported using the id, e.g.
```sh
$ pulumi import alicloud:ess/scalingConfiguration:ScalingConfiguration example asg-abc123456
```
:param str resource_name: The name of the resource.
:param ScalingConfigurationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ScalingConfigurationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
credit_specification: Optional[pulumi.Input[str]] = None,
data_disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ScalingConfigurationDataDiskArgs']]]]] = None,
enable: Optional[pulumi.Input[bool]] = None,
force_delete: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
instance_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
internet_max_bandwidth_in: Optional[pulumi.Input[int]] = None,
internet_max_bandwidth_out: Optional[pulumi.Input[int]] = None,
io_optimized: Optional[pulumi.Input[str]] = None,
is_outdated: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
override: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
password_inherit: Optional[pulumi.Input[bool]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
scaling_configuration_name: Optional[pulumi.Input[str]] = None,
scaling_group_id: Optional[pulumi.Input[str]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
substitute: Optional[pulumi.Input[str]] = None,
system_disk_auto_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
system_disk_category: Optional[pulumi.Input[str]] = None,
system_disk_description: Optional[pulumi.Input[str]] = None,
system_disk_name: Optional[pulumi.Input[str]] = None,
system_disk_performance_level: Optional[pulumi.Input[str]] = None,
system_disk_size: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
user_data: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ScalingConfigurationArgs.__new__(ScalingConfigurationArgs)
__props__.__dict__["active"] = active
__props__.__dict__["credit_specification"] = credit_specification
__props__.__dict__["data_disks"] = data_disks
__props__.__dict__["enable"] = enable
__props__.__dict__["force_delete"] = force_delete
__props__.__dict__["image_id"] = image_id
__props__.__dict__["image_name"] = image_name
if instance_ids is not None and not opts.urn:
warnings.warn("""Field 'instance_ids' has been deprecated from provider version 1.6.0. New resource 'alicloud_ess_attachment' replaces it.""", DeprecationWarning)
pulumi.log.warn("""instance_ids is deprecated: Field 'instance_ids' has been deprecated from provider version 1.6.0. New resource 'alicloud_ess_attachment' replaces it.""")
__props__.__dict__["instance_ids"] = instance_ids
__props__.__dict__["instance_name"] = instance_name
__props__.__dict__["instance_type"] = instance_type
__props__.__dict__["instance_types"] = instance_types
__props__.__dict__["internet_charge_type"] = internet_charge_type
__props__.__dict__["internet_max_bandwidth_in"] = internet_max_bandwidth_in
__props__.__dict__["internet_max_bandwidth_out"] = internet_max_bandwidth_out
if io_optimized is not None and not opts.urn:
warnings.warn("""Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.""", DeprecationWarning)
pulumi.log.warn("""io_optimized is deprecated: Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.""")
__props__.__dict__["io_optimized"] = io_optimized
__props__.__dict__["is_outdated"] = is_outdated
__props__.__dict__["key_name"] = key_name
__props__.__dict__["kms_encrypted_password"] = kms_encrypted_password
__props__.__dict__["kms_encryption_context"] = kms_encryption_context
__props__.__dict__["override"] = override
__props__.__dict__["password"] = password
__props__.__dict__["password_inherit"] = password_inherit
__props__.__dict__["resource_group_id"] = resource_group_id
__props__.__dict__["role_name"] = role_name
__props__.__dict__["scaling_configuration_name"] = scaling_configuration_name
if scaling_group_id is None and not opts.urn:
raise TypeError("Missing required property 'scaling_group_id'")
__props__.__dict__["scaling_group_id"] = scaling_group_id
__props__.__dict__["security_group_id"] = security_group_id
__props__.__dict__["security_group_ids"] = security_group_ids
__props__.__dict__["substitute"] = substitute
__props__.__dict__["system_disk_auto_snapshot_policy_id"] = system_disk_auto_snapshot_policy_id
__props__.__dict__["system_disk_category"] = system_disk_category
__props__.__dict__["system_disk_description"] = system_disk_description
__props__.__dict__["system_disk_name"] = system_disk_name
__props__.__dict__["system_disk_performance_level"] = system_disk_performance_level
__props__.__dict__["system_disk_size"] = system_disk_size
__props__.__dict__["tags"] = tags
__props__.__dict__["user_data"] = user_data
super(ScalingConfiguration, __self__).__init__(
'alicloud:ess/scalingConfiguration:ScalingConfiguration',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
credit_specification: Optional[pulumi.Input[str]] = None,
data_disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ScalingConfigurationDataDiskArgs']]]]] = None,
enable: Optional[pulumi.Input[bool]] = None,
force_delete: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
instance_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
internet_max_bandwidth_in: Optional[pulumi.Input[int]] = None,
internet_max_bandwidth_out: Optional[pulumi.Input[int]] = None,
io_optimized: Optional[pulumi.Input[str]] = None,
is_outdated: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
override: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
password_inherit: Optional[pulumi.Input[bool]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
scaling_configuration_name: Optional[pulumi.Input[str]] = None,
scaling_group_id: Optional[pulumi.Input[str]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
substitute: Optional[pulumi.Input[str]] = None,
system_disk_auto_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
system_disk_category: Optional[pulumi.Input[str]] = None,
system_disk_description: Optional[pulumi.Input[str]] = None,
system_disk_name: Optional[pulumi.Input[str]] = None,
system_disk_performance_level: Optional[pulumi.Input[str]] = None,
system_disk_size: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
user_data: Optional[pulumi.Input[str]] = None) -> 'ScalingConfiguration':
"""
Get an existing ScalingConfiguration resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] active: Whether active current scaling configuration in the specified scaling group. Default to `false`.
:param pulumi.Input[str] credit_specification: Performance mode of the t5 burstable instance. Valid values: 'Standard', 'Unlimited'.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ScalingConfigurationDataDiskArgs']]]] data_disks: DataDisk mappings to attach to ecs instance. See Block datadisk below for details.
:param pulumi.Input[bool] enable: Whether enable the specified scaling group(make it active) to which the current scaling configuration belongs.
:param pulumi.Input[bool] force_delete: The last scaling configuration will be deleted forcibly with deleting its scaling group. Default to false.
:param pulumi.Input[str] image_id: ID of an image file, indicating the image resource selected when an instance is enabled.
:param pulumi.Input[str] image_name: Name of an image file, indicating the image resource selected when an instance is enabled.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: It has been deprecated from version 1.6.0. New resource `ess.Attachment` replaces it.
:param pulumi.Input[str] instance_name: Name of an ECS instance. Default to "ESS-Instance". It is valid from version 1.7.1.
:param pulumi.Input[str] instance_type: Resource type of an ECS instance.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_types: Resource types of an ECS instance.
:param pulumi.Input[str] internet_charge_type: Network billing type, Values: PayByBandwidth or PayByTraffic. Default to `PayByBandwidth`.
:param pulumi.Input[int] internet_max_bandwidth_in: Maximum incoming bandwidth from the public network, measured in Mbps (Mega bit per second). The value range is [1,200].
:param pulumi.Input[int] internet_max_bandwidth_out: Maximum outgoing bandwidth from the public network, measured in Mbps (Mega bit per second). The value range for PayByBandwidth is [0,100].
:param pulumi.Input[str] io_optimized: It has been deprecated on instance resource. All the launched alicloud instances will be I/O optimized.
:param pulumi.Input[bool] is_outdated: Whether to use outdated instance type. Default to false.
:param pulumi.Input[str] key_name: The name of key pair that can login ECS instance successfully without password. If it is specified, the password would be invalid.
:param pulumi.Input[str] kms_encrypted_password: An KMS encrypts password used to a db account. If the `password` is filled in, this field will be ignored.
:param pulumi.Input[Mapping[str, Any]] kms_encryption_context: An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a db account with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
:param pulumi.Input[bool] override: Indicates whether to overwrite the existing data. Default to false.
:param pulumi.Input[str] password: The password of the ECS instance. The password must be 8 to 30 characters in length. It must contains at least three of the following character types: uppercase letters, lowercase letters, digits, and special characters. Special characters include `() ~!@#$%^&*-_+=\|{}[]:;'<>,.?/`, The password of Windows-based instances cannot start with a forward slash (/).
:param pulumi.Input[bool] password_inherit: Specifies whether to use the password that is predefined in the image. If the PasswordInherit parameter is set to true, the `password` and `kms_encrypted_password` will be ignored. You must ensure that the selected image has a password configured.
:param pulumi.Input[str] resource_group_id: ID of resource group.
:param pulumi.Input[str] role_name: Instance RAM role name. The name is provided and maintained by RAM. You can use `ram.Role` to create a new one.
:param pulumi.Input[str] scaling_configuration_name: Name shown for the scheduled task. which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is ScalingConfigurationId.
:param pulumi.Input[str] scaling_group_id: ID of the scaling group of a scaling configuration.
:param pulumi.Input[str] security_group_id: ID of the security group used to create new instance. It is conflict with `security_group_ids`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: List IDs of the security group used to create new instances. It is conflict with `security_group_id`.
:param pulumi.Input[str] substitute: The another scaling configuration which will be active automatically and replace current configuration when setting `active` to 'false'. It is invalid when `active` is 'true'.
:param pulumi.Input[str] system_disk_auto_snapshot_policy_id: The id of auto snapshot policy for system disk.
:param pulumi.Input[str] system_disk_category: Category of the system disk. The parameter value options are `ephemeral_ssd`, `cloud_efficiency`, `cloud_ssd`, `cloud_essd` and `cloud`. `cloud` only is used to some no I/O optimized instance. Default to `cloud_efficiency`.
:param pulumi.Input[str] system_disk_description: The description of the system disk. The description must be 2 to 256 characters in length and cannot start with http:// or https://.
:param pulumi.Input[str] system_disk_name: The name of the system disk. It must be 2 to 128 characters in length. It must start with a letter and cannot start with http:// or https://. It can contain letters, digits, colons (:), underscores (_), and hyphens (-). Default value: null.
:param pulumi.Input[str] system_disk_performance_level: The performance level of the ESSD used as the system disk.
:param pulumi.Input[int] system_disk_size: Size of system disk, in GiB. Optional values: cloud: 20-500, cloud_efficiency: 20-500, cloud_ssd: 20-500, ephemeral_ssd: 20-500 The default value is max{40, ImageSize}. If this parameter is set, the system disk size must be greater than or equal to max{40, ImageSize}.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource. It will be applied for ECS instances finally.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "http://", or "https://" It can be a null string.
:param pulumi.Input[str] user_data: User-defined data to customize the startup behaviors of the ECS instance and to pass data into the ECS instance.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ScalingConfigurationState.__new__(_ScalingConfigurationState)
__props__.__dict__["active"] = active
__props__.__dict__["credit_specification"] = credit_specification
__props__.__dict__["data_disks"] = data_disks
__props__.__dict__["enable"] = enable
__props__.__dict__["force_delete"] = force_delete
__props__.__dict__["image_id"] = image_id
__props__.__dict__["image_name"] = image_name
__props__.__dict__["instance_ids"] = instance_ids
__props__.__dict__["instance_name"] = instance_name
__props__.__dict__["instance_type"] = instance_type
__props__.__dict__["instance_types"] = instance_types
__props__.__dict__["internet_charge_type"] = internet_charge_type
__props__.__dict__["internet_max_bandwidth_in"] = internet_max_bandwidth_in
__props__.__dict__["internet_max_bandwidth_out"] = internet_max_bandwidth_out
__props__.__dict__["io_optimized"] = io_optimized
__props__.__dict__["is_outdated"] = is_outdated
__props__.__dict__["key_name"] = key_name
__props__.__dict__["kms_encrypted_password"] = kms_encrypted_password
__props__.__dict__["kms_encryption_context"] = kms_encryption_context
__props__.__dict__["override"] = override
__props__.__dict__["password"] = password
__props__.__dict__["password_inherit"] = password_inherit
__props__.__dict__["resource_group_id"] = resource_group_id
__props__.__dict__["role_name"] = role_name
__props__.__dict__["scaling_configuration_name"] = scaling_configuration_name
__props__.__dict__["scaling_group_id"] = scaling_group_id
__props__.__dict__["security_group_id"] = security_group_id
__props__.__dict__["security_group_ids"] = security_group_ids
__props__.__dict__["substitute"] = substitute
__props__.__dict__["system_disk_auto_snapshot_policy_id"] = system_disk_auto_snapshot_policy_id
__props__.__dict__["system_disk_category"] = system_disk_category
__props__.__dict__["system_disk_description"] = system_disk_description
__props__.__dict__["system_disk_name"] = system_disk_name
__props__.__dict__["system_disk_performance_level"] = system_disk_performance_level
__props__.__dict__["system_disk_size"] = system_disk_size
__props__.__dict__["tags"] = tags
__props__.__dict__["user_data"] = user_data
return ScalingConfiguration(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def active(self) -> pulumi.Output[bool]:
"""
Whether active current scaling configuration in the specified scaling group. Default to `false`.
"""
return pulumi.get(self, "active")
@property
@pulumi.getter(name="creditSpecification")
def credit_specification(self) -> pulumi.Output[Optional[str]]:
"""
Performance mode of the t5 burstable instance. Valid values: 'Standard', 'Unlimited'.
"""
return pulumi.get(self, "credit_specification")
@property
@pulumi.getter(name="dataDisks")
def data_disks(self) -> pulumi.Output[Optional[Sequence['outputs.ScalingConfigurationDataDisk']]]:
"""
DataDisk mappings to attach to ecs instance. See Block datadisk below for details.
"""
return pulumi.get(self, "data_disks")
@property
@pulumi.getter
def enable(self) -> pulumi.Output[Optional[bool]]:
"""
Whether enable the specified scaling group(make it active) to which the current scaling configuration belongs.
"""
return pulumi.get(self, "enable")
@property
@pulumi.getter(name="forceDelete")
def force_delete(self) -> pulumi.Output[Optional[bool]]:
"""
The last scaling configuration will be deleted forcibly with deleting its scaling group. Default to false.
"""
return pulumi.get(self, "force_delete")
@property
@pulumi.getter(name="imageId")
def image_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of an image file, indicating the image resource selected when an instance is enabled.
"""
return pulumi.get(self, "image_id")
@property
@pulumi.getter(name="imageName")
def image_name(self) -> pulumi.Output[Optional[str]]:
"""
Name of an image file, indicating the image resource selected when an instance is enabled.
"""
return pulumi.get(self, "image_name")
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
It has been deprecated from version 1.6.0. New resource `ess.Attachment` replaces it.
"""
return pulumi.get(self, "instance_ids")
@property
@pulumi.getter(name="instanceName")
def instance_name(self) -> pulumi.Output[Optional[str]]:
"""
Name of an ECS instance. Default to "ESS-Instance". It is valid from version 1.7.1.
"""
return pulumi.get(self, "instance_name")
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Output[Optional[str]]:
"""
Resource type of an ECS instance.
"""
return pulumi.get(self, "instance_type")
@property
@pulumi.getter(name="instanceTypes")
def instance_types(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Resource types of an ECS instance.
"""
return pulumi.get(self, "instance_types")
@property
@pulumi.getter(name="internetChargeType")
def internet_charge_type(self) -> pulumi.Output[Optional[str]]:
"""
Network billing type, Values: PayByBandwidth or PayByTraffic. Default to `PayByBandwidth`.
"""
return pulumi.get(self, "internet_charge_type")
@property
@pulumi.getter(name="internetMaxBandwidthIn")
def internet_max_bandwidth_in(self) -> pulumi.Output[int]:
"""
Maximum incoming bandwidth from the public network, measured in Mbps (Mega bit per second). The value range is [1,200].
"""
return pulumi.get(self, "internet_max_bandwidth_in")
@property
@pulumi.getter(name="internetMaxBandwidthOut")
def internet_max_bandwidth_out(self) -> pulumi.Output[Optional[int]]:
"""
Maximum outgoing bandwidth from the public network, measured in Mbps (Mega bit per second). The value range for PayByBandwidth is [0,100].
"""
return pulumi.get(self, "internet_max_bandwidth_out")
@property
@pulumi.getter(name="ioOptimized")
def io_optimized(self) -> pulumi.Output[Optional[str]]:
"""
It has been deprecated on instance resource. All the launched alicloud instances will be I/O optimized.
"""
return pulumi.get(self, "io_optimized")
@property
@pulumi.getter(name="isOutdated")
def is_outdated(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to use outdated instance type. Default to false.
"""
return pulumi.get(self, "is_outdated")
@property
@pulumi.getter(name="keyName")
def key_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of key pair that can login ECS instance successfully without password. If it is specified, the password would be invalid.
"""
return pulumi.get(self, "key_name")
@property
@pulumi.getter(name="kmsEncryptedPassword")
def kms_encrypted_password(self) -> pulumi.Output[Optional[str]]:
"""
An KMS encrypts password used to a db account. If the `password` is filled in, this field will be ignored.
"""
return pulumi.get(self, "kms_encrypted_password")
@property
@pulumi.getter(name="kmsEncryptionContext")
def kms_encryption_context(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a db account with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
"""
return pulumi.get(self, "kms_encryption_context")
@property
@pulumi.getter
def override(self) -> pulumi.Output[Optional[bool]]:
"""
Indicates whether to overwrite the existing data. Default to false.
"""
return pulumi.get(self, "override")
@property
@pulumi.getter
def password(self) -> pulumi.Output[Optional[str]]:
"""
The password of the ECS instance. The password must be 8 to 30 characters in length. It must contains at least three of the following character types: uppercase letters, lowercase letters, digits, and special characters. Special characters include `() ~!@#$%^&*-_+=\|{}[]:;'<>,.?/`, The password of Windows-based instances cannot start with a forward slash (/).
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="passwordInherit")
def password_inherit(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether to use the password that is predefined in the image. If the PasswordInherit parameter is set to true, the `password` and `kms_encrypted_password` will be ignored. You must ensure that the selected image has a password configured.
"""
return pulumi.get(self, "password_inherit")
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of resource group.
"""
return pulumi.get(self, "resource_group_id")
@property
@pulumi.getter(name="roleName")
def role_name(self) -> pulumi.Output[Optional[str]]:
"""
Instance RAM role name. The name is provided and maintained by RAM. You can use `ram.Role` to create a new one.
"""
return pulumi.get(self, "role_name")
@property
@pulumi.getter(name="scalingConfigurationName")
def scaling_configuration_name(self) -> pulumi.Output[str]:
"""
Name shown for the scheduled task. which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is ScalingConfigurationId.
"""
return pulumi.get(self, "scaling_configuration_name")
@property
@pulumi.getter(name="scalingGroupId")
def scaling_group_id(self) -> pulumi.Output[str]:
"""
ID of the scaling group of a scaling configuration.
"""
return pulumi.get(self, "scaling_group_id")
@property
@pulumi.getter(name="securityGroupId")
def security_group_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of the security group used to create new instance. It is conflict with `security_group_ids`.
"""
return pulumi.get(self, "security_group_id")
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List IDs of the security group used to create new instances. It is conflict with `security_group_id`.
"""
return pulumi.get(self, "security_group_ids")
@property
@pulumi.getter
def substitute(self) -> pulumi.Output[str]:
"""
The another scaling configuration which will be active automatically and replace current configuration when setting `active` to 'false'. It is invalid when `active` is 'true'.
"""
return pulumi.get(self, "substitute")
@property
@pulumi.getter(name="systemDiskAutoSnapshotPolicyId")
def system_disk_auto_snapshot_policy_id(self) -> pulumi.Output[Optional[str]]:
"""
The id of auto snapshot policy for system disk.
"""
return pulumi.get(self, "system_disk_auto_snapshot_policy_id")
@property
@pulumi.getter(name="systemDiskCategory")
def system_disk_category(self) -> pulumi.Output[Optional[str]]:
"""
Category of the system disk. The parameter value options are `ephemeral_ssd`, `cloud_efficiency`, `cloud_ssd`, `cloud_essd` and `cloud`. `cloud` only is used to some no I/O optimized instance. Default to `cloud_efficiency`.
"""
return pulumi.get(self, "system_disk_category")
@property
@pulumi.getter(name="systemDiskDescription")
def system_disk_description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the system disk. The description must be 2 to 256 characters in length and cannot start with http:// or https://.
"""
return pulumi.get(self, "system_disk_description")
@property
@pulumi.getter(name="systemDiskName")
def system_disk_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of the system disk. It must be 2 to 128 characters in length. It must start with a letter and cannot start with http:// or https://. It can contain letters, digits, colons (:), underscores (_), and hyphens (-). Default value: null.
"""
return pulumi.get(self, "system_disk_name")
@property
@pulumi.getter(name="systemDiskPerformanceLevel")
def system_disk_performance_level(self) -> pulumi.Output[Optional[str]]:
"""
The performance level of the ESSD used as the system disk.
"""
return pulumi.get(self, "system_disk_performance_level")
@property
@pulumi.getter(name="systemDiskSize")
def system_disk_size(self) -> pulumi.Output[Optional[int]]:
"""
Size of system disk, in GiB. Optional values: cloud: 20-500, cloud_efficiency: 20-500, cloud_ssd: 20-500, ephemeral_ssd: 20-500 The default value is max{40, ImageSize}. If this parameter is set, the system disk size must be greater than or equal to max{40, ImageSize}.
"""
return pulumi.get(self, "system_disk_size")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
A mapping of tags to assign to the resource. It will be applied for ECS instances finally.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "http://", or "https://" It can be a null string.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="userData")
def user_data(self) -> pulumi.Output[Optional[str]]:
"""
User-defined data to customize the startup behaviors of the ECS instance and to pass data into the ECS instance.
"""
return pulumi.get(self, "user_data")
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.ResourceOptions",
"pulumi.set",
"pulumi.log.warn",
"warnings.warn"
] |
[((15269, 15305), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scalingGroupId"""'}), "(name='scalingGroupId')\n", (15282, 15305), False, 'import pulumi\n'), ((16035, 16076), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""creditSpecification"""'}), "(name='creditSpecification')\n", (16048, 16076), False, 'import pulumi\n'), ((16500, 16531), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dataDisks"""'}), "(name='dataDisks')\n", (16513, 16531), False, 'import pulumi\n'), ((17406, 17439), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forceDelete"""'}), "(name='forceDelete')\n", (17419, 17439), False, 'import pulumi\n'), ((17846, 17875), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""imageId"""'}), "(name='imageId')\n", (17859, 17875), False, 'import pulumi\n'), ((18242, 18273), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""imageName"""'}), "(name='imageName')\n", (18255, 18273), False, 'import pulumi\n'), ((18652, 18685), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceIds"""'}), "(name='instanceIds')\n", (18665, 18685), False, 'import pulumi\n'), ((19117, 19151), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceName"""'}), "(name='instanceName')\n", (19130, 19151), False, 'import pulumi\n'), ((19538, 19572), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceType"""'}), "(name='instanceType')\n", (19551, 19572), False, 'import pulumi\n'), ((19909, 19944), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceTypes"""'}), "(name='instanceTypes')\n", (19922, 19944), False, 'import pulumi\n'), ((20335, 20375), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internetChargeType"""'}), "(name='internetChargeType')\n", (20348, 20375), False, 'import pulumi\n'), ((20804, 20848), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internetMaxBandwidthIn"""'}), "(name='internetMaxBandwidthIn')\n", (20817, 20848), False, 'import pulumi\n'), ((21331, 21376), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internetMaxBandwidthOut"""'}), "(name='internetMaxBandwidthOut')\n", (21344, 21376), False, 'import pulumi\n'), ((21883, 21916), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ioOptimized"""'}), "(name='ioOptimized')\n", (21896, 21916), False, 'import pulumi\n'), ((22318, 22350), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isOutdated"""'}), "(name='isOutdated')\n", (22331, 22350), False, 'import pulumi\n'), ((22702, 22731), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""keyName"""'}), "(name='keyName')\n", (22715, 22731), False, 'import pulumi\n'), ((23140, 23182), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""kmsEncryptedPassword"""'}), "(name='kmsEncryptedPassword')\n", (23153, 23182), False, 'import pulumi\n'), ((23637, 23679), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""kmsEncryptionContext"""'}), "(name='kmsEncryptionContext')\n", (23650, 23679), False, 'import pulumi\n'), ((25340, 25377), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""passwordInherit"""'}), "(name='passwordInherit')\n", (25353, 25377), False, 'import pulumi\n'), ((25946, 25983), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceGroupId"""'}), "(name='resourceGroupId')\n", (25959, 25983), False, 'import pulumi\n'), ((26328, 26358), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""roleName"""'}), "(name='roleName')\n", (26341, 26358), False, 'import pulumi\n'), ((26753, 26799), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scalingConfigurationName"""'}), "(name='scalingConfigurationName')\n", (26766, 26799), False, 'import pulumi\n'), ((27484, 27521), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""securityGroupId"""'}), "(name='securityGroupId')\n", (27497, 27521), False, 'import pulumi\n'), ((27940, 27978), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""securityGroupIds"""'}), "(name='securityGroupIds')\n", (27953, 27978), False, 'import pulumi\n'), ((28933, 28985), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskAutoSnapshotPolicyId"""'}), "(name='systemDiskAutoSnapshotPolicyId')\n", (28946, 28985), False, 'import pulumi\n'), ((29446, 29486), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskCategory"""'}), "(name='systemDiskCategory')\n", (29459, 29486), False, 'import pulumi\n'), ((30048, 30091), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskDescription"""'}), "(name='systemDiskDescription')\n", (30061, 30091), False, 'import pulumi\n'), ((30577, 30613), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskName"""'}), "(name='systemDiskName')\n", (30590, 30613), False, 'import pulumi\n'), ((31172, 31220), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskPerformanceLevel"""'}), "(name='systemDiskPerformanceLevel')\n", (31185, 31220), False, 'import pulumi\n'), ((31662, 31698), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskSize"""'}), "(name='systemDiskSize')\n", (31675, 31698), False, 'import pulumi\n'), ((32956, 32986), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""userData"""'}), "(name='userData')\n", (32969, 32986), False, 'import pulumi\n'), ((48647, 48688), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""creditSpecification"""'}), "(name='creditSpecification')\n", (48660, 48688), False, 'import pulumi\n'), ((49112, 49143), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dataDisks"""'}), "(name='dataDisks')\n", (49125, 49143), False, 'import pulumi\n'), ((50018, 50051), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forceDelete"""'}), "(name='forceDelete')\n", (50031, 50051), False, 'import pulumi\n'), ((50458, 50487), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""imageId"""'}), "(name='imageId')\n", (50471, 50487), False, 'import pulumi\n'), ((50854, 50885), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""imageName"""'}), "(name='imageName')\n", (50867, 50885), False, 'import pulumi\n'), ((51264, 51297), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceIds"""'}), "(name='instanceIds')\n", (51277, 51297), False, 'import pulumi\n'), ((51729, 51763), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceName"""'}), "(name='instanceName')\n", (51742, 51763), False, 'import pulumi\n'), ((52150, 52184), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceType"""'}), "(name='instanceType')\n", (52163, 52184), False, 'import pulumi\n'), ((52521, 52556), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceTypes"""'}), "(name='instanceTypes')\n", (52534, 52556), False, 'import pulumi\n'), ((52947, 52987), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internetChargeType"""'}), "(name='internetChargeType')\n", (52960, 52987), False, 'import pulumi\n'), ((53416, 53460), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internetMaxBandwidthIn"""'}), "(name='internetMaxBandwidthIn')\n", (53429, 53460), False, 'import pulumi\n'), ((53943, 53988), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internetMaxBandwidthOut"""'}), "(name='internetMaxBandwidthOut')\n", (53956, 53988), False, 'import pulumi\n'), ((54495, 54528), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ioOptimized"""'}), "(name='ioOptimized')\n", (54508, 54528), False, 'import pulumi\n'), ((54930, 54962), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isOutdated"""'}), "(name='isOutdated')\n", (54943, 54962), False, 'import pulumi\n'), ((55314, 55343), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""keyName"""'}), "(name='keyName')\n", (55327, 55343), False, 'import pulumi\n'), ((55752, 55794), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""kmsEncryptedPassword"""'}), "(name='kmsEncryptedPassword')\n", (55765, 55794), False, 'import pulumi\n'), ((56249, 56291), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""kmsEncryptionContext"""'}), "(name='kmsEncryptionContext')\n", (56262, 56291), False, 'import pulumi\n'), ((57952, 57989), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""passwordInherit"""'}), "(name='passwordInherit')\n", (57965, 57989), False, 'import pulumi\n'), ((58557, 58594), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceGroupId"""'}), "(name='resourceGroupId')\n", (58570, 58594), False, 'import pulumi\n'), ((58939, 58969), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""roleName"""'}), "(name='roleName')\n", (58952, 58969), False, 'import pulumi\n'), ((59364, 59410), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scalingConfigurationName"""'}), "(name='scalingConfigurationName')\n", (59377, 59410), False, 'import pulumi\n'), ((60095, 60131), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scalingGroupId"""'}), "(name='scalingGroupId')\n", (60108, 60131), False, 'import pulumi\n'), ((60501, 60538), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""securityGroupId"""'}), "(name='securityGroupId')\n", (60514, 60538), False, 'import pulumi\n'), ((60957, 60995), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""securityGroupIds"""'}), "(name='securityGroupIds')\n", (60970, 60995), False, 'import pulumi\n'), ((61950, 62002), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskAutoSnapshotPolicyId"""'}), "(name='systemDiskAutoSnapshotPolicyId')\n", (61963, 62002), False, 'import pulumi\n'), ((62463, 62503), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskCategory"""'}), "(name='systemDiskCategory')\n", (62476, 62503), False, 'import pulumi\n'), ((63065, 63108), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskDescription"""'}), "(name='systemDiskDescription')\n", (63078, 63108), False, 'import pulumi\n'), ((63594, 63630), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskName"""'}), "(name='systemDiskName')\n", (63607, 63630), False, 'import pulumi\n'), ((64189, 64237), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskPerformanceLevel"""'}), "(name='systemDiskPerformanceLevel')\n", (64202, 64237), False, 'import pulumi\n'), ((64679, 64715), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskSize"""'}), "(name='systemDiskSize')\n", (64692, 64715), False, 'import pulumi\n'), ((65973, 66003), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""userData"""'}), "(name='userData')\n", (65986, 66003), False, 'import pulumi\n'), ((98274, 98315), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""creditSpecification"""'}), "(name='creditSpecification')\n", (98287, 98315), False, 'import pulumi\n'), ((98578, 98609), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dataDisks"""'}), "(name='dataDisks')\n", (98591, 98609), False, 'import pulumi\n'), ((99168, 99201), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forceDelete"""'}), "(name='forceDelete')\n", (99181, 99201), False, 'import pulumi\n'), ((99470, 99499), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""imageId"""'}), "(name='imageId')\n", (99483, 99499), False, 'import pulumi\n'), ((99741, 99772), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""imageName"""'}), "(name='imageName')\n", (99754, 99772), False, 'import pulumi\n'), ((100020, 100053), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceIds"""'}), "(name='instanceIds')\n", (100033, 100053), False, 'import pulumi\n'), ((100310, 100344), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceName"""'}), "(name='instanceName')\n", (100323, 100344), False, 'import pulumi\n'), ((100591, 100625), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceType"""'}), "(name='instanceType')\n", (100604, 100625), False, 'import pulumi\n'), ((100822, 100857), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceTypes"""'}), "(name='instanceTypes')\n", (100835, 100857), False, 'import pulumi\n'), ((101067, 101107), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internetChargeType"""'}), "(name='internetChargeType')\n", (101080, 101107), False, 'import pulumi\n'), ((101375, 101419), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internetMaxBandwidthIn"""'}), "(name='internetMaxBandwidthIn')\n", (101388, 101419), False, 'import pulumi\n'), ((101716, 101761), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internetMaxBandwidthOut"""'}), "(name='internetMaxBandwidthOut')\n", (101729, 101761), False, 'import pulumi\n'), ((102089, 102122), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ioOptimized"""'}), "(name='ioOptimized')\n", (102102, 102122), False, 'import pulumi\n'), ((102387, 102419), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isOutdated"""'}), "(name='isOutdated')\n", (102400, 102419), False, 'import pulumi\n'), ((102636, 102665), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""keyName"""'}), "(name='keyName')\n", (102649, 102665), False, 'import pulumi\n'), ((102949, 102991), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""kmsEncryptedPassword"""'}), "(name='kmsEncryptedPassword')\n", (102962, 102991), False, 'import pulumi\n'), ((103279, 103321), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""kmsEncryptionContext"""'}), "(name='kmsEncryptionContext')\n", (103292, 103321), False, 'import pulumi\n'), ((104550, 104587), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""passwordInherit"""'}), "(name='passwordInherit')\n", (104563, 104587), False, 'import pulumi\n'), ((105005, 105042), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceGroupId"""'}), "(name='resourceGroupId')\n", (105018, 105042), False, 'import pulumi\n'), ((105235, 105265), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""roleName"""'}), "(name='roleName')\n", (105248, 105265), False, 'import pulumi\n'), ((105532, 105578), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scalingConfigurationName"""'}), "(name='scalingConfigurationName')\n", (105545, 105578), False, 'import pulumi\n'), ((106074, 106110), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scalingGroupId"""'}), "(name='scalingGroupId')\n", (106087, 106110), False, 'import pulumi\n'), ((106321, 106358), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""securityGroupId"""'}), "(name='securityGroupId')\n", (106334, 106358), False, 'import pulumi\n'), ((106625, 106663), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""securityGroupIds"""'}), "(name='securityGroupIds')\n", (106638, 106663), False, 'import pulumi\n'), ((107284, 107336), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskAutoSnapshotPolicyId"""'}), "(name='systemDiskAutoSnapshotPolicyId')\n", (107297, 107336), False, 'import pulumi\n'), ((107591, 107631), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskCategory"""'}), "(name='systemDiskCategory')\n", (107604, 107631), False, 'import pulumi\n'), ((108032, 108075), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskDescription"""'}), "(name='systemDiskDescription')\n", (108045, 108075), False, 'import pulumi\n'), ((108391, 108427), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskName"""'}), "(name='systemDiskName')\n", (108404, 108427), False, 'import pulumi\n'), ((108837, 108885), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskPerformanceLevel"""'}), "(name='systemDiskPerformanceLevel')\n", (108850, 108885), False, 'import pulumi\n'), ((109139, 109175), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemDiskSize"""'}), "(name='systemDiskSize')\n", (109152, 109175), False, 'import pulumi\n'), ((110157, 110187), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""userData"""'}), "(name='userData')\n", (110170, 110187), False, 'import pulumi\n'), ((10355, 10413), 'pulumi.set', 'pulumi.set', (['__self__', '"""scaling_group_id"""', 'scaling_group_id'], {}), "(__self__, 'scaling_group_id', scaling_group_id)\n", (10365, 10413), False, 'import pulumi\n'), ((15458, 15494), 'pulumi.get', 'pulumi.get', (['self', '"""scaling_group_id"""'], {}), "(self, 'scaling_group_id')\n", (15468, 15494), False, 'import pulumi\n'), ((15591, 15634), 'pulumi.set', 'pulumi.set', (['self', '"""scaling_group_id"""', 'value'], {}), "(self, 'scaling_group_id', value)\n", (15601, 15634), False, 'import pulumi\n'), ((15867, 15893), 'pulumi.get', 'pulumi.get', (['self', '"""active"""'], {}), "(self, 'active')\n", (15877, 15893), False, 'import pulumi\n'), ((15981, 16014), 'pulumi.set', 'pulumi.set', (['self', '"""active"""', 'value'], {}), "(self, 'active', value)\n", (15991, 16014), False, 'import pulumi\n'), ((16277, 16317), 'pulumi.get', 'pulumi.get', (['self', '"""credit_specification"""'], {}), "(self, 'credit_specification')\n", (16287, 16317), False, 'import pulumi\n'), ((16432, 16479), 'pulumi.set', 'pulumi.set', (['self', '"""credit_specification"""', 'value'], {}), "(self, 'credit_specification', value)\n", (16442, 16479), False, 'import pulumi\n'), ((16774, 16804), 'pulumi.get', 'pulumi.get', (['self', '"""data_disks"""'], {}), "(self, 'data_disks')\n", (16784, 16804), False, 'import pulumi\n'), ((16954, 16991), 'pulumi.set', 'pulumi.set', (['self', '"""data_disks"""', 'value'], {}), "(self, 'data_disks', value)\n", (16964, 16991), False, 'import pulumi\n'), ((17238, 17264), 'pulumi.get', 'pulumi.get', (['self', '"""enable"""'], {}), "(self, 'enable')\n", (17248, 17264), False, 'import pulumi\n'), ((17352, 17385), 'pulumi.set', 'pulumi.set', (['self', '"""enable"""', 'value'], {}), "(self, 'enable', value)\n", (17362, 17385), False, 'import pulumi\n'), ((17654, 17686), 'pulumi.get', 'pulumi.get', (['self', '"""force_delete"""'], {}), "(self, 'force_delete')\n", (17664, 17686), False, 'import pulumi\n'), ((17786, 17825), 'pulumi.set', 'pulumi.set', (['self', '"""force_delete"""', 'value'], {}), "(self, 'force_delete', value)\n", (17796, 17825), False, 'import pulumi\n'), ((18067, 18095), 'pulumi.get', 'pulumi.get', (['self', '"""image_id"""'], {}), "(self, 'image_id')\n", (18077, 18095), False, 'import pulumi\n'), ((18186, 18221), 'pulumi.set', 'pulumi.set', (['self', '"""image_id"""', 'value'], {}), "(self, 'image_id', value)\n", (18196, 18221), False, 'import pulumi\n'), ((18469, 18499), 'pulumi.get', 'pulumi.get', (['self', '"""image_name"""'], {}), "(self, 'image_name')\n", (18479, 18499), False, 'import pulumi\n'), ((18594, 18631), 'pulumi.set', 'pulumi.set', (['self', '"""image_name"""', 'value'], {}), "(self, 'image_name', value)\n", (18604, 18631), False, 'import pulumi\n'), ((18902, 18934), 'pulumi.get', 'pulumi.get', (['self', '"""instance_ids"""'], {}), "(self, 'instance_ids')\n", (18912, 18934), False, 'import pulumi\n'), ((19057, 19096), 'pulumi.set', 'pulumi.set', (['self', '"""instance_ids"""', 'value'], {}), "(self, 'instance_ids', value)\n", (19067, 19096), False, 'import pulumi\n'), ((19343, 19376), 'pulumi.get', 'pulumi.get', (['self', '"""instance_name"""'], {}), "(self, 'instance_name')\n", (19353, 19376), False, 'import pulumi\n'), ((19477, 19517), 'pulumi.set', 'pulumi.set', (['self', '"""instance_name"""', 'value'], {}), "(self, 'instance_name', value)\n", (19487, 19517), False, 'import pulumi\n'), ((19714, 19747), 'pulumi.get', 'pulumi.get', (['self', '"""instance_type"""'], {}), "(self, 'instance_type')\n", (19724, 19747), False, 'import pulumi\n'), ((19848, 19888), 'pulumi.set', 'pulumi.set', (['self', '"""instance_type"""', 'value'], {}), "(self, 'instance_type', value)\n", (19858, 19888), False, 'import pulumi\n'), ((20112, 20146), 'pulumi.get', 'pulumi.get', (['self', '"""instance_types"""'], {}), "(self, 'instance_types')\n", (20122, 20146), False, 'import pulumi\n'), ((20273, 20314), 'pulumi.set', 'pulumi.set', (['self', '"""instance_types"""', 'value'], {}), "(self, 'instance_types', value)\n", (20283, 20314), False, 'import pulumi\n'), ((20581, 20621), 'pulumi.get', 'pulumi.get', (['self', '"""internet_charge_type"""'], {}), "(self, 'internet_charge_type')\n", (20591, 20621), False, 'import pulumi\n'), ((20736, 20783), 'pulumi.set', 'pulumi.set', (['self', '"""internet_charge_type"""', 'value'], {}), "(self, 'internet_charge_type', value)\n", (20746, 20783), False, 'import pulumi\n'), ((21088, 21133), 'pulumi.get', 'pulumi.get', (['self', '"""internet_max_bandwidth_in"""'], {}), "(self, 'internet_max_bandwidth_in')\n", (21098, 21133), False, 'import pulumi\n'), ((21258, 21310), 'pulumi.set', 'pulumi.set', (['self', '"""internet_max_bandwidth_in"""', 'value'], {}), "(self, 'internet_max_bandwidth_in', value)\n", (21268, 21310), False, 'import pulumi\n'), ((21636, 21682), 'pulumi.get', 'pulumi.get', (['self', '"""internet_max_bandwidth_out"""'], {}), "(self, 'internet_max_bandwidth_out')\n", (21646, 21682), False, 'import pulumi\n'), ((21809, 21862), 'pulumi.set', 'pulumi.set', (['self', '"""internet_max_bandwidth_out"""', 'value'], {}), "(self, 'internet_max_bandwidth_out', value)\n", (21819, 21862), False, 'import pulumi\n'), ((22127, 22159), 'pulumi.get', 'pulumi.get', (['self', '"""io_optimized"""'], {}), "(self, 'io_optimized')\n", (22137, 22159), False, 'import pulumi\n'), ((22258, 22297), 'pulumi.set', 'pulumi.set', (['self', '"""io_optimized"""', 'value'], {}), "(self, 'io_optimized', value)\n", (22268, 22297), False, 'import pulumi\n'), ((22514, 22545), 'pulumi.get', 'pulumi.get', (['self', '"""is_outdated"""'], {}), "(self, 'is_outdated')\n", (22524, 22545), False, 'import pulumi\n'), ((22643, 22681), 'pulumi.set', 'pulumi.set', (['self', '"""is_outdated"""', 'value'], {}), "(self, 'is_outdated', value)\n", (22653, 22681), False, 'import pulumi\n'), ((22965, 22993), 'pulumi.get', 'pulumi.get', (['self', '"""key_name"""'], {}), "(self, 'key_name')\n", (22975, 22993), False, 'import pulumi\n'), ((23084, 23119), 'pulumi.set', 'pulumi.set', (['self', '"""key_name"""', 'value'], {}), "(self, 'key_name', value)\n", (23094, 23119), False, 'import pulumi\n'), ((23406, 23448), 'pulumi.get', 'pulumi.get', (['self', '"""kms_encrypted_password"""'], {}), "(self, 'kms_encrypted_password')\n", (23416, 23448), False, 'import pulumi\n'), ((23567, 23616), 'pulumi.set', 'pulumi.set', (['self', '"""kms_encrypted_password"""', 'value'], {}), "(self, 'kms_encrypted_password', value)\n", (23577, 23616), False, 'import pulumi\n'), ((24081, 24123), 'pulumi.get', 'pulumi.get', (['self', '"""kms_encryption_context"""'], {}), "(self, 'kms_encryption_context')\n", (24091, 24123), False, 'import pulumi\n'), ((24256, 24305), 'pulumi.set', 'pulumi.set', (['self', '"""kms_encryption_context"""', 'value'], {}), "(self, 'kms_encryption_context', value)\n", (24266, 24305), False, 'import pulumi\n'), ((24511, 24539), 'pulumi.get', 'pulumi.get', (['self', '"""override"""'], {}), "(self, 'override')\n", (24521, 24539), False, 'import pulumi\n'), ((24631, 24666), 'pulumi.set', 'pulumi.set', (['self', '"""override"""', 'value'], {}), "(self, 'override', value)\n", (24641, 24666), False, 'import pulumi\n'), ((25165, 25193), 'pulumi.get', 'pulumi.get', (['self', '"""password"""'], {}), "(self, 'password')\n", (25175, 25193), False, 'import pulumi\n'), ((25284, 25319), 'pulumi.set', 'pulumi.set', (['self', '"""password"""', 'value'], {}), "(self, 'password', value)\n", (25294, 25319), False, 'import pulumi\n'), ((25738, 25774), 'pulumi.get', 'pulumi.get', (['self', '"""password_inherit"""'], {}), "(self, 'password_inherit')\n", (25748, 25774), False, 'import pulumi\n'), ((25882, 25925), 'pulumi.set', 'pulumi.set', (['self', '"""password_inherit"""', 'value'], {}), "(self, 'password_inherit', value)\n", (25892, 25925), False, 'import pulumi\n'), ((26117, 26154), 'pulumi.get', 'pulumi.get', (['self', '"""resource_group_id"""'], {}), "(self, 'resource_group_id')\n", (26127, 26154), False, 'import pulumi\n'), ((26263, 26307), 'pulumi.set', 'pulumi.set', (['self', '"""resource_group_id"""', 'value'], {}), "(self, 'resource_group_id', value)\n", (26273, 26307), False, 'import pulumi\n'), ((26574, 26603), 'pulumi.get', 'pulumi.get', (['self', '"""role_name"""'], {}), "(self, 'role_name')\n", (26584, 26603), False, 'import pulumi\n'), ((26696, 26732), 'pulumi.set', 'pulumi.set', (['self', '"""role_name"""', 'value'], {}), "(self, 'role_name', value)\n", (26706, 26732), False, 'import pulumi\n'), ((27237, 27283), 'pulumi.get', 'pulumi.get', (['self', '"""scaling_configuration_name"""'], {}), "(self, 'scaling_configuration_name')\n", (27247, 27283), False, 'import pulumi\n'), ((27410, 27463), 'pulumi.set', 'pulumi.set', (['self', '"""scaling_configuration_name"""', 'value'], {}), "(self, 'scaling_configuration_name', value)\n", (27420, 27463), False, 'import pulumi\n'), ((27729, 27766), 'pulumi.get', 'pulumi.get', (['self', '"""security_group_id"""'], {}), "(self, 'security_group_id')\n", (27739, 27766), False, 'import pulumi\n'), ((27875, 27919), 'pulumi.set', 'pulumi.set', (['self', '"""security_group_id"""', 'value'], {}), "(self, 'security_group_id', value)\n", (27885, 27919), False, 'import pulumi\n'), ((28217, 28255), 'pulumi.get', 'pulumi.get', (['self', '"""security_group_ids"""'], {}), "(self, 'security_group_ids')\n", (28227, 28255), False, 'import pulumi\n'), ((28390, 28435), 'pulumi.set', 'pulumi.set', (['self', '"""security_group_ids"""', 'value'], {}), "(self, 'security_group_ids', value)\n", (28400, 28435), False, 'import pulumi\n'), ((28750, 28780), 'pulumi.get', 'pulumi.get', (['self', '"""substitute"""'], {}), "(self, 'substitute')\n", (28760, 28780), False, 'import pulumi\n'), ((28875, 28912), 'pulumi.set', 'pulumi.set', (['self', '"""substitute"""', 'value'], {}), "(self, 'substitute', value)\n", (28885, 28912), False, 'import pulumi\n'), ((29163, 29218), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_auto_snapshot_policy_id"""'], {}), "(self, 'system_disk_auto_snapshot_policy_id')\n", (29173, 29218), False, 'import pulumi\n'), ((29363, 29425), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_auto_snapshot_policy_id"""', 'value'], {}), "(self, 'system_disk_auto_snapshot_policy_id', value)\n", (29373, 29425), False, 'import pulumi\n'), ((29825, 29865), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_category"""'], {}), "(self, 'system_disk_category')\n", (29835, 29865), False, 'import pulumi\n'), ((29980, 30027), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_category"""', 'value'], {}), "(self, 'system_disk_category', value)\n", (29990, 30027), False, 'import pulumi\n'), ((30342, 30385), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_description"""'], {}), "(self, 'system_disk_description')\n", (30352, 30385), False, 'import pulumi\n'), ((30506, 30556), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_description"""', 'value'], {}), "(self, 'system_disk_description', value)\n", (30516, 30556), False, 'import pulumi\n'), ((30965, 31001), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_name"""'], {}), "(self, 'system_disk_name')\n", (30975, 31001), False, 'import pulumi\n'), ((31108, 31151), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_name"""', 'value'], {}), "(self, 'system_disk_name', value)\n", (31118, 31151), False, 'import pulumi\n'), ((31403, 31452), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_performance_level"""'], {}), "(self, 'system_disk_performance_level')\n", (31413, 31452), False, 'import pulumi\n'), ((31585, 31641), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_performance_level"""', 'value'], {}), "(self, 'system_disk_performance_level', value)\n", (31595, 31641), False, 'import pulumi\n'), ((32078, 32114), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_size"""'], {}), "(self, 'system_disk_size')\n", (32088, 32114), False, 'import pulumi\n'), ((32221, 32264), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_size"""', 'value'], {}), "(self, 'system_disk_size', value)\n", (32231, 32264), False, 'import pulumi\n'), ((32783, 32807), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (32793, 32807), False, 'import pulumi\n'), ((32904, 32935), 'pulumi.set', 'pulumi.set', (['self', '"""tags"""', 'value'], {}), "(self, 'tags', value)\n", (32914, 32935), False, 'import pulumi\n'), ((33203, 33232), 'pulumi.get', 'pulumi.get', (['self', '"""user_data"""'], {}), "(self, 'user_data')\n", (33213, 33232), False, 'import pulumi\n'), ((33325, 33361), 'pulumi.set', 'pulumi.set', (['self', '"""user_data"""', 'value'], {}), "(self, 'user_data', value)\n", (33335, 33361), False, 'import pulumi\n'), ((48479, 48505), 'pulumi.get', 'pulumi.get', (['self', '"""active"""'], {}), "(self, 'active')\n", (48489, 48505), False, 'import pulumi\n'), ((48593, 48626), 'pulumi.set', 'pulumi.set', (['self', '"""active"""', 'value'], {}), "(self, 'active', value)\n", (48603, 48626), False, 'import pulumi\n'), ((48889, 48929), 'pulumi.get', 'pulumi.get', (['self', '"""credit_specification"""'], {}), "(self, 'credit_specification')\n", (48899, 48929), False, 'import pulumi\n'), ((49044, 49091), 'pulumi.set', 'pulumi.set', (['self', '"""credit_specification"""', 'value'], {}), "(self, 'credit_specification', value)\n", (49054, 49091), False, 'import pulumi\n'), ((49386, 49416), 'pulumi.get', 'pulumi.get', (['self', '"""data_disks"""'], {}), "(self, 'data_disks')\n", (49396, 49416), False, 'import pulumi\n'), ((49566, 49603), 'pulumi.set', 'pulumi.set', (['self', '"""data_disks"""', 'value'], {}), "(self, 'data_disks', value)\n", (49576, 49603), False, 'import pulumi\n'), ((49850, 49876), 'pulumi.get', 'pulumi.get', (['self', '"""enable"""'], {}), "(self, 'enable')\n", (49860, 49876), False, 'import pulumi\n'), ((49964, 49997), 'pulumi.set', 'pulumi.set', (['self', '"""enable"""', 'value'], {}), "(self, 'enable', value)\n", (49974, 49997), False, 'import pulumi\n'), ((50266, 50298), 'pulumi.get', 'pulumi.get', (['self', '"""force_delete"""'], {}), "(self, 'force_delete')\n", (50276, 50298), False, 'import pulumi\n'), ((50398, 50437), 'pulumi.set', 'pulumi.set', (['self', '"""force_delete"""', 'value'], {}), "(self, 'force_delete', value)\n", (50408, 50437), False, 'import pulumi\n'), ((50679, 50707), 'pulumi.get', 'pulumi.get', (['self', '"""image_id"""'], {}), "(self, 'image_id')\n", (50689, 50707), False, 'import pulumi\n'), ((50798, 50833), 'pulumi.set', 'pulumi.set', (['self', '"""image_id"""', 'value'], {}), "(self, 'image_id', value)\n", (50808, 50833), False, 'import pulumi\n'), ((51081, 51111), 'pulumi.get', 'pulumi.get', (['self', '"""image_name"""'], {}), "(self, 'image_name')\n", (51091, 51111), False, 'import pulumi\n'), ((51206, 51243), 'pulumi.set', 'pulumi.set', (['self', '"""image_name"""', 'value'], {}), "(self, 'image_name', value)\n", (51216, 51243), False, 'import pulumi\n'), ((51514, 51546), 'pulumi.get', 'pulumi.get', (['self', '"""instance_ids"""'], {}), "(self, 'instance_ids')\n", (51524, 51546), False, 'import pulumi\n'), ((51669, 51708), 'pulumi.set', 'pulumi.set', (['self', '"""instance_ids"""', 'value'], {}), "(self, 'instance_ids', value)\n", (51679, 51708), False, 'import pulumi\n'), ((51955, 51988), 'pulumi.get', 'pulumi.get', (['self', '"""instance_name"""'], {}), "(self, 'instance_name')\n", (51965, 51988), False, 'import pulumi\n'), ((52089, 52129), 'pulumi.set', 'pulumi.set', (['self', '"""instance_name"""', 'value'], {}), "(self, 'instance_name', value)\n", (52099, 52129), False, 'import pulumi\n'), ((52326, 52359), 'pulumi.get', 'pulumi.get', (['self', '"""instance_type"""'], {}), "(self, 'instance_type')\n", (52336, 52359), False, 'import pulumi\n'), ((52460, 52500), 'pulumi.set', 'pulumi.set', (['self', '"""instance_type"""', 'value'], {}), "(self, 'instance_type', value)\n", (52470, 52500), False, 'import pulumi\n'), ((52724, 52758), 'pulumi.get', 'pulumi.get', (['self', '"""instance_types"""'], {}), "(self, 'instance_types')\n", (52734, 52758), False, 'import pulumi\n'), ((52885, 52926), 'pulumi.set', 'pulumi.set', (['self', '"""instance_types"""', 'value'], {}), "(self, 'instance_types', value)\n", (52895, 52926), False, 'import pulumi\n'), ((53193, 53233), 'pulumi.get', 'pulumi.get', (['self', '"""internet_charge_type"""'], {}), "(self, 'internet_charge_type')\n", (53203, 53233), False, 'import pulumi\n'), ((53348, 53395), 'pulumi.set', 'pulumi.set', (['self', '"""internet_charge_type"""', 'value'], {}), "(self, 'internet_charge_type', value)\n", (53358, 53395), False, 'import pulumi\n'), ((53700, 53745), 'pulumi.get', 'pulumi.get', (['self', '"""internet_max_bandwidth_in"""'], {}), "(self, 'internet_max_bandwidth_in')\n", (53710, 53745), False, 'import pulumi\n'), ((53870, 53922), 'pulumi.set', 'pulumi.set', (['self', '"""internet_max_bandwidth_in"""', 'value'], {}), "(self, 'internet_max_bandwidth_in', value)\n", (53880, 53922), False, 'import pulumi\n'), ((54248, 54294), 'pulumi.get', 'pulumi.get', (['self', '"""internet_max_bandwidth_out"""'], {}), "(self, 'internet_max_bandwidth_out')\n", (54258, 54294), False, 'import pulumi\n'), ((54421, 54474), 'pulumi.set', 'pulumi.set', (['self', '"""internet_max_bandwidth_out"""', 'value'], {}), "(self, 'internet_max_bandwidth_out', value)\n", (54431, 54474), False, 'import pulumi\n'), ((54739, 54771), 'pulumi.get', 'pulumi.get', (['self', '"""io_optimized"""'], {}), "(self, 'io_optimized')\n", (54749, 54771), False, 'import pulumi\n'), ((54870, 54909), 'pulumi.set', 'pulumi.set', (['self', '"""io_optimized"""', 'value'], {}), "(self, 'io_optimized', value)\n", (54880, 54909), False, 'import pulumi\n'), ((55126, 55157), 'pulumi.get', 'pulumi.get', (['self', '"""is_outdated"""'], {}), "(self, 'is_outdated')\n", (55136, 55157), False, 'import pulumi\n'), ((55255, 55293), 'pulumi.set', 'pulumi.set', (['self', '"""is_outdated"""', 'value'], {}), "(self, 'is_outdated', value)\n", (55265, 55293), False, 'import pulumi\n'), ((55577, 55605), 'pulumi.get', 'pulumi.get', (['self', '"""key_name"""'], {}), "(self, 'key_name')\n", (55587, 55605), False, 'import pulumi\n'), ((55696, 55731), 'pulumi.set', 'pulumi.set', (['self', '"""key_name"""', 'value'], {}), "(self, 'key_name', value)\n", (55706, 55731), False, 'import pulumi\n'), ((56018, 56060), 'pulumi.get', 'pulumi.get', (['self', '"""kms_encrypted_password"""'], {}), "(self, 'kms_encrypted_password')\n", (56028, 56060), False, 'import pulumi\n'), ((56179, 56228), 'pulumi.set', 'pulumi.set', (['self', '"""kms_encrypted_password"""', 'value'], {}), "(self, 'kms_encrypted_password', value)\n", (56189, 56228), False, 'import pulumi\n'), ((56693, 56735), 'pulumi.get', 'pulumi.get', (['self', '"""kms_encryption_context"""'], {}), "(self, 'kms_encryption_context')\n", (56703, 56735), False, 'import pulumi\n'), ((56868, 56917), 'pulumi.set', 'pulumi.set', (['self', '"""kms_encryption_context"""', 'value'], {}), "(self, 'kms_encryption_context', value)\n", (56878, 56917), False, 'import pulumi\n'), ((57123, 57151), 'pulumi.get', 'pulumi.get', (['self', '"""override"""'], {}), "(self, 'override')\n", (57133, 57151), False, 'import pulumi\n'), ((57243, 57278), 'pulumi.set', 'pulumi.set', (['self', '"""override"""', 'value'], {}), "(self, 'override', value)\n", (57253, 57278), False, 'import pulumi\n'), ((57777, 57805), 'pulumi.get', 'pulumi.get', (['self', '"""password"""'], {}), "(self, 'password')\n", (57787, 57805), False, 'import pulumi\n'), ((57896, 57931), 'pulumi.set', 'pulumi.set', (['self', '"""password"""', 'value'], {}), "(self, 'password', value)\n", (57906, 57931), False, 'import pulumi\n'), ((58349, 58385), 'pulumi.get', 'pulumi.get', (['self', '"""password_inherit"""'], {}), "(self, 'password_inherit')\n", (58359, 58385), False, 'import pulumi\n'), ((58493, 58536), 'pulumi.set', 'pulumi.set', (['self', '"""password_inherit"""', 'value'], {}), "(self, 'password_inherit', value)\n", (58503, 58536), False, 'import pulumi\n'), ((58728, 58765), 'pulumi.get', 'pulumi.get', (['self', '"""resource_group_id"""'], {}), "(self, 'resource_group_id')\n", (58738, 58765), False, 'import pulumi\n'), ((58874, 58918), 'pulumi.set', 'pulumi.set', (['self', '"""resource_group_id"""', 'value'], {}), "(self, 'resource_group_id', value)\n", (58884, 58918), False, 'import pulumi\n'), ((59185, 59214), 'pulumi.get', 'pulumi.get', (['self', '"""role_name"""'], {}), "(self, 'role_name')\n", (59195, 59214), False, 'import pulumi\n'), ((59307, 59343), 'pulumi.set', 'pulumi.set', (['self', '"""role_name"""', 'value'], {}), "(self, 'role_name', value)\n", (59317, 59343), False, 'import pulumi\n'), ((59848, 59894), 'pulumi.get', 'pulumi.get', (['self', '"""scaling_configuration_name"""'], {}), "(self, 'scaling_configuration_name')\n", (59858, 59894), False, 'import pulumi\n'), ((60021, 60074), 'pulumi.set', 'pulumi.set', (['self', '"""scaling_configuration_name"""', 'value'], {}), "(self, 'scaling_configuration_name', value)\n", (60031, 60074), False, 'import pulumi\n'), ((60294, 60330), 'pulumi.get', 'pulumi.get', (['self', '"""scaling_group_id"""'], {}), "(self, 'scaling_group_id')\n", (60304, 60330), False, 'import pulumi\n'), ((60437, 60480), 'pulumi.set', 'pulumi.set', (['self', '"""scaling_group_id"""', 'value'], {}), "(self, 'scaling_group_id', value)\n", (60447, 60480), False, 'import pulumi\n'), ((60746, 60783), 'pulumi.get', 'pulumi.get', (['self', '"""security_group_id"""'], {}), "(self, 'security_group_id')\n", (60756, 60783), False, 'import pulumi\n'), ((60892, 60936), 'pulumi.set', 'pulumi.set', (['self', '"""security_group_id"""', 'value'], {}), "(self, 'security_group_id', value)\n", (60902, 60936), False, 'import pulumi\n'), ((61234, 61272), 'pulumi.get', 'pulumi.get', (['self', '"""security_group_ids"""'], {}), "(self, 'security_group_ids')\n", (61244, 61272), False, 'import pulumi\n'), ((61407, 61452), 'pulumi.set', 'pulumi.set', (['self', '"""security_group_ids"""', 'value'], {}), "(self, 'security_group_ids', value)\n", (61417, 61452), False, 'import pulumi\n'), ((61767, 61797), 'pulumi.get', 'pulumi.get', (['self', '"""substitute"""'], {}), "(self, 'substitute')\n", (61777, 61797), False, 'import pulumi\n'), ((61892, 61929), 'pulumi.set', 'pulumi.set', (['self', '"""substitute"""', 'value'], {}), "(self, 'substitute', value)\n", (61902, 61929), False, 'import pulumi\n'), ((62180, 62235), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_auto_snapshot_policy_id"""'], {}), "(self, 'system_disk_auto_snapshot_policy_id')\n", (62190, 62235), False, 'import pulumi\n'), ((62380, 62442), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_auto_snapshot_policy_id"""', 'value'], {}), "(self, 'system_disk_auto_snapshot_policy_id', value)\n", (62390, 62442), False, 'import pulumi\n'), ((62842, 62882), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_category"""'], {}), "(self, 'system_disk_category')\n", (62852, 62882), False, 'import pulumi\n'), ((62997, 63044), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_category"""', 'value'], {}), "(self, 'system_disk_category', value)\n", (63007, 63044), False, 'import pulumi\n'), ((63359, 63402), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_description"""'], {}), "(self, 'system_disk_description')\n", (63369, 63402), False, 'import pulumi\n'), ((63523, 63573), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_description"""', 'value'], {}), "(self, 'system_disk_description', value)\n", (63533, 63573), False, 'import pulumi\n'), ((63982, 64018), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_name"""'], {}), "(self, 'system_disk_name')\n", (63992, 64018), False, 'import pulumi\n'), ((64125, 64168), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_name"""', 'value'], {}), "(self, 'system_disk_name', value)\n", (64135, 64168), False, 'import pulumi\n'), ((64420, 64469), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_performance_level"""'], {}), "(self, 'system_disk_performance_level')\n", (64430, 64469), False, 'import pulumi\n'), ((64602, 64658), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_performance_level"""', 'value'], {}), "(self, 'system_disk_performance_level', value)\n", (64612, 64658), False, 'import pulumi\n'), ((65095, 65131), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_size"""'], {}), "(self, 'system_disk_size')\n", (65105, 65131), False, 'import pulumi\n'), ((65238, 65281), 'pulumi.set', 'pulumi.set', (['self', '"""system_disk_size"""', 'value'], {}), "(self, 'system_disk_size', value)\n", (65248, 65281), False, 'import pulumi\n'), ((65800, 65824), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (65810, 65824), False, 'import pulumi\n'), ((65921, 65952), 'pulumi.set', 'pulumi.set', (['self', '"""tags"""', 'value'], {}), "(self, 'tags', value)\n", (65931, 65952), False, 'import pulumi\n'), ((66220, 66249), 'pulumi.get', 'pulumi.get', (['self', '"""user_data"""'], {}), "(self, 'user_data')\n", (66230, 66249), False, 'import pulumi\n'), ((66342, 66378), 'pulumi.set', 'pulumi.set', (['self', '"""user_data"""', 'value'], {}), "(self, 'user_data', value)\n", (66352, 66378), False, 'import pulumi\n'), ((98227, 98253), 'pulumi.get', 'pulumi.get', (['self', '"""active"""'], {}), "(self, 'active')\n", (98237, 98253), False, 'import pulumi\n'), ((98517, 98557), 'pulumi.get', 'pulumi.get', (['self', '"""credit_specification"""'], {}), "(self, 'credit_specification')\n", (98527, 98557), False, 'import pulumi\n'), ((98843, 98873), 'pulumi.get', 'pulumi.get', (['self', '"""data_disks"""'], {}), "(self, 'data_disks')\n", (98853, 98873), False, 'import pulumi\n'), ((99121, 99147), 'pulumi.get', 'pulumi.get', (['self', '"""enable"""'], {}), "(self, 'enable')\n", (99131, 99147), False, 'import pulumi\n'), ((99417, 99449), 'pulumi.get', 'pulumi.get', (['self', '"""force_delete"""'], {}), "(self, 'force_delete')\n", (99427, 99449), False, 'import pulumi\n'), ((99692, 99720), 'pulumi.get', 'pulumi.get', (['self', '"""image_id"""'], {}), "(self, 'image_id')\n", (99702, 99720), False, 'import pulumi\n'), ((99969, 99999), 'pulumi.get', 'pulumi.get', (['self', '"""image_name"""'], {}), "(self, 'image_name')\n", (99979, 99999), False, 'import pulumi\n'), ((100257, 100289), 'pulumi.get', 'pulumi.get', (['self', '"""instance_ids"""'], {}), "(self, 'instance_ids')\n", (100267, 100289), False, 'import pulumi\n'), ((100537, 100570), 'pulumi.get', 'pulumi.get', (['self', '"""instance_name"""'], {}), "(self, 'instance_name')\n", (100547, 100570), False, 'import pulumi\n'), ((100768, 100801), 'pulumi.get', 'pulumi.get', (['self', '"""instance_type"""'], {}), "(self, 'instance_type')\n", (100778, 100801), False, 'import pulumi\n'), ((101012, 101046), 'pulumi.get', 'pulumi.get', (['self', '"""instance_types"""'], {}), "(self, 'instance_types')\n", (101022, 101046), False, 'import pulumi\n'), ((101314, 101354), 'pulumi.get', 'pulumi.get', (['self', '"""internet_charge_type"""'], {}), "(self, 'internet_charge_type')\n", (101324, 101354), False, 'import pulumi\n'), ((101650, 101695), 'pulumi.get', 'pulumi.get', (['self', '"""internet_max_bandwidth_in"""'], {}), "(self, 'internet_max_bandwidth_in')\n", (101660, 101695), False, 'import pulumi\n'), ((102022, 102068), 'pulumi.get', 'pulumi.get', (['self', '"""internet_max_bandwidth_out"""'], {}), "(self, 'internet_max_bandwidth_out')\n", (102032, 102068), False, 'import pulumi\n'), ((102334, 102366), 'pulumi.get', 'pulumi.get', (['self', '"""io_optimized"""'], {}), "(self, 'io_optimized')\n", (102344, 102366), False, 'import pulumi\n'), ((102584, 102615), 'pulumi.get', 'pulumi.get', (['self', '"""is_outdated"""'], {}), "(self, 'is_outdated')\n", (102594, 102615), False, 'import pulumi\n'), ((102900, 102928), 'pulumi.get', 'pulumi.get', (['self', '"""key_name"""'], {}), "(self, 'key_name')\n", (102910, 102928), False, 'import pulumi\n'), ((103216, 103258), 'pulumi.get', 'pulumi.get', (['self', '"""kms_encrypted_password"""'], {}), "(self, 'kms_encrypted_password')\n", (103226, 103258), False, 'import pulumi\n'), ((103724, 103766), 'pulumi.get', 'pulumi.get', (['self', '"""kms_encryption_context"""'], {}), "(self, 'kms_encryption_context')\n", (103734, 103766), False, 'import pulumi\n'), ((103973, 104001), 'pulumi.get', 'pulumi.get', (['self', '"""override"""'], {}), "(self, 'override')\n", (103983, 104001), False, 'import pulumi\n'), ((104501, 104529), 'pulumi.get', 'pulumi.get', (['self', '"""password"""'], {}), "(self, 'password')\n", (104511, 104529), False, 'import pulumi\n'), ((104948, 104984), 'pulumi.get', 'pulumi.get', (['self', '"""password_inherit"""'], {}), "(self, 'password_inherit')\n", (104958, 104984), False, 'import pulumi\n'), ((105177, 105214), 'pulumi.get', 'pulumi.get', (['self', '"""resource_group_id"""'], {}), "(self, 'resource_group_id')\n", (105187, 105214), False, 'import pulumi\n'), ((105482, 105511), 'pulumi.get', 'pulumi.get', (['self', '"""role_name"""'], {}), "(self, 'role_name')\n", (105492, 105511), False, 'import pulumi\n'), ((106007, 106053), 'pulumi.get', 'pulumi.get', (['self', '"""scaling_configuration_name"""'], {}), "(self, 'scaling_configuration_name')\n", (106017, 106053), False, 'import pulumi\n'), ((106264, 106300), 'pulumi.get', 'pulumi.get', (['self', '"""scaling_group_id"""'], {}), "(self, 'scaling_group_id')\n", (106274, 106300), False, 'import pulumi\n'), ((106567, 106604), 'pulumi.get', 'pulumi.get', (['self', '"""security_group_id"""'], {}), "(self, 'security_group_id')\n", (106577, 106604), False, 'import pulumi\n'), ((106889, 106927), 'pulumi.get', 'pulumi.get', (['self', '"""security_group_ids"""'], {}), "(self, 'security_group_ids')\n", (106899, 106927), False, 'import pulumi\n'), ((107233, 107263), 'pulumi.get', 'pulumi.get', (['self', '"""substitute"""'], {}), "(self, 'substitute')\n", (107243, 107263), False, 'import pulumi\n'), ((107515, 107570), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_auto_snapshot_policy_id"""'], {}), "(self, 'system_disk_auto_snapshot_policy_id')\n", (107525, 107570), False, 'import pulumi\n'), ((107971, 108011), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_category"""'], {}), "(self, 'system_disk_category')\n", (107981, 108011), False, 'import pulumi\n'), ((108327, 108370), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_description"""'], {}), "(self, 'system_disk_description')\n", (108337, 108370), False, 'import pulumi\n'), ((108780, 108816), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_name"""'], {}), "(self, 'system_disk_name')\n", (108790, 108816), False, 'import pulumi\n'), ((109069, 109118), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_performance_level"""'], {}), "(self, 'system_disk_performance_level')\n", (109079, 109118), False, 'import pulumi\n'), ((109556, 109592), 'pulumi.get', 'pulumi.get', (['self', '"""system_disk_size"""'], {}), "(self, 'system_disk_size')\n", (109566, 109592), False, 'import pulumi\n'), ((110112, 110136), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (110122, 110136), False, 'import pulumi\n'), ((110405, 110434), 'pulumi.get', 'pulumi.get', (['self', '"""user_data"""'], {}), "(self, 'user_data')\n", (110415, 110434), False, 'import pulumi\n'), ((10457, 10495), 'pulumi.set', 'pulumi.set', (['__self__', '"""active"""', 'active'], {}), "(__self__, 'active', active)\n", (10467, 10495), False, 'import pulumi\n'), ((10553, 10619), 'pulumi.set', 'pulumi.set', (['__self__', '"""credit_specification"""', 'credit_specification'], {}), "(__self__, 'credit_specification', credit_specification)\n", (10563, 10619), False, 'import pulumi\n'), ((10667, 10713), 'pulumi.set', 'pulumi.set', (['__self__', '"""data_disks"""', 'data_disks'], {}), "(__self__, 'data_disks', data_disks)\n", (10677, 10713), False, 'import pulumi\n'), ((10757, 10795), 'pulumi.set', 'pulumi.set', (['__self__', '"""enable"""', 'enable'], {}), "(__self__, 'enable', enable)\n", (10767, 10795), False, 'import pulumi\n'), ((10845, 10895), 'pulumi.set', 'pulumi.set', (['__self__', '"""force_delete"""', 'force_delete'], {}), "(__self__, 'force_delete', force_delete)\n", (10855, 10895), False, 'import pulumi\n'), ((10941, 10983), 'pulumi.set', 'pulumi.set', (['__self__', '"""image_id"""', 'image_id'], {}), "(__self__, 'image_id', image_id)\n", (10951, 10983), False, 'import pulumi\n'), ((11031, 11077), 'pulumi.set', 'pulumi.set', (['__self__', '"""image_name"""', 'image_name'], {}), "(__self__, 'image_name', image_name)\n", (11041, 11077), False, 'import pulumi\n'), ((11127, 11295), 'warnings.warn', 'warnings.warn', (['"""Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."""', 'DeprecationWarning'], {}), '(\n "Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."\n , DeprecationWarning)\n', (11140, 11295), False, 'import warnings\n'), ((11302, 11480), 'pulumi.log.warn', 'pulumi.log.warn', (['"""instance_ids is deprecated: Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."""'], {}), '(\n "instance_ids is deprecated: Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."\n )\n', (11317, 11480), False, 'import pulumi\n'), ((11524, 11574), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_ids"""', 'instance_ids'], {}), "(__self__, 'instance_ids', instance_ids)\n", (11534, 11574), False, 'import pulumi\n'), ((11625, 11677), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_name"""', 'instance_name'], {}), "(__self__, 'instance_name', instance_name)\n", (11635, 11677), False, 'import pulumi\n'), ((11728, 11780), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_type"""', 'instance_type'], {}), "(__self__, 'instance_type', instance_type)\n", (11738, 11780), False, 'import pulumi\n'), ((11832, 11886), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_types"""', 'instance_types'], {}), "(__self__, 'instance_types', instance_types)\n", (11842, 11886), False, 'import pulumi\n'), ((11944, 12010), 'pulumi.set', 'pulumi.set', (['__self__', '"""internet_charge_type"""', 'internet_charge_type'], {}), "(__self__, 'internet_charge_type', internet_charge_type)\n", (11954, 12010), False, 'import pulumi\n'), ((12073, 12149), 'pulumi.set', 'pulumi.set', (['__self__', '"""internet_max_bandwidth_in"""', 'internet_max_bandwidth_in'], {}), "(__self__, 'internet_max_bandwidth_in', internet_max_bandwidth_in)\n", (12083, 12149), False, 'import pulumi\n'), ((12213, 12291), 'pulumi.set', 'pulumi.set', (['__self__', '"""internet_max_bandwidth_out"""', 'internet_max_bandwidth_out'], {}), "(__self__, 'internet_max_bandwidth_out', internet_max_bandwidth_out)\n", (12223, 12291), False, 'import pulumi\n'), ((12341, 12551), 'warnings.warn', 'warnings.warn', (['"""Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template."""', 'DeprecationWarning'], {}), "(\n 'Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.'\n , DeprecationWarning)\n", (12354, 12551), False, 'import warnings\n'), ((12558, 12778), 'pulumi.log.warn', 'pulumi.log.warn', (['"""io_optimized is deprecated: Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template."""'], {}), "(\n 'io_optimized is deprecated: Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.'\n )\n", (12573, 12778), False, 'import pulumi\n'), ((12822, 12872), 'pulumi.set', 'pulumi.set', (['__self__', '"""io_optimized"""', 'io_optimized'], {}), "(__self__, 'io_optimized', io_optimized)\n", (12832, 12872), False, 'import pulumi\n'), ((12921, 12969), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_outdated"""', 'is_outdated'], {}), "(__self__, 'is_outdated', is_outdated)\n", (12931, 12969), False, 'import pulumi\n'), ((13015, 13057), 'pulumi.set', 'pulumi.set', (['__self__', '"""key_name"""', 'key_name'], {}), "(__self__, 'key_name', key_name)\n", (13025, 13057), False, 'import pulumi\n'), ((13117, 13187), 'pulumi.set', 'pulumi.set', (['__self__', '"""kms_encrypted_password"""', 'kms_encrypted_password'], {}), "(__self__, 'kms_encrypted_password', kms_encrypted_password)\n", (13127, 13187), False, 'import pulumi\n'), ((13247, 13317), 'pulumi.set', 'pulumi.set', (['__self__', '"""kms_encryption_context"""', 'kms_encryption_context'], {}), "(__self__, 'kms_encryption_context', kms_encryption_context)\n", (13257, 13317), False, 'import pulumi\n'), ((13363, 13405), 'pulumi.set', 'pulumi.set', (['__self__', '"""override"""', 'override'], {}), "(__self__, 'override', override)\n", (13373, 13405), False, 'import pulumi\n'), ((13451, 13493), 'pulumi.set', 'pulumi.set', (['__self__', '"""password"""', 'password'], {}), "(__self__, 'password', password)\n", (13461, 13493), False, 'import pulumi\n'), ((13547, 13605), 'pulumi.set', 'pulumi.set', (['__self__', '"""password_inherit"""', 'password_inherit'], {}), "(__self__, 'password_inherit', password_inherit)\n", (13557, 13605), False, 'import pulumi\n'), ((13660, 13720), 'pulumi.set', 'pulumi.set', (['__self__', '"""resource_group_id"""', 'resource_group_id'], {}), "(__self__, 'resource_group_id', resource_group_id)\n", (13670, 13720), False, 'import pulumi\n'), ((13767, 13811), 'pulumi.set', 'pulumi.set', (['__self__', '"""role_name"""', 'role_name'], {}), "(__self__, 'role_name', role_name)\n", (13777, 13811), False, 'import pulumi\n'), ((13875, 13953), 'pulumi.set', 'pulumi.set', (['__self__', '"""scaling_configuration_name"""', 'scaling_configuration_name'], {}), "(__self__, 'scaling_configuration_name', scaling_configuration_name)\n", (13885, 13953), False, 'import pulumi\n'), ((14008, 14068), 'pulumi.set', 'pulumi.set', (['__self__', '"""security_group_id"""', 'security_group_id'], {}), "(__self__, 'security_group_id', security_group_id)\n", (14018, 14068), False, 'import pulumi\n'), ((14124, 14186), 'pulumi.set', 'pulumi.set', (['__self__', '"""security_group_ids"""', 'security_group_ids'], {}), "(__self__, 'security_group_ids', security_group_ids)\n", (14134, 14186), False, 'import pulumi\n'), ((14234, 14280), 'pulumi.set', 'pulumi.set', (['__self__', '"""substitute"""', 'substitute'], {}), "(__self__, 'substitute', substitute)\n", (14244, 14280), False, 'import pulumi\n'), ((14353, 14453), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_auto_snapshot_policy_id"""', 'system_disk_auto_snapshot_policy_id'], {}), "(__self__, 'system_disk_auto_snapshot_policy_id',\n system_disk_auto_snapshot_policy_id)\n", (14363, 14453), False, 'import pulumi\n'), ((14507, 14573), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_category"""', 'system_disk_category'], {}), "(__self__, 'system_disk_category', system_disk_category)\n", (14517, 14573), False, 'import pulumi\n'), ((14634, 14706), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_description"""', 'system_disk_description'], {}), "(__self__, 'system_disk_description', system_disk_description)\n", (14644, 14706), False, 'import pulumi\n'), ((14760, 14818), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_name"""', 'system_disk_name'], {}), "(__self__, 'system_disk_name', system_disk_name)\n", (14770, 14818), False, 'import pulumi\n'), ((14885, 14973), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_performance_level"""', 'system_disk_performance_level'], {}), "(__self__, 'system_disk_performance_level',\n system_disk_performance_level)\n", (14895, 14973), False, 'import pulumi\n'), ((15023, 15081), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_size"""', 'system_disk_size'], {}), "(__self__, 'system_disk_size', system_disk_size)\n", (15033, 15081), False, 'import pulumi\n'), ((15123, 15157), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (15133, 15157), False, 'import pulumi\n'), ((15204, 15248), 'pulumi.set', 'pulumi.set', (['__self__', '"""user_data"""', 'user_data'], {}), "(__self__, 'user_data', user_data)\n", (15214, 15248), False, 'import pulumi\n'), ((43343, 43381), 'pulumi.set', 'pulumi.set', (['__self__', '"""active"""', 'active'], {}), "(__self__, 'active', active)\n", (43353, 43381), False, 'import pulumi\n'), ((43439, 43505), 'pulumi.set', 'pulumi.set', (['__self__', '"""credit_specification"""', 'credit_specification'], {}), "(__self__, 'credit_specification', credit_specification)\n", (43449, 43505), False, 'import pulumi\n'), ((43553, 43599), 'pulumi.set', 'pulumi.set', (['__self__', '"""data_disks"""', 'data_disks'], {}), "(__self__, 'data_disks', data_disks)\n", (43563, 43599), False, 'import pulumi\n'), ((43643, 43681), 'pulumi.set', 'pulumi.set', (['__self__', '"""enable"""', 'enable'], {}), "(__self__, 'enable', enable)\n", (43653, 43681), False, 'import pulumi\n'), ((43731, 43781), 'pulumi.set', 'pulumi.set', (['__self__', '"""force_delete"""', 'force_delete'], {}), "(__self__, 'force_delete', force_delete)\n", (43741, 43781), False, 'import pulumi\n'), ((43827, 43869), 'pulumi.set', 'pulumi.set', (['__self__', '"""image_id"""', 'image_id'], {}), "(__self__, 'image_id', image_id)\n", (43837, 43869), False, 'import pulumi\n'), ((43917, 43963), 'pulumi.set', 'pulumi.set', (['__self__', '"""image_name"""', 'image_name'], {}), "(__self__, 'image_name', image_name)\n", (43927, 43963), False, 'import pulumi\n'), ((44013, 44181), 'warnings.warn', 'warnings.warn', (['"""Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."""', 'DeprecationWarning'], {}), '(\n "Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."\n , DeprecationWarning)\n', (44026, 44181), False, 'import warnings\n'), ((44188, 44366), 'pulumi.log.warn', 'pulumi.log.warn', (['"""instance_ids is deprecated: Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."""'], {}), '(\n "instance_ids is deprecated: Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."\n )\n', (44203, 44366), False, 'import pulumi\n'), ((44410, 44460), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_ids"""', 'instance_ids'], {}), "(__self__, 'instance_ids', instance_ids)\n", (44420, 44460), False, 'import pulumi\n'), ((44511, 44563), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_name"""', 'instance_name'], {}), "(__self__, 'instance_name', instance_name)\n", (44521, 44563), False, 'import pulumi\n'), ((44614, 44666), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_type"""', 'instance_type'], {}), "(__self__, 'instance_type', instance_type)\n", (44624, 44666), False, 'import pulumi\n'), ((44718, 44772), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_types"""', 'instance_types'], {}), "(__self__, 'instance_types', instance_types)\n", (44728, 44772), False, 'import pulumi\n'), ((44830, 44896), 'pulumi.set', 'pulumi.set', (['__self__', '"""internet_charge_type"""', 'internet_charge_type'], {}), "(__self__, 'internet_charge_type', internet_charge_type)\n", (44840, 44896), False, 'import pulumi\n'), ((44959, 45035), 'pulumi.set', 'pulumi.set', (['__self__', '"""internet_max_bandwidth_in"""', 'internet_max_bandwidth_in'], {}), "(__self__, 'internet_max_bandwidth_in', internet_max_bandwidth_in)\n", (44969, 45035), False, 'import pulumi\n'), ((45099, 45177), 'pulumi.set', 'pulumi.set', (['__self__', '"""internet_max_bandwidth_out"""', 'internet_max_bandwidth_out'], {}), "(__self__, 'internet_max_bandwidth_out', internet_max_bandwidth_out)\n", (45109, 45177), False, 'import pulumi\n'), ((45227, 45437), 'warnings.warn', 'warnings.warn', (['"""Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template."""', 'DeprecationWarning'], {}), "(\n 'Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.'\n , DeprecationWarning)\n", (45240, 45437), False, 'import warnings\n'), ((45444, 45664), 'pulumi.log.warn', 'pulumi.log.warn', (['"""io_optimized is deprecated: Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template."""'], {}), "(\n 'io_optimized is deprecated: Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.'\n )\n", (45459, 45664), False, 'import pulumi\n'), ((45708, 45758), 'pulumi.set', 'pulumi.set', (['__self__', '"""io_optimized"""', 'io_optimized'], {}), "(__self__, 'io_optimized', io_optimized)\n", (45718, 45758), False, 'import pulumi\n'), ((45807, 45855), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_outdated"""', 'is_outdated'], {}), "(__self__, 'is_outdated', is_outdated)\n", (45817, 45855), False, 'import pulumi\n'), ((45901, 45943), 'pulumi.set', 'pulumi.set', (['__self__', '"""key_name"""', 'key_name'], {}), "(__self__, 'key_name', key_name)\n", (45911, 45943), False, 'import pulumi\n'), ((46003, 46073), 'pulumi.set', 'pulumi.set', (['__self__', '"""kms_encrypted_password"""', 'kms_encrypted_password'], {}), "(__self__, 'kms_encrypted_password', kms_encrypted_password)\n", (46013, 46073), False, 'import pulumi\n'), ((46133, 46203), 'pulumi.set', 'pulumi.set', (['__self__', '"""kms_encryption_context"""', 'kms_encryption_context'], {}), "(__self__, 'kms_encryption_context', kms_encryption_context)\n", (46143, 46203), False, 'import pulumi\n'), ((46249, 46291), 'pulumi.set', 'pulumi.set', (['__self__', '"""override"""', 'override'], {}), "(__self__, 'override', override)\n", (46259, 46291), False, 'import pulumi\n'), ((46337, 46379), 'pulumi.set', 'pulumi.set', (['__self__', '"""password"""', 'password'], {}), "(__self__, 'password', password)\n", (46347, 46379), False, 'import pulumi\n'), ((46433, 46491), 'pulumi.set', 'pulumi.set', (['__self__', '"""password_inherit"""', 'password_inherit'], {}), "(__self__, 'password_inherit', password_inherit)\n", (46443, 46491), False, 'import pulumi\n'), ((46546, 46606), 'pulumi.set', 'pulumi.set', (['__self__', '"""resource_group_id"""', 'resource_group_id'], {}), "(__self__, 'resource_group_id', resource_group_id)\n", (46556, 46606), False, 'import pulumi\n'), ((46653, 46697), 'pulumi.set', 'pulumi.set', (['__self__', '"""role_name"""', 'role_name'], {}), "(__self__, 'role_name', role_name)\n", (46663, 46697), False, 'import pulumi\n'), ((46761, 46839), 'pulumi.set', 'pulumi.set', (['__self__', '"""scaling_configuration_name"""', 'scaling_configuration_name'], {}), "(__self__, 'scaling_configuration_name', scaling_configuration_name)\n", (46771, 46839), False, 'import pulumi\n'), ((46893, 46951), 'pulumi.set', 'pulumi.set', (['__self__', '"""scaling_group_id"""', 'scaling_group_id'], {}), "(__self__, 'scaling_group_id', scaling_group_id)\n", (46903, 46951), False, 'import pulumi\n'), ((47006, 47066), 'pulumi.set', 'pulumi.set', (['__self__', '"""security_group_id"""', 'security_group_id'], {}), "(__self__, 'security_group_id', security_group_id)\n", (47016, 47066), False, 'import pulumi\n'), ((47122, 47184), 'pulumi.set', 'pulumi.set', (['__self__', '"""security_group_ids"""', 'security_group_ids'], {}), "(__self__, 'security_group_ids', security_group_ids)\n", (47132, 47184), False, 'import pulumi\n'), ((47232, 47278), 'pulumi.set', 'pulumi.set', (['__self__', '"""substitute"""', 'substitute'], {}), "(__self__, 'substitute', substitute)\n", (47242, 47278), False, 'import pulumi\n'), ((47351, 47451), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_auto_snapshot_policy_id"""', 'system_disk_auto_snapshot_policy_id'], {}), "(__self__, 'system_disk_auto_snapshot_policy_id',\n system_disk_auto_snapshot_policy_id)\n", (47361, 47451), False, 'import pulumi\n'), ((47505, 47571), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_category"""', 'system_disk_category'], {}), "(__self__, 'system_disk_category', system_disk_category)\n", (47515, 47571), False, 'import pulumi\n'), ((47632, 47704), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_description"""', 'system_disk_description'], {}), "(__self__, 'system_disk_description', system_disk_description)\n", (47642, 47704), False, 'import pulumi\n'), ((47758, 47816), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_name"""', 'system_disk_name'], {}), "(__self__, 'system_disk_name', system_disk_name)\n", (47768, 47816), False, 'import pulumi\n'), ((47883, 47971), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_performance_level"""', 'system_disk_performance_level'], {}), "(__self__, 'system_disk_performance_level',\n system_disk_performance_level)\n", (47893, 47971), False, 'import pulumi\n'), ((48021, 48079), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_disk_size"""', 'system_disk_size'], {}), "(__self__, 'system_disk_size', system_disk_size)\n", (48031, 48079), False, 'import pulumi\n'), ((48121, 48155), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (48131, 48155), False, 'import pulumi\n'), ((48202, 48246), 'pulumi.set', 'pulumi.set', (['__self__', '"""user_data"""', 'user_data'], {}), "(__self__, 'user_data', user_data)\n", (48212, 48246), False, 'import pulumi\n'), ((80841, 80865), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (80863, 80865), False, 'import pulumi\n'), ((95413, 95442), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (95435, 95442), False, 'import pulumi\n'), ((81867, 82035), 'warnings.warn', 'warnings.warn', (['"""Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."""', 'DeprecationWarning'], {}), '(\n "Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."\n , DeprecationWarning)\n', (81880, 82035), False, 'import warnings\n'), ((82046, 82224), 'pulumi.log.warn', 'pulumi.log.warn', (['"""instance_ids is deprecated: Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."""'], {}), '(\n "instance_ids is deprecated: Field \'instance_ids\' has been deprecated from provider version 1.6.0. New resource \'alicloud_ess_attachment\' replaces it."\n )\n', (82061, 82224), False, 'import pulumi\n'), ((82805, 83015), 'warnings.warn', 'warnings.warn', (['"""Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template."""', 'DeprecationWarning'], {}), "(\n 'Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.'\n , DeprecationWarning)\n", (82818, 83015), False, 'import warnings\n'), ((83026, 83246), 'pulumi.log.warn', 'pulumi.log.warn', (['"""io_optimized is deprecated: Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template."""'], {}), "(\n 'io_optimized is deprecated: Attribute io_optimized has been deprecated on instance resource. All the launched alicloud instances will be IO optimized. Suggest to remove it from your template.'\n )\n", (83041, 83246), False, 'import pulumi\n')]
|
# -*- coding: utf-8 -*-
import json
from itertools import izip
from django.test.client import Client
from networkapi.test.test_case import NetworkApiTestCase
from networkapi.util.geral import prepare_url
fixtures_base_path = 'networkapi/api_network/fixtures/integration/%s'
class NetworksIntegrationV6TestCase(NetworkApiTestCase):
fixtures = [
'networkapi/system/fixtures/initial_variables.json',
'networkapi/usuario/fixtures/initial_usuario.json',
'networkapi/grupo/fixtures/initial_ugrupo.json',
'networkapi/usuario/fixtures/initial_usuariogrupo.json',
'networkapi/api_ogp/fixtures/initial_objecttype.json',
'networkapi/api_ogp/fixtures/initial_objectgrouppermissiongeneral.json',
'networkapi/grupo/fixtures/initial_permissions.json',
'networkapi/grupo/fixtures/initial_permissoes_administrativas.json',
'networkapi/vlan/fixtures/initial_tipo_rede.json',
'networkapi/filter/fixtures/initial_filter.json',
'networkapi/filterequiptype/fixtures/initial_filterequiptype.json',
'networkapi/equipamento/fixtures/initial_tipo_equip.json',
'networkapi/equipamento/fixtures/initial_equip_marca.json',
'networkapi/equipamento/fixtures/initial_equip_model.json',
fixtures_base_path % 'initial_vrf.json',
fixtures_base_path % 'initial_environment_dc.json',
fixtures_base_path % 'initial_environment_envlog.json',
fixtures_base_path % 'initial_environment_gl3.json',
fixtures_base_path % 'initial_environment.json',
fixtures_base_path % 'initial_ipconfig.json',
fixtures_base_path % 'initial_config_environment.json',
fixtures_base_path % 'initial_equipments.json',
fixtures_base_path % 'initial_equipments_env.json',
fixtures_base_path % 'initial_vlan.json',
]
def setUp(self):
self.client = Client()
self.authorization = self.get_http_authorization('test')
def tearDown(self):
pass
def test_create_networkv6_by_zero(self):
"""
Test of integration for create environment, vlan, eqpt networks v6.
##################
Starting test:
- environment A:
- eqpt 1, 2, 3
- vrf 1
- starting vlans 1,2,3
- environment B:
- eqpt 2, 4, 5
- vrf 1
- starting vLANS 4, 5, 6, 7, 8, 9
- environment C:
- EQpt 5, 6
- vrf 2
- startinG VLANS 10, 11
- environment D:
- eqpt 7
- vrf 1
- starting vlans 1
##################
##################
Starting networks:
environment A:
Nothing
environment B:
fdbe:bebe:bebe:1201:0000:0000:0000:0000/64
fdfdf8:f53e:61e4::18/65
fdbe:bebe:bebe:1203:0000:0000:0000:0000/64
environment C:
Nothing
environment D:
Nothing
##################
##################
Inserting networks:
- environment B:fdfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64
- environment C:fdbe:bebe:bebe:1200:0000:0000:0000:0000/65
- environment C using prefix 24: fdfdf8:f53e:61e4::18/64
- environment A:fdfc00:db20:35b:7399::5/65
- environment A:fdbe:bebe:bebe:1204:0000:0000:0000:0000/65
- environment B:fdbe:bebe:bebe:1205:0000:0000:0000:0000/64
- environment A:fdbe:bebe:bebe:1204:8000:0000:0000:0000/65
- environment D:fdbe:bebe:bebe:1200:0000:0000:0000:0000/64
##################
"""
# Creates networks with octs
self.create_netv6_with_octs()
# Creates networks with auto octs and prefix
self.create_netv6_without_octs()
def search_all_vlans(self, ids_env):
search_vlan = {
'start_record': 0,
'end_record': 100,
'asorting_cols': [],
'searchable_columns': [],
'extends_search': [
{'ambiente': id_env} for id_env in ids_env
]
}
url = '/api/v3/vlan/'
response = self.client.get(
prepare_url(url, search=search_vlan, fields=['id']),
HTTP_AUTHORIZATION=self.authorization
)
vlans = response.data['vlans']
ids_vlans = [id_vlan['id'] for id_vlan in vlans]
return ids_vlans
def create_netv6_with_octs(self):
"""Creates networks v6 using first vlan."""
networks = [{
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1201',
'block5': '0000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 64,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '0000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000',
'env': 3
}, {
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1202',
'block5': '0000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 65,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '8000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000',
'env': 3
}, {
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1203',
'block5': '0000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 64,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '0000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000',
'env': 3
}]
fields = [
'block1',
'block2',
'block3',
'block4',
'block5',
'block6',
'block7',
'block8',
'prefix',
'mask1',
'mask2',
'mask3',
'mask4',
'mask5',
'mask6',
'mask7',
'mask8',
'vlan'
]
for network_send in networks:
# Get all vlans of environment
ids_vlans = self.search_all_vlans([network_send.get('env')])
del network_send['env']
# Creates networks v4
network_send['vlan'] = ids_vlans[0]
network = [{
'block1': network_send.get('block1'),
'block2': network_send.get('block2'),
'block3': network_send.get('block3'),
'block4': network_send.get('block4'),
'block5': network_send.get('block5'),
'block6': network_send.get('block6'),
'block7': network_send.get('block7'),
'block8': network_send.get('block8'),
'prefix': network_send.get('prefix'),
'vlan': network_send.get('vlan'),
'network_type': 3,
'environmentvip': None
}]
id_network = self.create_networkv6s(network)[0]['id']
# Get object created
url = '/api/v3/networkv6/%s/' % id_network
url = prepare_url(url, fields=fields)
response = self.client.get(
url, HTTP_AUTHORIZATION=self.authorization
)
# Verify if object is right
self.compare_values(
json.dumps(network_send, sort_keys=True),
json.dumps(response.data['networks'][0], sort_keys=True)
)
def create_networkv6s(self, netv6_dict):
response = self.client.post(
'/api/v3/networkv6/',
data=json.dumps({'networks': netv6_dict}),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization
)
return response.data
def create_netv6_without_octs(self):
networks = [
{
'prefix': None,
'env': 3,
'network_type': 3,
'environmentvip': None
},
{
'prefix': None,
'env': 4,
'network_type': 3,
'environmentvip': None
},
{
'prefix': 64,
'env': 4,
'network_type': 3,
'environmentvip': None
},
{
'prefix': None,
'env': 2,
'network_type': 3,
'environmentvip': None
},
{
'prefix': None,
'env': 2,
'network_type': 3,
'environmentvip': None
},
{
'prefix': None,
'env': 3,
'network_type': 3,
'environmentvip': None
},
{
'prefix': None,
'env': 2,
'network_type': 3,
'environmentvip': None
},
{
'prefix': None,
'env': 5,
'network_type': 3,
'environmentvip': None
}
]
expected_networks = [{
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1200',
'block5': '0000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 64,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '0000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000'
}, {
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1200',
'block5': '0000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 65,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '8000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000'
}, {
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1201',
'block5': '0000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 64,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '0000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000'
}, {
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1202',
'block5': '8000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 65,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '8000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000'
}, {
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1204',
'block5': '0000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 65,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '8000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000'
}, {
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1205',
'block5': '0000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 64,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '0000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000'
}, {
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1204',
'block5': '8000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 65,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '8000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000'
}, {
'block1': 'fdbe',
'block2': 'bebe',
'block3': 'bebe',
'block4': '1200',
'block5': '0000',
'block6': '0000',
'block7': '0000',
'block8': '0000',
'prefix': 64,
'mask1': 'ffff',
'mask2': 'ffff',
'mask3': 'ffff',
'mask4': 'ffff',
'mask5': '0000',
'mask6': '0000',
'mask7': '0000',
'mask8': '0000'
}]
fields = [
'block1',
'block2',
'block3',
'block4',
'block5',
'block6',
'block7',
'block8',
'prefix',
'mask1',
'mask2',
'mask3',
'mask4',
'mask5',
'mask6',
'mask7',
'mask8',
'vlan',
]
for network_send, expected_network in izip(networks, expected_networks):
# Get all vlans of environment
ids_vlans = self.search_all_vlans([network_send.get('env')])
# Creates networks v4
network_send['vlan'] = ids_vlans[0]
expected_network['vlan'] = ids_vlans[0]
id_network = self.create_networkv6s([network_send])[0]['id']
# Get object created
url = '/api/v3/networkv6/%s/' % id_network
url = prepare_url(url, fields=fields)
response = self.client.get(
url, HTTP_AUTHORIZATION=self.authorization
)
# Verify if object is right
self.compare_values(
json.dumps(expected_network, sort_keys=True),
json.dumps(response.data['networks'][0], sort_keys=True)
)
|
[
"django.test.client.Client",
"networkapi.util.geral.prepare_url",
"itertools.izip",
"json.dumps"
] |
[((1904, 1912), 'django.test.client.Client', 'Client', ([], {}), '()\n', (1910, 1912), False, 'from django.test.client import Client\n'), ((14418, 14451), 'itertools.izip', 'izip', (['networks', 'expected_networks'], {}), '(networks, expected_networks)\n', (14422, 14451), False, 'from itertools import izip\n'), ((4352, 4403), 'networkapi.util.geral.prepare_url', 'prepare_url', (['url'], {'search': 'search_vlan', 'fields': "['id']"}), "(url, search=search_vlan, fields=['id'])\n", (4363, 4403), False, 'from networkapi.util.geral import prepare_url\n'), ((7811, 7842), 'networkapi.util.geral.prepare_url', 'prepare_url', (['url'], {'fields': 'fields'}), '(url, fields=fields)\n', (7822, 7842), False, 'from networkapi.util.geral import prepare_url\n'), ((14886, 14917), 'networkapi.util.geral.prepare_url', 'prepare_url', (['url'], {'fields': 'fields'}), '(url, fields=fields)\n', (14897, 14917), False, 'from networkapi.util.geral import prepare_url\n'), ((8046, 8086), 'json.dumps', 'json.dumps', (['network_send'], {'sort_keys': '(True)'}), '(network_send, sort_keys=True)\n', (8056, 8086), False, 'import json\n'), ((8104, 8160), 'json.dumps', 'json.dumps', (["response.data['networks'][0]"], {'sort_keys': '(True)'}), "(response.data['networks'][0], sort_keys=True)\n", (8114, 8160), False, 'import json\n'), ((8309, 8345), 'json.dumps', 'json.dumps', (["{'networks': netv6_dict}"], {}), "({'networks': netv6_dict})\n", (8319, 8345), False, 'import json\n'), ((15121, 15165), 'json.dumps', 'json.dumps', (['expected_network'], {'sort_keys': '(True)'}), '(expected_network, sort_keys=True)\n', (15131, 15165), False, 'import json\n'), ((15183, 15239), 'json.dumps', 'json.dumps', (["response.data['networks'][0]"], {'sort_keys': '(True)'}), "(response.data['networks'][0], sort_keys=True)\n", (15193, 15239), False, 'import json\n')]
|
import re
import string
import unicodedata
from unstdlib.six import text_type, PY3, string_types, binary_type, u
from unstdlib.six.moves import xrange
if PY3:
text_type_magicmethod = "__str__"
else:
text_type_magicmethod = "__unicode__"
from .random_ import random
__all__ = [
'random_string',
'number_to_string', 'string_to_number', 'number_to_bytes', 'bytes_to_number',
'dollars_to_cents',
'to_str', 'to_unicode', 'to_int', 'to_float',
'format_int',
'slugify',
]
class r(object):
"""
A normalized repr for bytes/unicode between Python2 and Python3.
"""
def __init__(self, val):
self.val = val
def __repr__(self):
if PY3:
if isinstance(self.val, text_type):
return 'u' + repr(self.val)
else:
if isinstance(self.val, str):
return 'b' + repr(self.val)
return repr(self.val)
_Default = object()
def random_string(length=6, alphabet=string.ascii_letters+string.digits):
"""
Return a random string of given length and alphabet.
Default alphabet is url-friendly (base62).
"""
return ''.join([random.choice(alphabet) for i in xrange(length)])
def number_to_string(n, alphabet):
"""
Given an non-negative integer ``n``, convert it to a string composed of
the given ``alphabet`` mapping, where the position of each element in
``alphabet`` is its radix value.
Examples::
>>> number_to_string(12345678, '01')
'101111000110000101001110'
>>> number_to_string(12345678, 'ab')
'babbbbaaabbaaaababaabbba'
>>> number_to_string(12345678, string.ascii_letters + string.digits)
'ZXP0'
>>> number_to_string(12345, ['zero ', 'one ', 'two ', 'three ', 'four ', 'five ', 'six ', 'seven ', 'eight ', 'nine '])
'one two three four five '
"""
result = ''
base = len(alphabet)
current = int(n)
if current < 0:
raise ValueError("invalid n (must be non-negative): %s", n)
while current:
result = alphabet[current % base] + result
current = current // base
return result
def string_to_number(s, alphabet):
"""
Given a string ``s``, convert it to an integer composed of the given
``alphabet`` mapping, where the position of each element in ``alphabet`` is
its radix value.
Examples::
>>> string_to_number('101111000110000101001110', '01')
12345678
>>> string_to_number('babbbbaaabbaaaababaabbba', 'ab')
12345678
>>> string_to_number('ZXP0', string.ascii_letters + string.digits)
12345678
"""
base = len(alphabet)
inverse_alphabet = dict(zip(alphabet, xrange(0, base)))
n = 0
exp = 0
for i in reversed(s):
n += inverse_alphabet[i] * (base ** exp)
exp += 1
return n
def bytes_to_number(b, endian='big'):
"""
Convert a string to an integer.
:param b:
String or bytearray to convert.
:param endian:
Byte order to convert into ('big' or 'little' endian-ness, default
'big')
Assumes bytes are 8 bits.
This is a special-case version of string_to_number with a full base-256
ASCII alphabet. It is the reverse of ``number_to_bytes(n)``.
Examples::
>>> bytes_to_number(b'*')
42
>>> bytes_to_number(b'\\xff')
255
>>> bytes_to_number(b'\\x01\\x00')
256
>>> bytes_to_number(b'\\x00\\x01', endian='little')
256
"""
if endian == 'big':
b = reversed(b)
n = 0
for i, ch in enumerate(bytearray(b)):
n ^= ch << i * 8
return n
def number_to_bytes(n, endian='big'):
"""
Convert an integer to a corresponding string of bytes..
:param n:
Integer to convert.
:param endian:
Byte order to convert into ('big' or 'little' endian-ness, default
'big')
Assumes bytes are 8 bits.
This is a special-case version of number_to_string with a full base-256
ASCII alphabet. It is the reverse of ``bytes_to_number(b)``.
Examples::
>>> r(number_to_bytes(42))
b'*'
>>> r(number_to_bytes(255))
b'\\xff'
>>> r(number_to_bytes(256))
b'\\x01\\x00'
>>> r(number_to_bytes(256, endian='little'))
b'\\x00\\x01'
"""
res = []
while n:
n, ch = divmod(n, 256)
if PY3:
res.append(ch)
else:
res.append(chr(ch))
if endian == 'big':
res.reverse()
if PY3:
return bytes(res)
else:
return ''.join(res)
def to_str(obj, encoding='utf-8', **encode_args):
r"""
Returns a ``str`` of ``obj``, encoding using ``encoding`` if necessary. For
example::
>>> some_str = b"\xff"
>>> some_unicode = u"\u1234"
>>> some_exception = Exception(u'Error: ' + some_unicode)
>>> r(to_str(some_str))
b'\xff'
>>> r(to_str(some_unicode))
b'\xe1\x88\xb4'
>>> r(to_str(some_exception))
b'Error: \xe1\x88\xb4'
>>> r(to_str([42]))
b'[42]'
See source code for detailed semantics.
"""
# Note: On py3, ``b'x'.__str__()`` returns ``"b'x'"``, so we need to do the
# explicit check first.
if isinstance(obj, binary_type):
return obj
# We coerce to unicode if '__unicode__' is available because there is no
# way to specify encoding when calling ``str(obj)``, so, eg,
# ``str(Exception(u'\u1234'))`` will explode.
if isinstance(obj, text_type) or hasattr(obj, text_type_magicmethod):
# Note: unicode(u'foo') is O(1) (by experimentation)
return text_type(obj).encode(encoding, **encode_args)
return binary_type(obj)
def to_unicode(obj, encoding='utf-8', fallback='latin1', **decode_args):
r"""
Returns a ``unicode`` of ``obj``, decoding using ``encoding`` if necessary.
If decoding fails, the ``fallback`` encoding (default ``latin1``) is used.
Examples::
>>> r(to_unicode(b'\xe1\x88\xb4'))
u'\u1234'
>>> r(to_unicode(b'\xff'))
u'\xff'
>>> r(to_unicode(u'\u1234'))
u'\u1234'
>>> r(to_unicode(Exception(u'\u1234')))
u'\u1234'
>>> r(to_unicode([42]))
u'[42]'
See source code for detailed semantics.
"""
# Note: on py3, the `bytes` type defines an unhelpful "__str__" function,
# so we need to do this check (see comments in ``to_str``).
if not isinstance(obj, binary_type):
if isinstance(obj, text_type) or hasattr(obj, text_type_magicmethod):
return text_type(obj)
obj_str = binary_type(obj)
else:
obj_str = obj
try:
return text_type(obj_str, encoding, **decode_args)
except UnicodeDecodeError:
return text_type(obj_str, fallback, **decode_args)
def to_int(s, default=0):
"""
Return input converted into an integer. If failed, then return ``default``.
Examples::
>>> to_int('1')
1
>>> to_int(1)
1
>>> to_int('')
0
>>> to_int(None)
0
>>> to_int(0, default='Empty')
0
>>> to_int(None, default='Empty')
'Empty'
"""
try:
return int(s)
except (TypeError, ValueError):
return default
_infs=set([float("inf"), float("-inf")])
def to_float(s, default=0.0, allow_nan=False):
"""
Return input converted into a float. If failed, then return ``default``.
Note that, by default, ``allow_nan=False``, so ``to_float`` will not return
``nan``, ``inf``, or ``-inf``.
Examples::
>>> to_float('1.5')
1.5
>>> to_float(1)
1.0
>>> to_float('')
0.0
>>> to_float('nan')
0.0
>>> to_float('inf')
0.0
>>> to_float('-inf', allow_nan=True)
-inf
>>> to_float(None)
0.0
>>> to_float(0, default='Empty')
0.0
>>> to_float(None, default='Empty')
'Empty'
"""
try:
f = float(s)
except (TypeError, ValueError):
return default
if not allow_nan:
if f != f or f in _infs:
return default
return f
def format_int(n, singular=_Default, plural=_Default):
"""
Return `singular.format(n)` if n is 1, or `plural.format(n)` otherwise. If
plural is not specified, then it is assumed to be same as singular but
suffixed with an 's'.
:param n:
Integer which determines pluralness.
:param singular:
String with a format() placeholder for n. (Default: `u"{:,}"`)
:param plural:
String with a format() placeholder for n. (Default: If singular is not
default, then it's `singular + u"s"`. Otherwise it's same as singular.)
Example: ::
>>> r(format_int(1000))
u'1,000'
>>> r(format_int(1, u"{} day"))
u'1 day'
>>> r(format_int(2, u"{} day"))
u'2 days'
>>> r(format_int(2, u"{} box", u"{} boxen"))
u'2 boxen'
>>> r(format_int(20000, u"{:,} box", u"{:,} boxen"))
u'20,000 boxen'
"""
n = int(n)
if singular in (None, _Default):
if plural is _Default:
plural = None
singular = u'{:,}'
elif plural is _Default:
plural = singular + u's'
if n == 1 or not plural:
return singular.format(n)
return plural.format(n)
RE_NUMBER = re.compile(r'[\d\.\-eE]+')
def dollars_to_cents(s, allow_negative=False):
"""
Given a string or integer representing dollars, return an integer of
equivalent cents, in an input-resilient way.
This works by stripping any non-numeric characters before attempting to
cast the value.
Examples::
>>> dollars_to_cents('$1')
100
>>> dollars_to_cents('1')
100
>>> dollars_to_cents(1)
100
>>> dollars_to_cents('1e2')
10000
>>> dollars_to_cents('-1$', allow_negative=True)
-100
>>> dollars_to_cents('1 dollar')
100
"""
# TODO: Implement cents_to_dollars
if not s:
return
if isinstance(s, string_types):
s = ''.join(RE_NUMBER.findall(s))
dollars = int(round(float(s) * 100))
if not allow_negative and dollars < 0:
raise ValueError('Negative values not permitted.')
return dollars
RE_SLUG = re.compile(r'\W+')
def slugify(s, delimiter='-'):
"""
Normalize `s` into ASCII and replace non-word characters with `delimiter`.
"""
s = unicodedata.normalize('NFKD', to_unicode(s)).encode('ascii', 'ignore').decode('ascii')
return RE_SLUG.sub(delimiter, s).strip(delimiter).lower()
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.ELLIPSIS)
|
[
"unstdlib.six.moves.xrange",
"unstdlib.six.binary_type",
"unstdlib.six.text_type",
"doctest.testmod",
"re.compile"
] |
[((9514, 9542), 're.compile', 're.compile', (['"""[\\\\d\\\\.\\\\-eE]+"""'], {}), "('[\\\\d\\\\.\\\\-eE]+')\n", (9524, 9542), False, 'import re\n'), ((10478, 10496), 're.compile', 're.compile', (['"""\\\\W+"""'], {}), "('\\\\W+')\n", (10488, 10496), False, 'import re\n'), ((5771, 5787), 'unstdlib.six.binary_type', 'binary_type', (['obj'], {}), '(obj)\n', (5782, 5787), False, 'from unstdlib.six import text_type, PY3, string_types, binary_type, u\n'), ((10833, 10878), 'doctest.testmod', 'doctest.testmod', ([], {'optionflags': 'doctest.ELLIPSIS'}), '(optionflags=doctest.ELLIPSIS)\n', (10848, 10878), False, 'import doctest\n'), ((6697, 6713), 'unstdlib.six.binary_type', 'binary_type', (['obj'], {}), '(obj)\n', (6708, 6713), False, 'from unstdlib.six import text_type, PY3, string_types, binary_type, u\n'), ((6771, 6814), 'unstdlib.six.text_type', 'text_type', (['obj_str', 'encoding'], {}), '(obj_str, encoding, **decode_args)\n', (6780, 6814), False, 'from unstdlib.six import text_type, PY3, string_types, binary_type, u\n'), ((2723, 2738), 'unstdlib.six.moves.xrange', 'xrange', (['(0)', 'base'], {}), '(0, base)\n', (2729, 2738), False, 'from unstdlib.six.moves import xrange\n'), ((6663, 6677), 'unstdlib.six.text_type', 'text_type', (['obj'], {}), '(obj)\n', (6672, 6677), False, 'from unstdlib.six import text_type, PY3, string_types, binary_type, u\n'), ((6861, 6904), 'unstdlib.six.text_type', 'text_type', (['obj_str', 'fallback'], {}), '(obj_str, fallback, **decode_args)\n', (6870, 6904), False, 'from unstdlib.six import text_type, PY3, string_types, binary_type, u\n'), ((1191, 1205), 'unstdlib.six.moves.xrange', 'xrange', (['length'], {}), '(length)\n', (1197, 1205), False, 'from unstdlib.six.moves import xrange\n'), ((5712, 5726), 'unstdlib.six.text_type', 'text_type', (['obj'], {}), '(obj)\n', (5721, 5726), False, 'from unstdlib.six import text_type, PY3, string_types, binary_type, u\n')]
|
import pytest
import snappi
def test_snappi_lists(api):
"""Validate SnappiList object indexing and unpacking
"""
config = api.config()
flows = config.flows.flow(name='1')
assert(flows.__class__ == snappi.FlowIter)
flow = flows[0]
flow.tx_rx.port.tx_name = 'p1'
assert(flow.__class__ == snappi.Flow)
eth, vlan, vlan1 = flow.packet.ethernet().vlan().vlan()
vlan.id.value = 1
flow = config.flows.flow(name='2')[-1]
flow.tx_rx.port.tx_name = 'p1'
assert(flow.__class__ == snappi.Flow)
eth, vlan, ipv4, tcp = flow.packet.ethernet().vlan().ipv4().tcp()
assert(eth.__class__ == snappi.FlowEthernet)
assert(vlan.__class__ == snappi.FlowVlan)
assert(ipv4.__class__ == snappi.FlowIpv4)
assert(tcp.__class__ == snappi.FlowTcp)
vlan.id.value = 2
flow = config.flows.flow(name='3')[-1]
flow.tx_rx.port.tx_name = 'p1'
pkt = flow.packet.ethernet().vlan()
assert(pkt.__class__ == snappi.FlowHeaderIter)
vlan = pkt[-1]
vlan.id.value = 3
flow = config.flows.flow(name='4')[-1]
flow.tx_rx.port.tx_name = 'p1'
vlan = flow.packet.ethernet().vlan()[-1]
assert(vlan.__class__ == snappi.FlowVlan)
vlan.id.value = 4
print(config)
api.set_config(config)
assert (len(config.flows) == 4)
assert (config.flows[0].name == '1')
assert (config.flows[1].name == '2')
assert (config.flows[2].name == '3')
assert (config.flows[3].name == '4')
assert (len(config.flows[0].packet) == 3)
assert (len(config.flows[1].packet) == 4)
assert (len(config.flows[2].packet) == 2)
assert (len(config.flows[3].packet) == 2)
assert (config.flows[0].packet[1].id.value == 1)
assert (config.flows[1].packet[1].id.value == 2)
assert (config.flows[2].packet[1].id.value == 3)
assert (config.flows[3].packet[1].id.value == 4)
if __name__ == '__main__':
pytest.main(['-vv', '-s', __file__])
|
[
"pytest.main"
] |
[((1897, 1933), 'pytest.main', 'pytest.main', (["['-vv', '-s', __file__]"], {}), "(['-vv', '-s', __file__])\n", (1908, 1933), False, 'import pytest\n')]
|
import sys
def dprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def read():
c = sys.stdin.read(1)
#dprint(f"Read byte '{c[0]}'")
return c
def clean_read():
while True:
v = read()
#dprint(f"clean_read: '{v[0]}'")
if v == '\n': continue
if v == ' ': continue
return v
def readword_finish(s):
while True:
c = read()
#dprint(f"readword: '{s}', '{c[0]}'")
if c == '\n': break
if c == ' ': break
if c == '': break
s += c
return s
def readword():
return readword_finish(clean_read())
def readint():
s = read()
#dprint(f'readint: {s}')
if s == 'x':
return int(readword(), 16)
if s == '\'':
return ord(read()[0])
return int(readword_finish(s), 10)
reg = [
"rax", #0
"rcx", #1
"rdx", #2
"rbx", #3
"rsp", #4
"rbp", #5
"rsi", #6
"rdi", #7
]
def translate():
while True:
op = readword()
#dprint(f"opcode: '{op}'")
if op == '!':
print(f"""
syscall
""")
elif op == 'A':
dest = readint()
lbl = readword()
print(f"""
lea {reg[dest]}, [{lbl}]
""")
elif op == 'B':
print(f"""
db {readint()}
""")
elif op == 'b':
dest = readint()
src = readint()
print(f"""
movzx {reg[dest]}, byte[{reg[src]}]
""")
elif op == 'C':
lbl = readword()
print(f"""
call {lbl}
""")
elif op == 'D':
lbl = readword()
print(f"""
{lbl}:
""")
elif op == 'E':
lbl = readword()
print(f"""
je {lbl}
""")
elif op == 'e':
print(f"""
_start:
""")
elif op == 'I':
value = readint()
print(f"""
dq {value}
""")
elif op == 'J':
lbl = readword()
print(f"""
jmp {lbl}
""")
elif op == 'L':
dest = readint()
src = readint()
print(f"""
mov {reg[dest]}, [{reg[src]}]
""")
elif op == 'l':
r = readint()
num = readint()
print(f"""
shl {reg[r]}, {num}
""")
elif op == 'M':
a = readint()
b = readint()
print(f"""
cmp {reg[a]}, {b}
""")
elif op == 'm':
a = readint()
b = readint()
print(f"""
cmp {reg[a]}, {reg[b]}
""")
elif op == 'N':
lbl = readword()
print(f"""
jne {lbl}
""")
elif op == 'P':
print(f"""
times 0x1000 db 0x00
""")
elif op == 'Q':
lbl = readword()
print(f"""
dq {lbl}
""")
elif op == 'R':
print(f"""
ret
""")
elif op == 'S':
src = readint()
dest = readint()
print(f"""
mov [{reg[dest]}], {reg[src]}
""")
elif op == 'W':
dest = readint()
src = readint()
value = readint()
print(f"""
lea {reg[dest]}, [{reg[src]} + {reg[value]}]
""")
elif op == 'w':
dest = readint()
src = readint()
print(f"""
sub {reg[dest]}, {reg[src]}
""")
elif op == '<':
print(f"""
pop {reg[readint()]}
""")
elif op == '=':
dest = readint()
value = readint()
print(f"""
mov {reg[dest]}, {value}
""")
elif op == '>':
print(f"""
push {reg[readint()]}
""")
elif op == '-':
dest = readint()
src = readint()
value = readint()
print(f"""
lea {reg[dest]}, [{reg[src]} - {value}]
""")
elif op == '+':
dest = readint()
src = readint()
value = readint()
print(f"""
lea {reg[dest]}, [{reg[src]} + {value}]
""")
elif op == '':
return
else:
dprint(f"ERR: Unknwon op: '{op}'!")
sys.exit(1)
if __name__ == '__main__':
print(f"""
[bits 64]
[section .memes]
extern _start
""")
translate()
|
[
"sys.stdin.read",
"sys.exit"
] |
[((106, 123), 'sys.stdin.read', 'sys.stdin.read', (['(1)'], {}), '(1)\n', (120, 123), False, 'import sys\n'), ((4725, 4736), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4733, 4736), False, 'import sys\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
pyFuckery - memory.py
Created on 2/12/17.
Memory object implementation. Provides memory bounds checking, as well as value enforcement.
"""
# Stdlib
import argparse
import hashlib
import json
import logging
import sys
# Third Party Code
import msgpack
# Custom Code
from fuckery.constants import DEFAULT_MEMORY_SIZE
from fuckery.constants import MEMORY_MAX_VALUE
from fuckery.constants import MEMORY_MIN_VALUE
from fuckery.exc import AddressError
from fuckery.exc import StorageError
log = logging.getLogger(__name__)
class Storage(object):
"""
Provides an interface for storing memory values for the Brainfuck VM.
This provides for type safety & memory access checking.
"""
def __init__(self, n=DEFAULT_MEMORY_SIZE):
"""
Init function for Storage.
:param n: Number of memory cells to create.
"""
self.n = n
self.min = MEMORY_MIN_VALUE
self.max = MEMORY_MAX_VALUE
self.mem = {i: 0x00 for i in range(self.n)}
@property
def mem_hash(self):
"""
Returns a hash of the state of the memory.
Note - Computing this frequently can be expensive to do as the memory section is
serialized via msgpack.dumps() prior to hashing.
:return:
"""
# We're abusing the python 3.6 ordereddict behavior which
# became a part of the cpython spec in 3.7 for this
# to work. It really only works since we pre-initialize
# the self.mem dictionary on startup.
ret = hashlib.md5(msgpack.dumps(self.mem)).hexdigest()
return ret
def __contains__(self, item):
return item in self.mem
def __len__(self):
return len(self.mem)
def get(self, addr):
"""
Get the value of the memory at a location.
:param addr: Memory address to retrieve.
:return:
"""
if addr not in self:
raise AddressError(f'Address is invalid: {addr}')
return self.mem.get(addr)
def set(self, addr, value):
"""
Set the value of the memory at a locaiton.
:param addr: Memory address to set.
:param value: Value to set.
:return:
"""
if addr not in self:
raise AddressError(f'Address is invalid: {addr}')
if not isinstance(value, int):
raise StorageError(f'Value is not an int: {type(value)}')
if value < self.min or value > self.max:
raise StorageError(f'Value is out of size bounds: {value}')
self.mem[addr] = value
# noinspection PyMissingOrEmptyDocstring
def main(options): # pragma: no cover
if not options.verbose:
logging.disable(logging.DEBUG)
m = Storage(n=25)
v = m.get(0)
log.info(f'm[0] is {v}')
m.set(24, 1)
v = m.get(0)
log.info(f'm[24] is {v}')
sys.exit(0)
# noinspection PyMissingOrEmptyDocstring
def makeargpaser(): # pragma: no cover
parser = argparse.ArgumentParser(description="Memory / Storage runner.")
parser.add_argument('-v', '--verbose', dest='verbose', default=False, action='store_true',
help='Enable verbose output')
return parser
def _main(): # pragma: no cover
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(funcName)s]')
p = makeargpaser()
opts = p.parse_args()
main(opts)
if __name__ == '__main__': # pragma: no cover
_main()
|
[
"fuckery.exc.StorageError",
"argparse.ArgumentParser",
"logging.basicConfig",
"fuckery.exc.AddressError",
"logging.getLogger",
"logging.disable",
"msgpack.dumps",
"sys.exit"
] |
[((544, 571), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (561, 571), False, 'import logging\n'), ((2907, 2918), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2915, 2918), False, 'import sys\n'), ((3015, 3078), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Memory / Storage runner."""'}), "(description='Memory / Storage runner.')\n", (3038, 3078), False, 'import argparse\n'), ((3285, 3408), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(funcName)s]"""'}), "(level=logging.DEBUG, format=\n '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(funcName)s]')\n", (3304, 3408), False, 'import logging\n'), ((2739, 2769), 'logging.disable', 'logging.disable', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (2754, 2769), False, 'import logging\n'), ((1985, 2028), 'fuckery.exc.AddressError', 'AddressError', (['f"""Address is invalid: {addr}"""'], {}), "(f'Address is invalid: {addr}')\n", (1997, 2028), False, 'from fuckery.exc import AddressError\n'), ((2316, 2359), 'fuckery.exc.AddressError', 'AddressError', (['f"""Address is invalid: {addr}"""'], {}), "(f'Address is invalid: {addr}')\n", (2328, 2359), False, 'from fuckery.exc import AddressError\n'), ((2536, 2589), 'fuckery.exc.StorageError', 'StorageError', (['f"""Value is out of size bounds: {value}"""'], {}), "(f'Value is out of size bounds: {value}')\n", (2548, 2589), False, 'from fuckery.exc import StorageError\n'), ((1594, 1617), 'msgpack.dumps', 'msgpack.dumps', (['self.mem'], {}), '(self.mem)\n', (1607, 1617), False, 'import msgpack\n')]
|
"""Admin for the ``test_app`` app."""
from django.contrib import admin
from .models import DummyProfileModel
admin.site.register(DummyProfileModel)
|
[
"django.contrib.admin.site.register"
] |
[((112, 150), 'django.contrib.admin.site.register', 'admin.site.register', (['DummyProfileModel'], {}), '(DummyProfileModel)\n', (131, 150), False, 'from django.contrib import admin\n')]
|
import datetime
import logging
import h5py
from .file_analyzer import FileAnalyzer
from .flux_calibration_data_analyzer import FluxCalibrationDataAnalyzer
module_logger = logging.getLogger(__name__)
class FluxCalibrationFileAnalyzer(FileAnalyzer):
def __init__(self, file_path):
super(FluxCalibrationAnalyzer, self).__init__()
self.file_path = file_path
self.file_name = os.path.basename(self.file_path)
self.timestamp = "2018-" + \
self.file_name.replace(".hdf5", "").split("_")[-1]
self.timestamp_obj = datetime.datetime.strptime(
self.timestamp, "%Y-%j-%Hh%Mm%Ss")
self.meta_data["timestamp"] = self.timestamp
self.meta_data["file_path"] = self.file_path
self.meta_data["file_name"] = self.file_name
def load_data(self):
calib_data = {}
with h5py.File(self.file_path, "r") as f:
for key in list(f.keys()):
calib_data[key] = f[key][...]
self.data = FluxCalibrationDataAnalyzer(calib_data)
def load_meta_data(self):
pass
def report_meta_data(self, header=False,
line_delimiter=", ", delimiter="\n"):
return ""
# this is not used here
def plot(self, save_dir=None, overwrite=True, ax=None):
save_file_path, save_file_name = self._plot_save_path(save_dir)
if not self._check_existing(save_file_path, overwrite):
return
fig = None
if ax is None:
fig, ax = plt.subplots(1, 1, figsize=(10, 10/1.3))
self.data.plot(ax=ax)
suptitle = save_file_name + "\n" + self.report_meta_data(
header=False, line_delimiter=", ", delimiter="\n")
if fig is not None:
fig.suptitle(suptitle)
top = 0.98-(0.03*float(len(suptitle.split('\n'))))
fig.tight_layout(rect=[0, 0.03, 1, top])
fig.savefig(save_file_path)
plt.close(fig)
|
[
"datetime.datetime.strptime",
"h5py.File",
"logging.getLogger"
] |
[((175, 202), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (192, 202), False, 'import logging\n'), ((568, 629), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['self.timestamp', '"""%Y-%j-%Hh%Mm%Ss"""'], {}), "(self.timestamp, '%Y-%j-%Hh%Mm%Ss')\n", (594, 629), False, 'import datetime\n'), ((865, 895), 'h5py.File', 'h5py.File', (['self.file_path', '"""r"""'], {}), "(self.file_path, 'r')\n", (874, 895), False, 'import h5py\n')]
|
import pandas as pd
import json
file = '~/stations.xlsx'
sheet_name = 'Data'
scenarios = ['A', 'B', 'C', 'D', 'E']
flat_rate = 0.3
battery_rate = 0.7
length_time_interval = 120
stations = {}
def read_excel():
df_stations = pd.read_excel(file, sheet_name)
for index, row in df_stations.iterrows():
interval_scenarios = {}
for scenario in scenarios:
init_load = round(battery_rate * float(row[scenario+'_start_load']), 3)
init_flat_load = round(flat_rate * float(row[scenario + '_start_load']), 3)
incoming_battery_rate = round(battery_rate * float(row[scenario + '_incoming'])/length_time_interval, 3)
incoming_flat_rate = round(flat_rate * float(row[scenario + '_incoming'])/length_time_interval, 3)
outgoing_rate = round(float(row[scenario + '_outgoing_rate']) / length_time_interval, 3)
demand = calculate_demand(float(row[scenario + '_outgoing_rate']), row[scenario+'_empty'])
interval_scenarios[scenario] = [init_load, init_flat_load, incoming_battery_rate, incoming_flat_rate,
outgoing_rate, demand]
stations[int(row['Station_ID'])] = [row['Latitude'], row['Longitude'], interval_scenarios]
def calculate_demand(trips, empty_time):
demand_rate = trips / (length_time_interval - empty_time)
return round(demand_rate, 2)
def write_json(json_element):
with open('station.json', 'w') as fp:
json.dump(json_element, fp)
read_excel()
write_json(stations)
|
[
"pandas.read_excel",
"json.dump"
] |
[((231, 262), 'pandas.read_excel', 'pd.read_excel', (['file', 'sheet_name'], {}), '(file, sheet_name)\n', (244, 262), True, 'import pandas as pd\n'), ((1480, 1507), 'json.dump', 'json.dump', (['json_element', 'fp'], {}), '(json_element, fp)\n', (1489, 1507), False, 'import json\n')]
|
from utils import Semaphore, Thread, Buffer, execution_manager
import random
import time
class WaitForEvent:
def __init__(self, data):
self.data = data
def process(self):
print("Finished consuming event {}".format(self.data))
mutex = Semaphore(1)
items = Semaphore(0)
buffer = Buffer()
def delay(n=1):
return time.sleep(random.random() * n)
def Producer():
global buffer
data = 0
while True:
delay(2)
event = WaitForEvent(data)
print("Producing event {}".format(event.data))
mutex.wait()
buffer.add(event)
mutex.signal()
items.signal()
data += 1
def Consumer():
global buffer
while True:
items.wait()
mutex.wait()
event = buffer.get()
mutex.signal()
print("Consuming event {}".format(event.data))
delay()
event.process()
#execution_manager() # Forces a global parent thread
Thread(Producer)
Thread(Consumer)
|
[
"utils.Buffer",
"random.random",
"utils.Thread",
"utils.Semaphore"
] |
[((262, 274), 'utils.Semaphore', 'Semaphore', (['(1)'], {}), '(1)\n', (271, 274), False, 'from utils import Semaphore, Thread, Buffer, execution_manager\n'), ((283, 295), 'utils.Semaphore', 'Semaphore', (['(0)'], {}), '(0)\n', (292, 295), False, 'from utils import Semaphore, Thread, Buffer, execution_manager\n'), ((305, 313), 'utils.Buffer', 'Buffer', ([], {}), '()\n', (311, 313), False, 'from utils import Semaphore, Thread, Buffer, execution_manager\n'), ((954, 970), 'utils.Thread', 'Thread', (['Producer'], {}), '(Producer)\n', (960, 970), False, 'from utils import Semaphore, Thread, Buffer, execution_manager\n'), ((971, 987), 'utils.Thread', 'Thread', (['Consumer'], {}), '(Consumer)\n', (977, 987), False, 'from utils import Semaphore, Thread, Buffer, execution_manager\n'), ((354, 369), 'random.random', 'random.random', ([], {}), '()\n', (367, 369), False, 'import random\n')]
|
from ctypes import Structure, Union, c_char_p, c_double, c_int, c_byte, \
c_long, POINTER
c_byte_p = POINTER(c_byte)
class MBusString(Structure):
_fields_ = [
('value', c_byte_p),
('size', c_int),
]
class MBusValue(Union):
_fields_ = [
('real_val', c_double),
('str_val', MBusString),
]
class MBusRecord(Structure):
_fields_ = [
('value', MBusValue),
('is_numeric', c_byte),
('unit', c_char_p),
('function_medium', c_char_p),
('quantity', c_char_p),
('device', c_int),
('tariff', c_long),
('storage_number', c_long),
]
|
[
"ctypes.POINTER"
] |
[((110, 125), 'ctypes.POINTER', 'POINTER', (['c_byte'], {}), '(c_byte)\n', (117, 125), False, 'from ctypes import Structure, Union, c_char_p, c_double, c_int, c_byte, c_long, POINTER\n')]
|
from rest_framework import routers
from . import ecommerce_viewsets
router = routers.DefaultRouter()
router.register(
r"ecommerce/purchase", ecommerce_viewsets.PurchasesViewSet, basename="api-purchases"
)
urlpatterns = router.urls
|
[
"rest_framework.routers.DefaultRouter"
] |
[((79, 102), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (100, 102), False, 'from rest_framework import routers\n')]
|
# Copyright (c) 2018 Georgia Tech Research Corporation
# Distributed under the terms of the BSD-3-Clause License
""" Completion implementations
"""
# pylint: disable=W0613,C0330,R0913,W0703,R0914
import re
from typing import List, Tuple
from IPython.core.completerlib import get_root_modules
from robot.libraries import STDLIBS
from robot.parsing.datarow import DataRow
from robot.parsing.robotreader import RobotReader
from .completer import Completer
TABLE_NAMES = ["Keywords", "Settings", "Tasks", "Test Cases", "Variables"]
TABLE_NAMES = ["Keywords", "Settings", "Tasks", "Test Cases", "Variables"]
RE_TABLE_NAME = (
r"^\*+ *(?P<name>settings?|(user )?keywords?|test cases?|variables?|tasks?) *\*+$"
)
RE_SEP = r"\|| {2,}|\t"
DEFAULT_SEP = " "
SUITE_SETTINGS = [
"Default Tags",
"Documentation",
"Force Tags",
"Library",
"Metadata",
"Resource",
"Suite Setup",
"Suite Teardown",
"Task Setup",
"Task Teardown",
"Task Template",
"Task Timeout",
"Test Setup",
"Test Teardown",
"Test Template",
"Test Timeout",
"Variables",
]
CASE_SETTINGS = ["Documentation", "Setup", "Tags", "Teardown", "Template", "Timeout"]
KEYWORD_SETTINGS = [
"Documentation",
"Tags",
"Teardown",
"Timeout",
"Arguments",
"Return",
]
# lowercase line-starting tokens that trigger keyword completion
RE_PRE_KEYWORD_SUITE = r"^((suite|test|task) (setup|teardown)|(test|task) template)$"
RE_PRE_KEYWORD_BRACKET = r"^\[ *(setup|teardown|template) *\]$"
def get_default_completion_finders():
""" The default ordering of completers, roughly from cheapest to most dear
"""
return [
complete_cell_magics,
complete_tables,
complete_libraries,
complete_settings,
complete_variables,
complete_keywords,
]
def complete_cell_magics(
completer: Completer,
line: str,
code: str,
cursor_pos: int,
line_cursor: int,
offset: int,
history: List[str],
):
""" Complete with all defined magics
"""
if not offset and line.startswith("%"):
matches = (
[
f"%%{name}"
for name in completer.parent.robot_magics["cell"]
if name.startswith(line.replace("%", "").strip())
],
)
return (
[
{"start": offset, "end": offset + len(line), "type": "magic", "text": m}
for m in matches
],
)
def complete_tables(
completer: Completer,
line: str,
code: str,
cursor_pos: int,
line_cursor: int,
offset: int,
history: List[str],
) -> Tuple[List[str], List[dict]]:
""" Complete table names
"""
matches = []
if line.startswith("*"):
no_star = line.replace("*", "").lower().strip()
for name in TABLE_NAMES:
if not no_star or name.lower().startswith(no_star):
matches.append(f"*** {name} ***\n")
elif line.startswith("| *"):
no_star = line.replace("*", "").replace("|", "").lower().strip()
for name in TABLE_NAMES:
if not no_star or name.lower().startswith(no_star):
matches.append(f"| *** {name} *** |\n")
return (
matches,
[
{"start": offset, "end": offset + len(line), "type": "table", "text": m}
for m in matches
],
)
def complete_settings(
completer: Completer,
line: str,
code: str,
cursor_pos: int,
line_cursor: int,
offset: int,
history: List[str],
) -> Tuple[List[str], List[dict]]:
""" Complete settings
"""
matches = []
row = DataRow(RobotReader.split_row(line[:line_cursor]))
tokens = row.data
current_table = find_current_table(code, cursor_pos)
if current_table is None:
return matches, []
bracket = False
settings = None
if "etting" in current_table:
settings = SUITE_SETTINGS
elif "test case" in current_table or "task" in current_table:
settings, bracket = CASE_SETTINGS, True
elif "keyword" in current_table:
settings, bracket = KEYWORD_SETTINGS, True
if not settings:
return matches, []
matches = complete_table_settings(completer, settings, tokens[-1], bracket)
post = ""
if bracket and not line.strip()[-1] == "]":
post = "]"
post += " | " if line.startswith("|") else " "
elif not bracket:
post += " | " if line.startswith("|") else " "
matches = [
f"{line[:line_cursor - (len(tokens[-1]))]}{match}{post}" for match in matches
]
return (
matches,
[
{
"start": cursor_pos,
"end": offset + len(line),
"type": "setting",
"text": m,
}
for m in matches
],
)
def complete_variables(
completer: Completer,
line: str,
code: str,
cursor_pos: int,
line_cursor: int,
offset: int,
history: List[str],
) -> Tuple[List[str], List[dict]]:
""" Complete variable references
These aren't particularly clever in terms of scope.
"""
matches = []
if not re.findall(r"[\$&@%]", line):
return matches, []
try:
frag = re.findall(r".*([\$%&@]\{[^{}]*$)", line[:line_cursor])[0]
frag_type = frag[0]
frag_start = frag[2:]
except Exception:
return matches, []
try:
if line[line_cursor] == "}":
trail = ""
except Exception:
trail = "}"
for var in find_all_variable_names(code, history, frag_type):
if frag_start.lower() in var.lower():
matches += [line.split(frag)[0] + frag_type + "{" + var + trail]
return (
matches,
[
{
"start": cursor_pos,
"end": offset + len(line),
"type": "variable",
"text": m,
}
for m in matches
],
)
def complete_libraries(
completer: Completer,
line: str,
code: str,
cursor_pos: int,
line_cursor: int,
offset: int,
history: List[str],
) -> Tuple[List[str], List[dict]]:
""" Complete library names
This could do better with sub-modules.
"""
matches = []
row = DataRow(RobotReader.split_row(line))
tokens = row.data
if not re.findall(r"\* *settings", code.lower(), flags=re.I):
return matches, []
if not tokens or tokens[0].lower() != "library":
return matches, []
for lib in list(STDLIBS) + list(get_root_modules()):
if tokens[1].lower() in lib.lower():
pre = line.split(tokens[1])[0]
if line.startswith("|"):
matches += [f"""{pre}{lib} | """]
else:
matches += [f"""{pre}{lib} """]
return (
matches,
[
{
"start": cursor_pos,
"end": offset + len(line),
"type": "library",
"text": m,
}
for m in matches
],
)
def complete_keywords(
completer: Completer,
line: str,
code: str,
cursor_pos: int,
line_cursor: int,
offset: int,
history: List[str],
) -> Tuple[List[str], List[dict]]:
""" Complete keywords from all imported libraries
"""
matches = []
row = DataRow(RobotReader.split_row(line))
tokens = row.data
if len(tokens) < 2:
return matches, []
if len(tokens) == 2 and not tokens[0].strip():
kw_token = tokens[1]
elif re.match(RE_PRE_KEYWORD_SUITE, tokens[0], flags=re.I) is not None:
kw_token = tokens[1]
elif re.match(RE_PRE_KEYWORD_BRACKET, tokens[1], flags=re.I) is not None:
kw_token = tokens[2]
else:
return matches, []
bdd = None
orig_kw_token = kw_token
bdd_token = re.match(r"^(given|when|then|and|but)?\b *(.*)", kw_token, flags=re.I)
if bdd_token is not None:
bdd, kw_token = bdd_token.groups()
for doc in completer.docs(history).values():
for keyword in getattr(doc, "keywords", []):
if kw_token.lower() in keyword.name.lower():
suggest_token = f"{bdd} {keyword.name}" if bdd else keyword.name
pre = line.split(orig_kw_token)[0]
if line.strip()[0] == "|":
matches.append(f"""{pre}{suggest_token} | """)
else:
matches.append(f"""{pre}{suggest_token} """)
return (
matches,
[
{
"start": cursor_pos,
"end": offset + len(line),
"type": "keyword",
"text": m,
}
for m in matches
],
)
# Utility functions
def complete_table_settings(
completer: Completer, settings: List[str], token: str, bracket: bool = False
) -> List[str]:
""" Find settings that might be in a table
"""
matches = []
for setting in settings:
if bracket:
setting = f"[{setting}"
if setting.lower().startswith(token.lower()):
matches += [setting]
return matches
def find_current_table(code: str, cursor_pos: int) -> str:
""" Given some code, what is the current table we are in?
"""
bits = re.split(r"^(\| )?(\*+ *[^*]+? *\*+)", code[:cursor_pos], flags=re.I | re.M)
for bit in bits[::-1]:
if bit is None:
continue
match = re.match(RE_TABLE_NAME, bit, flags=re.I)
if match is not None:
return match.groupdict()["name"].lower()
return None
def find_all_variable_names(code: str, history: str, frag_type: str):
""" Find all the variable names in the code and history
frag_type is one of the Robot variable types
"""
if frag_type == "%":
pattern = r"""%\{[^\$%&@}]+}"""
else:
pattern = r"""[\$&@]\{[^\$%&@}]+}"""
return [var[2:-1] for var in re.findall(pattern, "\n".join(history + [code]))]
|
[
"IPython.core.completerlib.get_root_modules",
"re.split",
"re.match",
"re.findall",
"robot.parsing.robotreader.RobotReader.split_row"
] |
[((7940, 8010), 're.match', 're.match', (['"""^(given|when|then|and|but)?\\\\b *(.*)"""', 'kw_token'], {'flags': 're.I'}), "('^(given|when|then|and|but)?\\\\b *(.*)', kw_token, flags=re.I)\n", (7948, 8010), False, 'import re\n'), ((9384, 9462), 're.split', 're.split', (['"""^(\\\\| )?(\\\\*+ *[^*]+? *\\\\*+)"""', 'code[:cursor_pos]'], {'flags': '(re.I | re.M)'}), "('^(\\\\| )?(\\\\*+ *[^*]+? *\\\\*+)', code[:cursor_pos], flags=re.I | re.M)\n", (9392, 9462), False, 'import re\n'), ((3692, 3733), 'robot.parsing.robotreader.RobotReader.split_row', 'RobotReader.split_row', (['line[:line_cursor]'], {}), '(line[:line_cursor])\n', (3713, 3733), False, 'from robot.parsing.robotreader import RobotReader\n'), ((5233, 5261), 're.findall', 're.findall', (['"""[\\\\$&@%]"""', 'line'], {}), "('[\\\\$&@%]', line)\n", (5243, 5261), False, 'import re\n'), ((6365, 6392), 'robot.parsing.robotreader.RobotReader.split_row', 'RobotReader.split_row', (['line'], {}), '(line)\n', (6386, 6392), False, 'from robot.parsing.robotreader import RobotReader\n'), ((7446, 7473), 'robot.parsing.robotreader.RobotReader.split_row', 'RobotReader.split_row', (['line'], {}), '(line)\n', (7467, 7473), False, 'from robot.parsing.robotreader import RobotReader\n'), ((9549, 9589), 're.match', 're.match', (['RE_TABLE_NAME', 'bit'], {'flags': 're.I'}), '(RE_TABLE_NAME, bit, flags=re.I)\n', (9557, 9589), False, 'import re\n'), ((5315, 5371), 're.findall', 're.findall', (['""".*([\\\\$%&@]\\\\{[^{}]*$)"""', 'line[:line_cursor]'], {}), "('.*([\\\\$%&@]\\\\{[^{}]*$)', line[:line_cursor])\n", (5325, 5371), False, 'import re\n'), ((6628, 6646), 'IPython.core.completerlib.get_root_modules', 'get_root_modules', ([], {}), '()\n', (6644, 6646), False, 'from IPython.core.completerlib import get_root_modules\n'), ((7639, 7692), 're.match', 're.match', (['RE_PRE_KEYWORD_SUITE', 'tokens[0]'], {'flags': 're.I'}), '(RE_PRE_KEYWORD_SUITE, tokens[0], flags=re.I)\n', (7647, 7692), False, 'import re\n'), ((7744, 7799), 're.match', 're.match', (['RE_PRE_KEYWORD_BRACKET', 'tokens[1]'], {'flags': 're.I'}), '(RE_PRE_KEYWORD_BRACKET, tokens[1], flags=re.I)\n', (7752, 7799), False, 'import re\n')]
|
""" irc x
"""
import automol.reac
import autofile
import elstruct
from mechlib.reaction import grid as rxngrid
from mechlib.amech_io import printer as ioprinter
from mechroutines.es import runner as es_runner
from mechroutines.es.runner import qchem_params
# Intrinsic Reaction Coordinates
def execute_irc(zma, ts_info,
mod_ini_thy_info, ini_method_dct,
ini_scn_run_fs, ini_scn_save_fs,
es_keyword_dct,
directions=(elstruct.Job.IRCF, elstruct.Job.IRCR)):
""" Run and save the IRC
"""
coord_name = 'IRC'
overwrite = es_keyword_dct['overwrite']
retryfail = es_keyword_dct['retryfail']
# Set up run filesys
run_fs = autofile.fs.run(ini_scn_run_fs[1].path([coord_name]))
# Run and Read the IRC in the forward and reverse direction
for direction in directions:
script_str, kwargs = qchem_params(
ini_method_dct, job=direction)
run_irc(
zma,
direction,
coord_name,
run_fs,
ini_scn_save_fs,
ts_info,
mod_ini_thy_info,
overwrite,
retryfail,
script_str,
**kwargs
)
success, _ = es_runner.read_job(
job=direction,
run_fs=run_fs,
)
if success:
save_irc(
direction,
coord_name,
run_fs,
ini_scn_save_fs,
mod_ini_thy_info
)
return success
def run_irc(zma, irc_job, coord_name, run_fs, ini_scn_save_fs,
ts_info, mod_ini_thy_info, overwrite, retryfail,
opt_script_str, **opt_kwargs):
""" Run the irc job
"""
def _irc_ran(ini_scn_save_fs, coord_name, irc_job):
""" See if coords are available
"""
coords = ini_scn_save_fs[-1].existing([coord_name])
if irc_job == elstruct.Job.IRCF:
ran_coords = [coord[1][0] for coord in coords if coord[1][0] > 0.0]
else:
ran_coords = [coord[1][0] for coord in coords if coord[1][0] < 0.0]
return bool(ran_coords)
# Maybe check for positive coords
if not _irc_ran(ini_scn_save_fs, coord_name, irc_job):
print('No IRC calculation in save filesystem')
opt_success, _ = es_runner.read_job(
job=irc_job,
run_fs=run_fs,
)
need_irc = not opt_success
else:
print('Found IRC directory at '
f'{ini_scn_save_fs[1].path([coord_name])}')
need_irc = False
if need_irc:
print('Running IRC calculation...')
es_runner.run_job(
job=irc_job,
script_str=opt_script_str,
run_fs=run_fs,
geo=zma,
spc_info=ts_info,
thy_info=mod_ini_thy_info,
overwrite=overwrite,
retryfail=retryfail,
**opt_kwargs
)
def save_irc(irc_job, coord_name,
run_fs, ini_scn_save_fs, mod_ini_thy_info):
""" Read IRC output and store data in filesystem
"""
opt_success, opt_ret = es_runner.read_job(
job=irc_job,
run_fs=run_fs,
)
locs_lst = []
if opt_success is not None:
# Read the IRC output file
inf_obj, inp_str, out_str = opt_ret
prog = inf_obj.prog
geos, gras, hessians = elstruct.reader.irc_points(prog, out_str)
coord_vals, enes = elstruct.reader.irc_path(prog, out_str)
# Write the data for each geom along IRC to the filesystem
save_path = ini_scn_save_fs[1].path([coord_name])
print(" - Saving...")
print(f" - Save path: {save_path}")
locs_lst = []
for idx, val in enumerate(coord_vals):
# Set locs idx; for reverse, ignore SadPt and flip idx to negative
locs_idx = idx
if irc_job == elstruct.Job.IRCR:
if locs_idx == 0:
continue
# Scale the coordinates so rounding to .2f number is non-zero
locs = [coord_name, [val*100.0]]
locs_lst.append(locs)
# Save files
ini_scn_save_fs[-1].create(locs)
ini_scn_save_fs[-1].file.energy.write(enes[idx], locs)
ini_scn_save_fs[-1].file.geometry.write(geos[idx], locs)
ini_scn_save_fs[-1].file.geometry_input.write(inp_str, locs)
ini_scn_save_fs[-1].file.geometry_info.write(inf_obj, locs)
if gras:
ini_scn_save_fs[-1].file.gradient.write(gras[idx], locs)
ini_scn_save_fs[-1].file.gradient_info.write(inf_obj, locs)
if hessians:
ini_scn_save_fs[-1].file.hessian.write(hessians[idx], locs)
ini_scn_save_fs[-1].file.hessian_info.write(inf_obj, locs)
scn_save_path = ini_scn_save_fs[-1].path(locs)
sp_save_fs = autofile.fs.single_point(scn_save_path)
sp_save_fs[-1].create(mod_ini_thy_info[1:4])
sp_save_fs[-1].file.input.write(inp_str, mod_ini_thy_info[1:4])
sp_save_fs[-1].file.info.write(inf_obj, mod_ini_thy_info[1:4])
sp_save_fs[-1].file.energy.write(enes[idx], mod_ini_thy_info[1:4])
update_traj_file(coord_name, ini_scn_save_fs, mod_ini_thy_info)
return locs_lst
def update_traj_file(coord_name, ini_scn_save_fs, mod_ini_thy_info):
""" Update the full IRC trajectory file based on what is in SAVE
filesystem
"""
saved_locs = ini_scn_save_fs[-1].existing()
if saved_locs:
es_runner.scan.write_traj(
coord_name, ini_scn_save_fs, mod_ini_thy_info, sorted(saved_locs)
)
def launch_point_zmatrices(ts_dct, mod_thy_info,
scn_alg, scn_fs, cnf_fs, cnf_locs):
""" Determine the point to launch an IRC from
Try to find saddle point at inplvl
Then search for the max
'auto': use sadpt, then max series
'sadpt': sadpt
'series': max series
"""
if 'sadpt' in scn_alg:
_, cnf_save_fs = cnf_fs
zma_locs = (ts_dct['zma_idx'],)
zma_fs = autofile.fs.zmatrix(cnf_save_fs[-1].path(cnf_locs))
if zma_fs[-1].file.zmatrix.exists(zma_locs):
geo_path = zma_fs[-1].file.zmatrix.path(zma_locs)
ioprinter.info_message(
' - Z-Matrix found.')
ioprinter.info_message(
f' - Reading Z-Matrix from path {geo_path}')
irc_zmas = (zma_fs[-1].file.zmatrix.read(zma_locs),)
elif 'max' in scn_alg:
_, scn_save_fs = scn_fs
zma, zrxn = ts_dct['zma'], ts_dct['zrxn']
scan_inf = automol.reac.build_scan_info(zrxn, zma)
coord_names, constraint_dct, coord_grids, _ = scan_inf
irc_zmas = rxngrid.grid_maximum_zmatrices(
zrxn.class_, zma, coord_grids, coord_names, scn_save_fs,
mod_thy_info, constraint_dct, series='full-n1')
print('irc zmas', irc_zmas)
import sys
sys.exit()
return irc_zmas
|
[
"mechroutines.es.runner.read_job",
"mechroutines.es.runner.run_job",
"mechroutines.es.runner.qchem_params",
"mechlib.amech_io.printer.info_message",
"autofile.fs.single_point",
"mechlib.reaction.grid.grid_maximum_zmatrices",
"elstruct.reader.irc_points",
"elstruct.reader.irc_path",
"sys.exit"
] |
[((3157, 3203), 'mechroutines.es.runner.read_job', 'es_runner.read_job', ([], {'job': 'irc_job', 'run_fs': 'run_fs'}), '(job=irc_job, run_fs=run_fs)\n', (3175, 3203), True, 'from mechroutines.es import runner as es_runner\n'), ((7051, 7061), 'sys.exit', 'sys.exit', ([], {}), '()\n', (7059, 7061), False, 'import sys\n'), ((892, 935), 'mechroutines.es.runner.qchem_params', 'qchem_params', (['ini_method_dct'], {'job': 'direction'}), '(ini_method_dct, job=direction)\n', (904, 935), False, 'from mechroutines.es.runner import qchem_params\n'), ((1252, 1300), 'mechroutines.es.runner.read_job', 'es_runner.read_job', ([], {'job': 'direction', 'run_fs': 'run_fs'}), '(job=direction, run_fs=run_fs)\n', (1270, 1300), True, 'from mechroutines.es import runner as es_runner\n'), ((2354, 2400), 'mechroutines.es.runner.read_job', 'es_runner.read_job', ([], {'job': 'irc_job', 'run_fs': 'run_fs'}), '(job=irc_job, run_fs=run_fs)\n', (2372, 2400), True, 'from mechroutines.es import runner as es_runner\n'), ((2674, 2865), 'mechroutines.es.runner.run_job', 'es_runner.run_job', ([], {'job': 'irc_job', 'script_str': 'opt_script_str', 'run_fs': 'run_fs', 'geo': 'zma', 'spc_info': 'ts_info', 'thy_info': 'mod_ini_thy_info', 'overwrite': 'overwrite', 'retryfail': 'retryfail'}), '(job=irc_job, script_str=opt_script_str, run_fs=run_fs,\n geo=zma, spc_info=ts_info, thy_info=mod_ini_thy_info, overwrite=\n overwrite, retryfail=retryfail, **opt_kwargs)\n', (2691, 2865), True, 'from mechroutines.es import runner as es_runner\n'), ((3416, 3457), 'elstruct.reader.irc_points', 'elstruct.reader.irc_points', (['prog', 'out_str'], {}), '(prog, out_str)\n', (3442, 3457), False, 'import elstruct\n'), ((3485, 3524), 'elstruct.reader.irc_path', 'elstruct.reader.irc_path', (['prog', 'out_str'], {}), '(prog, out_str)\n', (3509, 3524), False, 'import elstruct\n'), ((4946, 4985), 'autofile.fs.single_point', 'autofile.fs.single_point', (['scn_save_path'], {}), '(scn_save_path)\n', (4970, 4985), False, 'import autofile\n'), ((6361, 6405), 'mechlib.amech_io.printer.info_message', 'ioprinter.info_message', (['""" - Z-Matrix found."""'], {}), "(' - Z-Matrix found.')\n", (6383, 6405), True, 'from mechlib.amech_io import printer as ioprinter\n'), ((6435, 6502), 'mechlib.amech_io.printer.info_message', 'ioprinter.info_message', (['f""" - Reading Z-Matrix from path {geo_path}"""'], {}), "(f' - Reading Z-Matrix from path {geo_path}')\n", (6457, 6502), True, 'from mechlib.amech_io import printer as ioprinter\n'), ((6837, 6976), 'mechlib.reaction.grid.grid_maximum_zmatrices', 'rxngrid.grid_maximum_zmatrices', (['zrxn.class_', 'zma', 'coord_grids', 'coord_names', 'scn_save_fs', 'mod_thy_info', 'constraint_dct'], {'series': '"""full-n1"""'}), "(zrxn.class_, zma, coord_grids, coord_names,\n scn_save_fs, mod_thy_info, constraint_dct, series='full-n1')\n", (6867, 6976), True, 'from mechlib.reaction import grid as rxngrid\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 19 18:03:59 2016
@author: jones_000
"""
import copy as cp
import numpy as np
import math
import Solver
import Physics
import Body
import vector
import matplotlib.pyplot as plt
class Simulation(object):
'''Parent Simulation class
Attributes
----------
stop_condition : callable
sets the stop condition for simulation
physics : Physics
the physics being simulated with a solver
body : array, GravBody
the array of bodies with position, velocity, and mass
'''
def __init__(self,stop_condition=None,physics=None,body=None):
'''Make body a list'''
if type(body) == list:
self.body = body
else:
self.body = [body]
self.physics = physics
self.stop_condition = stop_condition
def get_results(self):
'''This advances the sim and returns results'''
body = self.body
time = 0
self.bodies = [cp.deepcopy(self.body)]
self.t = [0]
while self.stop_condition(self.bodies) == True:
body, time = self.physics.advance(body,time)
self.bodies.append(cp.deepcopy(body))
self.t.append(time)
return self.t, self.bodies
class OrbitSim(Simulation):
'''Drives the Central Grav sim for orbits
Attributes
----------
stop_condition : callable
sets the stop condition for simulation
physics : Physics
the physics being simulated with a solver
body : GravBody
the body with position, velocity, and mass
'''
def __init__(self,stop_condition=None,physics=None,body=None,apnd=True):
Simulation.__init__(self,stop_condition,physics,body)
self.bodies = [cp.deepcopy(self.body)]
self.t = [0]
self.apnd = apnd
def get_results(self):
'''Returns time and bodies lists'''
return self.t, self.bodies
def advance(self,time=None,step=None):
'''Advances sim to a certain time or step
Parameters
----------
time : float
the target time for the sim
step : float
the number of steps to run
'''
if time != None:
dt = self.physics.solver.stepsize
time = time - dt
self.run(self.time_stop(time))
self.physics.solver.stepsize = time + dt - self.t[-1]
self.run(self.time_stop(time+dt))
self.physics.solver.stepsize = dt
if step != None:
self.run(self.step_stop(step))
if time == None and step == None:
self.run(self.stop_condition)
def step_stop(self,step):
'''Reference to stop function to end at a certain step'''
def stop(time,bodies):
steps = math.floor(time/self.physics.solver.step_size)
if steps < step:
return True
else:
return False
return stop
def time_stop(self,goal):
'''Reference to a stop function to end at a certain time'''
def stop(time,bodies):
if time < goal:
return True
else:
return False
return stop
def run(self,stop_condition):
'''Internal run function that advances bodies
Parameters
----------
stop_condition : callable
the stop function to end the sim
'''
time = self.t[-1]
while stop_condition(time,self.bodies) == True:
self.body, time = self.physics.advance(self.body,time)
if self.apnd:
self.bodies.append(cp.deepcopy(self.body))
self.t.append(time)
if not self.apnd:
self.bodies.append(cp.deepcopy(self.body))
self.t.append(cp.deepcopy(time))
class BinarySim(OrbitSim):
'''Takes in Elliptical Inputs and produces a Binary Sim
Attributes
----------
M1 : float
mass of the first body
M2 : float
mass of the second body
a1 : float
the semi-major axis of the first body's orbit
e : float
the orbits' eccentricity
'''
def __init__(self,M1=None,M2=None,a1=1,e=0,apnd=True):
'''Build GravBodies'''
self.G = 4*math.pi**2.
r1p = a1-e*a1
r2p = -(M1/M2)*r1p
v1p = math.sqrt(((self.G*M2**3.)/(a1*(M1+M2)**2.))*((1.+e)/(1.-e)))
v2p = -(M1/M2)*v1p
r1 = vector.Vector(r1p,0.,0.)
r2 = vector.Vector(r2p,0.,0.)
v1 = vector.Vector(0.,v1p,0.)
v2 = vector.Vector(0.,v2p,0.)
body1 = Body.GravBody(M1,r1,v1)
body2 = Body.GravBody(M2,r2,v2)
'''Set up Sim'''
self.body = [body1,body2]
solver = Solver.RK2(0.01)
self.physics = Physics.NBody(solver,self.G)
self.bodies = [cp.deepcopy(self.body)]
self.t = [0]
self.apnd = apnd
class ExoSim(BinarySim):
'''Runs a siim for an exoplant search
Attributes
----------
Ms : float
mass of the star
Mp : float
mass of the plant
ap : float
the semi-major axis of the planet's orbit
e : float
the orbits' eccentricity
Rs : float
the radius of the star in Solar Radii
Rp : float
the radius of the planet in Solar Radii
omega : float
the angle of periastron
i : float
the angle of inclination
'''
def __init__(self,Ms=None,Mp=None,ap=None,e=0,Rs=None,Rp=None,omega=None,i=None,apnd=True):
'''Save Values'''
self.apnd = apnd
self.Rs = Rs
self.Rp = Rp
self.G = 4*math.pi**2.
self.period = np.sqrt(((ap**3.)*((Ms+Mp)**2.))/Ms**3.)
'''Set up Vectors'''
rpp = ap-e*ap
rsp = -(Mp/Ms)*rpp
vpp = math.sqrt(((self.G*Ms**3.)/(ap*(Ms+Mp)**2.))*((1.+e)/(1.-e)))
vsp = -(Mp/Ms)*vpp
'''Rotate Vectors into Viewer frame'''
rs = vector.Vector(rsp,0.,0.)
rs.rot_z(omega)
rs.rot_x(i)
rp = vector.Vector(rpp,0.,0.)
rp.rot_z(omega)
rp.rot_x(i)
vs = vector.Vector(0.,vsp,0.)
vs.rot_z(omega)
vs.rot_x(i)
vp = vector.Vector(0.,vpp,0.)
vp.rot_z(omega)
vp.rot_x(i)
'''Set Up Sim'''
star = Body.GravBody(Ms,rs,vs)
planet = Body.GravBody(Mp,rp,vp)
self.body = [star,planet]
solver = Solver.RK2(0.01)
self.physics = Physics.NBody(solver,self.G)
self.bodies = [cp.deepcopy(self.body)]
self.t = [0]
def advance(self,time=None):
'''Advances Sim to a certain time or for one orbital period
Parameters
----------
time : float
the target time for the simulation, defaults to one orbital period
'''
if time == None:
time = self.period
dt = self.physics.solver.stepsize
time = time - dt
self.run(self.time_stop(time))
self.physics.solver.stepsize = time + dt - self.t[-1]
self.run(self.time_stop(time+dt))
self.physics.solver.stepsize = dt
def light_curve(self,time,bodies):
'''Creates and plots an exoplanet transit light curve for the orbit
Paramters
---------
time : list, float
a list of the independant variable, time
bodies : list, GravBody
a list of the Gravbodies at each time in time list
Returns
-------
a graph of the light curve
'''
r_list = np.array([b[0].position - b[1].position for b in bodies])
p = np.array([r.cart for r in r_list])
d = np.sqrt((p[:,0])**2. + (p[:,1])**2.)
x = (self.Rp**2. - self.Rs**2. + d**2.)/(2.*d)
h = np.sqrt(self.Rp**2. - x**2.)
theta = np.arccos(x/self.Rp)
psi = np.arccos((d-x)/self.Rs)
'''Areas of Arcs and Triangles'''
a1 = 0.5*x*h
ap = 0.5*theta*(self.Rp**2.)
A1 = ap - a1
a2 = 0.5*(d-x)*h
As = 0.5*psi*(self.Rs**2.)
A2 = As -a2
A = 2*(A1 + A2)
'''Fix Failures'''
A[d>=(self.Rp+self.Rs)] = 0.
A[d<=(self.Rs-self.Rp)] = np.pi*(self.Rp**2.)
A[p[:,2]<=0] = 0
I = ((np.pi*self.Rs**2.) - A)/(np.pi*self.Rs**2.)
plt.figure()
plt.plot(time,I,'.')
plt.title('Exo Planet Light Curve')
plt.xlabel('Time [Years]')
plt.ylabel('Intensity')
|
[
"matplotlib.pyplot.title",
"Solver.RK2",
"copy.deepcopy",
"math.sqrt",
"matplotlib.pyplot.plot",
"math.floor",
"Physics.NBody",
"matplotlib.pyplot.figure",
"Body.GravBody",
"numpy.array",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.ylabel",
"vector.Vector",
"numpy.arccos",
"numpy.sqrt"
] |
[((4569, 4654), 'math.sqrt', 'math.sqrt', (['(self.G * M2 ** 3.0 / (a1 * (M1 + M2) ** 2.0) * ((1.0 + e) / (1.0 - e)))'], {}), '(self.G * M2 ** 3.0 / (a1 * (M1 + M2) ** 2.0) * ((1.0 + e) / (1.0 -\n e)))\n', (4578, 4654), False, 'import math\n'), ((4671, 4699), 'vector.Vector', 'vector.Vector', (['r1p', '(0.0)', '(0.0)'], {}), '(r1p, 0.0, 0.0)\n', (4684, 4699), False, 'import vector\n'), ((4709, 4737), 'vector.Vector', 'vector.Vector', (['r2p', '(0.0)', '(0.0)'], {}), '(r2p, 0.0, 0.0)\n', (4722, 4737), False, 'import vector\n'), ((4747, 4775), 'vector.Vector', 'vector.Vector', (['(0.0)', 'v1p', '(0.0)'], {}), '(0.0, v1p, 0.0)\n', (4760, 4775), False, 'import vector\n'), ((4785, 4813), 'vector.Vector', 'vector.Vector', (['(0.0)', 'v2p', '(0.0)'], {}), '(0.0, v2p, 0.0)\n', (4798, 4813), False, 'import vector\n'), ((4826, 4851), 'Body.GravBody', 'Body.GravBody', (['M1', 'r1', 'v1'], {}), '(M1, r1, v1)\n', (4839, 4851), False, 'import Body\n'), ((4866, 4891), 'Body.GravBody', 'Body.GravBody', (['M2', 'r2', 'v2'], {}), '(M2, r2, v2)\n', (4879, 4891), False, 'import Body\n'), ((4975, 4991), 'Solver.RK2', 'Solver.RK2', (['(0.01)'], {}), '(0.01)\n', (4985, 4991), False, 'import Solver\n'), ((5015, 5044), 'Physics.NBody', 'Physics.NBody', (['solver', 'self.G'], {}), '(solver, self.G)\n', (5028, 5044), False, 'import Physics\n'), ((5985, 6034), 'numpy.sqrt', 'np.sqrt', (['(ap ** 3.0 * (Ms + Mp) ** 2.0 / Ms ** 3.0)'], {}), '(ap ** 3.0 * (Ms + Mp) ** 2.0 / Ms ** 3.0)\n', (5992, 6034), True, 'import numpy as np\n'), ((6127, 6212), 'math.sqrt', 'math.sqrt', (['(self.G * Ms ** 3.0 / (ap * (Ms + Mp) ** 2.0) * ((1.0 + e) / (1.0 - e)))'], {}), '(self.G * Ms ** 3.0 / (ap * (Ms + Mp) ** 2.0) * ((1.0 + e) / (1.0 -\n e)))\n', (6136, 6212), False, 'import math\n'), ((6285, 6313), 'vector.Vector', 'vector.Vector', (['rsp', '(0.0)', '(0.0)'], {}), '(rsp, 0.0, 0.0)\n', (6298, 6313), False, 'import vector\n'), ((6367, 6395), 'vector.Vector', 'vector.Vector', (['rpp', '(0.0)', '(0.0)'], {}), '(rpp, 0.0, 0.0)\n', (6380, 6395), False, 'import vector\n'), ((6449, 6477), 'vector.Vector', 'vector.Vector', (['(0.0)', 'vsp', '(0.0)'], {}), '(0.0, vsp, 0.0)\n', (6462, 6477), False, 'import vector\n'), ((6531, 6559), 'vector.Vector', 'vector.Vector', (['(0.0)', 'vpp', '(0.0)'], {}), '(0.0, vpp, 0.0)\n', (6544, 6559), False, 'import vector\n'), ((6649, 6674), 'Body.GravBody', 'Body.GravBody', (['Ms', 'rs', 'vs'], {}), '(Ms, rs, vs)\n', (6662, 6674), False, 'import Body\n'), ((6690, 6715), 'Body.GravBody', 'Body.GravBody', (['Mp', 'rp', 'vp'], {}), '(Mp, rp, vp)\n', (6703, 6715), False, 'import Body\n'), ((6765, 6781), 'Solver.RK2', 'Solver.RK2', (['(0.01)'], {}), '(0.01)\n', (6775, 6781), False, 'import Solver\n'), ((6805, 6834), 'Physics.NBody', 'Physics.NBody', (['solver', 'self.G'], {}), '(solver, self.G)\n', (6818, 6834), False, 'import Physics\n'), ((7950, 8009), 'numpy.array', 'np.array', (['[(b[0].position - b[1].position) for b in bodies]'], {}), '([(b[0].position - b[1].position) for b in bodies])\n', (7958, 8009), True, 'import numpy as np\n'), ((8020, 8054), 'numpy.array', 'np.array', (['[r.cart for r in r_list]'], {}), '([r.cart for r in r_list])\n', (8028, 8054), True, 'import numpy as np\n'), ((8067, 8107), 'numpy.sqrt', 'np.sqrt', (['(p[:, 0] ** 2.0 + p[:, 1] ** 2.0)'], {}), '(p[:, 0] ** 2.0 + p[:, 1] ** 2.0)\n', (8074, 8107), True, 'import numpy as np\n'), ((8171, 8205), 'numpy.sqrt', 'np.sqrt', (['(self.Rp ** 2.0 - x ** 2.0)'], {}), '(self.Rp ** 2.0 - x ** 2.0)\n', (8178, 8205), True, 'import numpy as np\n'), ((8216, 8238), 'numpy.arccos', 'np.arccos', (['(x / self.Rp)'], {}), '(x / self.Rp)\n', (8225, 8238), True, 'import numpy as np\n'), ((8251, 8279), 'numpy.arccos', 'np.arccos', (['((d - x) / self.Rs)'], {}), '((d - x) / self.Rs)\n', (8260, 8279), True, 'import numpy as np\n'), ((8757, 8769), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8767, 8769), True, 'import matplotlib.pyplot as plt\n'), ((8778, 8800), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'I', '"""."""'], {}), "(time, I, '.')\n", (8786, 8800), True, 'import matplotlib.pyplot as plt\n'), ((8807, 8842), 'matplotlib.pyplot.title', 'plt.title', (['"""Exo Planet Light Curve"""'], {}), "('Exo Planet Light Curve')\n", (8816, 8842), True, 'import matplotlib.pyplot as plt\n'), ((8851, 8877), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [Years]"""'], {}), "('Time [Years]')\n", (8861, 8877), True, 'import matplotlib.pyplot as plt\n'), ((8886, 8909), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Intensity"""'], {}), "('Intensity')\n", (8896, 8909), True, 'import matplotlib.pyplot as plt\n'), ((1031, 1053), 'copy.deepcopy', 'cp.deepcopy', (['self.body'], {}), '(self.body)\n', (1042, 1053), True, 'import copy as cp\n'), ((1846, 1868), 'copy.deepcopy', 'cp.deepcopy', (['self.body'], {}), '(self.body)\n', (1857, 1868), True, 'import copy as cp\n'), ((2939, 2987), 'math.floor', 'math.floor', (['(time / self.physics.solver.step_size)'], {}), '(time / self.physics.solver.step_size)\n', (2949, 2987), False, 'import math\n'), ((5067, 5089), 'copy.deepcopy', 'cp.deepcopy', (['self.body'], {}), '(self.body)\n', (5078, 5089), True, 'import copy as cp\n'), ((6857, 6879), 'copy.deepcopy', 'cp.deepcopy', (['self.body'], {}), '(self.body)\n', (6868, 6879), True, 'import copy as cp\n'), ((1220, 1237), 'copy.deepcopy', 'cp.deepcopy', (['body'], {}), '(body)\n', (1231, 1237), True, 'import copy as cp\n'), ((3939, 3961), 'copy.deepcopy', 'cp.deepcopy', (['self.body'], {}), '(self.body)\n', (3950, 3961), True, 'import copy as cp\n'), ((3989, 4006), 'copy.deepcopy', 'cp.deepcopy', (['time'], {}), '(time)\n', (4000, 4006), True, 'import copy as cp\n'), ((3822, 3844), 'copy.deepcopy', 'cp.deepcopy', (['self.body'], {}), '(self.body)\n', (3833, 3844), True, 'import copy as cp\n')]
|
""" core run function
"""
from autorun import from_input_string
def direct(input_writer, script_str, run_dir, prog,
geo, charge, mult, method, basis, **kwargs):
""" Generates an input file for an electronic structure job and
runs it directly.
:param input_writer: elstruct writer module function for desired job
:type input_writer: elstruct function
:param script_str: string of bash script that contains
execution instructions electronic structure job
:type script_str: str
:param run_dir: name of directory to run electronic structure job
:type run_dir: str
:param prog: electronic structure program to run
:type prog: str
:param geo: cartesian or z-matrix geometry
:type geo: tuple
:param charge: molecular charge
:type charge: int
:param mult: spin multiplicity
:type mult: int
:param method: electronic structure method
:type method: str
:returns: the input string, the output string, and the run directory
:rtype: (str, str)
"""
input_str = input_writer(
prog=prog,
geo=geo, charge=charge, mult=mult, method=method, basis=basis,
**kwargs)
output_strs = from_input_string(script_str, run_dir, input_str)
output_str = output_strs[0]
return input_str, output_str
|
[
"autorun.from_input_string"
] |
[((1280, 1329), 'autorun.from_input_string', 'from_input_string', (['script_str', 'run_dir', 'input_str'], {}), '(script_str, run_dir, input_str)\n', (1297, 1329), False, 'from autorun import from_input_string\n')]
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
import torch
import torch.nn as nn
import torch.utils.data as data
import torch.backends.cudnn as cudnn
import torchvision.transforms as transforms
import os
import time
import argparse
import numpy as np
from PIL import Image
import cv2
from data.choose_config import cfg
cfg = cfg.cfg
from utils.augmentations import to_chw_bgr
from importlib import import_module
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
parser = argparse.ArgumentParser(description='face detection demo')
parser.add_argument('--save_dir', type=str, default='results/',
help='Directory for detect result')
parser.add_argument('--model', type=str,
default='weights/rpool_face_c.pth', help='trained model')
parser.add_argument('--thresh', default=0.17, type=float,
help='Final confidence threshold')
parser.add_argument('--multigpu',
default=False, type=str2bool,
help='Specify whether model was trained with multigpu')
parser.add_argument('--model_arch',
default='RPool_Face_C', type=str,
choices=['RPool_Face_C', 'RPool_Face_Quant', 'RPool_Face_QVGA_monochrome', 'RPool_Face_M4'],
help='choose architecture among rpool variants')
parser.add_argument('--image_folder', default=None, type=str, help='folder containing images')
parser.add_argument('--save_traces',
default=False, type=str2bool,
help='Specify whether to save input output traces')
args = parser.parse_args()
if not os.path.exists(args.save_dir):
os.makedirs(args.save_dir)
use_cuda = torch.cuda.is_available()
if use_cuda:
torch.set_default_tensor_type('torch.cuda.FloatTensor')
else:
torch.set_default_tensor_type('torch.FloatTensor')
def detect(net, img_path, thresh, save_traces):
img = Image.open(img_path)
img = img.convert('RGB')
img = np.array(img)
height, width, _ = img.shape
if os.environ['IS_QVGA_MONO'] == '1':
max_im_shrink = np.sqrt(
320 * 240 / (img.shape[0] * img.shape[1]))
else:
max_im_shrink = np.sqrt(
640 * 480 / (img.shape[0] * img.shape[1]))
if save_traces==True and os.environ['IS_QVGA_MONO'] == '1':
image = cv2.resize(img, (320, 240))
elif save_traces==True:
image = cv2.resize(img, (640, 480))
else:
image = cv2.resize(img, None, None, fx=max_im_shrink,
fy=max_im_shrink, interpolation=cv2.INTER_LINEAR)
x = to_chw_bgr(image)
x = x.astype('float32')
x -= cfg.img_mean
x = x[[2, 1, 0], :, :]
if cfg.IS_MONOCHROME == True:
x = 0.299 * x[0] + 0.587 * x[1] + 0.114 * x[2]
x = torch.from_numpy(x).unsqueeze(0).unsqueeze(0)
else:
x = torch.from_numpy(x).unsqueeze(0)
if use_cuda:
x = x.cuda()
t1 = time.time()
y, loc, conf = net(x)
detections = y.data
scale = torch.Tensor([img.shape[1], img.shape[0],
img.shape[1], img.shape[0]])
img = cv2.imread(img_path, cv2.IMREAD_COLOR)
for i in range(detections.size(1)):
j = 0
while detections[0, i, j, 0] >= thresh:
score = detections[0, i, j, 0]
pt = (detections[0, i, j, 1:] * scale).cpu().numpy()
left_up, right_bottom = (pt[0], pt[1]), (pt[2], pt[3])
j += 1
cv2.rectangle(img, left_up, right_bottom, (0, 0, 255), 2)
conf_score = "{:.3f}".format(score)
point = (int(left_up[0]), int(left_up[1] - 5))
cv2.putText(img, conf_score, point, cv2.FONT_HERSHEY_COMPLEX,
0.6, (0, 255, 0), 1)
t2 = time.time()
print('detect:{} timer:{}'.format(img_path, t2 - t1))
cv2.imwrite(os.path.join(args.save_dir, os.path.basename(img_path)), img)
if save_traces == True:
return x, loc, conf
if __name__ == '__main__':
module = import_module('models.' + args.model_arch)
net = module.build_s3fd('test', cfg.NUM_CLASSES)
if args.multigpu == True:
net = torch.nn.DataParallel(net)
checkpoint_dict = torch.load(args.model)
model_dict = net.state_dict()
model_dict.update(checkpoint_dict)
net.load_state_dict(model_dict)
net.eval()
if use_cuda:
net.cuda()
cudnn.benckmark = True
img_path = args.image_folder
img_list = [os.path.join(img_path, x)
for x in os.listdir(img_path)]
x = []
loc = []
conf = []
for path in img_list:
if args.save_traces == True:
x_temp, loc_temp, conf_temp = detect(net, path, args.thresh, args.save_traces)
x.append(x_temp)
loc.append(loc_temp)
conf.append(conf_temp)
else:
detect(net, path, args.thresh, args.save_traces)
if args.save_traces == True:
np.save('trace_inputs.npy', torch.cat(x).cpu().detach().numpy())
np.save('trace_outputs.npy', torch.cat([torch.cat(conf), torch.cat(loc)], dim=1).cpu().detach().numpy())
|
[
"utils.augmentations.to_chw_bgr",
"argparse.ArgumentParser",
"torch.set_default_tensor_type",
"torch.cat",
"cv2.rectangle",
"os.path.join",
"torch.load",
"os.path.exists",
"torch.Tensor",
"cv2.resize",
"importlib.import_module",
"os.path.basename",
"torch.cuda.is_available",
"os.listdir",
"torch.from_numpy",
"cv2.putText",
"os.makedirs",
"PIL.Image.open",
"time.time",
"cv2.imread",
"numpy.array",
"torch.nn.DataParallel",
"numpy.sqrt"
] |
[((543, 601), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""face detection demo"""'}), "(description='face detection demo')\n", (566, 601), False, 'import argparse\n'), ((1751, 1776), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1774, 1776), False, 'import torch\n'), ((1677, 1706), 'os.path.exists', 'os.path.exists', (['args.save_dir'], {}), '(args.save_dir)\n', (1691, 1706), False, 'import os\n'), ((1712, 1738), 'os.makedirs', 'os.makedirs', (['args.save_dir'], {}), '(args.save_dir)\n', (1723, 1738), False, 'import os\n'), ((1795, 1850), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (['"""torch.cuda.FloatTensor"""'], {}), "('torch.cuda.FloatTensor')\n", (1824, 1850), False, 'import torch\n'), ((1861, 1911), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (['"""torch.FloatTensor"""'], {}), "('torch.FloatTensor')\n", (1890, 1911), False, 'import torch\n'), ((1972, 1992), 'PIL.Image.open', 'Image.open', (['img_path'], {}), '(img_path)\n', (1982, 1992), False, 'from PIL import Image\n'), ((2032, 2045), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (2040, 2045), True, 'import numpy as np\n'), ((2642, 2659), 'utils.augmentations.to_chw_bgr', 'to_chw_bgr', (['image'], {}), '(image)\n', (2652, 2659), False, 'from utils.augmentations import to_chw_bgr\n'), ((2988, 2999), 'time.time', 'time.time', ([], {}), '()\n', (2997, 2999), False, 'import time\n'), ((3062, 3132), 'torch.Tensor', 'torch.Tensor', (['[img.shape[1], img.shape[0], img.shape[1], img.shape[0]]'], {}), '([img.shape[1], img.shape[0], img.shape[1], img.shape[0]])\n', (3074, 3132), False, 'import torch\n'), ((3170, 3208), 'cv2.imread', 'cv2.imread', (['img_path', 'cv2.IMREAD_COLOR'], {}), '(img_path, cv2.IMREAD_COLOR)\n', (3180, 3208), False, 'import cv2\n'), ((3811, 3822), 'time.time', 'time.time', ([], {}), '()\n', (3820, 3822), False, 'import time\n'), ((4060, 4102), 'importlib.import_module', 'import_module', (["('models.' + args.model_arch)"], {}), "('models.' + args.model_arch)\n", (4073, 4102), False, 'from importlib import import_module\n'), ((4251, 4273), 'torch.load', 'torch.load', (['args.model'], {}), '(args.model)\n', (4261, 4273), False, 'import torch\n'), ((2146, 2196), 'numpy.sqrt', 'np.sqrt', (['(320 * 240 / (img.shape[0] * img.shape[1]))'], {}), '(320 * 240 / (img.shape[0] * img.shape[1]))\n', (2153, 2196), True, 'import numpy as np\n'), ((2244, 2294), 'numpy.sqrt', 'np.sqrt', (['(640 * 480 / (img.shape[0] * img.shape[1]))'], {}), '(640 * 480 / (img.shape[0] * img.shape[1]))\n', (2251, 2294), True, 'import numpy as np\n'), ((2389, 2416), 'cv2.resize', 'cv2.resize', (['img', '(320, 240)'], {}), '(img, (320, 240))\n', (2399, 2416), False, 'import cv2\n'), ((4201, 4227), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['net'], {}), '(net)\n', (4222, 4227), False, 'import torch\n'), ((4521, 4546), 'os.path.join', 'os.path.join', (['img_path', 'x'], {}), '(img_path, x)\n', (4533, 4546), False, 'import os\n'), ((2461, 2488), 'cv2.resize', 'cv2.resize', (['img', '(640, 480)'], {}), '(img, (640, 480))\n', (2471, 2488), False, 'import cv2\n'), ((2515, 2614), 'cv2.resize', 'cv2.resize', (['img', 'None', 'None'], {'fx': 'max_im_shrink', 'fy': 'max_im_shrink', 'interpolation': 'cv2.INTER_LINEAR'}), '(img, None, None, fx=max_im_shrink, fy=max_im_shrink,\n interpolation=cv2.INTER_LINEAR)\n', (2525, 2614), False, 'import cv2\n'), ((3518, 3575), 'cv2.rectangle', 'cv2.rectangle', (['img', 'left_up', 'right_bottom', '(0, 0, 255)', '(2)'], {}), '(img, left_up, right_bottom, (0, 0, 255), 2)\n', (3531, 3575), False, 'import cv2\n'), ((3695, 3781), 'cv2.putText', 'cv2.putText', (['img', 'conf_score', 'point', 'cv2.FONT_HERSHEY_COMPLEX', '(0.6)', '(0, 255, 0)', '(1)'], {}), '(img, conf_score, point, cv2.FONT_HERSHEY_COMPLEX, 0.6, (0, 255,\n 0), 1)\n', (3706, 3781), False, 'import cv2\n'), ((3926, 3952), 'os.path.basename', 'os.path.basename', (['img_path'], {}), '(img_path)\n', (3942, 3952), False, 'import os\n'), ((4572, 4592), 'os.listdir', 'os.listdir', (['img_path'], {}), '(img_path)\n', (4582, 4592), False, 'import os\n'), ((2908, 2927), 'torch.from_numpy', 'torch.from_numpy', (['x'], {}), '(x)\n', (2924, 2927), False, 'import torch\n'), ((2840, 2859), 'torch.from_numpy', 'torch.from_numpy', (['x'], {}), '(x)\n', (2856, 2859), False, 'import torch\n'), ((5028, 5040), 'torch.cat', 'torch.cat', (['x'], {}), '(x)\n', (5037, 5040), False, 'import torch\n'), ((5113, 5128), 'torch.cat', 'torch.cat', (['conf'], {}), '(conf)\n', (5122, 5128), False, 'import torch\n'), ((5130, 5144), 'torch.cat', 'torch.cat', (['loc'], {}), '(loc)\n', (5139, 5144), False, 'import torch\n')]
|
import os
import numpy as np
import tensorflow as tf
from keras import backend as K
from keras.models import Sequential
from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout
from keras.models import Sequential
from keras.optimizers import Adam
from utils import load_multi_dataset, mkdir_p, HDF5_PATH, MODEL_PATH
from datetime import datetime
import time
from sklearn.model_selection import train_test_split
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
K.set_session(sess)
print('Loading data from HDF5...')
X_data, Y_data = load_multi_dataset(os.path.join(HDF5_PATH, 'train_h5_list.txt'))
# X_test, Y_test = load_multi_dataset(os.path.join(HDF5_PATH, 'test_h5_list.txt'))
print('Number of images:', X_data.shape[0])
print('Number of labels:', Y_data.shape[0])
print('Splitting data into training set and testing set....')
X_train, X_test, Y_train, Y_test = train_test_split(X_data, Y_data, test_size=0.2, random_state=42)
print('X_train shape:', X_train.shape)
print('Y_train shape:', Y_train.shape)
print('X_test shape:', X_test.shape)
print('Y_test shape:', Y_test.shape)
model = Sequential()
model.add(Lambda(lambda x: x / 255.0, input_shape=(70, 320, 3)))
model.add(Conv2D(24, (5, 5), strides=(2, 2), padding='valid', activation='relu'))
model.add(Conv2D(36, (5, 5), strides=(2, 2), padding='valid', activation='relu'))
model.add(Conv2D(48, (5, 5), strides=(2, 2), padding='valid', activation='relu'))
model.add(Conv2D(64, (3, 3), strides=(1, 1), padding='valid', activation='relu'))
model.add(Conv2D(64, (3, 3), strides=(1, 1), padding='valid', activation='relu'))
model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(100, activation='relu'))
model.add(Dense(50, activation='relu'))
model.add(Dense(10, activation='relu'))
model.add(Dense(1))
model.summary()
model.compile(optimizer=Adam(lr=1e-04, decay=0.0), loss='mse')
t0 = time.time()
model.fit(X_train, Y_train, validation_data=(X_test, Y_test), shuffle=True, epochs=30, batch_size=128)
t1 = time.time()
print('Total training time:', t1 - t0, 'seconds')
mkdir_p(MODEL_PATH)
model_id = datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
model_file = os.path.join(MODEL_PATH, '{}.h5'.format(model_id))
model.save(model_file)
print("Training done successfully and model has been saved: {}".format(model_file))
print("Drive safely!")
|
[
"sklearn.model_selection.train_test_split",
"keras.layers.Dropout",
"keras.backend.set_session",
"tensorflow.Session",
"keras.layers.Flatten",
"keras.optimizers.Adam",
"time.time",
"datetime.datetime.now",
"tensorflow.ConfigProto",
"utils.mkdir_p",
"keras.layers.Lambda",
"keras.layers.Conv2D",
"keras.layers.Dense",
"keras.models.Sequential",
"os.path.join"
] |
[((442, 458), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (456, 458), True, 'import tensorflow as tf\n'), ((505, 530), 'tensorflow.Session', 'tf.Session', ([], {'config': 'config'}), '(config=config)\n', (515, 530), True, 'import tensorflow as tf\n'), ((531, 550), 'keras.backend.set_session', 'K.set_session', (['sess'], {}), '(sess)\n', (544, 550), True, 'from keras import backend as K\n'), ((939, 1003), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X_data', 'Y_data'], {'test_size': '(0.2)', 'random_state': '(42)'}), '(X_data, Y_data, test_size=0.2, random_state=42)\n', (955, 1003), False, 'from sklearn.model_selection import train_test_split\n'), ((1165, 1177), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1175, 1177), False, 'from keras.models import Sequential\n'), ((1925, 1936), 'time.time', 'time.time', ([], {}), '()\n', (1934, 1936), False, 'import time\n'), ((2045, 2056), 'time.time', 'time.time', ([], {}), '()\n', (2054, 2056), False, 'import time\n'), ((2108, 2127), 'utils.mkdir_p', 'mkdir_p', (['MODEL_PATH'], {}), '(MODEL_PATH)\n', (2115, 2127), False, 'from utils import load_multi_dataset, mkdir_p, HDF5_PATH, MODEL_PATH\n'), ((623, 667), 'os.path.join', 'os.path.join', (['HDF5_PATH', '"""train_h5_list.txt"""'], {}), "(HDF5_PATH, 'train_h5_list.txt')\n", (635, 667), False, 'import os\n'), ((1188, 1241), 'keras.layers.Lambda', 'Lambda', (['(lambda x: x / 255.0)'], {'input_shape': '(70, 320, 3)'}), '(lambda x: x / 255.0, input_shape=(70, 320, 3))\n', (1194, 1241), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1253, 1323), 'keras.layers.Conv2D', 'Conv2D', (['(24)', '(5, 5)'], {'strides': '(2, 2)', 'padding': '"""valid"""', 'activation': '"""relu"""'}), "(24, (5, 5), strides=(2, 2), padding='valid', activation='relu')\n", (1259, 1323), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1335, 1405), 'keras.layers.Conv2D', 'Conv2D', (['(36)', '(5, 5)'], {'strides': '(2, 2)', 'padding': '"""valid"""', 'activation': '"""relu"""'}), "(36, (5, 5), strides=(2, 2), padding='valid', activation='relu')\n", (1341, 1405), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1417, 1487), 'keras.layers.Conv2D', 'Conv2D', (['(48)', '(5, 5)'], {'strides': '(2, 2)', 'padding': '"""valid"""', 'activation': '"""relu"""'}), "(48, (5, 5), strides=(2, 2), padding='valid', activation='relu')\n", (1423, 1487), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1499, 1569), 'keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'strides': '(1, 1)', 'padding': '"""valid"""', 'activation': '"""relu"""'}), "(64, (3, 3), strides=(1, 1), padding='valid', activation='relu')\n", (1505, 1569), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1581, 1651), 'keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'strides': '(1, 1)', 'padding': '"""valid"""', 'activation': '"""relu"""'}), "(64, (3, 3), strides=(1, 1), padding='valid', activation='relu')\n", (1587, 1651), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1663, 1675), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (1670, 1675), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1687, 1696), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1694, 1696), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1708, 1737), 'keras.layers.Dense', 'Dense', (['(100)'], {'activation': '"""relu"""'}), "(100, activation='relu')\n", (1713, 1737), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1749, 1777), 'keras.layers.Dense', 'Dense', (['(50)'], {'activation': '"""relu"""'}), "(50, activation='relu')\n", (1754, 1777), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1789, 1817), 'keras.layers.Dense', 'Dense', (['(10)'], {'activation': '"""relu"""'}), "(10, activation='relu')\n", (1794, 1817), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1829, 1837), 'keras.layers.Dense', 'Dense', (['(1)'], {}), '(1)\n', (1834, 1837), False, 'from keras.layers import Conv2D, Dense, Activation, Flatten, Lambda, Dropout\n'), ((1880, 1906), 'keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.0001)', 'decay': '(0.0)'}), '(lr=0.0001, decay=0.0)\n', (1884, 1906), False, 'from keras.optimizers import Adam\n'), ((2139, 2153), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2151, 2153), False, 'from datetime import datetime\n')]
|
import os
from flask import Flask, request, jsonify, Response
from flask_pymongo import PyMongo
from bson.objectid import ObjectId
from prometheus_flask_exporter import PrometheusMetrics
app = Flask(__name__)
app.config["MONGO_URI"] = "mongodb://mongo:27017/dev"
mongo = PyMongo(app)
db = mongo.db
hostname = os.uname()[1]
PrometheusMetrics(app)
@app.route("/")
def index():
return jsonify(message=f"Welcome to Movies app! I am running inside {hostname} pod!")
@app.route("/message1")
def message1():
return jsonify(message=f"You've hit {hostname}. A very interesting message.")
@app.route("/message2")
def message2():
return jsonify(message=f"You've hit {hostname}. A second very interesting message.")
@app.route("/message3")
def message3():
return jsonify(message=f"You've hit {hostname}. A third very interesting message.")
@app.route("/movies", methods=["GET"])
def get_all_movies():
movies = db.movies.find()
data = []
for movie in movies:
item = {
"id": str(movie["_id"]),
"title": movie["title"],
"year": movie["year"]
}
data.append(item)
return jsonify(data=data)
@app.route("/movies", methods=["POST"])
def create_movie():
data = request.get_json(force=True)
db.movies.insert_one(
{
"title": data["title"],
"year": data["year"]
}
)
return jsonify(message="Movie saved successfully!")
@app.route("/movies/<id>", methods=["GET"])
def get_movie_by_id(id):
movie = db.movies.find_one_or_404({"_id": ObjectId(id)})
movie = {
"id": str(movie["_id"]),
"title": movie["title"],
"year": movie["year"]
}
return jsonify(movie=movie)
if __name__ == "__main__":
app.run()
|
[
"bson.objectid.ObjectId",
"flask.Flask",
"os.uname",
"flask.jsonify",
"flask_pymongo.PyMongo",
"prometheus_flask_exporter.PrometheusMetrics",
"flask.request.get_json"
] |
[((194, 209), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (199, 209), False, 'from flask import Flask, request, jsonify, Response\n'), ((272, 284), 'flask_pymongo.PyMongo', 'PyMongo', (['app'], {}), '(app)\n', (279, 284), False, 'from flask_pymongo import PyMongo\n'), ((326, 348), 'prometheus_flask_exporter.PrometheusMetrics', 'PrometheusMetrics', (['app'], {}), '(app)\n', (343, 348), False, 'from prometheus_flask_exporter import PrometheusMetrics\n'), ((311, 321), 'os.uname', 'os.uname', ([], {}), '()\n', (319, 321), False, 'import os\n'), ((391, 469), 'flask.jsonify', 'jsonify', ([], {'message': 'f"""Welcome to Movies app! I am running inside {hostname} pod!"""'}), "(message=f'Welcome to Movies app! I am running inside {hostname} pod!')\n", (398, 469), False, 'from flask import Flask, request, jsonify, Response\n'), ((523, 593), 'flask.jsonify', 'jsonify', ([], {'message': 'f"""You\'ve hit {hostname}. A very interesting message."""'}), '(message=f"You\'ve hit {hostname}. A very interesting message.")\n', (530, 593), False, 'from flask import Flask, request, jsonify, Response\n'), ((647, 724), 'flask.jsonify', 'jsonify', ([], {'message': 'f"""You\'ve hit {hostname}. A second very interesting message."""'}), '(message=f"You\'ve hit {hostname}. A second very interesting message.")\n', (654, 724), False, 'from flask import Flask, request, jsonify, Response\n'), ((778, 854), 'flask.jsonify', 'jsonify', ([], {'message': 'f"""You\'ve hit {hostname}. A third very interesting message."""'}), '(message=f"You\'ve hit {hostname}. A third very interesting message.")\n', (785, 854), False, 'from flask import Flask, request, jsonify, Response\n'), ((1159, 1177), 'flask.jsonify', 'jsonify', ([], {'data': 'data'}), '(data=data)\n', (1166, 1177), False, 'from flask import Flask, request, jsonify, Response\n'), ((1251, 1279), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (1267, 1279), False, 'from flask import Flask, request, jsonify, Response\n'), ((1412, 1456), 'flask.jsonify', 'jsonify', ([], {'message': '"""Movie saved successfully!"""'}), "(message='Movie saved successfully!')\n", (1419, 1456), False, 'from flask import Flask, request, jsonify, Response\n'), ((1716, 1736), 'flask.jsonify', 'jsonify', ([], {'movie': 'movie'}), '(movie=movie)\n', (1723, 1736), False, 'from flask import Flask, request, jsonify, Response\n'), ((1574, 1586), 'bson.objectid.ObjectId', 'ObjectId', (['id'], {}), '(id)\n', (1582, 1586), False, 'from bson.objectid import ObjectId\n')]
|
from core.advbase import *
from slot.a import *
from slot.d import *
def module():
return Yurius
class Yurius(Adv):
a3 = ('prep', 100)
conf = {}
conf['slots.a'] = Primal_Crisis()+Candy_Couriers()
conf['slots.d'] = Gaibhne_and_Creidhne()
conf['acl'] = """
if self.afflics.frostbite.get()
`dragon, not self.dragondrive_buff.get() and (self.duration<=120 or self.dragonform.dragon_gauge>=2130 or self.dragonform.shift_count>0)
else
`dragon, self.dragondrive_buff.get()
end
queue prep and self.duration>120
`s3; s2; s1; s4
end
`s3, cancel
`s2, cancel
`s4, cancel
`s1, cancel
"""
coab = ['Blade','Hunter_Sarisse','Xander']
share = ['Gala_Elisanne', 'Ranzal']
# conf['sim_afflict.efficiency'] = 1
# conf['sim_afflict.type'] = 'frostbite'
def prerun(self):
# 3000/1200/75
self.dragondrive_buff = Selfbuff('dragondrive_sd', 0.35, -1, 's', 'passive')
self.dragondrive_haste = Selfbuff('dragondrive_sp',0.30, -1, 'sp', 'buff')
self.dragonform.set_dragondrive(self.dragondrive_buff, drain=75)
Event('dragon_end').listener(self.dragondrive_on) # cursed
Event('dragondrive_end').listener(self.dragondrive_off)
def dragondrive_on(self, e):
self.dragondrive_haste.on()
def dragondrive_off(self, e):
self.dragondrive_haste.off()
def s1_proc(self, e):
if self.dragondrive_buff.get():
with KillerModifier('s1_killer', 'hit', 0.6, ['frostbite']):
self.dmg_make(e.name, 7.92)
else:
self.dmg_make(e.name, 7.56)
self.dragonform.charge_gauge(530, utp=True)
self.inspiration.add(1, team=False)
def s2_proc(self, e):
if self.dragondrive_buff.get():
with KillerModifier('s2_killer', 'hit', 0.6, ['frostbite']):
self.dmg_make(e.name, 10.52)
else:
self.dmg_make(e.name, 2.08)
self.afflics.frostbite(e.name,120,0.287,duration=30)
self.dmg_make(e.name, 6.24)
self.dragonform.charge_gauge(530, utp=True)
self.inspiration.add(2, team=False)
def s_proc(self, e):
if self.dragondrive_buff.get():
s = getattr(self, e.name)
self.dragonform.add_drive_gauge_time(s.ac.getstartup()+s.ac.getrecovery(), skill_pause=True)
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv)
|
[
"core.simulate.test_with_argv"
] |
[((2511, 2542), 'core.simulate.test_with_argv', 'test_with_argv', (['None', '*sys.argv'], {}), '(None, *sys.argv)\n', (2525, 2542), False, 'from core.simulate import test_with_argv\n')]
|
from numpy.testing import assert_allclose
from ctapipe.calib.camera import (
CameraCalibrator,
HESSIOR1Calibrator,
NullR1Calibrator
)
from ctapipe.image.extractor import LocalPeakWindowSum
from ctapipe.io import SimTelEventSource
from ctapipe.utils import get_dataset_path
from traitlets.config.configurable import Config
def test_camera_calibrator(example_event):
telid = list(example_event.r0.tel)[0]
calibrator = CameraCalibrator(r1_product="HESSIOR1Calibrator")
calibrator.calibrate(example_event)
image = example_event.dl1.tel[telid].image
assert image is not None
def test_manual_r1():
calibrator = CameraCalibrator(r1_product="HESSIOR1Calibrator")
assert isinstance(calibrator.r1, HESSIOR1Calibrator)
def test_manual_extractor():
calibrator = CameraCalibrator(extractor_name="LocalPeakWindowSum")
assert isinstance(calibrator.dl1.extractor, LocalPeakWindowSum)
def test_eventsource_r1():
dataset = get_dataset_path("gamma_test_large.simtel.gz")
eventsource = SimTelEventSource(input_url=dataset)
calibrator = CameraCalibrator(eventsource=eventsource)
assert isinstance(calibrator.r1, HESSIOR1Calibrator)
def test_eventsource_override_r1():
dataset = get_dataset_path("gamma_test_large.simtel.gz")
eventsource = SimTelEventSource(input_url=dataset)
calibrator = CameraCalibrator(
eventsource=eventsource,
r1_product="NullR1Calibrator"
)
assert isinstance(calibrator.r1, NullR1Calibrator)
def test_config():
window_shift = 3
window_width = 9
config = Config({"LocalPeakWindowSum": {
"window_shift": window_shift,
"window_width": window_width,
}})
calibrator = CameraCalibrator(
r1_product='HESSIOR1Calibrator',
extractor_name='LocalPeakWindowSum',
config=config
)
assert calibrator.dl1.extractor.window_shift == window_shift
assert calibrator.dl1.extractor.window_width == window_width
|
[
"ctapipe.calib.camera.CameraCalibrator",
"ctapipe.utils.get_dataset_path",
"traitlets.config.configurable.Config",
"ctapipe.io.SimTelEventSource"
] |
[((440, 489), 'ctapipe.calib.camera.CameraCalibrator', 'CameraCalibrator', ([], {'r1_product': '"""HESSIOR1Calibrator"""'}), "(r1_product='HESSIOR1Calibrator')\n", (456, 489), False, 'from ctapipe.calib.camera import CameraCalibrator, HESSIOR1Calibrator, NullR1Calibrator\n'), ((648, 697), 'ctapipe.calib.camera.CameraCalibrator', 'CameraCalibrator', ([], {'r1_product': '"""HESSIOR1Calibrator"""'}), "(r1_product='HESSIOR1Calibrator')\n", (664, 697), False, 'from ctapipe.calib.camera import CameraCalibrator, HESSIOR1Calibrator, NullR1Calibrator\n'), ((803, 856), 'ctapipe.calib.camera.CameraCalibrator', 'CameraCalibrator', ([], {'extractor_name': '"""LocalPeakWindowSum"""'}), "(extractor_name='LocalPeakWindowSum')\n", (819, 856), False, 'from ctapipe.calib.camera import CameraCalibrator, HESSIOR1Calibrator, NullR1Calibrator\n'), ((968, 1014), 'ctapipe.utils.get_dataset_path', 'get_dataset_path', (['"""gamma_test_large.simtel.gz"""'], {}), "('gamma_test_large.simtel.gz')\n", (984, 1014), False, 'from ctapipe.utils import get_dataset_path\n'), ((1033, 1069), 'ctapipe.io.SimTelEventSource', 'SimTelEventSource', ([], {'input_url': 'dataset'}), '(input_url=dataset)\n', (1050, 1069), False, 'from ctapipe.io import SimTelEventSource\n'), ((1087, 1128), 'ctapipe.calib.camera.CameraCalibrator', 'CameraCalibrator', ([], {'eventsource': 'eventsource'}), '(eventsource=eventsource)\n', (1103, 1128), False, 'from ctapipe.calib.camera import CameraCalibrator, HESSIOR1Calibrator, NullR1Calibrator\n'), ((1238, 1284), 'ctapipe.utils.get_dataset_path', 'get_dataset_path', (['"""gamma_test_large.simtel.gz"""'], {}), "('gamma_test_large.simtel.gz')\n", (1254, 1284), False, 'from ctapipe.utils import get_dataset_path\n'), ((1303, 1339), 'ctapipe.io.SimTelEventSource', 'SimTelEventSource', ([], {'input_url': 'dataset'}), '(input_url=dataset)\n', (1320, 1339), False, 'from ctapipe.io import SimTelEventSource\n'), ((1357, 1429), 'ctapipe.calib.camera.CameraCalibrator', 'CameraCalibrator', ([], {'eventsource': 'eventsource', 'r1_product': '"""NullR1Calibrator"""'}), "(eventsource=eventsource, r1_product='NullR1Calibrator')\n", (1373, 1429), False, 'from ctapipe.calib.camera import CameraCalibrator, HESSIOR1Calibrator, NullR1Calibrator\n'), ((1583, 1679), 'traitlets.config.configurable.Config', 'Config', (["{'LocalPeakWindowSum': {'window_shift': window_shift, 'window_width':\n window_width}}"], {}), "({'LocalPeakWindowSum': {'window_shift': window_shift, 'window_width':\n window_width}})\n", (1589, 1679), False, 'from traitlets.config.configurable import Config\n'), ((1716, 1822), 'ctapipe.calib.camera.CameraCalibrator', 'CameraCalibrator', ([], {'r1_product': '"""HESSIOR1Calibrator"""', 'extractor_name': '"""LocalPeakWindowSum"""', 'config': 'config'}), "(r1_product='HESSIOR1Calibrator', extractor_name=\n 'LocalPeakWindowSum', config=config)\n", (1732, 1822), False, 'from ctapipe.calib.camera import CameraCalibrator, HESSIOR1Calibrator, NullR1Calibrator\n')]
|
# Generated by Django 1.9.1 on 2016-02-20 21:11
from django.db import migrations
def is_course_archived(course):
assert (course._participant_count is None) == (course._voter_count is None)
return course._participant_count is not None
def is_semester_archived(semester):
if semester.course_set.count() == 0:
return False
first_course_is_archived = is_course_archived(semester.course_set.first())
assert all(is_course_archived(course) == first_course_is_archived for course in semester.course_set.all())
return first_course_is_archived
def set_is_archived(apps, _schema_editor):
Semester = apps.get_model('evaluation', 'Semester')
for semester in Semester.objects.all():
semester.is_archived = is_semester_archived(semester)
semester.save()
class Migration(migrations.Migration):
dependencies = [
('evaluation', '0040_add_semester_is_archived'),
]
operations = [
migrations.RunPython(set_is_archived, reverse_code=migrations.RunPython.noop),
]
|
[
"django.db.migrations.RunPython"
] |
[((956, 1033), 'django.db.migrations.RunPython', 'migrations.RunPython', (['set_is_archived'], {'reverse_code': 'migrations.RunPython.noop'}), '(set_is_archived, reverse_code=migrations.RunPython.noop)\n', (976, 1033), False, 'from django.db import migrations\n')]
|
from tkinter import Tk, Label
root = Tk()
a = Label(root, text='Live de Python', font=('Arial', 30))
a.pack()
root.mainloop()
|
[
"tkinter.Label",
"tkinter.Tk"
] |
[((39, 43), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (41, 43), False, 'from tkinter import Tk, Label\n'), ((49, 103), 'tkinter.Label', 'Label', (['root'], {'text': '"""Live de Python"""', 'font': "('Arial', 30)"}), "(root, text='Live de Python', font=('Arial', 30))\n", (54, 103), False, 'from tkinter import Tk, Label\n')]
|
from fastapi import FastAPI
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseSettings
#root
app = FastAPI()
class Settings(BaseSettings):
env: str = 'production'
SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7"
JWT_ALGORITHM = 'HS256'
EXPIRE_JWT = 25
settings = Settings()
#db setup
DATABASE_URI = 'mysql+mysqlconnector://root:@localhost/shorten_url'
engine = create_engine(DATABASE_URI)
Base = declarative_base()
#cors setting
origins = dict(
development=["http://localhost:8000"],
production=["http://localhost:3000"]
)
app.add_middleware(
CORSMiddleware,
allow_origins=origins[settings.env],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
|
[
"sqlalchemy.create_engine",
"sqlalchemy.ext.declarative.declarative_base",
"fastapi.FastAPI"
] |
[((220, 229), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (227, 229), False, 'from fastapi import FastAPI\n'), ((532, 559), 'sqlalchemy.create_engine', 'create_engine', (['DATABASE_URI'], {}), '(DATABASE_URI)\n', (545, 559), False, 'from sqlalchemy import create_engine\n'), ((567, 585), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (583, 585), False, 'from sqlalchemy.ext.declarative import declarative_base\n')]
|
import os
import random
import time
from multiprocessing import Pool
def run_task(name):
print('Task {task_name} (pid={pid}) is Running...'.format(task_name=name, pid=os.getpid()))
time.sleep(random.randm() * 3)
print('Task {task_name} end.'.format(task_name=name))
if __name__ == '__main__':
print('Current process {pid}'.format(pid=os.getpid()))
p = Pool(processes=3)
for i in range(5):
p.apply_async(run_task, args=(i,))
print('waiting for all subprocess done....')
p.close()
p.join()
print('All subprocesses done')
|
[
"os.getpid",
"random.randm",
"multiprocessing.Pool"
] |
[((377, 394), 'multiprocessing.Pool', 'Pool', ([], {'processes': '(3)'}), '(processes=3)\n', (381, 394), False, 'from multiprocessing import Pool\n'), ((202, 216), 'random.randm', 'random.randm', ([], {}), '()\n', (214, 216), False, 'import random\n'), ((173, 184), 'os.getpid', 'os.getpid', ([], {}), '()\n', (182, 184), False, 'import os\n'), ((355, 366), 'os.getpid', 'os.getpid', ([], {}), '()\n', (364, 366), False, 'import os\n')]
|
import os
from conans import ConanFile, tools
from conans.errors import ConanInvalidConfiguration
class StructoptConan(ConanFile):
name = "structopt"
homepage = "https://github.com/p-ranav/structopt"
url = "https://github.com/conan-io/conan-center-index"
description = "Parse command line arguments by defining a struct+"
license = "MIT"
settings = "compiler", "os"
topics = ("conan", "structopt", "argument-parser", "cpp17", "header-only",
"single-header-lib", "header-library", "command-line", "arguments",
"mit-license", "modern-cpp", "structopt", "lightweight", "reflection",
"cross-platform", "library", "type-safety", "type-safe", "argparse",
"clap", "visit-struct-library", "magic-enum")
no_copy_source = True
@property
def _source_subfolder(self):
return "source_subfolder"
@property
def _supported_compiler(self):
compiler = str(self.settings.compiler)
version = tools.Version(self.settings.compiler.version)
if compiler == "Visual Studio" and version >= "15":
return True
elif compiler == "gcc" and version >= "9":
return True
elif compiler == "clang" and version >= "5":
return True
elif compiler == "apple-clang" and version >= "10":
return True
else:
self.output.warn("{} recipe lacks information about the {} compiler standard version support".format(self.name, compiler))
return False
def configure(self):
if self.settings.compiler.get_safe("cppstd"):
tools.check_min_cppstd(self, "17")
if not self._supported_compiler:
raise ConanInvalidConfiguration("structopt: Unsupported compiler: {}-{} "
"(https://github.com/p-ranav/structopt#compiler-compatibility).".format(self.settings.compiler, self.settings.compiler.version))
def source(self):
tools.get(**self.conan_data["sources"][self.version])
os.rename("{}-{}".format(self.name, self.version), self._source_subfolder)
def package(self):
self.copy(pattern="LICENSE", src=self._source_subfolder, dst="licenses")
self.copy(pattern="*.h", src=os.path.join(self._source_subfolder, "include"), dst="include")
self.copy(pattern="*.hpp", src=os.path.join(self._source_subfolder, "include"), dst="include")
def package_id(self):
self.info.header_only()
|
[
"conans.tools.get",
"conans.tools.check_min_cppstd",
"os.path.join",
"conans.tools.Version"
] |
[((979, 1024), 'conans.tools.Version', 'tools.Version', (['self.settings.compiler.version'], {}), '(self.settings.compiler.version)\n', (992, 1024), False, 'from conans import ConanFile, tools\n'), ((1977, 2030), 'conans.tools.get', 'tools.get', ([], {}), "(**self.conan_data['sources'][self.version])\n", (1986, 2030), False, 'from conans import ConanFile, tools\n'), ((1611, 1645), 'conans.tools.check_min_cppstd', 'tools.check_min_cppstd', (['self', '"""17"""'], {}), "(self, '17')\n", (1633, 1645), False, 'from conans import ConanFile, tools\n'), ((2256, 2303), 'os.path.join', 'os.path.join', (['self._source_subfolder', '"""include"""'], {}), "(self._source_subfolder, 'include')\n", (2268, 2303), False, 'import os\n'), ((2359, 2406), 'os.path.join', 'os.path.join', (['self._source_subfolder', '"""include"""'], {}), "(self._source_subfolder, 'include')\n", (2371, 2406), False, 'import os\n')]
|
"""This module contains `docker container rm` class"""
from docker.errors import APIError
from tsaotun.lib.Docker.Container.command import Command
from tsaotun.cli import Tsaotun
class Rm(Command):
"""This class implements `docker container rm` command"""
name = "container rm"
require = []
def __init__(self):
Command.__init__(self)
self.settings[self.name] = None
def eval_command(self, args):
try:
containers = args["containers"]
clear = args["clear"]
del args["containers"]
del args["clear"]
Ids = []
if clear:
cli = Tsaotun()
cli.send('ps -a --format {{Id}}')
ress = cli.recv()
if ress:
ress = ress.split('\n')
ress = [res[0:4] for res in ress]
for Id in ress:
Ids.append(Id)
args['container'] = Id
self.client.remove_container(**args)
else:
for Id in containers:
Ids.append(Id)
args['container'] = Id
self.client.remove_container(**args)
self.settings[self.name] = '\n'.join(Ids)
except APIError as e:
raise e
def final(self):
return self.settings[self.name]
|
[
"tsaotun.lib.Docker.Container.command.Command.__init__",
"tsaotun.cli.Tsaotun"
] |
[((340, 362), 'tsaotun.lib.Docker.Container.command.Command.__init__', 'Command.__init__', (['self'], {}), '(self)\n', (356, 362), False, 'from tsaotun.lib.Docker.Container.command import Command\n'), ((659, 668), 'tsaotun.cli.Tsaotun', 'Tsaotun', ([], {}), '()\n', (666, 668), False, 'from tsaotun.cli import Tsaotun\n')]
|
import os
import torch
import torch.nn as nn
import torch.optim as optim
from torch.nn.utils import clip_grad_norm
import misc.utils as utils
class Optim(object):
def __init__(self, opt):
self.last_ppl = None
self.init_i2t(opt)
self.init_nmt(opt)
self._step = 0
self.opt = opt
def init_i2t(self, opt):
self.i2t_train_flag = opt.i2t_train_flag
self.i2t_eval_flag = opt.i2t_eval_flag
self.i2t_method = opt.i2t_optim
self.i2t_lr = opt.i2t_learning_rate
self.i2t_current_lr = self.i2t_lr
self.i2t_learning_rate_decay_start = opt.i2t_learning_rate_decay_start
self.i2t_learning_rate_decay_every = opt.i2t_learning_rate_decay_every
self.i2t_learning_rate_decay_rate = opt.i2t_learning_rate_decay_rate
self.i2t_optim_alpha = opt.i2t_optim_alpha
self.i2t_optim_beta = opt.i2t_optim_beta
self.i2t_optim_epsilon = opt.i2t_optim_epsilon
self.i2t_momentum = opt.i2t_momentum
self.i2t_max_grad_norm = opt.i2t_max_grad_norm
self.i2t_grad_clip = opt.i2t_grad_clip
self.i2t_start_decay = False
self.i2t_decay_method = opt.i2t_decay_method
self.i2t_weight_decay = opt.i2t_weight_decay
def init_nmt(self, opt):
self.nmt_train_flag = opt.nmt_train_flag
self.nmt_eval_flag = opt.nmt_eval_flag
self.nmt_method = opt.nmt_optim
self.nmt_lr = opt.nmt_learning_rate
self.nmt_current_lr = self.nmt_lr
self.nmt_learning_rate_decay_start = opt.nmt_learning_rate_decay_start
self.nmt_learning_rate_decay_every = opt.nmt_learning_rate_decay_every
self.nmt_learning_rate_decay_rate = opt.nmt_learning_rate_decay_rate
self.nmt_optim_alpha = opt.nmt_optim_alpha
self.nmt_optim_beta = opt.nmt_optim_beta
self.nmt_optim_epsilon = opt.nmt_optim_epsilon
self.nmt_momentum = opt.nmt_momentum
self.nmt_max_grad_norm = opt.nmt_max_grad_norm
self.nmt_grad_clip = opt.nmt_grad_clip
self.nmt_start_decay = False
self.nmt_decay_method = opt.nmt_decay_method
self.nmt_weight_decay = opt.nmt_weight_decay
self.nmt_warmup_steps = opt.nmt_warmup_steps
self.nmt_betas = [0.9, 0.98]
def create_optimizer(self, method, parameters, lr, alpha, beta, epsilon, weight_decay):
if method == 'rmsprop':
optimizer = optim.RMSprop(parameters, lr, alpha, epsilon, weight_decay=weight_decay)
elif method == 'adagrad':
optimizer = optim.Adagrad(parameters, lr, weight_decay=weight_decay)
elif method == 'sgd':
optimizer = optim.SGD(parameters, lr, weight_decay=weight_decay)
elif method == 'sgdm':
optimizer = optim.SGD(parameters, lr, alpha, weight_decay=weight_decay)
elif method == 'sgdmom':
optimizer = optim.SGD(parameters, lr, alpha, weight_decay=weight_decay, nesterov=True)
elif method == 'adam':
optimizer = optim.Adam(parameters, lr, (alpha, beta), epsilon, weight_decay=weight_decay)
else:
raise RuntimeError("Invalid optim method: " + method)
return optimizer
def set_parameters(self, i2t_model, nmt_model):
if i2t_model is not None:
self.i2t_params = i2t_model.parameters()
self.i2t_optimizer = self.create_optimizer(self.i2t_method, self.i2t_params, self.i2t_lr, self.i2t_optim_alpha, self.i2t_optim_beta, self.i2t_optim_epsilon, self.i2t_weight_decay)
if vars(self.opt).get('start_from', None) is not None and os.path.isfile(os.path.join(self.opt.start_from, "i2t_optimizer.pth")):
self.i2t_optimizer.load_state_dict(torch.load(os.path.join(self.opt.start_from, 'i2t_optimizer.pth')))
if nmt_model is not None:
self.nmt_params = list(nmt_model.parameters()) # careful: params may be a generator
self.nmt_optimizer = self.create_optimizer(self.nmt_method, self.nmt_params, self.nmt_lr, self.nmt_optim_alpha, self.nmt_optim_beta, self.nmt_optim_epsilon, self.nmt_weight_decay)
if vars(self.opt).get('start_from', None) is not None and os.path.isfile(os.path.join(self.opt.start_from, "nmt_optimizer.pth")):
self.nmt_optimizer.load_state_dict(torch.load(os.path.join(self.opt.start_from, 'nmt_optimizer.pth')))
def step(self):
self._step += 1
if self.i2t_train_flag:
if self.i2t_max_grad_norm: clip_grad_norm(self.i2t_params, self.i2t_max_grad_norm)
self.i2t_optimizer.step()
if self.opt.nmt_train_flag:
if self.opt.nmt_decay_method == "noam":
self.nmt_current_lr = self.nmt_lr * (self.opt.rnn_size ** (-0.5) * min(self._step ** (-0.5), self._step * self.nmt_warmup_steps ** (-1.5)))
for group in self.nmt_optimizer.param_groups:
group['lr'] = self.nmt_current_lr
if self.nmt_max_grad_norm: clip_grad_norm(self.nmt_params, self.nmt_max_grad_norm)
self.nmt_optimizer.step()
def zero_grad(self):
if self.i2t_train_flag:
self.i2t_optimizer.zero_grad()
if self.nmt_train_flag:
self.nmt_optimizer.zero_grad()
def update_ScheduledSampling_prob(self, opt, epoch, dp_i2t_model):
if epoch > opt.scheduled_sampling_start and opt.scheduled_sampling_start >= 0:
frac = (epoch - opt.scheduled_sampling_start) // opt.scheduled_sampling_increase_every
dp_i2t_model.ss_prob = min(opt.scheduled_sampling_increase_prob * frac, opt.scheduled_sampling_max_prob)
return dp_i2t_model
def update_LearningRate(self, type, epoch):
if type == 'i2t':
if epoch > self.i2t_learning_rate_decay_start and self.i2t_learning_rate_decay_start >= 0:
frac = (epoch - self.i2t_learning_rate_decay_start) // self.i2t_learning_rate_decay_every
decay_factor = self.i2t_learning_rate_decay_rate ** frac
self.i2t_current_lr = self.i2t_lr * decay_factor
for group in self.i2t_optimizer.param_groups:
group['lr'] = self.i2t_current_lr
else:
self.i2t_current_lr = self.i2t_lr
if type == 'nmt':
if epoch > self.nmt_learning_rate_decay_start and self.nmt_learning_rate_decay_start >= 0:
self.nmt_current_lr = self.nmt_lr * self.nmt_learning_rate_decay_rate
for group in self.nmt_optimizer.param_groups:
group['lr'] = self.nmt_current_lr
else:
self.nmt_current_lr = self.nmt_lr
|
[
"torch.optim.Adagrad",
"torch.optim.Adam",
"torch.nn.utils.clip_grad_norm",
"torch.optim.RMSprop",
"os.path.join",
"torch.optim.SGD"
] |
[((2428, 2500), 'torch.optim.RMSprop', 'optim.RMSprop', (['parameters', 'lr', 'alpha', 'epsilon'], {'weight_decay': 'weight_decay'}), '(parameters, lr, alpha, epsilon, weight_decay=weight_decay)\n', (2441, 2500), True, 'import torch.optim as optim\n'), ((2559, 2615), 'torch.optim.Adagrad', 'optim.Adagrad', (['parameters', 'lr'], {'weight_decay': 'weight_decay'}), '(parameters, lr, weight_decay=weight_decay)\n', (2572, 2615), True, 'import torch.optim as optim\n'), ((4502, 4557), 'torch.nn.utils.clip_grad_norm', 'clip_grad_norm', (['self.i2t_params', 'self.i2t_max_grad_norm'], {}), '(self.i2t_params, self.i2t_max_grad_norm)\n', (4516, 4557), False, 'from torch.nn.utils import clip_grad_norm\n'), ((4995, 5050), 'torch.nn.utils.clip_grad_norm', 'clip_grad_norm', (['self.nmt_params', 'self.nmt_max_grad_norm'], {}), '(self.nmt_params, self.nmt_max_grad_norm)\n', (5009, 5050), False, 'from torch.nn.utils import clip_grad_norm\n'), ((2670, 2722), 'torch.optim.SGD', 'optim.SGD', (['parameters', 'lr'], {'weight_decay': 'weight_decay'}), '(parameters, lr, weight_decay=weight_decay)\n', (2679, 2722), True, 'import torch.optim as optim\n'), ((3625, 3679), 'os.path.join', 'os.path.join', (['self.opt.start_from', '"""i2t_optimizer.pth"""'], {}), "(self.opt.start_from, 'i2t_optimizer.pth')\n", (3637, 3679), False, 'import os\n'), ((4210, 4264), 'os.path.join', 'os.path.join', (['self.opt.start_from', '"""nmt_optimizer.pth"""'], {}), "(self.opt.start_from, 'nmt_optimizer.pth')\n", (4222, 4264), False, 'import os\n'), ((2778, 2837), 'torch.optim.SGD', 'optim.SGD', (['parameters', 'lr', 'alpha'], {'weight_decay': 'weight_decay'}), '(parameters, lr, alpha, weight_decay=weight_decay)\n', (2787, 2837), True, 'import torch.optim as optim\n'), ((3744, 3798), 'os.path.join', 'os.path.join', (['self.opt.start_from', '"""i2t_optimizer.pth"""'], {}), "(self.opt.start_from, 'i2t_optimizer.pth')\n", (3756, 3798), False, 'import os\n'), ((4329, 4383), 'os.path.join', 'os.path.join', (['self.opt.start_from', '"""nmt_optimizer.pth"""'], {}), "(self.opt.start_from, 'nmt_optimizer.pth')\n", (4341, 4383), False, 'import os\n'), ((2895, 2969), 'torch.optim.SGD', 'optim.SGD', (['parameters', 'lr', 'alpha'], {'weight_decay': 'weight_decay', 'nesterov': '(True)'}), '(parameters, lr, alpha, weight_decay=weight_decay, nesterov=True)\n', (2904, 2969), True, 'import torch.optim as optim\n'), ((3025, 3102), 'torch.optim.Adam', 'optim.Adam', (['parameters', 'lr', '(alpha, beta)', 'epsilon'], {'weight_decay': 'weight_decay'}), '(parameters, lr, (alpha, beta), epsilon, weight_decay=weight_decay)\n', (3035, 3102), True, 'import torch.optim as optim\n')]
|
'''
Haystack does not yet support range facets on Solr. This module
provides subclasses of SolrSearchQuery and SolrSearchBackend to
patch in range facet functionalty.
'''
from haystack import connections
from haystack.backends.solr_backend import SolrSearchQuery, SolrSearchBackend, \
SolrEngine
from unidecode import unidecode
class RangeSolrSearchQuery(SolrSearchQuery):
def __init__(self, *args, **kwargs):
super(RangeSolrSearchQuery, self).__init__(*args, **kwargs)
self.range_facets = {}
def add_field_facet(self, field, **options):
# extend default facet field method to handle a special
# range=True case
if options.get('range', None):
self.add_range_facet(field, **options)
else:
return super(RangeSolrSearchQuery, self).add_field_facet(field, **options)
def add_range_facet(self, field, **options):
"""Adds a solr range facet on a field. Options must include
start, end, and gap."""
# using same logic as normal facets; for range facets this
# is probably unnecessary since they have to be numeric anyway
field_name = connections[self._using].get_unified_index() \
.get_facet_fieldname(field)
self.range_facets[field_name] = options.copy()
def build_params(self, *args, **kwargs):
# extend default build params logic to include any facet range
# options
search_kwargs = super(RangeSolrSearchQuery, self).build_params(*args, **kwargs)
if not self.range_facets:
return search_kwargs
range_kwargs = {
'facet.range': list(self.range_facets.keys())
}
for field, opts in self.range_facets.items():
# NOTE: not exposing other range facet params for now
for solr_opt in ['start', 'end', 'gap']:
if solr_opt in opts:
range_kwargs['f.%s.facet.range.%s' % (field, solr_opt)] \
= opts[solr_opt]
# support hard end option; convert python boolean to solr bool
if 'hardend' in opts:
val = 'true' if bool(opts['hardend']) else 'false'
range_kwargs['f.%s.facet.range.hardend' % field] = val
search_kwargs.update(range_kwargs)
return search_kwargs
def post_process_facets(self, results):
'''
Extend post processing logic to include facet range data in returned
facets.
'''
facets = super(RangeSolrSearchQuery, self).post_process_facets(results)
if 'facet_ranges' in results:
# copy facet range data into existing facet data
facets['ranges'] = results['facet_ranges'][0]
for data in facets['ranges'].values():
# possible to get no counts, in which case we can't calculate a max
if data['counts']:
# find the max value for the facet_ranges
data['max'] = max(data['counts'][1::2])
# solr returns a list of value, count, value, count
# use zip to convert into a list of two-tuples
# (thanks to https://stackoverflow.com/questions/14902686/turn-flat-list-into-two-tuples)
data['counts'] = list(zip(data['counts'][::2], data['counts'][1::2]))
return facets
def _clone(self, *args, **kwargs):
# extend clone to ensure range facets are preserved
clone = super(RangeSolrSearchQuery, self)._clone(klass=self.__class__,
*args, **kwargs)
clone.range_facets = self.range_facets.copy()
return clone
class SolrRangeSearchBackend(SolrSearchBackend):
# extend default solr backend to ensure facet ranges are accessible
# in the result for processing by RangeSolrSearchQuery
def _process_results(self, raw_results, *args, **kwargs):
results = super(SolrRangeSearchBackend, self)._process_results(raw_results,
*args, **kwargs)
if hasattr(raw_results, 'facets'):
results['facet_ranges'] = raw_results.facets.get('facet_ranges', {}),
return results
def build_schema(self, fields):
# haystack doesn't have any customization points for schema generation
# or types, and Solr won't allow tokenization/customization on
# the built string field; customize the generated schema here
# to use local 'string_en' solr field for fields ending in "_isort"
schema = super(SolrRangeSearchBackend, self).build_schema(fields)
for field_cfg in schema[1]:
if field_cfg['field_name'].endswith('_isort'):
field_cfg['type'] = 'string_en'
return schema
class RangeSolrEngine(SolrEngine):
# extend default solr engine to make range backend and query defaults
backend = SolrRangeSearchBackend
query = RangeSolrSearchQuery
def facet_sort_ignoreaccents(facets, *fields):
'''Update alpha facet so that sorting ignores accents.'''
# update alpha facet so that sorting ignores accents
# (can't be done in solr because then facets would display without accents)
if not facets:
return facets
for field in fields:
if field in facets['fields']:
facets['fields'][field].sort(key=lambda elem: unidecode(elem[0]))
return facets
|
[
"unidecode.unidecode"
] |
[((5382, 5400), 'unidecode.unidecode', 'unidecode', (['elem[0]'], {}), '(elem[0])\n', (5391, 5400), False, 'from unidecode import unidecode\n')]
|
from glob import glob
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import argparse
"""
This is a reproduction of Fernando's 2011 normalized commit rate plot. This
shows roughly the bus factor
"""
parser = argparse.ArgumentParser()
parser.add_argument("--outname", "-o")
args = parser.parse_args()
outname = args.outname
filenames = glob("data/raw_data/*/commits.tsv")
filenames.sort()
fig, ax = plt.subplots()
for i, filename in enumerate(filenames):
# Parse project name
project = filename.split("/")[2].split("_")[0]
commits = pd.read_csv(filename, sep="\t", keep_default_na=False)
commits[commits["author_name"].isnull()]["author_name"] = ""
_, ticket_counts = np.unique(commits["author_name"], return_counts=True)
ticket_counts.sort()
ticket_counts = ticket_counts[::-1] / ticket_counts.max()
ax.plot(ticket_counts[:15] * 100,
label=project,
marker=".", color="C%d" % i,
linewidth=2)
ax.set_xlim(0, 20)
ax.legend()
ax.set_title("Normalized commit rates", fontweight="bold",
fontsize="large")
ax.set_xticks(np.arange(0, 21, 5))
ax.set_yticks([0, 50, 100])
[ax.axhline(i, color="0", alpha=0.3, linewidth=1, zorder=-1)
for i in (0, 50, 100)]
ax.set_ylim(-1, 105)
ax.spines['right'].set_color('none')
ax.spines['left'].set_color('none')
ax.spines['top'].set_color('none')
ax.spines['bottom'].set_color('none')
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.grid(which='major', axis='y', linewidth=0.75, linestyle='-',
color='0.5')
ax.set_xticklabels(["%d" % i for i in np.arange(0, 20, 5)], fontsize="medium",
fontweight="bold", color="0.5")
ax.set_yticklabels(["%d%%" % i for i in (0, 50, 100)], fontsize="medium",
fontweight="bold", color="0.5")
ax.set_xlabel("Contributors", fontsize="medium", fontweight="bold",
color="0.5")
if outname is not None:
try:
os.makedirs(os.path.dirname(outname))
except OSError:
pass
fig.savefig(outname)
|
[
"argparse.ArgumentParser",
"pandas.read_csv",
"os.path.dirname",
"numpy.arange",
"glob.glob",
"matplotlib.pyplot.subplots",
"numpy.unique"
] |
[((243, 268), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (266, 268), False, 'import argparse\n'), ((372, 407), 'glob.glob', 'glob', (['"""data/raw_data/*/commits.tsv"""'], {}), "('data/raw_data/*/commits.tsv')\n", (376, 407), False, 'from glob import glob\n'), ((436, 450), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (448, 450), True, 'import matplotlib.pyplot as plt\n'), ((584, 638), 'pandas.read_csv', 'pd.read_csv', (['filename'], {'sep': '"""\t"""', 'keep_default_na': '(False)'}), "(filename, sep='\\t', keep_default_na=False)\n", (595, 638), True, 'import pandas as pd\n'), ((727, 780), 'numpy.unique', 'np.unique', (["commits['author_name']"], {'return_counts': '(True)'}), "(commits['author_name'], return_counts=True)\n", (736, 780), True, 'import numpy as np\n'), ((1136, 1155), 'numpy.arange', 'np.arange', (['(0)', '(21)', '(5)'], {}), '(0, 21, 5)\n', (1145, 1155), True, 'import numpy as np\n'), ((1634, 1653), 'numpy.arange', 'np.arange', (['(0)', '(20)', '(5)'], {}), '(0, 20, 5)\n', (1643, 1653), True, 'import numpy as np\n'), ((2000, 2024), 'os.path.dirname', 'os.path.dirname', (['outname'], {}), '(outname)\n', (2015, 2024), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
import os
import sys
from xml.dom import minidom
from os.path import abspath, dirname, normcase, normpath, splitdrive
# http://code.activestate.com/recipes/302594/
def commonpath(a, b):
"""Returns the longest common to 'paths' path.
Unlike the strange os.path.commonprefix:
- this returns valid path
- accepts only two arguments
"""
a = normpath(normcase(a))
b = normpath(normcase(b))
if a == b:
return a
while len(a) > 0:
if a == b:
return a
if len(a) > len(b):
a = dirname(a)
else:
b = dirname(b)
return None
def relpath(target, base_path=os.curdir):
"""
Return a relative path to the target from either the current directory
or an optional base directory.
Base can be a directory specified either as absolute or relative
to current directory.
"""
base_path = normcase(abspath(normpath(base_path)))
target = normcase(abspath(normpath(target)))
if base_path == target:
return '.'
# On the windows platform the target may be on a different drive.
if splitdrive(base_path)[0] != splitdrive(target)[0]:
return None
common_path_len = len(commonpath(base_path, target))
# If there's no common prefix decrease common_path_len should be less by 1
base_drv, base_dir = splitdrive(base_path)
if common_path_len == len(base_drv) + 1:
common_path_len -= 1
# if base_path is root directory - no directories up
if base_dir == os.sep:
dirs_up = 0
else:
dirs_up = base_path[common_path_len:].count(os.sep)
ret = os.sep.join([os.pardir] * dirs_up)
if len(target) > common_path_len:
ret = os.path.join(ret, target[common_path_len + 1:])
return ret
def fix_file_params(name,element,base_path):
params = element.getElementsByTagName(name)
if params:
for param in params:
attr = param.getAttribute('name')
if attr == 'file':
path = param.firstChild.nodeValue
param.firstChild.nodeValue = relpath(path,base_path)
def fix_sym_file(element,base_path):
path = element.getAttribute('file')
if path:
element.setAttribute('file',relpath(path,base_path))
def fix_paths(mapfile,base_path):
doc = minidom.parse(mapfile)
datasources = doc.getElementsByTagName("Datasource")
for ds in datasources:
fix_file_params('Parameter',ds,base_path)
pnt_syms = doc.getElementsByTagName("PointSymbolizer")
for pnt in pnt_syms:
fix_sym_file(pnt,base_path)
open(mapfile,'wb').write(doc.toxml())
#def move_files(mapfile,base_path): pass
|
[
"os.path.splitdrive",
"os.path.dirname",
"xml.dom.minidom.parse",
"os.sep.join",
"os.path.normpath",
"os.path.join",
"os.path.normcase"
] |
[((1382, 1403), 'os.path.splitdrive', 'splitdrive', (['base_path'], {}), '(base_path)\n', (1392, 1403), False, 'from os.path import abspath, dirname, normcase, normpath, splitdrive\n'), ((1664, 1698), 'os.sep.join', 'os.sep.join', (['([os.pardir] * dirs_up)'], {}), '([os.pardir] * dirs_up)\n', (1675, 1698), False, 'import os\n'), ((2346, 2368), 'xml.dom.minidom.parse', 'minidom.parse', (['mapfile'], {}), '(mapfile)\n', (2359, 2368), False, 'from xml.dom import minidom\n'), ((398, 409), 'os.path.normcase', 'normcase', (['a'], {}), '(a)\n', (406, 409), False, 'from os.path import abspath, dirname, normcase, normpath, splitdrive\n'), ((428, 439), 'os.path.normcase', 'normcase', (['b'], {}), '(b)\n', (436, 439), False, 'from os.path import abspath, dirname, normcase, normpath, splitdrive\n'), ((1751, 1798), 'os.path.join', 'os.path.join', (['ret', 'target[common_path_len + 1:]'], {}), '(ret, target[common_path_len + 1:])\n', (1763, 1798), False, 'import os\n'), ((582, 592), 'os.path.dirname', 'dirname', (['a'], {}), '(a)\n', (589, 592), False, 'from os.path import abspath, dirname, normcase, normpath, splitdrive\n'), ((623, 633), 'os.path.dirname', 'dirname', (['b'], {}), '(b)\n', (630, 633), False, 'from os.path import abspath, dirname, normcase, normpath, splitdrive\n'), ((951, 970), 'os.path.normpath', 'normpath', (['base_path'], {}), '(base_path)\n', (959, 970), False, 'from os.path import abspath, dirname, normcase, normpath, splitdrive\n'), ((1003, 1019), 'os.path.normpath', 'normpath', (['target'], {}), '(target)\n', (1011, 1019), False, 'from os.path import abspath, dirname, normcase, normpath, splitdrive\n'), ((1148, 1169), 'os.path.splitdrive', 'splitdrive', (['base_path'], {}), '(base_path)\n', (1158, 1169), False, 'from os.path import abspath, dirname, normcase, normpath, splitdrive\n'), ((1176, 1194), 'os.path.splitdrive', 'splitdrive', (['target'], {}), '(target)\n', (1186, 1194), False, 'from os.path import abspath, dirname, normcase, normpath, splitdrive\n')]
|
import os
from pathlib import Path
from dotenv import load_dotenv
load_dotenv()
API_TOKEN = os.getenv("TELEGRAM_API_TOKEN")
SECRET_KEY = os.getenv("SECRET_KEY")
ACCESS_KEY = os.getenv("ACCESS_KEY")
OBJECT_STORAGE_IP = os.getenv("IN_DOCKER", "127.0.0.1")
OBJECT_STORAGE_PORT = os.getenv("OBJECT_STORAGE_PORT", "9000")
REPOSITORY_ROOT = Path(__file__).parent.parent
PNG = "PNG"
GIF = "GIF"
GIF_FILE_NAME = "water_gif.gif"
PNG_IMAGE = "image_with_text.png"
ZIP_FILE_NAME = "gifs.zip"
RGBA = "RGBA"
FILL_COLOR = "white"
STROKE_COLOR = "black"
DURATION = 350
STROKE_WIDTH = 4
SIZE = 20
HEIGHT_PROPORTION = 0.9
WIDTH_PROPORTION = 0.7
REGULAR_TTF = REPOSITORY_ROOT / "telegram_bot" / "fonts" / "AbyssinicaSIL-Regular.ttf" # noqa: E501
USER_ID = "X-Amz-Meta-User_id"
PRIVATE = "X-Amz-Meta-Private"
|
[
"dotenv.load_dotenv",
"pathlib.Path",
"os.getenv"
] |
[((68, 81), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (79, 81), False, 'from dotenv import load_dotenv\n'), ((95, 126), 'os.getenv', 'os.getenv', (['"""TELEGRAM_API_TOKEN"""'], {}), "('TELEGRAM_API_TOKEN')\n", (104, 126), False, 'import os\n'), ((141, 164), 'os.getenv', 'os.getenv', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (150, 164), False, 'import os\n'), ((178, 201), 'os.getenv', 'os.getenv', (['"""ACCESS_KEY"""'], {}), "('ACCESS_KEY')\n", (187, 201), False, 'import os\n'), ((223, 258), 'os.getenv', 'os.getenv', (['"""IN_DOCKER"""', '"""127.0.0.1"""'], {}), "('IN_DOCKER', '127.0.0.1')\n", (232, 258), False, 'import os\n'), ((281, 321), 'os.getenv', 'os.getenv', (['"""OBJECT_STORAGE_PORT"""', '"""9000"""'], {}), "('OBJECT_STORAGE_PORT', '9000')\n", (290, 321), False, 'import os\n'), ((341, 355), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (345, 355), False, 'from pathlib import Path\n')]
|
import pytest
from chirc import replies
import chirc.tests.fixtures as fixtures
class TestWHOIS(object):
@pytest.mark.category("WHOIS")
def test_whois1(self, irc_session):
"""
Test doing a WHOIS on a user (user2) that is not in any channels.
"""
client1 = irc_session.connect_user("user1", "User One")
client2 = irc_session.connect_user("user2", "User Two")
client1.send_cmd("WHOIS user2")
reply = irc_session.get_reply(client1, expect_code = replies.RPL_WHOISUSER,
expect_nparams = 5, long_param_re = "User Two")
reply = irc_session.get_reply(client1, expect_code = replies.RPL_WHOISSERVER,
expect_nparams = 3)
reply = irc_session.get_reply(client1, expect_code = replies.RPL_ENDOFWHOIS,
expect_nparams = 2, long_param_re = "End of WHOIS list")
@pytest.mark.category("WHOIS")
def test_whois_nonick(self, irc_session):
"""
Test doing a WHOIS on a user (user2) that does not exist in the server.
"""
client1 = irc_session.connect_user("user1", "User One")
client1.send_cmd("WHOIS user2")
reply = irc_session.get_reply(client1, expect_code = replies.ERR_NOSUCHNICK, expect_nick = "user1",
expect_nparams = 2, expect_short_params = ["user2"],
long_param_re = "No such nick/channel")
@pytest.mark.category("WHOIS")
def test_whois_params(self, irc_session):
"""
Test sending a WHOIS without parameters..
"""
client1 = irc_session.connect_user("user1", "User One")
client1.send_cmd("WHOIS")
irc_session.get_reply(client1, expect_timeout = True)
def _test_userchannels(self, irc_session, channels, nick, channelstring):
whois_channels = channelstring.strip().split()
for qchannel in whois_channels:
if qchannel[0] in ('@', '+'):
modchar = qchannel[0]
channel = qchannel[1:]
else:
modchar = ""
channel = qchannel
assert channel in channels, "RPL_WHOISCHANNELS: Includes unexpected channel {}".format(channel)
users = channels[channel]
assert modchar + nick in users, "RPL_WHOISCHANNELS: Expected {} to be in {} (for channels '{}')".format(modchar + nick, channel, channelstring)
if channelstring[-1] != " ":
pytest.fail("You may want to *very carefully* reread the specification for RPL_WHOISCHANNELS...")
@pytest.mark.category("UPDATE_ASSIGNMENT2")
def test_whois2(self, irc_session):
"""
Given the following users and channels (@ denotes channel
operators, and + denotes a user with voice privileges):
#test1: @user1, user2, user3
#test2: @user2
#test3: @user3, @user4, user5, user6
#test4: @user7, +user8, +user9, user1, user2
#test5: @user1, @user5
Not in a channel: user10, user11
Test doing a WHOIS on user2
"""
users = irc_session.connect_and_join_channels(fixtures.channels3)
users["user1"].send_cmd("WHOIS user2")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISUSER,
expect_nparams = 5, long_param_re = "user2")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISCHANNELS,
expect_nparams = 2)
self._test_userchannels(irc_session, fixtures.channels3, "user2", reply.params[2][1:])
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISSERVER,
expect_nparams = 3)
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_ENDOFWHOIS,
expect_nparams = 2, long_param_re = "End of WHOIS list")
@pytest.mark.category("UPDATE_ASSIGNMENT2")
def test_whois3(self, irc_session):
"""
Given the following users and channels (@ denotes channel
operators, and + denotes a user with voice privileges):
#test1: @user1, user2, user3
#test2: @user2
#test3: @user3, @user4, user5, user6
#test4: @user7, +user8, +user9, user1, user2
#test5: @user1, @user5
Not in a channel: user10, user11
Where, additionally, user8 is an IRCop and is away.
Test doing a WHOIS on user8
"""
users = irc_session.connect_and_join_channels(fixtures.channels3, aways=["user8"], ircops=["user8"])
users["user1"].send_cmd("WHOIS user8")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISUSER,
expect_nparams = 5, long_param_re = "user8")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISCHANNELS,
expect_nparams = 2)
self._test_userchannels(irc_session, fixtures.channels3, "user8", reply.params[2][1:])
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISSERVER,
expect_nparams = 3)
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_AWAY, expect_nick = "user1",
expect_nparams = 2, expect_short_params = ["user8"],
long_param_re = "I'm away")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_WHOISOPERATOR,
expect_nparams = 2, expect_short_params = ["user8"],
long_param_re = "is an IRC operator")
reply = irc_session.get_reply(users["user1"], expect_code = replies.RPL_ENDOFWHOIS,
expect_nparams = 2, long_param_re = "End of WHOIS list")
|
[
"pytest.fail",
"pytest.mark.category"
] |
[((113, 142), 'pytest.mark.category', 'pytest.mark.category', (['"""WHOIS"""'], {}), "('WHOIS')\n", (133, 142), False, 'import pytest\n'), ((1002, 1031), 'pytest.mark.category', 'pytest.mark.category', (['"""WHOIS"""'], {}), "('WHOIS')\n", (1022, 1031), False, 'import pytest\n'), ((1613, 1642), 'pytest.mark.category', 'pytest.mark.category', (['"""WHOIS"""'], {}), "('WHOIS')\n", (1633, 1642), False, 'import pytest\n'), ((2875, 2917), 'pytest.mark.category', 'pytest.mark.category', (['"""UPDATE_ASSIGNMENT2"""'], {}), "('UPDATE_ASSIGNMENT2')\n", (2895, 2917), False, 'import pytest\n'), ((4351, 4393), 'pytest.mark.category', 'pytest.mark.category', (['"""UPDATE_ASSIGNMENT2"""'], {}), "('UPDATE_ASSIGNMENT2')\n", (4371, 4393), False, 'import pytest\n'), ((2754, 2861), 'pytest.fail', 'pytest.fail', (['"""You may want to *very carefully* reread the specification for RPL_WHOISCHANNELS..."""'], {}), "(\n 'You may want to *very carefully* reread the specification for RPL_WHOISCHANNELS...'\n )\n", (2765, 2861), False, 'import pytest\n')]
|
import numpy
import pytest
from grunnur import dtypes
from grunnur.modules import render_with_modules
def test_normalize_type():
dtype = dtypes.normalize_type(numpy.int32)
assert dtype == numpy.int32
assert type(dtype) == numpy.dtype
def test_ctype_builtin():
assert dtypes.ctype(numpy.int32) == 'int'
def test_is_complex():
assert dtypes.is_complex(numpy.complex64)
assert dtypes.is_complex(numpy.complex128)
assert not dtypes.is_complex(numpy.float64)
def test_is_double():
assert dtypes.is_double(numpy.float64)
assert dtypes.is_double(numpy.complex128)
assert not dtypes.is_double(numpy.complex64)
def test_is_integer():
assert dtypes.is_integer(numpy.int32)
assert not dtypes.is_integer(numpy.float32)
def test_is_real():
assert dtypes.is_real(numpy.float32)
assert not dtypes.is_real(numpy.complex64)
assert not dtypes.is_real(numpy.int32)
def test_promote_type():
assert dtypes._promote_type(numpy.int8) == numpy.int32
assert dtypes._promote_type(numpy.uint8) == numpy.uint32
assert dtypes._promote_type(numpy.float16) == numpy.float32
assert dtypes._promote_type(numpy.int32) == numpy.int32
def test_result_type():
assert dtypes.result_type(numpy.int32, numpy.float32) == numpy.float64
def test_min_scalar_type():
assert dtypes.min_scalar_type(1) == numpy.uint32
assert dtypes.min_scalar_type(-1) == numpy.int32
assert dtypes.min_scalar_type(1.) == numpy.float32
assert dtypes.min_scalar_type(2**31-1, force_signed=True) == numpy.int32
# 2**31 will not fit into int32 type
assert dtypes.min_scalar_type(2**31, force_signed=True) == numpy.int64
def test_detect_type():
assert dtypes.detect_type(numpy.int8(-1)) == numpy.int32
assert dtypes.detect_type(numpy.int64(-1)) == numpy.int64
assert dtypes.detect_type(-1) == numpy.int32
assert dtypes.detect_type(-1.) == numpy.float32
def test_complex_for():
assert dtypes.complex_for(numpy.float32) == numpy.complex64
assert dtypes.complex_for(numpy.float64) == numpy.complex128
with pytest.raises(ValueError):
assert dtypes.complex_for(numpy.complex64)
with pytest.raises(ValueError):
assert dtypes.complex_for(numpy.int32)
def test_real_for():
assert dtypes.real_for(numpy.complex64) == numpy.float32
assert dtypes.real_for(numpy.complex128) == numpy.float64
with pytest.raises(ValueError):
assert dtypes.real_for(numpy.float32)
with pytest.raises(ValueError):
assert dtypes.real_for(numpy.int32)
def test_complex_ctr():
assert dtypes.complex_ctr(numpy.complex64) == "COMPLEX_CTR(float2)"
def test_cast():
cast = dtypes.cast(numpy.uint64)
for val in [cast(1), cast(numpy.int32(1)), cast(numpy.uint64(1))]:
assert val.dtype == numpy.uint64 and val == 1
def test_c_constant():
# scalar values
assert dtypes.c_constant(1) == "1"
assert dtypes.c_constant(numpy.uint64(1)) == "1UL"
assert dtypes.c_constant(numpy.int64(-1)) == "-1L"
assert dtypes.c_constant(numpy.float64(1.)) == "1.0"
assert dtypes.c_constant(numpy.float32(1.)) == "1.0f"
assert dtypes.c_constant(numpy.complex64(1 + 2j)) == "COMPLEX_CTR(float2)(1.0f, 2.0f)"
assert dtypes.c_constant(numpy.complex128(1 + 2j)) == "COMPLEX_CTR(double2)(1.0, 2.0)"
# array
assert dtypes.c_constant(numpy.array([1, 2, 3], numpy.float32)) == "{1.0f, 2.0f, 3.0f}"
# struct type
dtype = numpy.dtype([('val1', numpy.int32), ('val2', numpy.float32)])
val = numpy.empty((), dtype)
val['val1'] = 1
val['val2'] = 2
assert dtypes.c_constant(val) == "{1, 2.0f}"
# custom dtype
assert dtypes.c_constant(1, numpy.float32) == "1.0f"
def test__align_simple():
dtype = numpy.dtype('int32')
res = dtypes._align(dtype)
ref = dtypes.WrappedType(dtype, dtype.itemsize)
assert res == ref
def test__align_array():
dtype = numpy.dtype('int32')
dtype_arr = numpy.dtype((dtype, 3))
res = dtypes._align(dtype_arr)
ref = dtypes.WrappedType(dtype_arr, dtype.itemsize)
assert res == ref
def test__align_non_aligned_struct():
dtype = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32]))
res = dtypes._align(dtype)
dtype_aligned = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32],
offsets=[0, 2, 4],
itemsize=8,
aligned=True))
wt_x = dtypes.WrappedType(numpy.dtype('int8'), 1)
wt_y = dtypes.WrappedType(numpy.dtype('int16'), 2)
wt_z = dtypes.WrappedType(numpy.dtype('int32'), 4)
ref = dtypes.WrappedType(
dtype_aligned, 4, explicit_alignment=None, wrapped_fields=dict(x=wt_x, y=wt_y, z=wt_z),
field_alignments=dict(x=None, y=None, z=None))
assert res == ref
def test__align_aligned_struct():
dtype_aligned = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32],
offsets=[0, 2, 4],
itemsize=8,
aligned=True))
res = dtypes._align(dtype_aligned)
wt_x = dtypes.WrappedType(numpy.dtype('int8'), 1)
wt_y = dtypes.WrappedType(numpy.dtype('int16'), 2)
wt_z = dtypes.WrappedType(numpy.dtype('int32'), 4)
ref = dtypes.WrappedType(
dtype_aligned, 4, explicit_alignment=None, wrapped_fields=dict(x=wt_x, y=wt_y, z=wt_z),
field_alignments=dict(x=None, y=None, z=None))
assert res == ref
def test__align_aligned_struct_custom_itemsize():
dtype_aligned = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32],
offsets=[0, 2, 4],
itemsize=16,
aligned=True))
res = dtypes._align(dtype_aligned)
wt_x = dtypes.WrappedType(numpy.dtype('int8'), 1)
wt_y = dtypes.WrappedType(numpy.dtype('int16'), 2)
wt_z = dtypes.WrappedType(numpy.dtype('int32'), 4)
ref = dtypes.WrappedType(
dtype_aligned, 16, explicit_alignment=16, wrapped_fields=dict(x=wt_x, y=wt_y, z=wt_z),
field_alignments=dict(x=None, y=None, z=None))
assert res == ref
def test__align_custom_field_offsets():
dtype = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32],
offsets=[0, 4, 16],
itemsize=32))
dtype_aligned = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32],
offsets=[0, 4, 16],
itemsize=32,
aligned=True))
res = dtypes._align(dtype_aligned)
wt_x = dtypes.WrappedType(numpy.dtype('int8'), 1)
wt_y = dtypes.WrappedType(numpy.dtype('int16'), 2)
wt_z = dtypes.WrappedType(numpy.dtype('int32'), 4)
ref = dtypes.WrappedType(
dtype_aligned, 16, explicit_alignment=None, wrapped_fields=dict(x=wt_x, y=wt_y, z=wt_z),
field_alignments=dict(x=None, y=4, z=16))
assert res == ref
def test__align_aligned_struct_invalid_itemsize():
dtype_aligned = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32],
offsets=[0, 2, 4],
itemsize=20, # not a power of 2, an error should be raised
aligned=True))
with pytest.raises(ValueError):
dtypes._align(dtype_aligned)
def test_align_nested():
dtype_nested = numpy.dtype(dict(
names=['val1', 'pad'],
formats=[numpy.int8, numpy.int8]))
dtype = numpy.dtype(dict(
names=['pad', 'struct_arr', 'regular_arr'],
formats=[numpy.int32, numpy.dtype((dtype_nested, 2)), numpy.dtype((numpy.int16, 3))]))
dtype_ref = numpy.dtype(dict(
names=['pad','struct_arr','regular_arr'],
formats=[numpy.int32, (dtype_nested, (2,)), (numpy.int16, (3,))],
offsets=[0,4,8],
itemsize=16))
dtype_aligned = dtypes.align(dtype)
assert dtype_aligned.isalignedstruct
assert dtype_aligned == dtype_ref
def test_align_preserve_nested_aligned():
dtype_int3 = numpy.dtype(dict(names=['x'], formats=[(numpy.int32, 3)], itemsize=16, aligned=True))
dtype = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int32, dtype_int3, numpy.int32]))
dtype_ref = numpy.dtype(dict(
names=['x','y','z'],
formats=[numpy.int32, dtype_int3, numpy.int32],
offsets=[0,16,32],
itemsize=48,
aligned=True))
dtype_aligned = dtypes.align(dtype)
assert dtype_aligned.isalignedstruct
assert dtype_aligned == dtype_ref
def test_lcm():
assert dtypes._lcm(10) == 10
assert dtypes._lcm(15, 20) == 60
assert dtypes._lcm(16, 32, 24) == 96
def test_find_minimum_alignment():
# simple case: base alignment is enough because 12 is the next multiple of 4 after 9
assert dtypes._find_minimum_alignment(12, 4, 9) == 4
# the next multiple of 4 is 12, but we want offset 16 - this means we need to set
# the alignment equal to 8, because 16 is the next multiple of 8 after 9.
assert dtypes._find_minimum_alignment(16, 4, 9) == 8
# incorrect offset (not a multiple of the base alignment)
with pytest.raises(ValueError):
dtypes._find_minimum_alignment(13, 4, 9)
# offset too large and not a power of 2 - cannot achieve that with alignment only,
# will need explicit padding
with pytest.raises(ValueError):
dtypes._find_minimum_alignment(24, 4, 9)
def test_wrapped_type_repr():
dtype_aligned = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32],
offsets=[0, 4, 16],
itemsize=32,
aligned=True))
wt_x = dtypes.WrappedType(numpy.dtype('int8'), 1)
wt_y = dtypes.WrappedType(numpy.dtype('int16'), 2)
wt_z = dtypes.WrappedType(numpy.dtype('int32'), 4)
wt = dtypes.WrappedType(
dtype_aligned, 16, explicit_alignment=None, wrapped_fields=dict(x=wt_x, y=wt_y, z=wt_z),
field_alignments=dict(x=None, y=4, z=16))
assert eval(
repr(wt),
dict(
numpy=numpy, WrappedType=dtypes.WrappedType,
int8=numpy.int8, int16=numpy.int16, int32=numpy.int32)) == wt
def test_ctype_struct():
dtype = dtypes.align(numpy.dtype([('val1', numpy.int32), ('val2', numpy.float32)]))
ctype = dtypes.ctype(dtype)
src = render_with_modules("${ctype}", render_globals=dict(ctype=ctype)).strip()
assert src == (
'typedef struct _mod__module_0__ {\n'
' int val1;\n'
' float val2;\n'
'} _mod__module_0_;\n\n\n'
'_mod__module_0_')
def test_ctype_struct_nested():
dtype_nested = numpy.dtype(dict(
names=['val1', 'pad'],
formats=[numpy.int8, numpy.int8]))
dtype = numpy.dtype(dict(
names=['pad', 'struct_arr', 'regular_arr'],
formats=[numpy.int32, numpy.dtype((dtype_nested, 2)), numpy.dtype((numpy.int16, 3))]))
dtype = dtypes.align(dtype)
ctype = dtypes.ctype(dtype)
src = render_with_modules("${ctype}", render_globals=dict(ctype=ctype)).strip()
assert src == (
'typedef struct _mod__module_1__ {\n'
' char val1;\n'
' char pad;\n'
'} _mod__module_1_;\n\n\n'
'typedef struct _mod__module_0__ {\n'
' int pad;\n'
' _mod__module_1_ struct_arr[2];\n'
' short regular_arr[3];\n'
'} _mod__module_0_;\n\n\n'
'_mod__module_0_')
def test_ctype_to_ctype_struct():
# Checks that ctype() on an unknown type calls ctype_struct()
dtype = dtypes.align(numpy.dtype([('val1', numpy.int32), ('val2', numpy.float32)]))
ctype = dtypes.ctype(dtype)
src = render_with_modules("${ctype}", render_globals=dict(ctype=ctype)).strip()
assert src == (
'typedef struct _mod__module_0__ {\n'
' int val1;\n'
' float val2;\n'
'} _mod__module_0_;\n\n\n'
'_mod__module_0_')
def test_ctype_struct():
dtype = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32],
offsets=[0, 4, 16],
itemsize=64,
aligned=True))
ctype = dtypes.ctype_struct(dtype)
src = render_with_modules("${ctype}", render_globals=dict(ctype=ctype)).strip()
assert src == (
'typedef struct _mod__module_0__ {\n'
' char x;\n'
' short ALIGN(4) y;\n'
' int ALIGN(16) z;\n'
'} ALIGN(64) _mod__module_0_;\n\n\n'
'_mod__module_0_')
def test_ctype_struct_ignore_alignment():
dtype = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32],
offsets=[0, 4, 16],
itemsize=64,
aligned=True))
ctype = dtypes.ctype_struct(dtype, ignore_alignment=True)
src = render_with_modules("${ctype}", render_globals=dict(ctype=ctype)).strip()
assert src == (
'typedef struct _mod__module_0__ {\n'
' char x;\n'
' short y;\n'
' int z;\n'
'} _mod__module_0_;\n\n\n'
'_mod__module_0_')
def test_ctype_struct_checks_alignment():
dtype = numpy.dtype(dict(
names=['x', 'y', 'z'],
formats=[numpy.int8, numpy.int16, numpy.int32]))
with pytest.raises(ValueError):
dtypes.ctype_struct(dtype)
def test_ctype_struct_for_non_struct():
dtype = numpy.dtype((numpy.int32, 3))
with pytest.raises(ValueError):
dtypes.ctype_struct(dtype)
# ctype_struct() is not applicable for simple types
with pytest.raises(ValueError):
dtypes.ctype_struct(numpy.int32)
def test_flatten_dtype():
dtype_nested = numpy.dtype(dict(
names=['val1', 'pad'],
formats=[numpy.int8, numpy.int8]))
dtype = numpy.dtype(dict(
names=['pad', 'struct_arr', 'regular_arr'],
formats=[numpy.int32, numpy.dtype((dtype_nested, 2)), numpy.dtype((numpy.int16, 3))]))
res = dtypes.flatten_dtype(dtype)
ref = [
(['pad'], numpy.dtype('int32')),
(['struct_arr', 0, 'val1'], numpy.dtype('int8')),
(['struct_arr', 0, 'pad'], numpy.dtype('int8')),
(['struct_arr', 1, 'val1'], numpy.dtype('int8')),
(['struct_arr', 1, 'pad'], numpy.dtype('int8')),
(['regular_arr', 0], numpy.dtype('int16')),
(['regular_arr', 1], numpy.dtype('int16')),
(['regular_arr', 2], numpy.dtype('int16'))]
assert dtypes.flatten_dtype(dtype) == ref
def test_c_path():
assert dtypes.c_path(['struct_arr', 0, 'val1']) == 'struct_arr[0].val1'
def test_extract_field():
dtype_nested = numpy.dtype(dict(
names=['val1', 'pad'],
formats=[numpy.int8, numpy.int8]))
dtype = numpy.dtype(dict(
names=['pad', 'struct_arr', 'regular_arr'],
formats=[numpy.int32, numpy.dtype((dtype_nested, 2)), numpy.dtype((numpy.int16, 3))]))
a = numpy.empty(16, dtype)
a['struct_arr']['val1'][:,1] = numpy.arange(16)
assert (dtypes.extract_field(a, ['struct_arr', 1, 'val1']) == numpy.arange(16)).all()
b = numpy.empty(16, dtype_nested)
b['val1'] = numpy.arange(16)
assert (dtypes.extract_field(b, ['val1']) == numpy.arange(16)).all()
|
[
"numpy.uint64",
"grunnur.dtypes.is_double",
"numpy.empty",
"grunnur.dtypes.is_complex",
"grunnur.dtypes.detect_type",
"grunnur.dtypes._align",
"grunnur.dtypes._find_minimum_alignment",
"numpy.arange",
"grunnur.dtypes.align",
"numpy.float64",
"numpy.complex64",
"numpy.int8",
"grunnur.dtypes.c_constant",
"grunnur.dtypes.flatten_dtype",
"pytest.raises",
"grunnur.dtypes.normalize_type",
"grunnur.dtypes.is_real",
"numpy.int64",
"numpy.int32",
"grunnur.dtypes._promote_type",
"grunnur.dtypes.WrappedType",
"numpy.complex128",
"grunnur.dtypes.complex_for",
"grunnur.dtypes.cast",
"grunnur.dtypes.extract_field",
"grunnur.dtypes.ctype_struct",
"grunnur.dtypes.min_scalar_type",
"grunnur.dtypes.ctype",
"grunnur.dtypes._lcm",
"grunnur.dtypes.c_path",
"numpy.dtype",
"grunnur.dtypes.complex_ctr",
"numpy.float32",
"numpy.array",
"grunnur.dtypes.real_for",
"grunnur.dtypes.is_integer",
"grunnur.dtypes.result_type"
] |
[((144, 178), 'grunnur.dtypes.normalize_type', 'dtypes.normalize_type', (['numpy.int32'], {}), '(numpy.int32)\n', (165, 178), False, 'from grunnur import dtypes\n'), ((359, 393), 'grunnur.dtypes.is_complex', 'dtypes.is_complex', (['numpy.complex64'], {}), '(numpy.complex64)\n', (376, 393), False, 'from grunnur import dtypes\n'), ((405, 440), 'grunnur.dtypes.is_complex', 'dtypes.is_complex', (['numpy.complex128'], {}), '(numpy.complex128)\n', (422, 440), False, 'from grunnur import dtypes\n'), ((524, 555), 'grunnur.dtypes.is_double', 'dtypes.is_double', (['numpy.float64'], {}), '(numpy.float64)\n', (540, 555), False, 'from grunnur import dtypes\n'), ((567, 601), 'grunnur.dtypes.is_double', 'dtypes.is_double', (['numpy.complex128'], {}), '(numpy.complex128)\n', (583, 601), False, 'from grunnur import dtypes\n'), ((687, 717), 'grunnur.dtypes.is_integer', 'dtypes.is_integer', (['numpy.int32'], {}), '(numpy.int32)\n', (704, 717), False, 'from grunnur import dtypes\n'), ((799, 828), 'grunnur.dtypes.is_real', 'dtypes.is_real', (['numpy.float32'], {}), '(numpy.float32)\n', (813, 828), False, 'from grunnur import dtypes\n'), ((2687, 2712), 'grunnur.dtypes.cast', 'dtypes.cast', (['numpy.uint64'], {}), '(numpy.uint64)\n', (2698, 2712), False, 'from grunnur import dtypes\n'), ((3465, 3526), 'numpy.dtype', 'numpy.dtype', (["[('val1', numpy.int32), ('val2', numpy.float32)]"], {}), "([('val1', numpy.int32), ('val2', numpy.float32)])\n", (3476, 3526), False, 'import numpy\n'), ((3537, 3559), 'numpy.empty', 'numpy.empty', (['()', 'dtype'], {}), '((), dtype)\n', (3548, 3559), False, 'import numpy\n'), ((3766, 3786), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (3777, 3786), False, 'import numpy\n'), ((3797, 3817), 'grunnur.dtypes._align', 'dtypes._align', (['dtype'], {}), '(dtype)\n', (3810, 3817), False, 'from grunnur import dtypes\n'), ((3828, 3869), 'grunnur.dtypes.WrappedType', 'dtypes.WrappedType', (['dtype', 'dtype.itemsize'], {}), '(dtype, dtype.itemsize)\n', (3846, 3869), False, 'from grunnur import dtypes\n'), ((3931, 3951), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (3942, 3951), False, 'import numpy\n'), ((3968, 3991), 'numpy.dtype', 'numpy.dtype', (['(dtype, 3)'], {}), '((dtype, 3))\n', (3979, 3991), False, 'import numpy\n'), ((4002, 4026), 'grunnur.dtypes._align', 'dtypes._align', (['dtype_arr'], {}), '(dtype_arr)\n', (4015, 4026), False, 'from grunnur import dtypes\n'), ((4037, 4082), 'grunnur.dtypes.WrappedType', 'dtypes.WrappedType', (['dtype_arr', 'dtype.itemsize'], {}), '(dtype_arr, dtype.itemsize)\n', (4055, 4082), False, 'from grunnur import dtypes\n'), ((4273, 4293), 'grunnur.dtypes._align', 'dtypes._align', (['dtype'], {}), '(dtype)\n', (4286, 4293), False, 'from grunnur import dtypes\n'), ((5100, 5128), 'grunnur.dtypes._align', 'dtypes._align', (['dtype_aligned'], {}), '(dtype_aligned)\n', (5113, 5128), False, 'from grunnur import dtypes\n'), ((5756, 5784), 'grunnur.dtypes._align', 'dtypes._align', (['dtype_aligned'], {}), '(dtype_aligned)\n', (5769, 5784), False, 'from grunnur import dtypes\n'), ((6570, 6598), 'grunnur.dtypes._align', 'dtypes._align', (['dtype_aligned'], {}), '(dtype_aligned)\n', (6583, 6598), False, 'from grunnur import dtypes\n'), ((7875, 7894), 'grunnur.dtypes.align', 'dtypes.align', (['dtype'], {}), '(dtype)\n', (7887, 7894), False, 'from grunnur import dtypes\n'), ((8454, 8473), 'grunnur.dtypes.align', 'dtypes.align', (['dtype'], {}), '(dtype)\n', (8466, 8473), False, 'from grunnur import dtypes\n'), ((10318, 10337), 'grunnur.dtypes.ctype', 'dtypes.ctype', (['dtype'], {}), '(dtype)\n', (10330, 10337), False, 'from grunnur import dtypes\n'), ((10945, 10964), 'grunnur.dtypes.align', 'dtypes.align', (['dtype'], {}), '(dtype)\n', (10957, 10964), False, 'from grunnur import dtypes\n'), ((10977, 10996), 'grunnur.dtypes.ctype', 'dtypes.ctype', (['dtype'], {}), '(dtype)\n', (10989, 10996), False, 'from grunnur import dtypes\n'), ((11663, 11682), 'grunnur.dtypes.ctype', 'dtypes.ctype', (['dtype'], {}), '(dtype)\n', (11675, 11682), False, 'from grunnur import dtypes\n'), ((12182, 12208), 'grunnur.dtypes.ctype_struct', 'dtypes.ctype_struct', (['dtype'], {}), '(dtype)\n', (12201, 12208), False, 'from grunnur import dtypes\n'), ((12769, 12818), 'grunnur.dtypes.ctype_struct', 'dtypes.ctype_struct', (['dtype'], {'ignore_alignment': '(True)'}), '(dtype, ignore_alignment=True)\n', (12788, 12818), False, 'from grunnur import dtypes\n'), ((13394, 13423), 'numpy.dtype', 'numpy.dtype', (['(numpy.int32, 3)'], {}), '((numpy.int32, 3))\n', (13405, 13423), False, 'import numpy\n'), ((13957, 13984), 'grunnur.dtypes.flatten_dtype', 'dtypes.flatten_dtype', (['dtype'], {}), '(dtype)\n', (13977, 13984), False, 'from grunnur import dtypes\n'), ((14894, 14916), 'numpy.empty', 'numpy.empty', (['(16)', 'dtype'], {}), '(16, dtype)\n', (14905, 14916), False, 'import numpy\n'), ((14952, 14968), 'numpy.arange', 'numpy.arange', (['(16)'], {}), '(16)\n', (14964, 14968), False, 'import numpy\n'), ((15068, 15097), 'numpy.empty', 'numpy.empty', (['(16)', 'dtype_nested'], {}), '(16, dtype_nested)\n', (15079, 15097), False, 'import numpy\n'), ((15114, 15130), 'numpy.arange', 'numpy.arange', (['(16)'], {}), '(16)\n', (15126, 15130), False, 'import numpy\n'), ((288, 313), 'grunnur.dtypes.ctype', 'dtypes.ctype', (['numpy.int32'], {}), '(numpy.int32)\n', (300, 313), False, 'from grunnur import dtypes\n'), ((456, 488), 'grunnur.dtypes.is_complex', 'dtypes.is_complex', (['numpy.float64'], {}), '(numpy.float64)\n', (473, 488), False, 'from grunnur import dtypes\n'), ((617, 650), 'grunnur.dtypes.is_double', 'dtypes.is_double', (['numpy.complex64'], {}), '(numpy.complex64)\n', (633, 650), False, 'from grunnur import dtypes\n'), ((733, 765), 'grunnur.dtypes.is_integer', 'dtypes.is_integer', (['numpy.float32'], {}), '(numpy.float32)\n', (750, 765), False, 'from grunnur import dtypes\n'), ((844, 875), 'grunnur.dtypes.is_real', 'dtypes.is_real', (['numpy.complex64'], {}), '(numpy.complex64)\n', (858, 875), False, 'from grunnur import dtypes\n'), ((891, 918), 'grunnur.dtypes.is_real', 'dtypes.is_real', (['numpy.int32'], {}), '(numpy.int32)\n', (905, 918), False, 'from grunnur import dtypes\n'), ((957, 989), 'grunnur.dtypes._promote_type', 'dtypes._promote_type', (['numpy.int8'], {}), '(numpy.int8)\n', (977, 989), False, 'from grunnur import dtypes\n'), ((1016, 1049), 'grunnur.dtypes._promote_type', 'dtypes._promote_type', (['numpy.uint8'], {}), '(numpy.uint8)\n', (1036, 1049), False, 'from grunnur import dtypes\n'), ((1077, 1112), 'grunnur.dtypes._promote_type', 'dtypes._promote_type', (['numpy.float16'], {}), '(numpy.float16)\n', (1097, 1112), False, 'from grunnur import dtypes\n'), ((1141, 1174), 'grunnur.dtypes._promote_type', 'dtypes._promote_type', (['numpy.int32'], {}), '(numpy.int32)\n', (1161, 1174), False, 'from grunnur import dtypes\n'), ((1227, 1273), 'grunnur.dtypes.result_type', 'dtypes.result_type', (['numpy.int32', 'numpy.float32'], {}), '(numpy.int32, numpy.float32)\n', (1245, 1273), False, 'from grunnur import dtypes\n'), ((1332, 1357), 'grunnur.dtypes.min_scalar_type', 'dtypes.min_scalar_type', (['(1)'], {}), '(1)\n', (1354, 1357), False, 'from grunnur import dtypes\n'), ((1385, 1411), 'grunnur.dtypes.min_scalar_type', 'dtypes.min_scalar_type', (['(-1)'], {}), '(-1)\n', (1407, 1411), False, 'from grunnur import dtypes\n'), ((1438, 1465), 'grunnur.dtypes.min_scalar_type', 'dtypes.min_scalar_type', (['(1.0)'], {}), '(1.0)\n', (1460, 1465), False, 'from grunnur import dtypes\n'), ((1494, 1548), 'grunnur.dtypes.min_scalar_type', 'dtypes.min_scalar_type', (['(2 ** 31 - 1)'], {'force_signed': '(True)'}), '(2 ** 31 - 1, force_signed=True)\n', (1516, 1548), False, 'from grunnur import dtypes\n'), ((1612, 1662), 'grunnur.dtypes.min_scalar_type', 'dtypes.min_scalar_type', (['(2 ** 31)'], {'force_signed': '(True)'}), '(2 ** 31, force_signed=True)\n', (1634, 1662), False, 'from grunnur import dtypes\n'), ((1836, 1858), 'grunnur.dtypes.detect_type', 'dtypes.detect_type', (['(-1)'], {}), '(-1)\n', (1854, 1858), False, 'from grunnur import dtypes\n'), ((1885, 1909), 'grunnur.dtypes.detect_type', 'dtypes.detect_type', (['(-1.0)'], {}), '(-1.0)\n', (1903, 1909), False, 'from grunnur import dtypes\n'), ((1963, 1996), 'grunnur.dtypes.complex_for', 'dtypes.complex_for', (['numpy.float32'], {}), '(numpy.float32)\n', (1981, 1996), False, 'from grunnur import dtypes\n'), ((2027, 2060), 'grunnur.dtypes.complex_for', 'dtypes.complex_for', (['numpy.float64'], {}), '(numpy.float64)\n', (2045, 2060), False, 'from grunnur import dtypes\n'), ((2090, 2115), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2103, 2115), False, 'import pytest\n'), ((2132, 2167), 'grunnur.dtypes.complex_for', 'dtypes.complex_for', (['numpy.complex64'], {}), '(numpy.complex64)\n', (2150, 2167), False, 'from grunnur import dtypes\n'), ((2177, 2202), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2190, 2202), False, 'import pytest\n'), ((2219, 2250), 'grunnur.dtypes.complex_for', 'dtypes.complex_for', (['numpy.int32'], {}), '(numpy.int32)\n', (2237, 2250), False, 'from grunnur import dtypes\n'), ((2285, 2317), 'grunnur.dtypes.real_for', 'dtypes.real_for', (['numpy.complex64'], {}), '(numpy.complex64)\n', (2300, 2317), False, 'from grunnur import dtypes\n'), ((2346, 2379), 'grunnur.dtypes.real_for', 'dtypes.real_for', (['numpy.complex128'], {}), '(numpy.complex128)\n', (2361, 2379), False, 'from grunnur import dtypes\n'), ((2406, 2431), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2419, 2431), False, 'import pytest\n'), ((2448, 2478), 'grunnur.dtypes.real_for', 'dtypes.real_for', (['numpy.float32'], {}), '(numpy.float32)\n', (2463, 2478), False, 'from grunnur import dtypes\n'), ((2488, 2513), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2501, 2513), False, 'import pytest\n'), ((2530, 2558), 'grunnur.dtypes.real_for', 'dtypes.real_for', (['numpy.int32'], {}), '(numpy.int32)\n', (2545, 2558), False, 'from grunnur import dtypes\n'), ((2596, 2631), 'grunnur.dtypes.complex_ctr', 'dtypes.complex_ctr', (['numpy.complex64'], {}), '(numpy.complex64)\n', (2614, 2631), False, 'from grunnur import dtypes\n'), ((2894, 2914), 'grunnur.dtypes.c_constant', 'dtypes.c_constant', (['(1)'], {}), '(1)\n', (2911, 2914), False, 'from grunnur import dtypes\n'), ((3611, 3633), 'grunnur.dtypes.c_constant', 'dtypes.c_constant', (['val'], {}), '(val)\n', (3628, 3633), False, 'from grunnur import dtypes\n'), ((3680, 3715), 'grunnur.dtypes.c_constant', 'dtypes.c_constant', (['(1)', 'numpy.float32'], {}), '(1, numpy.float32)\n', (3697, 3715), False, 'from grunnur import dtypes\n'), ((4521, 4540), 'numpy.dtype', 'numpy.dtype', (['"""int8"""'], {}), "('int8')\n", (4532, 4540), False, 'import numpy\n'), ((4575, 4595), 'numpy.dtype', 'numpy.dtype', (['"""int16"""'], {}), "('int16')\n", (4586, 4595), False, 'import numpy\n'), ((4630, 4650), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (4641, 4650), False, 'import numpy\n'), ((5160, 5179), 'numpy.dtype', 'numpy.dtype', (['"""int8"""'], {}), "('int8')\n", (5171, 5179), False, 'import numpy\n'), ((5214, 5234), 'numpy.dtype', 'numpy.dtype', (['"""int16"""'], {}), "('int16')\n", (5225, 5234), False, 'import numpy\n'), ((5269, 5289), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (5280, 5289), False, 'import numpy\n'), ((5816, 5835), 'numpy.dtype', 'numpy.dtype', (['"""int8"""'], {}), "('int8')\n", (5827, 5835), False, 'import numpy\n'), ((5870, 5890), 'numpy.dtype', 'numpy.dtype', (['"""int16"""'], {}), "('int16')\n", (5881, 5890), False, 'import numpy\n'), ((5925, 5945), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (5936, 5945), False, 'import numpy\n'), ((6630, 6649), 'numpy.dtype', 'numpy.dtype', (['"""int8"""'], {}), "('int8')\n", (6641, 6649), False, 'import numpy\n'), ((6684, 6704), 'numpy.dtype', 'numpy.dtype', (['"""int16"""'], {}), "('int16')\n", (6695, 6704), False, 'import numpy\n'), ((6739, 6759), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (6750, 6759), False, 'import numpy\n'), ((7268, 7293), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7281, 7293), False, 'import pytest\n'), ((7303, 7331), 'grunnur.dtypes._align', 'dtypes._align', (['dtype_aligned'], {}), '(dtype_aligned)\n', (7316, 7331), False, 'from grunnur import dtypes\n'), ((8583, 8598), 'grunnur.dtypes._lcm', 'dtypes._lcm', (['(10)'], {}), '(10)\n', (8594, 8598), False, 'from grunnur import dtypes\n'), ((8616, 8635), 'grunnur.dtypes._lcm', 'dtypes._lcm', (['(15)', '(20)'], {}), '(15, 20)\n', (8627, 8635), False, 'from grunnur import dtypes\n'), ((8653, 8676), 'grunnur.dtypes._lcm', 'dtypes._lcm', (['(16)', '(32)', '(24)'], {}), '(16, 32, 24)\n', (8664, 8676), False, 'from grunnur import dtypes\n'), ((8820, 8860), 'grunnur.dtypes._find_minimum_alignment', 'dtypes._find_minimum_alignment', (['(12)', '(4)', '(9)'], {}), '(12, 4, 9)\n', (8850, 8860), False, 'from grunnur import dtypes\n'), ((9041, 9081), 'grunnur.dtypes._find_minimum_alignment', 'dtypes._find_minimum_alignment', (['(16)', '(4)', '(9)'], {}), '(16, 4, 9)\n', (9071, 9081), False, 'from grunnur import dtypes\n'), ((9159, 9184), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9172, 9184), False, 'import pytest\n'), ((9194, 9234), 'grunnur.dtypes._find_minimum_alignment', 'dtypes._find_minimum_alignment', (['(13)', '(4)', '(9)'], {}), '(13, 4, 9)\n', (9224, 9234), False, 'from grunnur import dtypes\n'), ((9365, 9390), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9378, 9390), False, 'import pytest\n'), ((9400, 9440), 'grunnur.dtypes._find_minimum_alignment', 'dtypes._find_minimum_alignment', (['(24)', '(4)', '(9)'], {}), '(24, 4, 9)\n', (9430, 9440), False, 'from grunnur import dtypes\n'), ((9700, 9719), 'numpy.dtype', 'numpy.dtype', (['"""int8"""'], {}), "('int8')\n", (9711, 9719), False, 'import numpy\n'), ((9754, 9774), 'numpy.dtype', 'numpy.dtype', (['"""int16"""'], {}), "('int16')\n", (9765, 9774), False, 'import numpy\n'), ((9809, 9829), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (9820, 9829), False, 'import numpy\n'), ((10243, 10304), 'numpy.dtype', 'numpy.dtype', (["[('val1', numpy.int32), ('val2', numpy.float32)]"], {}), "([('val1', numpy.int32), ('val2', numpy.float32)])\n", (10254, 10304), False, 'import numpy\n'), ((11588, 11649), 'numpy.dtype', 'numpy.dtype', (["[('val1', numpy.int32), ('val2', numpy.float32)]"], {}), "([('val1', numpy.int32), ('val2', numpy.float32)])\n", (11599, 11649), False, 'import numpy\n'), ((13278, 13303), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13291, 13303), False, 'import pytest\n'), ((13313, 13339), 'grunnur.dtypes.ctype_struct', 'dtypes.ctype_struct', (['dtype'], {}), '(dtype)\n', (13332, 13339), False, 'from grunnur import dtypes\n'), ((13433, 13458), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13446, 13458), False, 'import pytest\n'), ((13468, 13494), 'grunnur.dtypes.ctype_struct', 'dtypes.ctype_struct', (['dtype'], {}), '(dtype)\n', (13487, 13494), False, 'from grunnur import dtypes\n'), ((13561, 13586), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13574, 13586), False, 'import pytest\n'), ((13596, 13628), 'grunnur.dtypes.ctype_struct', 'dtypes.ctype_struct', (['numpy.int32'], {}), '(numpy.int32)\n', (13615, 13628), False, 'from grunnur import dtypes\n'), ((14436, 14463), 'grunnur.dtypes.flatten_dtype', 'dtypes.flatten_dtype', (['dtype'], {}), '(dtype)\n', (14456, 14463), False, 'from grunnur import dtypes\n'), ((14503, 14543), 'grunnur.dtypes.c_path', 'dtypes.c_path', (["['struct_arr', 0, 'val1']"], {}), "(['struct_arr', 0, 'val1'])\n", (14516, 14543), False, 'from grunnur import dtypes\n'), ((1732, 1746), 'numpy.int8', 'numpy.int8', (['(-1)'], {}), '(-1)\n', (1742, 1746), False, 'import numpy\n'), ((1793, 1808), 'numpy.int64', 'numpy.int64', (['(-1)'], {}), '(-1)\n', (1804, 1808), False, 'import numpy\n'), ((2743, 2757), 'numpy.int32', 'numpy.int32', (['(1)'], {}), '(1)\n', (2754, 2757), False, 'import numpy\n'), ((2765, 2780), 'numpy.uint64', 'numpy.uint64', (['(1)'], {}), '(1)\n', (2777, 2780), False, 'import numpy\n'), ((2951, 2966), 'numpy.uint64', 'numpy.uint64', (['(1)'], {}), '(1)\n', (2963, 2966), False, 'import numpy\n'), ((3006, 3021), 'numpy.int64', 'numpy.int64', (['(-1)'], {}), '(-1)\n', (3017, 3021), False, 'import numpy\n'), ((3061, 3079), 'numpy.float64', 'numpy.float64', (['(1.0)'], {}), '(1.0)\n', (3074, 3079), False, 'import numpy\n'), ((3118, 3136), 'numpy.float32', 'numpy.float32', (['(1.0)'], {}), '(1.0)\n', (3131, 3136), False, 'import numpy\n'), ((3176, 3201), 'numpy.complex64', 'numpy.complex64', (['(1 + 2.0j)'], {}), '(1 + 2.0j)\n', (3191, 3201), False, 'import numpy\n'), ((3267, 3293), 'numpy.complex128', 'numpy.complex128', (['(1 + 2.0j)'], {}), '(1 + 2.0j)\n', (3283, 3293), False, 'import numpy\n'), ((3371, 3408), 'numpy.array', 'numpy.array', (['[1, 2, 3]', 'numpy.float32'], {}), '([1, 2, 3], numpy.float32)\n', (3382, 3408), False, 'import numpy\n'), ((14015, 14035), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (14026, 14035), False, 'import numpy\n'), ((14074, 14093), 'numpy.dtype', 'numpy.dtype', (['"""int8"""'], {}), "('int8')\n", (14085, 14093), False, 'import numpy\n'), ((14131, 14150), 'numpy.dtype', 'numpy.dtype', (['"""int8"""'], {}), "('int8')\n", (14142, 14150), False, 'import numpy\n'), ((14189, 14208), 'numpy.dtype', 'numpy.dtype', (['"""int8"""'], {}), "('int8')\n", (14200, 14208), False, 'import numpy\n'), ((14246, 14265), 'numpy.dtype', 'numpy.dtype', (['"""int8"""'], {}), "('int8')\n", (14257, 14265), False, 'import numpy\n'), ((14297, 14317), 'numpy.dtype', 'numpy.dtype', (['"""int16"""'], {}), "('int16')\n", (14308, 14317), False, 'import numpy\n'), ((14349, 14369), 'numpy.dtype', 'numpy.dtype', (['"""int16"""'], {}), "('int16')\n", (14360, 14369), False, 'import numpy\n'), ((14401, 14421), 'numpy.dtype', 'numpy.dtype', (['"""int16"""'], {}), "('int16')\n", (14412, 14421), False, 'import numpy\n'), ((14981, 15031), 'grunnur.dtypes.extract_field', 'dtypes.extract_field', (['a', "['struct_arr', 1, 'val1']"], {}), "(a, ['struct_arr', 1, 'val1'])\n", (15001, 15031), False, 'from grunnur import dtypes\n'), ((15035, 15051), 'numpy.arange', 'numpy.arange', (['(16)'], {}), '(16)\n', (15047, 15051), False, 'import numpy\n'), ((15143, 15176), 'grunnur.dtypes.extract_field', 'dtypes.extract_field', (['b', "['val1']"], {}), "(b, ['val1'])\n", (15163, 15176), False, 'from grunnur import dtypes\n'), ((15180, 15196), 'numpy.arange', 'numpy.arange', (['(16)'], {}), '(16)\n', (15192, 15196), False, 'import numpy\n'), ((7583, 7613), 'numpy.dtype', 'numpy.dtype', (['(dtype_nested, 2)'], {}), '((dtype_nested, 2))\n', (7594, 7613), False, 'import numpy\n'), ((7615, 7644), 'numpy.dtype', 'numpy.dtype', (['(numpy.int16, 3)'], {}), '((numpy.int16, 3))\n', (7626, 7644), False, 'import numpy\n'), ((10867, 10897), 'numpy.dtype', 'numpy.dtype', (['(dtype_nested, 2)'], {}), '((dtype_nested, 2))\n', (10878, 10897), False, 'import numpy\n'), ((10899, 10928), 'numpy.dtype', 'numpy.dtype', (['(numpy.int16, 3)'], {}), '((numpy.int16, 3))\n', (10910, 10928), False, 'import numpy\n'), ((13881, 13911), 'numpy.dtype', 'numpy.dtype', (['(dtype_nested, 2)'], {}), '((dtype_nested, 2))\n', (13892, 13911), False, 'import numpy\n'), ((13913, 13942), 'numpy.dtype', 'numpy.dtype', (['(numpy.int16, 3)'], {}), '((numpy.int16, 3))\n', (13924, 13942), False, 'import numpy\n'), ((14820, 14850), 'numpy.dtype', 'numpy.dtype', (['(dtype_nested, 2)'], {}), '((dtype_nested, 2))\n', (14831, 14850), False, 'import numpy\n'), ((14852, 14881), 'numpy.dtype', 'numpy.dtype', (['(numpy.int16, 3)'], {}), '((numpy.int16, 3))\n', (14863, 14881), False, 'import numpy\n')]
|
import torch
import torch.nn as nn
from .convolution import Conv2d
class SelfAttention2d(nn.Module):
def __init__(self, in_channels, k=8, bias=False, eq_lr=False, spectral_normalization=False):
super().__init__()
self.wf = Conv2d(in_channels, in_channels // k, kernel_size=1, stride=1, padding=0, bias=bias, eq_lr=eq_lr, spectral_normalization=spectral_normalization)
self.wg = Conv2d(in_channels, in_channels // k, kernel_size=1, stride=1, padding=0, bias=bias, eq_lr=eq_lr, spectral_normalization=spectral_normalization)
self.wh = Conv2d(in_channels, in_channels // k, kernel_size=1, stride=1, padding=0, bias=bias, eq_lr=eq_lr, spectral_normalization=spectral_normalization)
self.wv = Conv2d(in_channels // k, in_channels, kernel_size=1, stride=1, padding=0, bias=bias, eq_lr=eq_lr, spectral_normalization=spectral_normalization)
self.gamma = nn.Parameter(torch.zeros(1), requires_grad=True)
def forward(self, x):
f = self.wf(x).view(x.size(0), -1, x.size(2) * x.size(3))
g = self.wg(x).view(x.size(0), -1, x.size(2) * x.size(3))
h = self.wh(x).view(x.size(0), -1, x.size(2) * x.size(3))
s = torch.bmm(f.transpose(1, 2), g)
beta = torch.softmax(s, 2)
v = torch.bmm(h, beta).view(x.size(0), -1, x.size(2), x.size(3))
o = self.wv(v)
return self.gamma * o + x
|
[
"torch.zeros",
"torch.bmm",
"torch.softmax"
] |
[((1236, 1255), 'torch.softmax', 'torch.softmax', (['s', '(2)'], {}), '(s, 2)\n', (1249, 1255), False, 'import torch\n'), ((916, 930), 'torch.zeros', 'torch.zeros', (['(1)'], {}), '(1)\n', (927, 930), False, 'import torch\n'), ((1269, 1287), 'torch.bmm', 'torch.bmm', (['h', 'beta'], {}), '(h, beta)\n', (1278, 1287), False, 'import torch\n')]
|
# SPDX-License-Identifier: Apache-2.0
#
# Copyright (C) 2015, ARM Limited and contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import re
import os
import logging
from subprocess import Popen, PIPE
from time import sleep
from android import Screen, System, Workload
class UiBench(Workload):
"""
Android UiBench workload
"""
# Package required by this workload
package = 'com.android.test.uibench'
# Instrumentation required to run tests
test_package = 'com.android.uibench.janktests'
# Supported tests list
test_ClippedListView = 'UiBenchJankTests#testClippedListView'
test_DialogListFling = 'UiBenchJankTests#testDialogListFling'
test_FadingEdgeListViewFling = 'UiBenchJankTests#testFadingEdgeListViewFling'
test_FullscreenOverdraw = 'UiBenchJankTests#testFullscreenOverdraw'
test_GLTextureView = 'UiBenchJankTests#testGLTextureView'
test_InflatingListViewFling = 'UiBenchJankTests#testInflatingListViewFling'
test_Invalidate = 'UiBenchJankTests#testInvalidate'
test_InvalidateTree = 'UiBenchJankTests#testInvalidateTree'
test_OpenNavigationDrawer = 'UiBenchJankTests#testOpenNavigationDrawer'
test_OpenNotificationShade = 'UiBenchJankTests#testOpenNotificationShade'
test_ResizeHWLayer = 'UiBenchJankTests#testResizeHWLayer'
test_SaveLayerAnimation = 'UiBenchJankTests#testSaveLayerAnimation'
test_SlowBindRecyclerViewFling = 'UiBenchJankTests#testSlowBindRecyclerViewFling'
test_SlowNestedRecyclerViewFling = 'UiBenchJankTests#testSlowNestedRecyclerViewFling'
test_SlowNestedRecyclerViewInitialFling = 'UiBenchJankTests#testSlowNestedRecyclerViewInitialFling'
test_TrivialAnimation = 'UiBenchJankTests#testTrivialAnimation'
test_TrivialListViewFling = 'UiBenchJankTests#testTrivialListViewFling'
test_TrivialRecyclerListViewFling = 'UiBenchJankTests#testTrivialRecyclerListViewFling'
test_BitmapUploadJank = 'UiBenchRenderingJankTests#testBitmapUploadJank'
test_ShadowGridListFling = 'UiBenchRenderingJankTests#testShadowGridListFling'
test_EditTextTyping = 'UiBenchTextJankTests#testEditTextTyping'
test_LayoutCacheHighHitrateFling = 'UiBenchTextJankTests#testLayoutCacheHighHitrateFling'
test_LayoutCacheLowHitrateFling = 'UiBenchTextJankTests#testLayoutCacheLowHitrateFling'
test_ActivityTransitionsAnimation = 'UiBenchTransitionsJankTests#testActivityTransitionsAnimation'
test_WebViewFling = 'UiBenchWebView#testWebViewFling'
def __init__(self, test_env):
super(UiBench, self).__init__(test_env)
self._log = logging.getLogger('UiBench')
self._log.debug('Workload created')
# Set of output data reported by UiBench
self.db_file = None
def run(self, out_dir, test_name, duration_s, collect=''):
"""
Run single UiBench workload.
:param out_dir: Path to experiment directory where to store results.
:type out_dir: str
:param test_name: Name of the test to run
:type test_name: str
:param duration_s: Run benchmak for this required number of seconds
:type duration_s: int
:param collect: Specifies what to collect. Possible values:
- 'energy'
- 'systrace'
- 'ftrace'
- any combination of the above
:type collect: list(str)
"""
activity = '.' + test_name
# Keep track of mandatory parameters
self.out_dir = out_dir
self.collect = collect
# Unlock device screen (assume no password required)
Screen.unlock(self._target)
# Close and clear application
System.force_stop(self._target, self.package, clear=True)
# Set airplane mode
System.set_airplane_mode(self._target, on=True)
# Set min brightness
Screen.set_brightness(self._target, auto=False, percent=0)
# Start the main view of the app which must be running
# to reset the frame statistics.
System.monkey(self._target, self.package)
# Force screen in PORTRAIT mode
Screen.set_orientation(self._target, portrait=True)
# Reset frame statistics
System.gfxinfo_reset(self._target, self.package)
sleep(1)
# Clear logcat
os.system(self._adb('logcat -c'));
# Regexps for benchmark synchronization
start_logline = r'TestRunner: started'
UIBENCH_BENCHMARK_START_RE = re.compile(start_logline)
self._log.debug("START string [%s]", start_logline)
# Parse logcat output lines
logcat_cmd = self._adb(
'logcat TestRunner:* System.out:I *:S BENCH:*'\
.format(self._target.adb_name))
self._log.info("%s", logcat_cmd)
# Run benchmark with a lot of iterations to avoid finishing before duration_s elapses
command = "nohup am instrument -e iterations 1000000 -e class {}{} -w {}".format(
self.test_package, activity, self.test_package)
self._target.background(command)
logcat = Popen(logcat_cmd, shell=True, stdout=PIPE)
while True:
# read next logcat line (up to max 1024 chars)
message = logcat.stdout.readline(1024)
# Benchmark start trigger
match = UIBENCH_BENCHMARK_START_RE.search(message)
if match:
self.tracingStart()
self._log.debug("Benchmark started!")
break
# Run the workload for the required time
self._log.info('Benchmark [%s] started, waiting %d [s]',
activity, duration_s)
sleep(duration_s)
self._log.debug("Benchmark done!")
self.tracingStop()
# Get frame stats
self.db_file = os.path.join(out_dir, "framestats.txt")
System.gfxinfo_get(self._target, self.package, self.db_file)
# Close and clear application
System.force_stop(self._target, self.package, clear=True)
# Go back to home screen
System.home(self._target)
# Switch back to original settings
Screen.set_orientation(self._target, auto=True)
System.set_airplane_mode(self._target, on=False)
Screen.set_brightness(self._target, auto=True)
# vim :set tabstop=4 shiftwidth=4 expandtab
|
[
"android.Screen.set_orientation",
"android.Screen.set_brightness",
"subprocess.Popen",
"android.System.monkey",
"android.System.home",
"logging.getLogger",
"time.sleep",
"android.System.gfxinfo_get",
"android.Screen.unlock",
"android.System.set_airplane_mode",
"os.path.join",
"android.System.gfxinfo_reset",
"android.System.force_stop",
"re.compile"
] |
[((3084, 3112), 'logging.getLogger', 'logging.getLogger', (['"""UiBench"""'], {}), "('UiBench')\n", (3101, 3112), False, 'import logging\n'), ((4082, 4109), 'android.Screen.unlock', 'Screen.unlock', (['self._target'], {}), '(self._target)\n', (4095, 4109), False, 'from android import Screen, System, Workload\n'), ((4157, 4214), 'android.System.force_stop', 'System.force_stop', (['self._target', 'self.package'], {'clear': '(True)'}), '(self._target, self.package, clear=True)\n', (4174, 4214), False, 'from android import Screen, System, Workload\n'), ((4252, 4299), 'android.System.set_airplane_mode', 'System.set_airplane_mode', (['self._target'], {'on': '(True)'}), '(self._target, on=True)\n', (4276, 4299), False, 'from android import Screen, System, Workload\n'), ((4338, 4396), 'android.Screen.set_brightness', 'Screen.set_brightness', (['self._target'], {'auto': '(False)', 'percent': '(0)'}), '(self._target, auto=False, percent=0)\n', (4359, 4396), False, 'from android import Screen, System, Workload\n'), ((4510, 4551), 'android.System.monkey', 'System.monkey', (['self._target', 'self.package'], {}), '(self._target, self.package)\n', (4523, 4551), False, 'from android import Screen, System, Workload\n'), ((4601, 4652), 'android.Screen.set_orientation', 'Screen.set_orientation', (['self._target'], {'portrait': '(True)'}), '(self._target, portrait=True)\n', (4623, 4652), False, 'from android import Screen, System, Workload\n'), ((4695, 4743), 'android.System.gfxinfo_reset', 'System.gfxinfo_reset', (['self._target', 'self.package'], {}), '(self._target, self.package)\n', (4715, 4743), False, 'from android import Screen, System, Workload\n'), ((4752, 4760), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4757, 4760), False, 'from time import sleep\n'), ((4961, 4986), 're.compile', 're.compile', (['start_logline'], {}), '(start_logline)\n', (4971, 4986), False, 'import re\n'), ((5573, 5615), 'subprocess.Popen', 'Popen', (['logcat_cmd'], {'shell': '(True)', 'stdout': 'PIPE'}), '(logcat_cmd, shell=True, stdout=PIPE)\n', (5578, 5615), False, 'from subprocess import Popen, PIPE\n'), ((6149, 6166), 'time.sleep', 'sleep', (['duration_s'], {}), '(duration_s)\n', (6154, 6166), False, 'from time import sleep\n'), ((6288, 6327), 'os.path.join', 'os.path.join', (['out_dir', '"""framestats.txt"""'], {}), "(out_dir, 'framestats.txt')\n", (6300, 6327), False, 'import os\n'), ((6336, 6396), 'android.System.gfxinfo_get', 'System.gfxinfo_get', (['self._target', 'self.package', 'self.db_file'], {}), '(self._target, self.package, self.db_file)\n', (6354, 6396), False, 'from android import Screen, System, Workload\n'), ((6444, 6501), 'android.System.force_stop', 'System.force_stop', (['self._target', 'self.package'], {'clear': '(True)'}), '(self._target, self.package, clear=True)\n', (6461, 6501), False, 'from android import Screen, System, Workload\n'), ((6544, 6569), 'android.System.home', 'System.home', (['self._target'], {}), '(self._target)\n', (6555, 6569), False, 'from android import Screen, System, Workload\n'), ((6622, 6669), 'android.Screen.set_orientation', 'Screen.set_orientation', (['self._target'], {'auto': '(True)'}), '(self._target, auto=True)\n', (6644, 6669), False, 'from android import Screen, System, Workload\n'), ((6678, 6726), 'android.System.set_airplane_mode', 'System.set_airplane_mode', (['self._target'], {'on': '(False)'}), '(self._target, on=False)\n', (6702, 6726), False, 'from android import Screen, System, Workload\n'), ((6735, 6781), 'android.Screen.set_brightness', 'Screen.set_brightness', (['self._target'], {'auto': '(True)'}), '(self._target, auto=True)\n', (6756, 6781), False, 'from android import Screen, System, Workload\n')]
|
from setuptools import setup,find_packages
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setup(
name='python installer',
version='0.0.1',
license='MIT',
author='<NAME>',
author_email='<EMAIL>',
description='A package installer for python',
long_description=long_description,
long_description_content_type="text/markdown",
packages=find_packages(),
install_requires=[
'click',
'pipreqs'
],
keywords=['python', 'package installer', 'json'],
classifiers= [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS',
'Operating System :: Unix',
'Programming Language :: Python'
],
entry_points={
'console_scripts': [
'package = python_installer.main:cli',
]
},
)
|
[
"setuptools.find_packages"
] |
[((425, 440), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (438, 440), False, 'from setuptools import setup, find_packages\n')]
|
import os
import subprocess
import sys
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
GPIO.setup(6,GPIO.IN)
while True:
val = GPIO.input(6)
print(val)
if val==1:
break;
time.sleep(0.2)
print(' finished ' )
|
[
"RPi.GPIO.setup",
"RPi.GPIO.setmode",
"RPi.GPIO.input",
"time.sleep"
] |
[((76, 98), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (88, 98), True, 'import RPi.GPIO as GPIO\n'), ((99, 121), 'RPi.GPIO.setup', 'GPIO.setup', (['(6)', 'GPIO.IN'], {}), '(6, GPIO.IN)\n', (109, 121), True, 'import RPi.GPIO as GPIO\n'), ((144, 157), 'RPi.GPIO.input', 'GPIO.input', (['(6)'], {}), '(6)\n', (154, 157), True, 'import RPi.GPIO as GPIO\n'), ((207, 222), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (217, 222), False, 'import time\n')]
|
import os
from datetime import timedelta
from pathlib import Path
from dotenv import load_dotenv
BASE_DIR = Path(__file__).resolve().parent.parent
load_dotenv(dotenv_path=BASE_DIR / '.env')
SECRET_KEY = os.getenv('SECRET_KEY')
DEBUG = True if os.getenv('DEBUG') in ['true', 'True', True] else False
INSTALLED_APPS = [
# first place required
# base
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# third-party
'rest_framework',
# project apps
'users',
'appcraft_logging',
'seeds'
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'server.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
ASGI_APPLICATION = 'server.asgi.application'
WSGI_APPLICATION = 'server.wsgi.application'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'ru-RU'
TIME_ZONE = 'Europe/Moscow'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# - - - -
# custom
# - - - -
ALLOWED_HOSTS = [
os.getenv('HOST_IP', '127.0.0.1'),
os.getenv('HOST_DOMAIN', 'localhost')
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_simplejwt.authentication.JWTAuthentication',
),
'DEFAULT_RENDERER_CLASSES': (
'djangorestframework_camel_case.render.CamelCaseJSONRenderer',
'djangorestframework_camel_case.render.CamelCaseBrowsableAPIRenderer',
),
'DEFAULT_PARSER_CLASSES': (
'djangorestframework_camel_case.parser.CamelCaseFormParser',
'djangorestframework_camel_case.parser.CamelCaseMultiPartParser',
'djangorestframework_camel_case.parser.CamelCaseJSONParser',
),
'DEFAULT_PAGINATION_CLASS': 'backend.paginators.LimitOffsetPaginationDataAndCountOnly',
'PAGE_SIZE': 10,
'COERCE_DECIMAL_TO_STRING': False
}
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(days=365),
'REFRESH_TOKEN_LIFETIME': timedelta(days=365),
'AUTH_HEADER_TYPES': ['JWT'],
}
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR, 'files', 'static')
MEDIA_ROOT = os.path.join(BASE_DIR, 'files', 'media')
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
AUTH_USER_MODEL = 'users.UserModel'
SMS_AERO_EMAIL = os.getenv('SMS_AERO_EMAIL')
SMS_AERO_API_KEY = os.getenv('SMS_AERO_API_KEY')
APPCRAFT_LOGGING_CONFIG = {
'db_name': os.getenv('CLICKHOUSE_DB_NAME', 'db'),
'username': os.getenv('CLICKHOUSE_USERNAME', 'root'),
'password': os.getenv('CLICKHOUSE_PASSWORD', '<PASSWORD>'),
'port': os.getenv('CLICKHOUSE_PORT', '8123'),
}
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'django.contrib.staticfiles.finders.FileSystemFinder'
]
if DEBUG:
# debug_toolbar
INSTALLED_APPS.append('debug_toolbar')
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INTERNAL_IPS = ['127.0.0.1']
|
[
"dotenv.load_dotenv",
"pathlib.Path",
"datetime.timedelta",
"os.path.join",
"os.getenv"
] |
[((150, 192), 'dotenv.load_dotenv', 'load_dotenv', ([], {'dotenv_path': "(BASE_DIR / '.env')"}), "(dotenv_path=BASE_DIR / '.env')\n", (161, 192), False, 'from dotenv import load_dotenv\n'), ((207, 230), 'os.getenv', 'os.getenv', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (216, 230), False, 'import os\n'), ((3544, 3585), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""files"""', '"""static"""'], {}), "(BASE_DIR, 'files', 'static')\n", (3556, 3585), False, 'import os\n'), ((3599, 3639), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""files"""', '"""media"""'], {}), "(BASE_DIR, 'files', 'media')\n", (3611, 3639), False, 'import os\n'), ((3751, 3778), 'os.getenv', 'os.getenv', (['"""SMS_AERO_EMAIL"""'], {}), "('SMS_AERO_EMAIL')\n", (3760, 3778), False, 'import os\n'), ((3798, 3827), 'os.getenv', 'os.getenv', (['"""SMS_AERO_API_KEY"""'], {}), "('SMS_AERO_API_KEY')\n", (3807, 3827), False, 'import os\n'), ((2422, 2455), 'os.getenv', 'os.getenv', (['"""HOST_IP"""', '"""127.0.0.1"""'], {}), "('HOST_IP', '127.0.0.1')\n", (2431, 2455), False, 'import os\n'), ((2461, 2498), 'os.getenv', 'os.getenv', (['"""HOST_DOMAIN"""', '"""localhost"""'], {}), "('HOST_DOMAIN', 'localhost')\n", (2470, 2498), False, 'import os\n'), ((3374, 3393), 'datetime.timedelta', 'timedelta', ([], {'days': '(365)'}), '(days=365)\n', (3383, 3393), False, 'from datetime import timedelta\n'), ((3425, 3444), 'datetime.timedelta', 'timedelta', ([], {'days': '(365)'}), '(days=365)\n', (3434, 3444), False, 'from datetime import timedelta\n'), ((3661, 3693), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (3673, 3693), False, 'import os\n'), ((3872, 3909), 'os.getenv', 'os.getenv', (['"""CLICKHOUSE_DB_NAME"""', '"""db"""'], {}), "('CLICKHOUSE_DB_NAME', 'db')\n", (3881, 3909), False, 'import os\n'), ((3927, 3967), 'os.getenv', 'os.getenv', (['"""CLICKHOUSE_USERNAME"""', '"""root"""'], {}), "('CLICKHOUSE_USERNAME', 'root')\n", (3936, 3967), False, 'import os\n'), ((3985, 4031), 'os.getenv', 'os.getenv', (['"""CLICKHOUSE_PASSWORD"""', '"""<PASSWORD>"""'], {}), "('CLICKHOUSE_PASSWORD', '<PASSWORD>')\n", (3994, 4031), False, 'import os\n'), ((4045, 4081), 'os.getenv', 'os.getenv', (['"""CLICKHOUSE_PORT"""', '"""8123"""'], {}), "('CLICKHOUSE_PORT', '8123')\n", (4054, 4081), False, 'import os\n'), ((248, 266), 'os.getenv', 'os.getenv', (['"""DEBUG"""'], {}), "('DEBUG')\n", (257, 266), False, 'import os\n'), ((1342, 1377), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (1354, 1377), False, 'import os\n'), ((110, 124), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (114, 124), False, 'from pathlib import Path\n')]
|
# AUTOGENERATED! DO NOT EDIT! File to edit: dev/52_USB_camera.ipynb (unless otherwise specified).
__all__ = ['Camera']
# Cell
from FLIRCam.core import *
# Cell
# Standard imports:
from pathlib import Path
import logging
from logging.handlers import RotatingFileHandler
from time import sleep, time as timestamp
from datetime import datetime
from threading import Thread, Event
from struct import pack as pack_data
# External imports:
import numpy as np
# Cell
import PySpin
class Camera():
"""Control acquisition and receive images from a camera.
To initialise a Camera a *model* (determines hardware interface) and *identity* (identifying the specific device)
must be given. If both are given to the constructor the Camera will be initialised immediately (unless
auto_init=False is passed). Manually initialise with a call to Camera.initialize(); release hardware with a call to
Camera.deinitialize().
After the Camera is intialised, acquisition properties (e.g. exposure_time and frame_rate) may be set and images
received. The Camera also supports event-driven acquisition, see Camera.add_event_callback(), where new images are
automatically passed on to the desired functions.
Args:
model (str, optional): The model used to determine the correct hardware API. Supported: 'ptgrey' for
PointGrey/FLIR Machine Vision cameras (using Spinnaker and PySpin).
identity (str, optional): String identifying the device. For model *ptgrey* this is 'serial number' *as a
string*.
name (str, optional): Name for the device.
auto_init (bool, optional): If both model and identity are given when creating the Camera and auto_init
is True (the default), Camera.initialize() will be called after creation.
debug_folder (pathlib.Path, optional): The folder for debug logging. If None (the default)
the folder *pypogs*/debug will be used/created.
Example:
::
# Create instance and set parameters (will auto initialise)
cam = pypogs.Camera(model='ptgrey', identity='18285284', name='CoarseCam')
cam.gain = 0 #decibel
cam.exposure_time = 100 #milliseconds
cam.frame_rate_auto = True
# Start acquisition
cam.start()
# Wait for a while
time.sleep(2)
# Read the latest image
img = cam.get_latest_image()
# Stop the acquisition
cam.stop()
# Release the hardware
cam.deinitialize()
"""
_supported_models = ('ptgrey',)
def __init__(self, model=None, identity=None, name=None, auto_init=True, debug_folder=None):
"""Create Camera instance. See class documentation."""
# Logger setup
self._debug_folder = None
if debug_folder is None:
try:
self.debug_folder = Path(__file__).parent / 'debug'
except:
self.debug_folder = Path()/'debug'
else:
self.debug_folder = debug_folder
self.log = logging.getLogger(f'{name}')
if not self.log.hasHandlers():
# Add new handlers to the logger if there are none
self.log.setLevel(logging.DEBUG)
# Console handler at INFO level
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
# File handler at DEBUG level
# fh = logging.FileHandler(self.debug_folder / 'log.txt')
fh = RotatingFileHandler(self.debug_folder / 'camera.log', maxBytes=1*1024*1024,
backupCount=2)
fh.setLevel(logging.DEBUG)
# Format and add
# log_formatter = logging.Formatter('%(asctime)s:%(name)s-%(levelname)s: %(message)s')
# log_formatter = logging.Formatter('%(asctime)s CAM-%(levelname)s: %(message)s')
log_formatter = logging.Formatter('%(asctime)s %(name)s-%(levelname)s-%(threadName)s'+
'-%(funcName)s-(%(lineno)d) %(message)s')
fh.setFormatter(log_formatter)
ch.setFormatter(log_formatter)
self.log.addHandler(fh)
self.log.addHandler(ch)
self.log.info('New console and file logging handlers added.')
# Start of constructor
self.log.debug('Camera Init: Model:'+str(model)+' ID:'+str(identity) \
+' Name:'+str(name) +' AutoInit:'+str(auto_init))
self._model = None
self._identity = None
self._name = 'UnnamedCamera'
self._plate_scale = 1.0
self._rotation = 0.0
self._flipX = False
self._flipY = False
self._rot90 = 0 #Number of times to rotate by 90 deg, done after flips
#Only used for ptgrey
self._ptgrey_camera = None
self._ptgrey_camlist = None
self._ptgrey_system = None
#Callbacks on image event
self._call_on_image = set()
self._got_image_event = Event()
self._image_data = None
self._image_frameID = None
self._image_timestamp = None
self._imgs_since_start = 0
self.log.debug('Calling self on constructor input')
if model is not None:
self.model = model
if identity is not None:
self.identity = identity
if name is not None:
self.name = name
if auto_init and not None in (model, identity):
self.log.debug('Trying to auto-initialise')
self.initialize()
self.log.debug('Registering destructor')
# TODO: Should we register deinitialisor instead? (probably yes...)
import atexit, weakref
atexit.register(weakref.ref(self.__del__))
self.log.info('Camera instance created with name: ' + self.name + '.')
def __del__(self):
"""Destructor. Releases hardware."""
if self.is_init:
self.deinitialize()
def getprops(self, prop_list):
""" Get FLIR Camera properties, listed in the prop_list"""
assert self.is_init, 'Camera must be initialised'
prop_dict = { i : None for i in prop_list }
try:
nodemap = self._ptgrey_camera.GetNodeMap()
for i, p in enumerate(prop_list):
# val_list[i] = PySpin.CIntegerPtr(nodemap.GetNode(p)).GetValue()
try: # integer
prop_dict[p] = PySpin.CIntegerPtr(nodemap.GetNode(p)).GetValue()
except:
try: # Float
prop_dict[p] = PySpin.CFloatPtr(nodemap.GetNode(p)).GetValue()
except:
try: # enumeration
node = PySpin.CEnumerationPtr(nodemap.GetNode(p))
prop_dict[p] = node.GetCurrentEntry().GetDisplayName().lower()
except: # Bool
prop_dict[p] = PySpin.CBooleanPtr(nodemap.GetNode(p)).GetValue()
self.log.debug(f'Found Node "{str(p)}" = {prop_dict[p]}')
except PySpin.SpinnakerException:
self.log.warning(f'Failed to read node "{str(p)}"')
finally:
return prop_dict
def setprops(self, prop_dict, stop=True):
""" Set FLIR Camera properties, listed in the prop_dict"""
assert self.is_init, 'Camera must be initialised'
was_stopped = False
if self.is_running and stop:
self.log.debug('Camera is running, stop it and restart immediately after.')
self.stop()
was_stopped = True
assert self.is_init, 'Camera must be initialised'
type_list = [type(value) for key, value in self.getprops(prop_dict).items()]
self.log.debug(f'Type_list = {type_list}')
try:
nodemap = self._ptgrey_camera.GetNodeMap()
for (key, value), t in zip(prop_dict.items(), type_list):
if t == int : # integer
PySpin.CIntegerPtr(nodemap.GetNode(key)).SetValue(value)
elif t == float:
PySpin.CFloatPtr(nodemap.GetNode(key)).SetValue(value)
elif t == str:
node = PySpin.CEnumerationPtr(nodemap.GetNode(key))
node.SetIntValue(node.GetEntryByName(value).GetValue())
elif t == bool:
# node = PySpin.CBooleanPtr(nodemap.GetNode('AcquisitionFrameRateEnable'))
PySpin.CBooleanPtr(nodemap.GetNode(key)).SetValue(value)
elif t == type(None):
self.log.warning(f'No property type found for node: "{key}"')
# raise Exception(f'No property type found for node: "{key}"')
return
else:
self.log.warning(f'Property type not implemented for node: "{key}"')
# raise Exception(f'Property type not implemented for node: "{key}"')
return
self.log.debug(f'Set Node "{key}" = {value}')
except PySpin.SpinnakerException as e:
if 'LogicalErrorException' in e.message:
self.log.warning(f'Node: "{key}", LogicalErrorException')
elif 'OutOfRangeException' in e.message:
self.log.warning(f'Node: "{key}", value: "{value}" is out of range.')
elif 'AccessException' in e.message:
self.log.warning(f'Not allowed to change Node: "{key}" now - Try "stop=True".')
else:
self.log.warning(f'Failed to set node: "{key}"')
if was_stopped :
try:
self.start()
self.log.debug('Restarted')
except Exception:
self.log.debug('Failed to restart: ', exc_info=True)
def _ptgrey_release(self):
"""PRIVATE: Release Point Grey hardware resources."""
self.log.debug('PointGrey hardware release called')
if self._ptgrey_camera is not None:
self.log.debug('Deleting PtGrey camera object')
del(self._ptgrey_camera) #Preferred over =None according to PtGrey
self._ptgrey_camera = None
if self._ptgrey_camlist is not None:
self.log.debug('Clearing and deleting PtGrey camlist')
self._ptgrey_camlist.Clear()
del(self._ptgrey_camlist)
self._ptgrey_camlist = None
if self._ptgrey_system is not None:
self.log.debug('Has PtGrey system. Is in use? '+str(self._ptgrey_system.IsInUse()))
if not self._ptgrey_system.IsInUse():
self.log.debug('Not in use, releasing and deleting')
self._ptgrey_system.ReleaseInstance()
del(self._ptgrey_system)
self._ptgrey_system = None
self.log.debug('Hardware released')
@property
def debug_folder(self):
"""pathlib.Path: Get or set the path for debug logging. Will create folder if not existing."""
return self._debug_folder
@debug_folder.setter
def debug_folder(self, path):
# Do not do logging in here! This will be called before the logger is set up
path = Path(path) #Make sure pathlib.Path
if path.is_file():
path = path.parent
if not path.is_dir():
path.mkdir(parents=True)
self._debug_folder = path
@property
def name(self):
"""str: Get or set the name."""
return self._name
@name.setter
def name(self, name):
self.log.debug('Setting name to: '+str(name))
self._name = str(name)
self.log.debug('Name set to '+str(self.name))
@property
def model(self):
"""str: Get or set the device model.
Supported:
- 'ptgrey' for FLIR/Point Grey cameras (using Spinnaker/PySpin SDKs).
- This will determine which hardware API that is used.
- Must set before initialising the device and may not be changed for an initialised device.
"""
return self._model
@model.setter
def model(self, model):
self.log.debug('Setting model to: '+str(model))
assert not self.is_init, 'Can not change already intialised device model'
model = str(model)
assert model.lower() in self._supported_models,\
'Model type not recognised, allowed: '+str(self._supported_models)
#TODO: Check that the APIs are available.
self._model = model
self.log.debug('Model set to '+str(self.model))
@property
def identity(self):
"""str: Get or set the device and/or input. Model must be defined first.
- For model *ptgrey* this is the serial number *as a string*
- Must set before initialising the device and may not be changed for an initialised device.
"""
return self._identity
@identity.setter
def identity(self, identity):
self.log.debug('Setting identity to: '+str(identity))
assert not self.is_init, 'Can not change already intialised device'
assert self.model is not None, 'Must define model first'
identity = str(identity)
if not self._ptgrey_system:
self._ptgrey_system = PySpin.System.GetInstance() #Get singleton
self._ptgrey_camlist = self._ptgrey_system.GetCameras()
self.log.debug('Got cam list, size:'+str(self._ptgrey_camlist.GetSize()))
self._ptgrey_camera = self._ptgrey_camlist.GetBySerial(identity)
valid = self._ptgrey_camera.IsValid()
self.log.debug('Got object, valid: '+str(valid))
if valid:
self.log.debug('Already init: '+str(self._ptgrey_camera.IsInitialized()))
if not valid:
self.log.debug('Invalid camera object. Cleaning up')
del(self._ptgrey_camera)
self._ptgrey_camera = None
self._ptgrey_camlist.Clear()
raise AssertionError('The camera was not found')
elif self._ptgrey_camera.IsInitialized():
self.log.debug('Camera object already in use. Cleaning up')
del(self._ptgrey_camera)
self._ptgrey_camera = None
self._ptgrey_camlist.Clear()
raise RuntimeError('The camera is already in use')
else:
self.log.debug('Seems valid. Setting identity and cleaning up')
del(self._ptgrey_camera)
self._ptgrey_camera = None
self._identity = identity
self._ptgrey_camlist.Clear()
self.log.debug('Identity set to: '+str(self.identity))
@property
def is_init(self):
"""bool: True if the device is initialised (and therefore ready to start)."""
init = self._ptgrey_camera is not None and self._ptgrey_camera.IsInitialized()
return init
def initialize(self):
"""Initialise (make ready to start) the device. The model and identity must be defined."""
self.log.debug('Initialising')
assert not self.is_init, 'Already initialised'
assert not None in (self.model, self.identity), 'Must define model and identity before initialising'
if self._ptgrey_camera is not None:
raise RuntimeError('There is already a camera object here')
if not self._ptgrey_system: self._ptgrey_system = PySpin.System.GetInstance() #Get singleton
if self._ptgrey_camlist: #Clear old list and get fresh one
self._ptgrey_camlist.Clear()
del(self._ptgrey_camlist)
self._ptgrey_camlist = self._ptgrey_system.GetCameras()
self.log.debug('Getting pyspin object and initialising')
self._ptgrey_camera = self._ptgrey_camlist.GetBySerial(self.identity)
self._ptgrey_camera.Init()
# BASIC SETUP
# self.log.debug('Setting gamma off')
# nodemap = self._ptgrey_camera.GetNodeMap()
# PySpin.CBooleanPtr(nodemap.GetNode('GammaEnable')).SetValue(False)
self.log.debug('Setting acquisition mode to continuous')
self._ptgrey_camera.AcquisitionMode.SetIntValue(PySpin.AcquisitionMode_Continuous)
self.log.debug('Setting stream mode to newest only')
self._ptgrey_camera.TLStream.StreamBufferHandlingMode.SetIntValue(
PySpin.StreamBufferHandlingMode_NewestOnly)
self.log.info('Camera successfully initialised')
def deinitialize(self):
"""De-initialise the device and release hardware resources. Will stop the acquisition if it is running."""
self.log.debug('De-initialising')
assert self.is_init, 'Not initialised'
if self.is_running:
self.log.debug('Is running, stopping')
self.stop()
self.log.debug('Stopped')
self.log.debug('Found PtGrey camera, deinitialising')
self.unregister_event_handler()
try:
self._ptgrey_camera.DeInit()
del(self._ptgrey_camera)
self._ptgrey_camera = None
self.log.debug('Deinitialised PtGrey camera object and deleted')
except:
self.log.exception('Failed to close task')
self.log.debug('Trying to release PtGrey hardware resources')
self._ptgrey_release()
def register_event_handler(self):
"""Initialise images event handler mode."""
class PtGreyEventHandler(PySpin.ImageEvent):
"""Barebones event handler for ptgrey, just pass along the event to the Camera class."""
def __init__(self, parent):
assert parent.model.lower() == 'ptgrey', 'Trying to attach ptgrey event handler to non ptgrey model'
super().__init__()
self.parent = parent
def OnImageEvent(self, image:PySpin.Image):
"""Read out the image and a timestamp, reshape to array, pass to parent"""
# self.parent.log.debug('Image event! Unpack and release pointer')
self.parent._image_timestamp = datetime.utcnow()
try:
# img = image.GetData()
image_converted = image.Convert(PySpin.PixelFormat_RGB8)
image_converted = image_converted.GetNDArray()
# print('img', image_converted.shape)
# img = img.reshape((img_ptr.GetHeight(), img_ptr.GetWidth(), 3))
if self.parent._flipX:
img = np.fliplr(image_converted)
if self.parent._flipY:
img = np.flipud(image_converted)
if self.parent._rot90:
img = np.rot90(image_converted, self.parent._rot90)
self.parent._image_data = image_converted
self.parent._image_frameID = image.GetFrameID()
except:
self.parent.log.warning('Failed to unpack image', exc_info=True)
self.parent._image_data = None
finally:
image.Release()
# self.parent._image_data = np.ones((100,100,3))
# self.parent._image_frameID = image.GetFrameID()
# image.Release()
self.parent._got_image_event.set()
if self.parent._imgs_since_start % 10 == 0:
self.parent.log.debug('Frames Received: ' + str(self.parent._imgs_since_start) \
+ ' Size:' + str(self.parent._image_data.shape) \
+ ' Type:' + str(self.parent._image_data.dtype))
for func in self.parent._call_on_image:
try:
self.parent.log.debug('Calling back to: ' + str(func))
func(self.parent._image_data, self.parent._image_frameID, self.parent._image_timestamp, self.parent.identity)
except:
self.parent.log.warning('Failed image callback', exc_info=True)
self.parent._imgs_since_start += 1
# self.parent.log.debug('Event handler finished.')
self._ptgrey_event_handler = PtGreyEventHandler(self)
self.log.debug('Created ptgrey image event handler')
self._ptgrey_camera.RegisterEvent( self._ptgrey_event_handler )
self.log.debug('Registered ptgrey image event handler')
def unregister_event_handler(self):
"""Unregister images event handler."""
try:
self._ptgrey_camera.UnregisterEvent(self._ptgrey_event_handler)
self.log.debug('Unregistered event handler')
except:
self.log.exception('Failed to unregister event handler')
@property
def available_properties(self):
"""tuple of str: Get all the available properties (settings) supported by this device."""
assert self.is_init, 'Camera must be initialised'
return ('flip_x', 'flip_y', 'rotate_90', 'plate_scale', 'rotation', 'binning', 'size_readout', 'frame_rate_auto',\
'frame_rate', 'gain_auto', 'gain', 'exposure_time_auto', 'exposure_time')
@property
def flip_x(self):
"""bool: Get or set if the image X-axis should be flipped. Default is False."""
self.log.debug('Get flip-X called')
assert self.is_init, 'Camera must be initialised'
self.log.debug('Using PtGrey camera. Will flip the received image array ourselves: ' +str(self._flipX))
return self._flipX
@flip_x.setter
def flip_x(self, flip):
self.log.debug('Set flip-X called with: '+str(flip))
assert self.is_init, 'Camera must be initialised'
flip = bool(flip)
self.log.debug('Using PtGrey camera. Will flip the received image array ourselves.')
self._flipX = flip
self.log.debug('_flipX set to: '+str(self._flipX))
@property
def flip_y(self):
"""bool: Get or set if the image Y-axis should be flipped. Default is False."""
self.log.debug('Get flip-Y called')
assert self.is_init, 'Camera must be initialised'
self.log.debug('Using PtGrey camera. Will flip the received image array ourselves: ' +str(self._flipX))
return self._flipY
@flip_y.setter
def flip_y(self, flip):
self.log.debug('Set flip-Y called with: '+str(flip))
assert self.is_init, 'Camera must be initialised'
flip = bool(flip)
self.log.debug('Using PtGrey camera. Will flip the received image array ourselves.')
self._flipY = flip
self.log.debug('_flipY set to: '+str(self._flipY))
@property
def rotate_90(self):
"""int: Get or set how many times the image should be rotated by 90 degrees. Applied *after* flip_x and flip_y.
"""
assert self.is_init, 'Camera must be initialised'
return self._rot90
@rotate_90.setter
def rotate_90(self, k):
self.log.debug('Set rot90 called with: '+str(k))
assert self.is_init, 'Camera must be initialised'
k = int(k)
self.log.debug('Using PtGrey camera. Will rotate the received image array ourselves.')
self._rot90 = k
self.log.debug('rot90 set to: '+str(self._rot90))
@property
def plate_scale(self):
"""float: Get or set the plate scale of the Camera in arcsec per pixel.
This will not affect anything in this class but is used elsewhere. Set this to the physical pixel plate scale
*before* any binning. When getting the plate scale it will be scaled by the binning factor.
"""
return self._plate_scale * self.binning
@plate_scale.setter
def plate_scale(self, arcsec):
self.log.debug('Set plate scale called with: '+str(arcsec))
self._plate_scale = float(arcsec)
self.log.debug('Plate scale set to: '+str(self.plate_scale))
@property
def rotation(self):
"""float: Get or set the camera rotation relative to the horizon in degrees.
This does not affect the received images, but is used elsewhere. Use rotate_90 first to keep this rotation
small.
"""
return self._rotation
@rotation.setter
def rotation(self, rot):
self.log.debug('Set rotation called with: '+str(rot))
self._rotation = float(rot)
self.log.debug('Rotation set to: '+str(self.rotation))
@property
def frame_rate_auto(self):
"""bool: Get or set automatic frame rate. If True camera will run as fast as possible."""
self.log.debug('Get frame rate auto called')
val = self.getprops(['AcquisitionFrameRateEnable'])['AcquisitionFrameRateEnable']
return not val
@frame_rate_auto.setter
def frame_rate_auto(self, auto):
self.log.debug('Set frame rate called with: '+str(auto))
auto = bool(auto)
self.setprops({'AcquisitionFrameRateEnable': not auto})
@property
def frame_rate_limit(self):
"""tuple of float: Get the minimum and maximum frame rate in Hz supported."""
self.log.debug('Get frame rate limit called')
mn,mx = list(self.getprops(['FrameRateHz_Min', 'FrameRateHz_Max']).values())
return (mn,mx)
@property
def frame_rate(self):
"""float: Get or set the camera frame rate in Hz. Will set auto frame rate to False."""
self.log.debug('Get frame rate called')
return self.getprops(['AcquisitionFrameRate'])['AcquisitionFrameRate']
@frame_rate.setter
def frame_rate(self, frame_rate_hz):
self.log.debug('Set frame rate called with: '+str(frame_rate_hz))
self.frame_rate_auto = False
self.setprops({'AcquisitionFrameRate':frame_rate_hz})
@property
def gain_auto(self):
"""bool: Get or set automatic gain. If True the gain will be continuously updated."""
self.log.debug('Get gain auto called')
val = self.getprops(['GainAuto'])['GainAuto'].lower()
return True if val == 'continuous' else False
@gain_auto.setter
def gain_auto(self, auto):
self.log.debug('Set gain called with: '+str(auto))
auto = bool(auto)
self.setprops({'GainAuto': 'Continuous' if auto else 'Off'})
@property
def gain_limit(self):
"""tuple of float: Get the minimum and maximum gain in dB supported."""
self.log.debug('Get gain limit called')
mn,mx = list(self.getprops(['GainDB_Min', 'GainDB_Max']).values())
return (mn,mx)
@property
def gain(self):
"""Float: Get or set the camera gain in dB. Will set auto frame rate to False."""
self.log.debug('Get gain called')
return self.getprops(['Gain'])['Gain']
@gain.setter
def gain(self, gain_db):
self.log.debug('Set gain called with: '+str(gain_db))
self.gain_auto = False
self.setprops({'Gain':gain_db})
@property
def exposure_time_auto(self):
"""bool: Get or set automatic exposure time. If True the exposure time will be continuously updated."""
self.log.debug('Get exposure time auto called')
val = self.getprops(['ExposureAuto'])['ExposureAuto'].lower()
return True if val == 'continuous' else False
@exposure_time_auto.setter
def exposure_time_auto(self, auto):
self.log.debug('Set exposure time called with: '+str(auto))
auto = bool(auto)
self.setprops({'ExposureAuto': 'Continuous' if auto else 'Off'})
@property
def exposure_time_limit(self):
"""tuple of float: Get the minimum and maximum expsure time in ms supported."""
self.log.debug('Get gain limit called')
prop_list = list(self.getprops(['ExposureTime_FloatMin', 'ExposureTime_FloatMax']).values())
return (prop_list[0]/1000, prop_list[1]/1000)
@property
def exposure_time(self):
"""float: Get or set the camera expsure time in ms. Will set auto exposure time to False."""
self.log.debug('Get exposure time called')
return self.getprops(['ExposureTime'])['ExposureTime'] / 1000
@exposure_time.setter
def exposure_time(self, exposure_ms):
self.log.debug('Set exposure time called with: '+str(exposure_ms))
assert self.is_init, 'Camera must be initialised'
exposure_ms = float(exposure_ms)*1000
self.exposure_time_auto = False
self.setprops({'ExposureTime':exposure_ms})
@property
def binning(self):
"""int: Number of pixels to bin in each dimension (e.g. 2 gives 2x2 binning). Bins by summing.
Setting will stop and restart camera if running. Will scale size_readout to show the same sensor area.
"""
val_horiz, val_vert = self.getprops(['BinningHorizontal','BinningVertical']).values()
if val_horiz != val_vert:
self.log.warning('Horzontal and vertical binning is not equal.')
return val_horiz
@binning.setter
def binning(self, binning):
self.log.debug('Set binning called with: '+str(binning))
binning = int(binning)
initial_size = self.size_readout
initial_bin = self.binning
self.log.debug('Initial sensor readout area and binning: '+str(initial_size)+' ,'+str(initial_bin))
self.setprops({'BinningHorizontal':binning, 'BinningVertical':binning})
new_bin = self.binning
bin_scaling = new_bin/initial_bin
new_size = [round(sz/bin_scaling) for sz in initial_size]
self.log.debug('New binning and new size to set: '+str(new_bin)+' ,'+str(new_size))
try:
self.size_readout = new_size
self.log.debug('Set new size to: ' + str(self.size_readout))
except:
self.log.warning('Failed to scale readout after binning change', exc_info=True)
@property
def size_max(self):
"""tuple of int: Get the maximum allowed readout size (width, height) in pixels."""
val_w, val_h = self.getprops(['WidthMax','HeightMax']).values()
return (val_w, val_h)
@property
def size_readout(self):
"""tuple of int: Get or set the number of pixels read out (width, height). Will automatically center.
This applies after binning, i.e. this is the size the output image will be.
Setting will stop and restart camera if running.
"""
val_w, val_h = self.getprops(['Width','Height']).values()
return (val_w, val_h)
@size_readout.setter
def size_readout(self, size):
assert self.is_init, 'Camera must be initialised'
if isinstance(size, (int, float)): size = (size, size)
size = tuple([int(x) for x in size])
self.log.debug(f'Setting size_readout({size})')
maxWidth, maxHeight = self.size_max
new_offset = (round((maxWidth - size[0]) / 2), round((maxHeight - size[1]) / 2))
self.log.debug('Neccessary offset: ' + str(new_offset))
self.setprops({'OffsetX':new_offset[0], 'OffsetY':new_offset[1], 'Width':size[0], 'Height':size[1]})
def add_event_callback(self, method):
"""Add a method to be called when a new image shows up.
The method should have the signature (image, timestamp, \*args, \*\*kwargs) where:
- image (numpy.ndarray): The image data as a 2D numpy array.
- timestamp (datetime.datetime): UTC timestamp when the image event occured (i.e. when the capture
finished).
- \*args, \*\*kwargs should be allowed for forward compatability.
The callback should *not* be used for computations, make sure the method returns as fast as possible.
Args:
method: The method to be called, with signature (image, timestamp, \*args, \*\*kwargs).
"""
self.log.debug('Adding to callbacks: ' + str(method))
self._call_on_image.add(method)
def remove_event_callback(self, method):
"""Remove method from event callbacks."""
self.log.debug('Removing callbacks: ' + str(method))
try:
self._call_on_image.remove(method)
except:
self.log.warning('Could not remove callback', exc_info=True)
@property
def is_running(self):
"""bool: True if device is currently acquiring data."""
# self.log.debug('Checking if running')
if not self.is_init: return False
if self.model.lower() == 'ptgrey':
return self._ptgrey_camera is not None and self._ptgrey_camera.IsStreaming()
else:
self.log.warning('Forbidden model string defined.')
raise RuntimeError('An unknown (forbidden) model is defined: '+str(self.model))
def start(self):
""" Start the acquisition. Device must be initialised."""
assert self.is_init, 'Must initialise first'
if self.is_running:
self.log.info('Camera already running, name: '+self.name)
return
self.log.debug('Got start command')
self._imgs_since_start = 0
try:
self._ptgrey_camera.BeginAcquisition()
except PySpin.SpinnakerException as e:
self.log.debug('Could not start:', exc_info=True)
if 'already streaming' in e.message:
self.log.warning('The camera was already streaming...')
else:
raise RuntimeError('Failed to start camera acquisition') from e
self.log.info('Acquisition started, name: '+self.name)
def stop(self):
"""Stop the acquisition."""
if not self.is_running:
self.log.info('Camera was not running, name: '+self.name)
return
self.log.debug('Got stop command')
if self.model.lower() == 'ptgrey':
self.log.debug('Using PtGrey')
try:
self._ptgrey_camera.EndAcquisition()
except:
self.log.debug('Could not stop:', exc_info=True)
raise RuntimeError('Failed to stop camera acquisition')
else:
self.log.warning('Forbidden model string defined.')
raise RuntimeError('An unknown (forbidden) model is defined: '+str(self.model))
self._image_data = None
self._image_timestamp = None
self._got_image_event.clear()
self.log.info('Acquisition stopped, name: '+self.name)
def get_next_image(self, timeout=10):
"""Get the next image to be completed. Camera does not have to be running.
Args:
timeout (float): Maximum time (seconds) to wait for the image before raising TimeoutError.
Returns:
numpy.ndarray: 2d array with image data.
"""
# self.log.debug('Got next image request')
assert self.is_init, 'Camera must be initialised'
if not self.is_running:
self.log.debug('Camera was not running, start and grab the first image')
self._got_image_event.clear()
self.start()
if not self._got_image_event.wait(timeout):
raise TimeoutError('Getting image timed out')
img = self._image_data
self.stop()
else:
# self.log.debug('Camera running, grab the first image to show up')
self._got_image_event.clear()
if not self._got_image_event.wait(timeout):
raise TimeoutError('Getting image timed out')
img = self._image_data
return img
def get_new_image(self, timeout=10):
"""Get an image guaranteed to be started *after* calling this method. Camera does not have to be running.
Args:
timeout (float): Maximum time (seconds) to wait for the image before raising TimeoutError.
Returns:
numpy.ndarray: 2d array with image data.
"""
self.log.debug('Got next image request')
assert self.is_init, 'Camera must be initialised'
if not self.is_running:
self.log.debug('Camera was not running, start and grab the first image')
self._got_image_event.clear()
self.start()
if not self._got_image_event.wait(timeout):
raise TimeoutError('Getting image timed out')
img = self._image_data
self.stop()
else:
self.log.debug('Camera running, grab the second image to show up')
self._got_image_event.clear()
if not self._got_image_event.wait(timeout/2):
raise TimeoutError('Getting image timed out')
self._got_image_event.clear()
if not self._got_image_event.wait(timeout/2):
raise TimeoutError('Getting image timed out')
img = self._image_data
return img
def get_latest_image(self):
"""Get latest image in the cache immediately. Camera must be running.
Returns:
numpy.ndarray: 2d array with image data.
"""
self.log.debug('Got latest image request')
assert self.is_running, 'Camera must be running'
return self._image_data
|
[
"PySpin.System.GetInstance",
"logging.StreamHandler",
"numpy.flipud",
"logging.Formatter",
"datetime.datetime.utcnow",
"pathlib.Path",
"numpy.fliplr",
"threading.Event",
"numpy.rot90",
"weakref.ref",
"logging.handlers.RotatingFileHandler",
"logging.getLogger"
] |
[((3110, 3138), 'logging.getLogger', 'logging.getLogger', (['f"""{name}"""'], {}), "(f'{name}')\n", (3127, 3138), False, 'import logging\n'), ((5060, 5067), 'threading.Event', 'Event', ([], {}), '()\n', (5065, 5067), False, 'from threading import Thread, Event\n'), ((11271, 11281), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (11275, 11281), False, 'from pathlib import Path\n'), ((3347, 3370), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (3368, 3370), False, 'import logging\n'), ((3538, 3637), 'logging.handlers.RotatingFileHandler', 'RotatingFileHandler', (["(self.debug_folder / 'camera.log')"], {'maxBytes': '(1 * 1024 * 1024)', 'backupCount': '(2)'}), "(self.debug_folder / 'camera.log', maxBytes=1 * 1024 * \n 1024, backupCount=2)\n", (3557, 3637), False, 'from logging.handlers import RotatingFileHandler\n'), ((3951, 4068), 'logging.Formatter', 'logging.Formatter', (["('%(asctime)s %(name)s-%(levelname)s-%(threadName)s' +\n '-%(funcName)s-(%(lineno)d) %(message)s')"], {}), "('%(asctime)s %(name)s-%(levelname)s-%(threadName)s' +\n '-%(funcName)s-(%(lineno)d) %(message)s')\n", (3968, 4068), False, 'import logging\n'), ((5779, 5804), 'weakref.ref', 'weakref.ref', (['self.__del__'], {}), '(self.__del__)\n', (5790, 5804), False, 'import atexit, weakref\n'), ((13341, 13368), 'PySpin.System.GetInstance', 'PySpin.System.GetInstance', ([], {}), '()\n', (13366, 13368), False, 'import PySpin\n'), ((15421, 15448), 'PySpin.System.GetInstance', 'PySpin.System.GetInstance', ([], {}), '()\n', (15446, 15448), False, 'import PySpin\n'), ((18122, 18139), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (18137, 18139), False, 'from datetime import datetime\n'), ((2929, 2943), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2933, 2943), False, 'from pathlib import Path\n'), ((3017, 3023), 'pathlib.Path', 'Path', ([], {}), '()\n', (3021, 3023), False, 'from pathlib import Path\n'), ((18566, 18592), 'numpy.fliplr', 'np.fliplr', (['image_converted'], {}), '(image_converted)\n', (18575, 18592), True, 'import numpy as np\n'), ((18666, 18692), 'numpy.flipud', 'np.flipud', (['image_converted'], {}), '(image_converted)\n', (18675, 18692), True, 'import numpy as np\n'), ((18766, 18811), 'numpy.rot90', 'np.rot90', (['image_converted', 'self.parent._rot90'], {}), '(image_converted, self.parent._rot90)\n', (18774, 18811), True, 'import numpy as np\n')]
|
# :copyright: Copyright (c) 2018-2020. OS4D Ltd - All Rights Reserved
# :license: Commercial
# Unauthorized copying of this file, via any medium is strictly prohibited
# Written by <NAME> <<EMAIL>>, October 2020
from birder.core.redis import client
channel = client.pubsub()
channel.subscribe('system')
send = lambda data: client.publish('system', data)
read = channel.get_message
|
[
"birder.core.redis.client.publish",
"birder.core.redis.client.pubsub"
] |
[((264, 279), 'birder.core.redis.client.pubsub', 'client.pubsub', ([], {}), '()\n', (277, 279), False, 'from birder.core.redis import client\n'), ((329, 359), 'birder.core.redis.client.publish', 'client.publish', (['"""system"""', 'data'], {}), "('system', data)\n", (343, 359), False, 'from birder.core.redis import client\n')]
|
from django.contrib import admin
from .models import FAQModel, AssistanceModel, FeedbackModel
admin.site.register(FAQModel)
admin.site.register(AssistanceModel)
admin.site.register(FeedbackModel)
|
[
"django.contrib.admin.site.register"
] |
[((96, 125), 'django.contrib.admin.site.register', 'admin.site.register', (['FAQModel'], {}), '(FAQModel)\n', (115, 125), False, 'from django.contrib import admin\n'), ((126, 162), 'django.contrib.admin.site.register', 'admin.site.register', (['AssistanceModel'], {}), '(AssistanceModel)\n', (145, 162), False, 'from django.contrib import admin\n'), ((163, 197), 'django.contrib.admin.site.register', 'admin.site.register', (['FeedbackModel'], {}), '(FeedbackModel)\n', (182, 197), False, 'from django.contrib import admin\n')]
|
# -*- coding: utf-8 -*-
"""
rstblog.modules
~~~~~~~~~~~~~~~
The module interface.
:copyright: (c) 2010 by <NAME>.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
def add_module_path(folder):
"""Adds a new search path to the list of search paths."""
import os
__path__.append(os.path.abspath(folder))
def find_module(name):
"""Returns the module by the given name or raises an ImportError."""
import sys
full_name = 'rstblog.modules.' + name
__import__(full_name)
return sys.modules[full_name]
|
[
"os.path.abspath"
] |
[((351, 374), 'os.path.abspath', 'os.path.abspath', (['folder'], {}), '(folder)\n', (366, 374), False, 'import os\n')]
|
import os
import sys
import cv2
import numpy as np
import pandas as pd
import os
import matplotlib.image as mpimg
def file_to_fragment(path, return_path):
path = path
file_path = []
file_class = []
for subdirectory in os.walk(path):
for file in subdirectory[2]:
file_path.append(os.path.join(subdirectory[0], file))
file_class.append(file.split(".")[1])
df = pd.DataFrame({"class": file_class, "path": file_path})
path_sample = return_path
for cls in ["csv", "doc", "gif", "gz", "html", "jpg", "pdf", "png", "ppt", "ps", "txt", "xls", "xml"]:
path_folder = os.path.join(path_sample + "/" + cls)
if not os.path.isdir(path_folder):
os.mkdir(path_folder)
count = 0
for file_path in df[df["class"] == cls][["path"]].values:
file = open(file_path[0], "rb")
filesize = os.stat(file_path[0]).st_size
nb_images = (filesize - 1024) // 4096
if nb_images != 0:
file.read(512)
data = file.read(4096)
for i in range(nb_images):
flatNumpyArray = np.array(bytearray(data))
grayImage = flatNumpyArray.reshape(64, 64)
backtorgb = cv2.cvtColor(grayImage, cv2.COLOR_GRAY2RGB)
cv2.imwrite(os.path.join(path_folder, str(count) + '.jpg'), backtorgb)
data = file.read(4096)
count += 1
def read_fragment_to_dataframe(path: str):
cls = []
image = []
for subdirectory in os.walk(path):
for image in subdirectory[2]:
cls.append(subdirectory)
img = mpimg.imread(os.path.join(subdirectory[0], image))
print(img)
if __name__ == '__main__':
input_path = "./data/GovDocs"
return_path = "./data/Fragments"
file_to_fragment(path=input_path, return_path=return_path)
|
[
"pandas.DataFrame",
"os.mkdir",
"os.stat",
"os.path.isdir",
"cv2.cvtColor",
"os.walk",
"os.path.join"
] |
[((225, 238), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (232, 238), False, 'import os\n'), ((375, 429), 'pandas.DataFrame', 'pd.DataFrame', (["{'class': file_class, 'path': file_path}"], {}), "({'class': file_class, 'path': file_path})\n", (387, 429), True, 'import pandas as pd\n'), ((1337, 1350), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (1344, 1350), False, 'import os\n'), ((578, 615), 'os.path.join', 'os.path.join', (["(path_sample + '/' + cls)"], {}), "(path_sample + '/' + cls)\n", (590, 615), False, 'import os\n'), ((625, 651), 'os.path.isdir', 'os.path.isdir', (['path_folder'], {}), '(path_folder)\n', (638, 651), False, 'import os\n'), ((656, 677), 'os.mkdir', 'os.mkdir', (['path_folder'], {}), '(path_folder)\n', (664, 677), False, 'import os\n'), ((291, 326), 'os.path.join', 'os.path.join', (['subdirectory[0]', 'file'], {}), '(subdirectory[0], file)\n', (303, 326), False, 'import os\n'), ((800, 821), 'os.stat', 'os.stat', (['file_path[0]'], {}), '(file_path[0])\n', (807, 821), False, 'import os\n'), ((1434, 1470), 'os.path.join', 'os.path.join', (['subdirectory[0]', 'image'], {}), '(subdirectory[0], image)\n', (1446, 1470), False, 'import os\n'), ((1084, 1127), 'cv2.cvtColor', 'cv2.cvtColor', (['grayImage', 'cv2.COLOR_GRAY2RGB'], {}), '(grayImage, cv2.COLOR_GRAY2RGB)\n', (1096, 1127), False, 'import cv2\n')]
|
import random
import threading
class RR() :
def __init__(self,v):
self.v = v
def kapoy (self):
print('Russian Roulette is starting!!')
if self.v == 0 :
def random1():
print('Randomizing your punishment...')
timer = threading.Timer(1.0, random1)
timer.start()
def random2():
print('Randomizing your punishment....')
timer = threading.Timer(2.0, random2)
timer.start()
def random3():
print('Randomizing your punishment.....')
timer = threading.Timer(3.0, random3)
timer.start()
def start ():
punishment_ = ['[Shout you name 3 times]','[Kiss the forehead of your friend]','[Roll 3 times]',
'[Confess you embarassing moments in the past]',
'[Bang your head in the door]','[Slap your face 3 times]','[Imitate an animal for 10 secs]',
'[Sing your favorite song]',
'[Dance for 20 secs]','[Say HI! to a random person]']
rand_punishment = random.choice(punishment_)
print('***********************')
print(rand_punishment)
print('***********************')
timer = threading.Timer(4.0, start)
timer.start()
|
[
"threading.Timer",
"random.choice"
] |
[((245, 274), 'threading.Timer', 'threading.Timer', (['(1.0)', 'random1'], {}), '(1.0, random1)\n', (260, 274), False, 'import threading\n'), ((371, 400), 'threading.Timer', 'threading.Timer', (['(2.0)', 'random2'], {}), '(2.0, random2)\n', (386, 400), False, 'import threading\n'), ((497, 526), 'threading.Timer', 'threading.Timer', (['(3.0)', 'random3'], {}), '(3.0, random3)\n', (512, 526), False, 'import threading\n'), ((1088, 1115), 'threading.Timer', 'threading.Timer', (['(4.0)', 'start'], {}), '(4.0, start)\n', (1103, 1115), False, 'import threading\n'), ((945, 971), 'random.choice', 'random.choice', (['punishment_'], {}), '(punishment_)\n', (958, 971), False, 'import random\n')]
|
import sys
sys.path.append('./lib')
from eloqua_request import EloquaRequest
request = EloquaRequest('site', 'user', 'password')
response = request.get('/assets/emails?search=Demand*&page=1&count=50&depth=minimal', None)
|
[
"sys.path.append",
"eloqua_request.EloquaRequest"
] |
[((11, 35), 'sys.path.append', 'sys.path.append', (['"""./lib"""'], {}), "('./lib')\n", (26, 35), False, 'import sys\n'), ((88, 129), 'eloqua_request.EloquaRequest', 'EloquaRequest', (['"""site"""', '"""user"""', '"""password"""'], {}), "('site', 'user', 'password')\n", (101, 129), False, 'from eloqua_request import EloquaRequest\n')]
|
import datetime
import pytest
import pytz
from applications.models import (
Application,
ApplicationEvent,
ApplicationEventSchedule,
ApplicationRound,
)
@pytest.fixture
def default_application_round() -> ApplicationRound:
return ApplicationRound.objects.create(
application_period_begin=datetime.datetime(
year=2020, month=1, day=1, tzinfo=pytz.UTC
),
application_period_end=datetime.datetime(
year=2020, month=8, day=30, tzinfo=pytz.UTC
),
reservation_period_begin=datetime.date(year=2020, month=1, day=1),
reservation_period_end=datetime.date(year=2020, month=8, day=30),
public_display_begin=datetime.datetime(
year=2020, month=1, day=1, tzinfo=pytz.UTC
),
public_display_end=datetime.datetime(
year=2020, month=8, day=30, tzinfo=pytz.UTC
),
)
@pytest.fixture
def minimal_application(default_application_round) -> Application:
return Application.objects.create(application_round_id=default_application_round.id)
@pytest.fixture
def recurring_application_event(minimal_application) -> ApplicationEvent:
return ApplicationEvent.objects.create(
application=minimal_application,
num_persons=10,
min_duration=datetime.timedelta(hours=1),
max_duration=datetime.timedelta(hours=2),
name="Football",
events_per_week=2,
begin=datetime.date(year=2020, month=1, day=1),
end=datetime.date(year=2020, month=2, day=28),
biweekly=False,
)
@pytest.fixture
def recurring_bi_weekly_application_event(minimal_application) -> ApplicationEvent:
return ApplicationEvent.objects.create(
application=minimal_application,
num_persons=10,
min_duration=datetime.timedelta(hours=1),
max_duration=datetime.timedelta(hours=2),
name="Soccer",
events_per_week=2,
begin=datetime.date(year=2020, month=1, day=1),
end=datetime.date(year=2020, month=3, day=31),
biweekly=True,
)
@pytest.fixture
def scheduled_for_tuesday(recurring_application_event) -> ApplicationEventSchedule:
return ApplicationEventSchedule.objects.create(
day=1, begin="10:00", end="12:00", application_event=recurring_application_event
)
|
[
"datetime.date",
"datetime.datetime",
"applications.models.Application.objects.create",
"datetime.timedelta",
"applications.models.ApplicationEventSchedule.objects.create"
] |
[((999, 1076), 'applications.models.Application.objects.create', 'Application.objects.create', ([], {'application_round_id': 'default_application_round.id'}), '(application_round_id=default_application_round.id)\n', (1025, 1076), False, 'from applications.models import Application, ApplicationEvent, ApplicationEventSchedule, ApplicationRound\n'), ((2185, 2310), 'applications.models.ApplicationEventSchedule.objects.create', 'ApplicationEventSchedule.objects.create', ([], {'day': '(1)', 'begin': '"""10:00"""', 'end': '"""12:00"""', 'application_event': 'recurring_application_event'}), "(day=1, begin='10:00', end='12:00',\n application_event=recurring_application_event)\n", (2224, 2310), False, 'from applications.models import Application, ApplicationEvent, ApplicationEventSchedule, ApplicationRound\n'), ((319, 380), 'datetime.datetime', 'datetime.datetime', ([], {'year': '(2020)', 'month': '(1)', 'day': '(1)', 'tzinfo': 'pytz.UTC'}), '(year=2020, month=1, day=1, tzinfo=pytz.UTC)\n', (336, 380), False, 'import datetime\n'), ((435, 497), 'datetime.datetime', 'datetime.datetime', ([], {'year': '(2020)', 'month': '(8)', 'day': '(30)', 'tzinfo': 'pytz.UTC'}), '(year=2020, month=8, day=30, tzinfo=pytz.UTC)\n', (452, 497), False, 'import datetime\n'), ((554, 594), 'datetime.date', 'datetime.date', ([], {'year': '(2020)', 'month': '(1)', 'day': '(1)'}), '(year=2020, month=1, day=1)\n', (567, 594), False, 'import datetime\n'), ((627, 668), 'datetime.date', 'datetime.date', ([], {'year': '(2020)', 'month': '(8)', 'day': '(30)'}), '(year=2020, month=8, day=30)\n', (640, 668), False, 'import datetime\n'), ((699, 760), 'datetime.datetime', 'datetime.datetime', ([], {'year': '(2020)', 'month': '(1)', 'day': '(1)', 'tzinfo': 'pytz.UTC'}), '(year=2020, month=1, day=1, tzinfo=pytz.UTC)\n', (716, 760), False, 'import datetime\n'), ((811, 873), 'datetime.datetime', 'datetime.datetime', ([], {'year': '(2020)', 'month': '(8)', 'day': '(30)', 'tzinfo': 'pytz.UTC'}), '(year=2020, month=8, day=30, tzinfo=pytz.UTC)\n', (828, 873), False, 'import datetime\n'), ((1299, 1326), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (1317, 1326), False, 'import datetime\n'), ((1349, 1376), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(2)'}), '(hours=2)\n', (1367, 1376), False, 'import datetime\n'), ((1444, 1484), 'datetime.date', 'datetime.date', ([], {'year': '(2020)', 'month': '(1)', 'day': '(1)'}), '(year=2020, month=1, day=1)\n', (1457, 1484), False, 'import datetime\n'), ((1498, 1539), 'datetime.date', 'datetime.date', ([], {'year': '(2020)', 'month': '(2)', 'day': '(28)'}), '(year=2020, month=2, day=28)\n', (1511, 1539), False, 'import datetime\n'), ((1803, 1830), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (1821, 1830), False, 'import datetime\n'), ((1853, 1880), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(2)'}), '(hours=2)\n', (1871, 1880), False, 'import datetime\n'), ((1946, 1986), 'datetime.date', 'datetime.date', ([], {'year': '(2020)', 'month': '(1)', 'day': '(1)'}), '(year=2020, month=1, day=1)\n', (1959, 1986), False, 'import datetime\n'), ((2000, 2041), 'datetime.date', 'datetime.date', ([], {'year': '(2020)', 'month': '(3)', 'day': '(31)'}), '(year=2020, month=3, day=31)\n', (2013, 2041), False, 'import datetime\n')]
|
from ipykernel.kernelbase import Kernel
import tempfile
import os
from .realtime_subprocess import RealTimeSubprocess
from .fprogram import FortranGatherer
class FortranKernel(Kernel):
implementation = 'jfk-fling'
implementation_version = '0.1'
language = 'Fortran'
language_version = 'F2008'
language_info = {'name': 'fortran',
'mimetype': 'text/plain',
'file_extension': '.f90'}
banner = ("Fortran kernel.\n"
"Uses $FC, compiles in F2008, and creates source code "
"files and executables in temporary folder.\n")
def __init__(self, *args, **kwargs):
super(FortranKernel, self).__init__(*args, **kwargs)
self.gatherer = FortranGatherer()
self.files_for_cleanup = []
self.fragment_accumulator = []
def cleanup_files(self):
"""Remove all the temporary files created by the kernel"""
for fname in self.files_for_cleanup:
os.remove(fname)
def new_temp_file(self, **kwargs):
"""Create a new temp file to be deleted when the kernel shuts down"""
fh = tempfile.NamedTemporaryFile(delete=False, mode='w', **kwargs)
self.files_for_cleanup.append(fh.name)
return fh
def _write_to_stdout(self, contents):
self.send_response(
self.iopub_socket, 'stream', {'name': 'stdout', 'text': contents})
def _write_to_stderr(self, contents):
self.send_response(
self.iopub_socket, 'stream', {'name': 'stderr', 'text': contents})
def create_jupyter_subprocess(self, cmd):
return RealTimeSubprocess(
cmd,
lambda contents: self._write_to_stdout(contents.decode()),
lambda contents: self._write_to_stderr(contents.decode()))
def compile_with_gfortran(self, source_filename, binary_filename):
compiler = os.environ.get('FC', 'gfortran')
fflags = os.environ.get('FFLAGS', '').split(' ')
args = ([compiler, source_filename, '-std=f2008'] +
fflags +
['-o', binary_filename])
return self.create_jupyter_subprocess(args)
def split_magics(self, code):
code_lines = []
magics = []
lines = code.split('\n')
state = 'magics'
for line in lines:
if state == 'magics':
if line.startswith('%'):
magics.append(line.lstrip('%'))
continue
elif not line:
continue
state = 'code'
code_lines.append(line)
return magics, '\n'.join(code_lines)
def do_execute(self, code, silent, store_history=True,
user_expressions=None, allow_stdin=False):
response_template = {
'status': 'ok', 'execution_count': self.execution_count,
'payload': [], 'user_expressions': {}}
fragment = False
magics, code = self.split_magics(code)
if 'code' in magics:
if code.strip():
self._write_to_stderr(
'The %code magic must not have code body.')
self._write_to_stdout(self.gatherer.to_program())
return response_template
elif 'clear' in magics:
self.gatherer.clear()
elif 'fragment' in magics:
fragment = True
self.fragment_accumulator.append(code)
return response_template
if self.fragment_accumulator:
code = '\n'.join(self.fragment_accumulator + [code])
self.fragment_accumulator = []
try:
self.gatherer.extend(code)
except Exception as exception:
msg = '[FAILED TO PARSE:] {}'.format(str(exception))
self._write_to_stderr(msg)
return response_template
program_code = self.gatherer.to_program()
with self.new_temp_file(suffix='.f90') as source_file:
source_file.write(program_code)
source_file.flush()
if fragment:
return response_template
with self.new_temp_file(suffix='.out') as binary_file:
p = self.compile_with_gfortran(
source_file.name, binary_file.name)
while p.poll() is None:
p.write_contents()
p.write_contents()
if p.returncode != 0: # Compilation failed
# Remove the most recently added sub-program.
del self.gatherer.programs[-1]
msg = ("[Fortran kernel] gfortran exited with code {}, "
"the executable will not be executed"
.format(p.returncode))
self._write_to_stderr(msg)
return response_template
p = self.create_jupyter_subprocess(binary_file.name)
while p.poll() is None:
p.write_contents()
p.write_contents()
if p.returncode != 0:
# e.g. segfault...
del self.gatherer.programs[-1]
msg = ("[Fortran kernel] Executable exited with code {}"
"".format(p.returncode))
self._write_to_stderr(msg)
return response_template
def do_shutdown(self, restart):
# Cleanup the created source code files and executables when
# shutting down the kernel.
self.cleanup_files()
|
[
"os.environ.get",
"tempfile.NamedTemporaryFile",
"os.remove"
] |
[((1134, 1195), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)', 'mode': '"""w"""'}), "(delete=False, mode='w', **kwargs)\n", (1161, 1195), False, 'import tempfile\n'), ((1893, 1925), 'os.environ.get', 'os.environ.get', (['"""FC"""', '"""gfortran"""'], {}), "('FC', 'gfortran')\n", (1907, 1925), False, 'import os\n'), ((986, 1002), 'os.remove', 'os.remove', (['fname'], {}), '(fname)\n', (995, 1002), False, 'import os\n'), ((1943, 1971), 'os.environ.get', 'os.environ.get', (['"""FFLAGS"""', '""""""'], {}), "('FFLAGS', '')\n", (1957, 1971), False, 'import os\n')]
|
import json
from monster import Monster
from rune import Rune, Grind, Gem
from constant_maps import *
class Summoner:
def __init__(self,filename):
self.data_file = filename
print(f'Loading data from {filename}')
with open(filename) as fin:
data = json.load(fin)
self.mons = {}
self.runes = {}
self.grinds = []
self.gems = []
self.parse_mons(data['unit_list'])
self.parse_runes(data['runes'])
self.parse_grinds(data['rune_craft_item_list'])
self.grinds.sort(key=lambda x: (x.set, x.stat, -x.grade))
def parse_mons(self,mon_list):
for mon in mon_list:
m = Monster(mon)
self.mons[m.id] = m
self.parse_runes(mon['runes'])
def parse_grinds(self,rune_craft_list):
for gg in rune_craft_list:
if gg['craft_type'] == 1 or gg['craft_type'] == 3: self.gems.append(Gem(gg['craft_type_id'],gg['sell_value']))
else: self.grinds.append(Grind(gg['craft_type_id'],gg['sell_value']))
def parse_runes(self,rune_list):
for rune in rune_list:
r = Rune(rune)
self.runes[r.id] = r
def find_rune(self,rune_id):
location = self.runes[rune_id].location
if location != 0:
location = self.mons[location]
else: location = 'Inventory'
return location
def print_runes(self):
for rune_id in self.runes:
print(self.runes[rune_id].str_with_subs())
print(self.find_rune(rune_id))
print()
def analyze_reapps(self,n=10):
poss = []
for rune_id in self.runes:
s = self.runes[rune_id].reapp
if s > 0:
poss.append((rune_id,s))
poss.sort(key=lambda x: x[1],reverse=True)
for i in range(n):
print('Option',i+1)
rune_id, s = poss[i]
print(self.runes[rune_id].str_with_subs())
print('On {}, Score {}'.format(self.find_rune(rune_id),s))
print(rune_id)
def analyze_grinds(self):
counts = {}
poss = {}
for grind in self.grinds:
k = (grind.set,grind.stat,grind.grade)
if k in counts: counts[k] += 1
else:
counts[k] = 1
poss[k] = []
for rune_id in self.runes:
r = self.runes[rune_id]
if r.level < 12 or r.reapp > .5: continue
if r.set != grind.set and grind.set != 99: continue
for sub in r.subs:
if sub['stat'] == grind.stat:
if sub['grind'] < grind.get_max():
poss[k].append((r,grind.get_max()-sub['grind'],grind.stat))
for k in poss:
sset, stat, grade = k
s = 'Grind'
c = counts[k]
if c > 1: s+= 's'
print('{} {} {} {} {}'.format(c,rune['quality'][grade],rune['sets'][sset],rune['effectTypes'][stat],s))
runes = poss[k]
runes.sort(key=lambda x: (-x[1],-(x[0].set),-(x[0].location)))
for x in runes:
print(f"{x[0]} at {self.find_rune(x[0].id)} can improve {x[1]} {rune['effectTypes'][x[2]]}")
print()
|
[
"rune.Grind",
"json.load",
"rune.Rune",
"monster.Monster",
"rune.Gem"
] |
[((298, 312), 'json.load', 'json.load', (['fin'], {}), '(fin)\n', (307, 312), False, 'import json\n'), ((733, 745), 'monster.Monster', 'Monster', (['mon'], {}), '(mon)\n', (740, 745), False, 'from monster import Monster\n'), ((1219, 1229), 'rune.Rune', 'Rune', (['rune'], {}), '(rune)\n', (1223, 1229), False, 'from rune import Rune, Grind, Gem\n'), ((997, 1039), 'rune.Gem', 'Gem', (["gg['craft_type_id']", "gg['sell_value']"], {}), "(gg['craft_type_id'], gg['sell_value'])\n", (1000, 1039), False, 'from rune import Rune, Grind, Gem\n'), ((1077, 1121), 'rune.Grind', 'Grind', (["gg['craft_type_id']", "gg['sell_value']"], {}), "(gg['craft_type_id'], gg['sell_value'])\n", (1082, 1121), False, 'from rune import Rune, Grind, Gem\n')]
|
from devito.core.cpu import CPU64Operator, CPU64OpenMPOperator
from devito.exceptions import InvalidOperator
from devito.passes.clusters import (Blocking, Lift, cire, cse, eliminate_arrays,
extract_increments, factorize, fuse, optimize_pows)
from devito.tools import timed_pass
__all__ = ['Intel64Operator', 'Intel64OpenMPOperator', 'Intel64FSGOperator',
'Intel64FSGOpenMPOperator']
Intel64Operator = CPU64Operator
Intel64OpenMPOperator = CPU64OpenMPOperator
class Intel64FSGOperator(Intel64Operator):
"""
Operator with performance optimizations tailored "For Small Grids" (FSG).
"""
@classmethod
def _normalize_kwargs(cls, **kwargs):
kwargs = super(Intel64FSGOperator, cls)._normalize_kwargs(**kwargs)
if kwargs['options']['min-storage']:
raise InvalidOperator('You should not use `min-storage` with `advanced-fsg '
' as they work in opposite directions')
return kwargs
@classmethod
@timed_pass(name='specializing.Clusters')
def _specialize_clusters(cls, clusters, **kwargs):
options = kwargs['options']
platform = kwargs['platform']
sregistry = kwargs['sregistry']
# Toposort+Fusion (the former to expose more fusion opportunities)
clusters = fuse(clusters, toposort=True)
# Hoist and optimize Dimension-invariant sub-expressions
clusters = cire(clusters, 'invariants', sregistry, options, platform)
clusters = Lift().process(clusters)
# Reduce flops (potential arithmetic alterations)
clusters = extract_increments(clusters, sregistry)
clusters = cire(clusters, 'sops', sregistry, options, platform)
clusters = factorize(clusters)
clusters = optimize_pows(clusters)
# The previous passes may have created fusion opportunities, which in
# turn may enable further optimizations
clusters = fuse(clusters)
clusters = eliminate_arrays(clusters)
# Reduce flops (no arithmetic alterations)
clusters = cse(clusters, sregistry)
# Blocking to improve data locality
clusters = Blocking(options).process(clusters)
return clusters
class Intel64FSGOpenMPOperator(Intel64FSGOperator, CPU64OpenMPOperator):
_specialize_iet = CPU64OpenMPOperator._specialize_iet
|
[
"devito.passes.clusters.cire",
"devito.passes.clusters.extract_increments",
"devito.passes.clusters.fuse",
"devito.passes.clusters.eliminate_arrays",
"devito.passes.clusters.factorize",
"devito.exceptions.InvalidOperator",
"devito.passes.clusters.Blocking",
"devito.passes.clusters.Lift",
"devito.passes.clusters.optimize_pows",
"devito.tools.timed_pass",
"devito.passes.clusters.cse"
] |
[((1040, 1080), 'devito.tools.timed_pass', 'timed_pass', ([], {'name': '"""specializing.Clusters"""'}), "(name='specializing.Clusters')\n", (1050, 1080), False, 'from devito.tools import timed_pass\n'), ((1345, 1374), 'devito.passes.clusters.fuse', 'fuse', (['clusters'], {'toposort': '(True)'}), '(clusters, toposort=True)\n', (1349, 1374), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((1460, 1518), 'devito.passes.clusters.cire', 'cire', (['clusters', '"""invariants"""', 'sregistry', 'options', 'platform'], {}), "(clusters, 'invariants', sregistry, options, platform)\n", (1464, 1518), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((1641, 1680), 'devito.passes.clusters.extract_increments', 'extract_increments', (['clusters', 'sregistry'], {}), '(clusters, sregistry)\n', (1659, 1680), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((1700, 1752), 'devito.passes.clusters.cire', 'cire', (['clusters', '"""sops"""', 'sregistry', 'options', 'platform'], {}), "(clusters, 'sops', sregistry, options, platform)\n", (1704, 1752), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((1772, 1791), 'devito.passes.clusters.factorize', 'factorize', (['clusters'], {}), '(clusters)\n', (1781, 1791), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((1811, 1834), 'devito.passes.clusters.optimize_pows', 'optimize_pows', (['clusters'], {}), '(clusters)\n', (1824, 1834), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((1981, 1995), 'devito.passes.clusters.fuse', 'fuse', (['clusters'], {}), '(clusters)\n', (1985, 1995), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((2015, 2041), 'devito.passes.clusters.eliminate_arrays', 'eliminate_arrays', (['clusters'], {}), '(clusters)\n', (2031, 2041), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((2113, 2137), 'devito.passes.clusters.cse', 'cse', (['clusters', 'sregistry'], {}), '(clusters, sregistry)\n', (2116, 2137), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((849, 966), 'devito.exceptions.InvalidOperator', 'InvalidOperator', (['"""You should not use `min-storage` with `advanced-fsg as they work in opposite directions"""'], {}), "(\n 'You should not use `min-storage` with `advanced-fsg as they work in opposite directions'\n )\n", (864, 966), False, 'from devito.exceptions import InvalidOperator\n'), ((1538, 1544), 'devito.passes.clusters.Lift', 'Lift', ([], {}), '()\n', (1542, 1544), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n'), ((2202, 2219), 'devito.passes.clusters.Blocking', 'Blocking', (['options'], {}), '(options)\n', (2210, 2219), False, 'from devito.passes.clusters import Blocking, Lift, cire, cse, eliminate_arrays, extract_increments, factorize, fuse, optimize_pows\n')]
|
# coding: utf-8
"""
Yapily API
To access endpoints that require authentication, use your application key and secret created in the Dashboard (https://dashboard.yapily.com) # noqa: E501
The version of the OpenAPI document: 1.157.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from yapily.configuration import Configuration
class PaymentStatusDetails(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'status': 'str',
'status_reason': 'str',
'status_reason_description': 'str',
'status_update_date': 'datetime',
'multi_authorisation_status': 'MultiAuthorisation',
'iso_status': 'PaymentIsoStatus'
}
attribute_map = {
'status': 'status',
'status_reason': 'statusReason',
'status_reason_description': 'statusReasonDescription',
'status_update_date': 'statusUpdateDate',
'multi_authorisation_status': 'multiAuthorisationStatus',
'iso_status': 'isoStatus'
}
def __init__(self, status=None, status_reason=None, status_reason_description=None, status_update_date=None, multi_authorisation_status=None, iso_status=None, local_vars_configuration=None): # noqa: E501
"""PaymentStatusDetails - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._status = None
self._status_reason = None
self._status_reason_description = None
self._status_update_date = None
self._multi_authorisation_status = None
self._iso_status = None
self.discriminator = None
if status is not None:
self.status = status
if status_reason is not None:
self.status_reason = status_reason
if status_reason_description is not None:
self.status_reason_description = status_reason_description
if status_update_date is not None:
self.status_update_date = status_update_date
if multi_authorisation_status is not None:
self.multi_authorisation_status = multi_authorisation_status
if iso_status is not None:
self.iso_status = iso_status
@property
def status(self):
"""Gets the status of this PaymentStatusDetails. # noqa: E501
:return: The status of this PaymentStatusDetails. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this PaymentStatusDetails.
:param status: The status of this PaymentStatusDetails. # noqa: E501
:type: str
"""
allowed_values = ["PENDING", "FAILED", "DECLINED", "COMPLETED", "COMPLETED_SETTLEMENT_IN_PROCESS", "EXPIRED", "UNKNOWN", "ACTIVE", "INACTIVE"] # noqa: E501
if self.local_vars_configuration.client_side_validation and status not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}" # noqa: E501
.format(status, allowed_values)
)
self._status = status
@property
def status_reason(self):
"""Gets the status_reason of this PaymentStatusDetails. # noqa: E501
:return: The status_reason of this PaymentStatusDetails. # noqa: E501
:rtype: str
"""
return self._status_reason
@status_reason.setter
def status_reason(self, status_reason):
"""Sets the status_reason of this PaymentStatusDetails.
:param status_reason: The status_reason of this PaymentStatusDetails. # noqa: E501
:type: str
"""
self._status_reason = status_reason
@property
def status_reason_description(self):
"""Gets the status_reason_description of this PaymentStatusDetails. # noqa: E501
:return: The status_reason_description of this PaymentStatusDetails. # noqa: E501
:rtype: str
"""
return self._status_reason_description
@status_reason_description.setter
def status_reason_description(self, status_reason_description):
"""Sets the status_reason_description of this PaymentStatusDetails.
:param status_reason_description: The status_reason_description of this PaymentStatusDetails. # noqa: E501
:type: str
"""
self._status_reason_description = status_reason_description
@property
def status_update_date(self):
"""Gets the status_update_date of this PaymentStatusDetails. # noqa: E501
:return: The status_update_date of this PaymentStatusDetails. # noqa: E501
:rtype: datetime
"""
return self._status_update_date
@status_update_date.setter
def status_update_date(self, status_update_date):
"""Sets the status_update_date of this PaymentStatusDetails.
:param status_update_date: The status_update_date of this PaymentStatusDetails. # noqa: E501
:type: datetime
"""
self._status_update_date = status_update_date
@property
def multi_authorisation_status(self):
"""Gets the multi_authorisation_status of this PaymentStatusDetails. # noqa: E501
:return: The multi_authorisation_status of this PaymentStatusDetails. # noqa: E501
:rtype: MultiAuthorisation
"""
return self._multi_authorisation_status
@multi_authorisation_status.setter
def multi_authorisation_status(self, multi_authorisation_status):
"""Sets the multi_authorisation_status of this PaymentStatusDetails.
:param multi_authorisation_status: The multi_authorisation_status of this PaymentStatusDetails. # noqa: E501
:type: MultiAuthorisation
"""
self._multi_authorisation_status = multi_authorisation_status
@property
def iso_status(self):
"""Gets the iso_status of this PaymentStatusDetails. # noqa: E501
:return: The iso_status of this PaymentStatusDetails. # noqa: E501
:rtype: PaymentIsoStatus
"""
return self._iso_status
@iso_status.setter
def iso_status(self, iso_status):
"""Sets the iso_status of this PaymentStatusDetails.
:param iso_status: The iso_status of this PaymentStatusDetails. # noqa: E501
:type: PaymentIsoStatus
"""
self._iso_status = iso_status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PaymentStatusDetails):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, PaymentStatusDetails):
return True
return self.to_dict() != other.to_dict()
|
[
"yapily.configuration.Configuration",
"six.iteritems"
] |
[((7061, 7094), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (7074, 7094), False, 'import six\n'), ((1813, 1828), 'yapily.configuration.Configuration', 'Configuration', ([], {}), '()\n', (1826, 1828), False, 'from yapily.configuration import Configuration\n')]
|
import os
class Tools(object):
def getRootPath(self):
'''
获取上级目录的路径
:return:
'''
rootpath = os.path.dirname(os.path.abspath(__file__))
while rootpath:
if os.path.exists(os.path.join(rootpath, 'readme.md')):
break
rootpath = rootpath[0:rootpath.rfind(os.path.sep)]
return rootpath
def main():
tools = Tools()
rootpath = tools.getRootPath()
apkpath = os.path.join(rootpath,'apks','cnode.apk')
print(apkpath)
if __name__ == '__main__':
d=Tools().getRootPath()
print(d)
main()
|
[
"os.path.abspath",
"os.path.join"
] |
[((466, 509), 'os.path.join', 'os.path.join', (['rootpath', '"""apks"""', '"""cnode.apk"""'], {}), "(rootpath, 'apks', 'cnode.apk')\n", (478, 509), False, 'import os\n'), ((154, 179), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (169, 179), False, 'import os\n'), ((235, 270), 'os.path.join', 'os.path.join', (['rootpath', '"""readme.md"""'], {}), "(rootpath, 'readme.md')\n", (247, 270), False, 'import os\n')]
|
# Copyright 2019 <NAME>, Inc. and the University of Edinburgh. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import commentjson
class Config:
def __init__(self,
filepath: str) -> None:
self.gold_name = False
if ".csv" in filepath:
# Assume aggregated file
filename = os.path.basename(filepath)
if "gold" in filename:
conf_name = "-".join(filename.split(".")[0].split("_")[2:])
self.gold_name = True
else:
conf_name = "-".join(filename.split(".")[0].split("_")[1:])
conf_type = "aggregate"
self.is_csv = True
self.config = None
else:
filename = os.path.basename(filepath)
names = filename.split("_")
conf_type = names[0]
conf_name = ".".join(
"_".join(names[1:]).split(".")[:-1])
self.is_csv = False
self.load_config(filepath)
if len(conf_name) == 0:
raise ValueError("Config name cannot be empty: {}".format(filename))
if conf_type in ["prepare",
"train",
"aggregate",
"generate"]:
self.conf_type = conf_type
self.conf_name = conf_name
else:
raise ValueError("Invalid config file name: {}".format(filename))
def load_config(self,
filepath: str) -> None:
with open(filepath, "r") as fin:
lines = fin.readlines()
json_str = "\n".join(lines)
self.config = commentjson.loads(json_str)
def get_agg_name(self):
""" Generate aggregation target file name."""
assert self.conf_type == "aggregate"
if self.is_csv:
if self.gold_name:
agg_name = "_gold_{}".format(self.conf_name.replace('-', '_'))
else:
agg_name = "_{}".format(self.conf_name.replace('-', '_'))
else:
agg_name = "_{}_{}_{}_{}_{}_{}".format(self.config["num_review"],
self.config["top_k"],
"all",
self.config["sentiment"],
self.config["embedding"][-3:],
str(int(self.config["threshold"] * 10))
)
"""
agg_name = "_{}_{}_{}_{}_{}".format(self.config["num_review"],
self.config["top_k"],
self.config["attribute"],
self.config["sentiment"],
self.config["embedding"][-3:],
str(int(self.config["threshold"] * 10))
)
"""
return agg_name
def __getitem__(self,
key: str):
if key not in self.config:
raise KeyError(key)
return self.config[key]
def __contains__(self,
key: str):
return key in self.config
if __name__ == "__main__":
conf = Config("train_test.json")
|
[
"commentjson.loads",
"os.path.basename"
] |
[((944, 970), 'os.path.basename', 'os.path.basename', (['filepath'], {}), '(filepath)\n', (960, 970), False, 'import os\n'), ((1349, 1375), 'os.path.basename', 'os.path.basename', (['filepath'], {}), '(filepath)\n', (1365, 1375), False, 'import os\n'), ((2288, 2315), 'commentjson.loads', 'commentjson.loads', (['json_str'], {}), '(json_str)\n', (2305, 2315), False, 'import commentjson\n')]
|
from tensorflow.keras import backend as K
from tensorflow.keras.metrics import SpecificityAtSensitivity
from tensorflow.keras.metrics import Precision
from tensorflow.keras.metrics import Recall, FalsePositives, FalseNegatives, TruePositives, TrueNegatives
def recall(y_true, y_pred):
m = Recall()
m.update_state(y_true, y_pred)
pre_res = m.result()
res = pre_res.numpy()
return res
def f1(y_true, y_pred):
m = Precision()
m.update_state(y_true, y_pred)
pre_precision = m.result()
precision = pre_precision.numpy()
m = Recall()
m.update_state(y_true, y_pred)
pre_recall = m.result()
recall = pre_recall.numpy()
return 2 * ((precision * recall) / (precision + recall + K.epsilon()))
def balanced_acc(y_true, y_pred):
selectivity = recall(y_true, y_pred)
specificity = SpecificityAtSensitivity(selectivity)
specificity.update_state(y_true, y_pred)
specificity = specificity.result().numpy()
return (selectivity + specificity) / 2
def new_bac(y_true, y_pred):
m = Recall()
m.update_state(y_true, y_pred)
pre_recall = m.result()
recall = pre_recall.numpy()
n = FalsePositives()
n.update_state(y_true, y_pred)
pre_res_2 = n.result()
fp = pre_res_2.numpy()
l = TrueNegatives()
l.update_state(y_true, y_pred)
pre_res = l.result()
tn = pre_res.numpy()
specificity = tn / (tn + fp)
return (recall + specificity) / 2
|
[
"tensorflow.keras.metrics.TrueNegatives",
"tensorflow.keras.metrics.FalsePositives",
"tensorflow.keras.metrics.Recall",
"tensorflow.keras.backend.epsilon",
"tensorflow.keras.metrics.SpecificityAtSensitivity",
"tensorflow.keras.metrics.Precision"
] |
[((300, 308), 'tensorflow.keras.metrics.Recall', 'Recall', ([], {}), '()\n', (306, 308), False, 'from tensorflow.keras.metrics import Recall, FalsePositives, FalseNegatives, TruePositives, TrueNegatives\n'), ((452, 463), 'tensorflow.keras.metrics.Precision', 'Precision', ([], {}), '()\n', (461, 463), False, 'from tensorflow.keras.metrics import Precision\n'), ((582, 590), 'tensorflow.keras.metrics.Recall', 'Recall', ([], {}), '()\n', (588, 590), False, 'from tensorflow.keras.metrics import Recall, FalsePositives, FalseNegatives, TruePositives, TrueNegatives\n'), ((867, 904), 'tensorflow.keras.metrics.SpecificityAtSensitivity', 'SpecificityAtSensitivity', (['selectivity'], {}), '(selectivity)\n', (891, 904), False, 'from tensorflow.keras.metrics import SpecificityAtSensitivity\n'), ((1086, 1094), 'tensorflow.keras.metrics.Recall', 'Recall', ([], {}), '()\n', (1092, 1094), False, 'from tensorflow.keras.metrics import Recall, FalsePositives, FalseNegatives, TruePositives, TrueNegatives\n'), ((1204, 1220), 'tensorflow.keras.metrics.FalsePositives', 'FalsePositives', ([], {}), '()\n', (1218, 1220), False, 'from tensorflow.keras.metrics import Recall, FalsePositives, FalseNegatives, TruePositives, TrueNegatives\n'), ((1324, 1339), 'tensorflow.keras.metrics.TrueNegatives', 'TrueNegatives', ([], {}), '()\n', (1337, 1339), False, 'from tensorflow.keras.metrics import Recall, FalsePositives, FalseNegatives, TruePositives, TrueNegatives\n'), ((753, 764), 'tensorflow.keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (762, 764), True, 'from tensorflow.keras import backend as K\n')]
|
import sys
from pathlib import Path
from argparse import ArgumentParser
import h5py
import pandas as pd
import numpy as np
from tqdm import tqdm
from export import export_read_file
def get_args():
parser = ArgumentParser(description="Parse sequencing_summary.txt files and .paf files to find split reads "
"in an Oxford Nanopore Dataset",
add_help=False)
general = parser.add_argument_group(title='General options')
general.add_argument("-h", "--help",
action="help",
help="Show this help and exit"
)
in_args = parser.add_argument_group(
title='Input sources'
)
in_args.add_argument("-s", "--summary",
required=True,
nargs='+',
help='Sequencing summary file(s) generated by albacore or guppy. Can be compressed '
'using gzip, bzip2, xz, or zip')
in_args.add_argument("--start-events",
help="start_events.csv file generated by event_finder.py",
default="",
required=True,
)
in_args.add_argument("--end-events",
help="end_events.csv file generated by event_finder.py",
default="",
required=True,
)
in_args.add_argument("--targets",
help="A text file of target read ids with one per line.",
default="",
required=True,
)
in_args.add_argument("--bulk-files",
help="ONT bulk FAST5 files.",
nargs='+',
default="",
)
in_args.add_argument("-o", "--output-name",
help="Name of the output folder, this will be generated if it does not exist",
required=True,
default=""
)
in_args.add_argument("--extra-classifications",
help="Any extra MinKNOW classifications to include.",
nargs='*',
default="",
)
return parser.parse_args()
def main():
args = get_args()
# debug(args)
# # sys.exit()
# Make folders
for j in ['starts', 'ends']:
Path('{i}/{j}/{k}'.format(i=args.output_name, j=j, k='fast5')).mkdir(parents=True, exist_ok=True)
# Open files
start_events = pd.read_csv(args.start_events, sep=',')
end_events = pd.read_csv(args.end_events, sep=',')
seq_sum_df = concat_files_to_df(file_list=args.summary, sep='\t')
# Create end_time Series in seq_sum_df
seq_sum_df['end_time'] = seq_sum_df['start_time'] + seq_sum_df['duration']
# Sort and Groupby to segregate runs and channels
seq_sum_df = seq_sum_df.sort_values(by=['run_id', 'channel', 'start_time'], ascending=True)
seq_sum_df_1 = seq_sum_df.copy()
gb = seq_sum_df.groupby(['run_id', 'channel'])
gb1 = seq_sum_df_1.groupby(['run_id', 'channel'])
# Get previous and next start times within groupby
seq_sum_df['next_start'] = gb['start_time'].shift(-1)
seq_sum_df_1['prev_start'] = gb1['start_time'].shift(1)
target_read_ids = []
with open(args.targets, 'r') as file:
for line in file:
target_read_ids.append(line.strip())
classifications = ['pore', 'inrange', 'good_single', 'unblocking']
if args.extra_classifications:
classifications.extend(args.extra_classifications)
# Get end_events for target_read_ids
end_events = end_events[end_events['read_id'].isin(target_read_ids)]
normal_ending_ids = end_events[end_events['time'].ge(0) &
end_events['label'].isin(classifications)]['read_id'].unique()
abnormally_ending_ids = end_events[~end_events['read_id'].isin(normal_ending_ids)]['read_id'].unique()
end_target_ss = seq_sum_df[seq_sum_df['read_id'].isin(abnormally_ending_ids)]
# Get start_events for target_read_ids
start_events = start_events[start_events['read_id'].isin(target_read_ids)]
normal_starting_ids = start_events[start_events['time'].le(0) &
start_events['label'].isin(classifications)]['read_id'].unique()
abnormally_starting_ids = start_events[~start_events['read_id'].isin(normal_starting_ids)]['read_id'].unique()
start_target_ss = seq_sum_df_1[seq_sum_df_1['read_id'].isin(abnormally_starting_ids)]
print('Collecting abnormally ending reads:')
end_read_info = write_files(end_target_ss, args.bulk_files, 'start_time',
'next_start', '{i}/ends/fast5/'.format(i=args.output_name))
end_read_info.to_csv('{}/ends_read_info.txt'.format(args.output_name), sep='\t', index=False, header=True)
end_read_info.to_csv('{}/ends_filenames.txt'.format(args.output_name), sep='\t', index=False, header=False,
columns=['filename'])
print('Collecting abnormally starting reads:')
start_read_info = write_files(start_target_ss, args.bulk_files, 'prev_start',
'end_time', '{i}/starts/fast5/'.format(i=args.output_name))
start_read_info.to_csv('{}/starts_read_info.txt'.format(args.output_name), sep='\t', index=False, header=True)
start_read_info.to_csv('{}/starts_filenames.txt'.format(args.output_name), sep='\t', index=False, header=False,
columns=['filename'])
return
def write_files(target_ss, bulkfiles, read_start_col, read_end_col, export_path, remove_pore=True):
"""Abstraction for export_read_file for collecting read info
Parameters
----------
target_ss : pd.DataFrame
DataFrame of reads to generate reads for
bulkfiles: list
list of bulk FAST5 files
read_start_col : str
Column in the target_ss that start index is derived from
read_end_col : str
Column in the target_ss that end index is derived from
export_path : str
The folder where read files will be written
remove_pore : bool
Remove pore-like signal from trace (>1500)
Returns
-------
pd.DataFrame
DataFrame of read info about reads that have been written
"""
d = {
'read_id': [],
'channel': [],
'start_index': [],
'end_index': [],
'bv_read_id': [],
'filename': [],
'bv_filename': []
}
files_written = 0
for bf in tqdm(bulkfiles):
f = h5py.File(bf, 'r')
run_id = f['UniqueGlobalKey']["tracking_id"].attrs["run_id"].decode('utf8')
sf = int(f["UniqueGlobalKey"]["context_tags"].attrs["sample_frequency"].decode('utf8'))
t = target_ss[target_ss['run_id'] == run_id]
t = t.dropna()
f.close()
file = h5py.File(bf, 'r')
for idx, row in tqdm(t.iterrows(), total=t.shape[0], desc=run_id):
si = int(np.floor(row[read_start_col] * sf))
ei = int(np.floor(row[read_end_col] * sf))
d['read_id'].append(row['read_id'])
d['channel'].append(row['channel'])
d['start_index'].append(si)
d['end_index'].append(ei)
d['bv_read_id'].append("{ch}-{start}-{end}".format(ch=row['channel'], start=si, end=ei))
d['filename'].append(row['filename'])
d['bv_filename'].append(export_read_file(row['channel'],
si,
ei,
file,
export_path,
remove_pore=remove_pore))
files_written += 1
print('{} reads written'.format(files_written))
return pd.DataFrame(d)
def concat_files_to_df(file_list, **kwargs):
"""Return a pandas.DataFrame from a list of files
"""
df_list = []
for f in file_list:
try:
df_list.append(pd.read_csv(filepath_or_buffer=f, **kwargs))
except pd.errors.ParserError as e:
print('{}\nThis is usually caused by an input file not being the expected format'.format(repr(e)))
sys.exit(1)
except Exception as e:
sys.exit(1)
return pd.concat(df_list, ignore_index=True)
def debug(args):
dirs = dir(args)
for attr in dirs:
if attr[0] != '_':
print('{a:<25} {b}'.format(a=attr, b=getattr(args, attr)))
if __name__ == '__main__':
main()
|
[
"pandas.DataFrame",
"export.export_read_file",
"tqdm.tqdm",
"h5py.File",
"argparse.ArgumentParser",
"pandas.read_csv",
"numpy.floor",
"pandas.concat",
"sys.exit"
] |
[((214, 369), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Parse sequencing_summary.txt files and .paf files to find split reads in an Oxford Nanopore Dataset"""', 'add_help': '(False)'}), "(description=\n 'Parse sequencing_summary.txt files and .paf files to find split reads in an Oxford Nanopore Dataset'\n , add_help=False)\n", (228, 369), False, 'from argparse import ArgumentParser\n'), ((2693, 2732), 'pandas.read_csv', 'pd.read_csv', (['args.start_events'], {'sep': '""","""'}), "(args.start_events, sep=',')\n", (2704, 2732), True, 'import pandas as pd\n'), ((2750, 2787), 'pandas.read_csv', 'pd.read_csv', (['args.end_events'], {'sep': '""","""'}), "(args.end_events, sep=',')\n", (2761, 2787), True, 'import pandas as pd\n'), ((6723, 6738), 'tqdm.tqdm', 'tqdm', (['bulkfiles'], {}), '(bulkfiles)\n', (6727, 6738), False, 'from tqdm import tqdm\n'), ((8072, 8087), 'pandas.DataFrame', 'pd.DataFrame', (['d'], {}), '(d)\n', (8084, 8087), True, 'import pandas as pd\n'), ((8567, 8604), 'pandas.concat', 'pd.concat', (['df_list'], {'ignore_index': '(True)'}), '(df_list, ignore_index=True)\n', (8576, 8604), True, 'import pandas as pd\n'), ((6752, 6770), 'h5py.File', 'h5py.File', (['bf', '"""r"""'], {}), "(bf, 'r')\n", (6761, 6770), False, 'import h5py\n'), ((7060, 7078), 'h5py.File', 'h5py.File', (['bf', '"""r"""'], {}), "(bf, 'r')\n", (7069, 7078), False, 'import h5py\n'), ((7175, 7209), 'numpy.floor', 'np.floor', (['(row[read_start_col] * sf)'], {}), '(row[read_start_col] * sf)\n', (7183, 7209), True, 'import numpy as np\n'), ((7232, 7264), 'numpy.floor', 'np.floor', (['(row[read_end_col] * sf)'], {}), '(row[read_end_col] * sf)\n', (7240, 7264), True, 'import numpy as np\n'), ((7627, 7716), 'export.export_read_file', 'export_read_file', (["row['channel']", 'si', 'ei', 'file', 'export_path'], {'remove_pore': 'remove_pore'}), "(row['channel'], si, ei, file, export_path, remove_pore=\n remove_pore)\n", (7643, 7716), False, 'from export import export_read_file\n'), ((8278, 8321), 'pandas.read_csv', 'pd.read_csv', ([], {'filepath_or_buffer': 'f'}), '(filepath_or_buffer=f, **kwargs)\n', (8289, 8321), True, 'import pandas as pd\n'), ((8489, 8500), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8497, 8500), False, 'import sys\n'), ((8544, 8555), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8552, 8555), False, 'import sys\n')]
|
import tkinter as tk
from tkinter import ttk
import pandas as pd
import numpy as np
class ProcessFrame(tk.Frame):
def __init__(self, parent: tk.Tk) -> None:
super().__init__(parent)
self.data = None
# init widgets
frame_delete_nan = tk.LabelFrame(self, text='Delete empty values')
frame_delete_nan.grid(row=0, column=0, sticky='nswe')
frame_fill_nan = tk.LabelFrame(self, text='Fill empty values')
frame_fill_nan.grid(row=1, column=0, sticky='nswe')
frame_new_column = tk.LabelFrame(self, text='Add new column')
frame_new_column.grid(row=3, column=0, sticky='NSWE')
frame_delete_column = tk.LabelFrame(self, text='Delete column')
frame_delete_column.grid(row=4, column=0, sticky='NSWE')
# widgets for delete empty values
self.label_list = tk.Label(frame_fill_nan, text='Select column :')
self.label_list.grid(row=0, column=0)
self.btn_delete_nan = tk.Button(frame_delete_nan,
command=lambda: self.delete_nan(column=str(self.list_columns.get())),
text='Delete rows with empty values')
self.btn_delete_nan.grid(row=0, column=0)
# widgets for fill empty values
self.label_replace_column = tk.Label(frame_fill_nan, text='Select column')
self.label_replace_column.grid(row=0, column=0)
self.list_columns = ttk.Combobox(frame_fill_nan)
self.list_columns.grid(row=0, column=1)
self.label_replace = tk.Label(frame_fill_nan, text='Select value to replace empty values')
self.label_replace.grid(row=1, column=0)
self.entry_replace = tk.Entry(frame_fill_nan)
self.entry_replace.grid(row=1, column=1)
self.btn_replace_nan = tk.Button(frame_fill_nan,
command=lambda: self.replace_nan(
column=str(self.list_columns.get()),
new_value=self.entry_replace.get()),
text='Replace empty values')
self.btn_replace_nan.grid(row=2, column=0)
#widgets for adding new column
self.label_add_col = tk.Label(frame_new_column, text='New column name:')
self.label_add_col.grid(row=0, column=0)
self.entry_add_col = tk.Entry(frame_new_column)
self.entry_add_col.grid(row=0, column=1)
label_init_val = tk.Label(frame_new_column, text='(optional) init value in new column:')
label_init_val.grid(row=1, column=0)
self.entry_init_val = tk.Entry(frame_new_column)
self.entry_init_val.grid(row=1, column=1)
self.btn_add_col = tk.Button(frame_new_column,
command=lambda: self.add_new_column(column_name=str(self.entry_add_col.get()),
init_value=str(self.entry_init_val.get())),
text='add new column')
self.btn_add_col.grid(row=2, column=0)
# add widgets to delete column
label_del_col = tk.Label(frame_delete_column, text='Set column to delete:')
label_del_col.grid(row=0, column=0)
self.list_del_col = ttk.Combobox(frame_delete_column)
self.list_del_col.grid(row=0, column=1)
btn_del_col = tk.Button(frame_delete_column,
text='Delete column',
command=lambda: self.delete_column(column_name=self.list_del_col.get()))
btn_del_col.grid(row=1, column=0)
def init_data(self, data: pd.DataFrame) -> None:
self.data = data
self.refresh_widgets()
def refresh_widgets(self):
list_container = self.data.columns.values.tolist()
self.list_del_col['values'] = list_container
list_container.append('all columns')
self.list_columns['values'] = list_container
def delete_nan(self, column: str) -> None:
self.data.dropna(inplace=True)
self.refresh_widgets()
print(self.data)
def replace_nan(self, column: str, new_value: str) -> None:
if column.strip() is not None:
if column == 'all columns':
self.data.fillna(value=new_value, inplace=True)
else:
self.data[column].fillna(value=new_value, inplace=True)
self.refresh_widgets()
def add_new_column(self, column_name: str, init_value: str) -> None:
if column_name.strip() is not None:
self.data.insert(loc=len(self.data.columns), column=column_name, value=init_value)
self.refresh_widgets()
def delete_column(self, column_name: str) -> None:
if column_name.strip() is not None:
del self.data[column_name]
self.refresh_widgets()
|
[
"tkinter.ttk.Combobox",
"tkinter.Label",
"tkinter.Entry",
"tkinter.LabelFrame"
] |
[((271, 318), 'tkinter.LabelFrame', 'tk.LabelFrame', (['self'], {'text': '"""Delete empty values"""'}), "(self, text='Delete empty values')\n", (284, 318), True, 'import tkinter as tk\n'), ((406, 451), 'tkinter.LabelFrame', 'tk.LabelFrame', (['self'], {'text': '"""Fill empty values"""'}), "(self, text='Fill empty values')\n", (419, 451), True, 'import tkinter as tk\n'), ((539, 581), 'tkinter.LabelFrame', 'tk.LabelFrame', (['self'], {'text': '"""Add new column"""'}), "(self, text='Add new column')\n", (552, 581), True, 'import tkinter as tk\n'), ((674, 715), 'tkinter.LabelFrame', 'tk.LabelFrame', (['self'], {'text': '"""Delete column"""'}), "(self, text='Delete column')\n", (687, 715), True, 'import tkinter as tk\n'), ((849, 897), 'tkinter.Label', 'tk.Label', (['frame_fill_nan'], {'text': '"""Select column :"""'}), "(frame_fill_nan, text='Select column :')\n", (857, 897), True, 'import tkinter as tk\n'), ((1317, 1363), 'tkinter.Label', 'tk.Label', (['frame_fill_nan'], {'text': '"""Select column"""'}), "(frame_fill_nan, text='Select column')\n", (1325, 1363), True, 'import tkinter as tk\n'), ((1448, 1476), 'tkinter.ttk.Combobox', 'ttk.Combobox', (['frame_fill_nan'], {}), '(frame_fill_nan)\n', (1460, 1476), False, 'from tkinter import ttk\n'), ((1554, 1623), 'tkinter.Label', 'tk.Label', (['frame_fill_nan'], {'text': '"""Select value to replace empty values"""'}), "(frame_fill_nan, text='Select value to replace empty values')\n", (1562, 1623), True, 'import tkinter as tk\n'), ((1702, 1726), 'tkinter.Entry', 'tk.Entry', (['frame_fill_nan'], {}), '(frame_fill_nan)\n', (1710, 1726), True, 'import tkinter as tk\n'), ((2257, 2308), 'tkinter.Label', 'tk.Label', (['frame_new_column'], {'text': '"""New column name:"""'}), "(frame_new_column, text='New column name:')\n", (2265, 2308), True, 'import tkinter as tk\n'), ((2387, 2413), 'tkinter.Entry', 'tk.Entry', (['frame_new_column'], {}), '(frame_new_column)\n', (2395, 2413), True, 'import tkinter as tk\n'), ((2488, 2559), 'tkinter.Label', 'tk.Label', (['frame_new_column'], {'text': '"""(optional) init value in new column:"""'}), "(frame_new_column, text='(optional) init value in new column:')\n", (2496, 2559), True, 'import tkinter as tk\n'), ((2635, 2661), 'tkinter.Entry', 'tk.Entry', (['frame_new_column'], {}), '(frame_new_column)\n', (2643, 2661), True, 'import tkinter as tk\n'), ((3182, 3241), 'tkinter.Label', 'tk.Label', (['frame_delete_column'], {'text': '"""Set column to delete:"""'}), "(frame_delete_column, text='Set column to delete:')\n", (3190, 3241), True, 'import tkinter as tk\n'), ((3314, 3347), 'tkinter.ttk.Combobox', 'ttk.Combobox', (['frame_delete_column'], {}), '(frame_delete_column)\n', (3326, 3347), False, 'from tkinter import ttk\n')]
|
from adjutant.actions.v1.serializers import BaseUserIdSerializer
from rest_framework import serializers
class EditMFASerializer(BaseUserIdSerializer):
delete = serializers.BooleanField(default=False)
|
[
"rest_framework.serializers.BooleanField"
] |
[((166, 205), 'rest_framework.serializers.BooleanField', 'serializers.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (190, 205), False, 'from rest_framework import serializers\n')]
|
# matrices.py - boolean matrices as row bitsets and column bitsets
"""Boolean matrices as collections of row and column vectors."""
import bitsets
from ._compat import zip
__all__ = ['Relation']
Vector = bitsets.bases.MemberBits
"""Single row or column of a boolean matrix as bit vector."""
class Vectors(bitsets.series.Tuple):
"""Paired collection of rows or columns of a boolean matrix relation."""
def _pair_with(self, relation, index, other):
if hasattr(self, 'prime'):
raise RuntimeError('%r attempt _pair_with %r' % (self, other))
self.relation = relation
self.relation_index = index
Prime = other.BitSet.supremum # noqa: N806
Double = self.BitSet.supremum # noqa: N806
_prime = other.BitSet.fromint
_double = self.BitSet.fromint
def prime(bitset):
"""FCA derivation operator (extent->intent, intent->extent)."""
prime = Prime
for o in other:
if bitset & 1:
prime &= o
bitset >>= 1
if not bitset:
break
return _prime(prime)
def double(bitset):
"""FCA double derivation operator (extent->extent, intent->intent)."""
prime = Prime
for o in other:
if bitset & 1:
prime &= o
bitset >>= 1
if not bitset:
break
double = Double
for s in self:
if prime & 1:
double &= s
prime >>= 1
if not prime:
break
return _double(double)
def doubleprime(bitset):
"""FCA single and double derivation (extent->extent+intent, intent->intent+extent)."""
prime = Prime
for o in other:
if bitset & 1:
prime &= o
bitset >>= 1
if not bitset:
break
bitset = prime
double = Double
for s in self:
if bitset & 1:
double &= s
bitset >>= 1
if not bitset:
break
return _double(double), _prime(prime)
self.prime = self.BitSet.prime = prime
self.double = self.BitSet.double = double
self.doubleprime = self.BitSet.doubleprime = doubleprime
def __reduce__(self):
return self.relation, (self.relation_index,)
class Relation(tuple):
"""Binary relation as interconnected pair of bitset collections.
>>> br = Relation('Condition', 'Symbol',
... ('TT', 'TF', 'FT', 'FF'), ('->', '<-'),
... [(True, False, True, True), (True, True, False, True)])
>>> br
<Relation(ConditionVectors('1011', '1101'), SymbolVectors('11', '01', '10', '11'))>
>>> br[1].BitSet.frommembers(('->', '<-')).prime().members()
('TT', 'FF')
"""
__slots__ = ()
def __new__(cls, xname, yname, xmembers, ymembers, xbools, _ids=None):
if _ids is not None: # unpickle reconstruction
xid, yid = _ids
X = bitsets.meta.bitset(xname, xmembers, xid, Vector, None, Vectors) # noqa: N806
Y = bitsets.meta.bitset(yname, ymembers, yid, Vector, None, Vectors) # noqa: N806
else:
X = bitsets.bitset(xname, xmembers, Vector, tuple=Vectors) # noqa: N806
Y = bitsets.bitset(yname, ymembers, Vector, tuple=Vectors) # noqa: N806
x = X.Tuple.frombools(xbools)
y = Y.Tuple.frombools(zip(*x.bools()))
self = super(Relation, cls).__new__(cls, (x, y))
x._pair_with(self, 0, y)
y._pair_with(self, 1, x)
return self
__call__ = tuple.__getitem__
def __repr__(self):
return '<%s(%r, %r)>' % (self.__class__.__name__, self[0], self[1])
def __reduce__(self):
X, Y = (v.BitSet for v in self) # noqa: N806
bools = self[0].bools()
ids = (X._id, Y._id)
args = (X.__name__, Y.__name__, X._members, Y._members, bools, ids)
return self.__class__, args
|
[
"bitsets.meta.bitset",
"bitsets.bitset"
] |
[((3213, 3277), 'bitsets.meta.bitset', 'bitsets.meta.bitset', (['xname', 'xmembers', 'xid', 'Vector', 'None', 'Vectors'], {}), '(xname, xmembers, xid, Vector, None, Vectors)\n', (3232, 3277), False, 'import bitsets\n'), ((3308, 3372), 'bitsets.meta.bitset', 'bitsets.meta.bitset', (['yname', 'ymembers', 'yid', 'Vector', 'None', 'Vectors'], {}), '(yname, ymembers, yid, Vector, None, Vectors)\n', (3327, 3372), False, 'import bitsets\n'), ((3417, 3471), 'bitsets.bitset', 'bitsets.bitset', (['xname', 'xmembers', 'Vector'], {'tuple': 'Vectors'}), '(xname, xmembers, Vector, tuple=Vectors)\n', (3431, 3471), False, 'import bitsets\n'), ((3502, 3556), 'bitsets.bitset', 'bitsets.bitset', (['yname', 'ymembers', 'Vector'], {'tuple': 'Vectors'}), '(yname, ymembers, Vector, tuple=Vectors)\n', (3516, 3556), False, 'import bitsets\n')]
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
'''
Author: <NAME> (<EMAIL>)
Created Date: 2019-09-19 5:35:12
-----
Last Modified: 2019-10-07 8:27:16
Modified By: <NAME> (<EMAIL>)
-----
THIS PROGRAM IS FREE SOFTWARE, IS LICENSED UNDER MIT.
A short and simple permissive license with conditions
only requiring preservation of copyright and license notices.
Copyright © 2019 <NAME>
-----
HISTORY:
Date By Comments
---------- -------- ---------------------------------------------------------
'''
import os
import smtplib
import tempfile
import pytest
@pytest.fixture(scope='module')
def smtp_connection():
return smtplib.SMTP("smtp.163.com", 25, timeout=5)
@pytest.fixture(scope='package')
def smtp_connection_package():
return smtplib.SMTP("smtp.163.com", 25, timeout=5)
@pytest.fixture()
def smtp_connection_yield():
smtp_connection = smtplib.SMTP("smtp.163.com", 25, timeout=5)
yield smtp_connection
print("关闭SMTP连接")
smtp_connection.close()
@pytest.fixture(scope='module')
def smtp_connection_request(request):
server, port = getattr(request.module, 'smtp_server', ("smtp.163.com", 25))
with smtplib.SMTP(server, port, timeout=5) as smtp_connection:
yield smtp_connection
print("断开 %s:%d" % (server, port))
@pytest.fixture(scope='module', params=['smtp.163.com', 'smtp.126.com'])
def smtp_connection_params(request):
server = request.param
with smtplib.SMTP(server, 25, timeout=5) as smtp_connection:
yield smtp_connection
print("断开 %s:%d" % (server, 25))
@pytest.fixture()
def cleandir():
newpath = tempfile.mkdtemp()
os.chdir(newpath)
|
[
"tempfile.mkdtemp",
"pytest.fixture",
"os.chdir",
"smtplib.SMTP"
] |
[((570, 600), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (584, 600), False, 'import pytest\n'), ((682, 713), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""package"""'}), "(scope='package')\n", (696, 713), False, 'import pytest\n'), ((803, 819), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (817, 819), False, 'import pytest\n'), ((994, 1024), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1008, 1024), False, 'import pytest\n'), ((1286, 1357), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""', 'params': "['smtp.163.com', 'smtp.126.com']"}), "(scope='module', params=['smtp.163.com', 'smtp.126.com'])\n", (1300, 1357), False, 'import pytest\n'), ((1561, 1577), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1575, 1577), False, 'import pytest\n'), ((635, 678), 'smtplib.SMTP', 'smtplib.SMTP', (['"""smtp.163.com"""', '(25)'], {'timeout': '(5)'}), "('smtp.163.com', 25, timeout=5)\n", (647, 678), False, 'import smtplib\n'), ((756, 799), 'smtplib.SMTP', 'smtplib.SMTP', (['"""smtp.163.com"""', '(25)'], {'timeout': '(5)'}), "('smtp.163.com', 25, timeout=5)\n", (768, 799), False, 'import smtplib\n'), ((871, 914), 'smtplib.SMTP', 'smtplib.SMTP', (['"""smtp.163.com"""', '(25)'], {'timeout': '(5)'}), "('smtp.163.com', 25, timeout=5)\n", (883, 914), False, 'import smtplib\n'), ((1608, 1626), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1624, 1626), False, 'import tempfile\n'), ((1631, 1648), 'os.chdir', 'os.chdir', (['newpath'], {}), '(newpath)\n', (1639, 1648), False, 'import os\n'), ((1152, 1189), 'smtplib.SMTP', 'smtplib.SMTP', (['server', 'port'], {'timeout': '(5)'}), '(server, port, timeout=5)\n', (1164, 1189), False, 'import smtplib\n'), ((1431, 1466), 'smtplib.SMTP', 'smtplib.SMTP', (['server', '(25)'], {'timeout': '(5)'}), '(server, 25, timeout=5)\n', (1443, 1466), False, 'import smtplib\n')]
|
from typing import List, Dict
from identifyneeds.entities import Condition
class MemRepo():
def __init__(self, condition_dicts):
self.conditions = {}
self.put(condition_dicts)
def get(self, filters: Dict = None):
condition_objects = [Condition.from_dict(i) for i in self.conditions.values()]
if not filters:
return condition_objects
if 'name' in filters.keys():
return [item for item in condition_objects if item.name in filters['name']]
raise NotImplementedError(f"Filtering by something other than 'name' is not supported")
def put(self, condition_dicts: List[dict]):
self._check_types(condition_dicts)
for cnd in condition_dicts:
self.conditions[cnd['uuid']] = cnd
def _check_types(self, conditions: List[dict]):
if type(conditions) is not list:
raise TypeError(f"Expected List of 'dicts'. Received: {type(conditions)}")
for cnd in conditions:
if type(cnd) is not dict:
raise TypeError(f"Expected 'dict' got {type(cnd)}")
|
[
"identifyneeds.entities.Condition.from_dict"
] |
[((271, 293), 'identifyneeds.entities.Condition.from_dict', 'Condition.from_dict', (['i'], {}), '(i)\n', (290, 293), False, 'from identifyneeds.entities import Condition\n')]
|
import requests
import tkinter as tk
from tkinter import filedialog, Text
import main as main
from sortedcontainers import SortedSet
# This file runs the main program
def raise_frame(frame):
frame.tkraise()
root=tk.Tk()
root.title("Better Bolus V2.0")
# root.iconbitmap("../favicon.ico")
f1 = tk.Frame(root)
f2 = tk.Frame(root)
f3 = tk.Frame(root)
f4 = tk.Frame(root)
for frame in (f1, f2, f3, f4):
frame.grid(row=0, column=0, sticky='news')
# setting the windows size
root.geometry("500x500")
# bolus profile values as an object of arrays, each with two values,
# the first being the amount of insulin in units,
# and the second being the amount of time in minutes between
# now and when the bolus would be taken
bolusProfile = {}
insulinEffectUpdated = []
if len(insulinEffectUpdated) > 49:
insulinEffectUpdated = insulinEffectUpdated[48:]
# declaring string variables
bg_var = tk.StringVar()
trending_var = tk.StringVar()
bolus_var = tk.StringVar()
bolusTime_var = tk.StringVar()
# defining a function that will
# generate original graph based off BG
def submit_f1():
trending = trending_var.get()
# Validation
try:
bg = int(bg_var.get())
if 120 < int(bg_var.get()) < 450:
raise_frame(f2)
main.initial_bg(bg)
main.unadjusted_graph(trending)
main.show_unadjusted_graph()
else:
bg_label.config(text="Invalid input\nplease enter an integer between 120 and 450:")
except ValueError:
bg_label.config(text="Invalid input\nplease enter an integer between 120 and 450:")
bg_var.set("")
# defining a function that will
# generate graph with bolus profile applied
def submit_f2():
trending = trending_var.get()
time = bolusTime_var.get()
if bolusTime_var.get() == "":
time = '0.00'
if bolus_var.get() == "":
bolus = 0
bolusProfile.update({time: bolus})
insulinEffect = main.applyInitialBolus(bolusProfile)
insulinEffectResistanceAdjusted = main.build_resistance_profile(trending, insulinEffect)
root.destroy()
main.show_adjusted_graph(insulinEffectResistanceAdjusted)
try:
bolus = int(bolus_var.get())
if 0 <= bolus <= 10:
bolusProfile.update({time: bolus})
insulinEffect = main.applyInitialBolus(bolusProfile)
insulinEffectResistanceAdjusted = main.build_resistance_profile(trending, insulinEffect)
root.destroy()
main.show_adjusted_graph(insulinEffectResistanceAdjusted)
else:
bolus_label.config(text="Invalid input\nPlease enter a bolus between 0 and 10:")
except ValueError:
bolus_label.config(text="Invalid input\nPlease enter an integer between 0 and 10:")
bolus_var.set("")
# defining a function that will
# allow user to add more bolus info
def add_another_bolus():
time = bolusTime_var.get()
if bolusTime_var.get() == "":
time = '0.00'
try:
bolus = int(bolus_var.get())
if 0 <= bolus <= 10:
bolusProfile.update({time: bolus})
insulinEffect = main.applyInitialBolus(bolusProfile)
insulinEffectUpdated.append(main.bolusStack(insulinEffect, bolusProfile))
raise_frame(f3)
else:
bolus_label.config(text="Invalid input\nPlease enter a bolus between 0 and 10:")
except ValueError:
bolus_label.config(text="Invalid input\nPlease enter an integer between 0 and 10:")
bolus_var.set("")
def add_another_bolus2():
time = bolusTime_var.get()
if bolusTime_var.get() == "":
time = '0.00'
try:
bolus = int(bolus_var.get())
if 0 <= bolus <= 10:
bolusProfile.update({time: bolus})
insulinEffectUpdated.append(main.bolusStack(insulinEffectUpdated[len(insulinEffectUpdated) - 1], bolusProfile))
else:
bolus_label2.config(text="Invalid input\nPlease enter a bolus between 0 and 10:")
except ValueError:
bolus_label2.config(text="Invalid input\nPlease enter an integer between 0 and 10:")
bolus_var.set("")
def submit_f3():
trending = trending_var.get()
time = bolusTime_var.get()
if bolusTime_var.get() == "":
time = '0.00'
if bolus_var.get() == "":
insulinEffectUpdated.append(main.bolusStack(insulinEffectUpdated[len(insulinEffectUpdated) - 1], bolusProfile))
insulinEffectResistanceAdjusted = main.build_resistance_profile(trending, insulinEffectUpdated[len(insulinEffectUpdated) - 1])
print(bolusProfile)
root.destroy()
main.show_adjusted_graph(insulinEffectResistanceAdjusted)
try:
bolus = int(bolus_var.get())
print(bolus)
if 0 <= bolus <= 10:
bolusProfile.update({time: bolus})
insulinEffectUpdated.append(main.bolusStack(insulinEffectUpdated[len(insulinEffectUpdated) - 1], bolusProfile))
insulinEffectResistanceAdjusted = main.build_resistance_profile(trending, insulinEffectUpdated[len(insulinEffectUpdated) - 1])
print(bolusProfile)
root.destroy()
main.show_adjusted_graph(insulinEffectResistanceAdjusted)
else:
bolus_label.config(text="Invalid input\nPlease enter a bolus between 0 and 10:")
except ValueError:
bolus_label.config(text="Invalid input\nPlease enter an integer between 0 and 10:")
# function to update bolus profile label
def change_profile_text(bolusprofile_label):
print(bolusProfile)
profile_string = ""
for key in bolusProfile:
if len(key) == 3:
make_str_readable = "0"
else:
make_str_readable = ""
profile_string = profile_string + str(bolusProfile[key]) + " Unit(s) in " + str(key[0]) + " hours and " + str(key[2:]) + make_str_readable + " minutes\n"
bolusprofile_label.config(text = f"Current Bolus Profile: {profile_string}")
# creating a label for
# disclaimer
disclaimer_label = tk.Label(f1, text = "THIS PROGRAM IS CURRENTLY FOR MODELING PURPOSES ONLY\nAND SHOULD NOT BE USED TO DETERMINE\nHOW TO MANAGE AN INDIVIDUAL'S BLOOD GLUCOSE.",
font=('calibre',
10, 'bold'))
# creating a label for
# disclaimer cont'd
disclaimer_label_cont = tk.Label(f1, text = "",
font=('calibre',
10, 'bold'))
# creating a label for
# disclaimer cont'd 2
disclaimer_label_cont2 = tk.Label(f1, text = "",
font=('calibre',
10, 'bold'))
# creating a label for
# bg using widget Label
bg_label = tk.Label(f1, text = 'Blood Glucose',
font=('calibre',
10, 'bold'))
# creating an entry for input
# bg using widget Entry
bg_entry = tk.Entry(f1,
textvariable = bg_var,
font=('calibre',10,'normal'))
choices = { 'Stable','Rising','Dropping','Rising Fast','Dropping Fast', 'Unknown'}
trending_var.set('Unknown') # set the default option
trending_profile = tk.OptionMenu(f1, trending_var, *choices,)
trending_label = tk.Label(f1, text="Choose a Trending Option")
trending_profile.grid(row = 2, column =1)
# on change dropdown value
def change_dropdown(*args):
print( trending_var.get() )
# link function to change dropdown
trending_var.trace('w', change_dropdown)
# creating a button using the widget
# Button that will call the submit function
sub_btn=tk.Button(f1,text = 'Submit',
command = lambda:[submit_f1()]
)
# creating a label for f2
f2_label = tk.Label(f2, text = "Please enter an initial bolus value",
font=('calibre',
10, 'bold'))
# creating a label for bolus
bolus_label = tk.Label(f2,
text = 'Bolus Amount',
font = ('calibre',10,'bold'))
# creating an entry for bolus
bolus_entry=tk.Entry(f2,
textvariable = bolus_var,
font = ('calibre',10,'normal'))
# creating a button using the widget
# Button that will call the submit function for f2
sub2_btn=tk.Button(f2,text = 'Apply',
command = lambda:[submit_f2()])
# creating a button using the widget
# Button that will call the submit function for f2
add_bolus_btn=tk.Button(f2,text = 'Add another bolus',
command = lambda:[add_another_bolus(), change_profile_text(bolusprofile_label)])
# creating a label for f3
f3_label = tk.Label(f3, text = "Please enter another bolus value with a time to be applied",
font=('calibre',
10, 'bold'))
# creating a label for subsequent bolus
bolus_label2 = tk.Label(f3,
text = 'Bolus Amount',
font = ('calibre',10,'bold'))
# creating an entry for subsequent bolus
bolus_entry2=tk.Entry(f3,
textvariable = bolus_var,
font = ('calibre',10,'normal'))
# creating a label for f3
bolusprofile_label = tk.Label(f3, text = "",
font=('calibre',
10, 'bold'))
times = SortedSet()
for el in main.insulinProfileList:
times.add(el[0])
bolus_time = tk.OptionMenu(f3, bolusTime_var, *times,)
time_label = tk.Label(f3, text="Choose a time to apply bolus\nin 5 minute increments from now")
bolus_time.grid(row = 2, column =1)
# on change dropdown value
def change_time_dropdown(*args):
print( "time", bolusTime_var.get() )
# link function to change dropdown
bolusTime_var.trace('w', change_time_dropdown)
# creating a button using the widget
# Button that will call the submit function for f2
sub3_btn=tk.Button(f3,text = 'Apply',
command = submit_f3)
# creating a button using the widget
# Button that will call the submit function for f3
add_bolus_btn2=tk.Button(f3,text = 'Add another bolus',
command = lambda:[add_another_bolus2(), change_profile_text(bolusprofile_label)]
)
# placing the label and entry in
# the required position using grid for f1
disclaimer_label.grid(row=0,column=0,columnspan=2,padx=20,pady=20)
bg_label.grid(row=1,column=0,padx=20,pady=20)
bg_entry.grid(row=1,column=1)
trending_label.grid(row=2,column=0,padx=20,pady=20)
trending_profile.grid(row=2,column=1)
sub_btn.grid(row=3,column=1,padx=20,pady=20)
# placing the label and entry in
# the required position using grid for f2
f2_label.grid(row=0,column=0,columnspan=2,padx=20,pady=20)
bolus_label.grid(row=1,column=0,padx=20,pady=20)
bolus_entry.grid(row=1,column=1)
sub2_btn.grid(row=3,column=0,padx=20,pady=20)
add_bolus_btn.grid(row=3,column=1)
# placing the label and entry in
# the required position using grid for f3
f3_label.grid(row=0,column=0,columnspan=2,padx=20,pady=20)
bolusprofile_label.grid(row=1,column=0,columnspan=2,padx=20,pady=20)
bolus_label2.grid(row=2,column=0,padx=20,pady=20)
bolus_entry2.grid(row=2,column=1)
time_label.grid(row=3,column=0,padx=20,pady=20)
bolus_time.grid(row=3,column=1)
sub3_btn.grid(row=4,columnspan=2,padx=20,pady=20)
add_bolus_btn2.grid(row=5,columnspan=2)
# performing an infinite loop
# for the window to display
raise_frame(f1)
root.mainloop()
|
[
"tkinter.StringVar",
"main.show_adjusted_graph",
"main.bolusStack",
"main.unadjusted_graph",
"main.initial_bg",
"tkinter.Button",
"tkinter.Entry",
"main.show_unadjusted_graph",
"tkinter.OptionMenu",
"main.build_resistance_profile",
"sortedcontainers.SortedSet",
"main.applyInitialBolus",
"tkinter.Frame",
"tkinter.Label",
"tkinter.Tk"
] |
[((219, 226), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (224, 226), True, 'import tkinter as tk\n'), ((302, 316), 'tkinter.Frame', 'tk.Frame', (['root'], {}), '(root)\n', (310, 316), True, 'import tkinter as tk\n'), ((322, 336), 'tkinter.Frame', 'tk.Frame', (['root'], {}), '(root)\n', (330, 336), True, 'import tkinter as tk\n'), ((342, 356), 'tkinter.Frame', 'tk.Frame', (['root'], {}), '(root)\n', (350, 356), True, 'import tkinter as tk\n'), ((362, 376), 'tkinter.Frame', 'tk.Frame', (['root'], {}), '(root)\n', (370, 376), True, 'import tkinter as tk\n'), ((910, 924), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (922, 924), True, 'import tkinter as tk\n'), ((941, 955), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (953, 955), True, 'import tkinter as tk\n'), ((968, 982), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (980, 982), True, 'import tkinter as tk\n'), ((1000, 1014), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (1012, 1014), True, 'import tkinter as tk\n'), ((6075, 6273), 'tkinter.Label', 'tk.Label', (['f1'], {'text': '"""THIS PROGRAM IS CURRENTLY FOR MODELING PURPOSES ONLY\nAND SHOULD NOT BE USED TO DETERMINE\nHOW TO MANAGE AN INDIVIDUAL\'S BLOOD GLUCOSE."""', 'font': "('calibre', 10, 'bold')"}), '(f1, text=\n """THIS PROGRAM IS CURRENTLY FOR MODELING PURPOSES ONLY\nAND SHOULD NOT BE USED TO DETERMINE\nHOW TO MANAGE AN INDIVIDUAL\'S BLOOD GLUCOSE."""\n , font=(\'calibre\', 10, \'bold\'))\n', (6083, 6273), True, 'import tkinter as tk\n'), ((6387, 6438), 'tkinter.Label', 'tk.Label', (['f1'], {'text': '""""""', 'font': "('calibre', 10, 'bold')"}), "(f1, text='', font=('calibre', 10, 'bold'))\n", (6395, 6438), True, 'import tkinter as tk\n'), ((6567, 6618), 'tkinter.Label', 'tk.Label', (['f1'], {'text': '""""""', 'font': "('calibre', 10, 'bold')"}), "(f1, text='', font=('calibre', 10, 'bold'))\n", (6575, 6618), True, 'import tkinter as tk\n'), ((6739, 6803), 'tkinter.Label', 'tk.Label', (['f1'], {'text': '"""Blood Glucose"""', 'font': "('calibre', 10, 'bold')"}), "(f1, text='Blood Glucose', font=('calibre', 10, 'bold'))\n", (6747, 6803), True, 'import tkinter as tk\n'), ((6930, 6995), 'tkinter.Entry', 'tk.Entry', (['f1'], {'textvariable': 'bg_var', 'font': "('calibre', 10, 'normal')"}), "(f1, textvariable=bg_var, font=('calibre', 10, 'normal'))\n", (6938, 6995), True, 'import tkinter as tk\n'), ((7199, 7240), 'tkinter.OptionMenu', 'tk.OptionMenu', (['f1', 'trending_var', '*choices'], {}), '(f1, trending_var, *choices)\n', (7212, 7240), True, 'import tkinter as tk\n'), ((7259, 7304), 'tkinter.Label', 'tk.Label', (['f1'], {'text': '"""Choose a Trending Option"""'}), "(f1, text='Choose a Trending Option')\n", (7267, 7304), True, 'import tkinter as tk\n'), ((7745, 7836), 'tkinter.Label', 'tk.Label', (['f2'], {'text': '"""Please enter an initial bolus value"""', 'font': "('calibre', 10, 'bold')"}), "(f2, text='Please enter an initial bolus value', font=('calibre', \n 10, 'bold'))\n", (7753, 7836), True, 'import tkinter as tk\n'), ((7936, 7999), 'tkinter.Label', 'tk.Label', (['f2'], {'text': '"""Bolus Amount"""', 'font': "('calibre', 10, 'bold')"}), "(f2, text='Bolus Amount', font=('calibre', 10, 'bold'))\n", (7944, 7999), True, 'import tkinter as tk\n'), ((8098, 8166), 'tkinter.Entry', 'tk.Entry', (['f2'], {'textvariable': 'bolus_var', 'font': "('calibre', 10, 'normal')"}), "(f2, textvariable=bolus_var, font=('calibre', 10, 'normal'))\n", (8106, 8166), True, 'import tkinter as tk\n'), ((8680, 8799), 'tkinter.Label', 'tk.Label', (['f3'], {'text': '"""Please enter another bolus value with a time to be applied"""', 'font': "('calibre', 10, 'bold')"}), "(f3, text=\n 'Please enter another bolus value with a time to be applied', font=(\n 'calibre', 10, 'bold'))\n", (8688, 8799), True, 'import tkinter as tk\n'), ((8903, 8966), 'tkinter.Label', 'tk.Label', (['f3'], {'text': '"""Bolus Amount"""', 'font': "('calibre', 10, 'bold')"}), "(f3, text='Bolus Amount', font=('calibre', 10, 'bold'))\n", (8911, 8966), True, 'import tkinter as tk\n'), ((9077, 9145), 'tkinter.Entry', 'tk.Entry', (['f3'], {'textvariable': 'bolus_var', 'font': "('calibre', 10, 'normal')"}), "(f3, textvariable=bolus_var, font=('calibre', 10, 'normal'))\n", (9085, 9145), True, 'import tkinter as tk\n'), ((9241, 9292), 'tkinter.Label', 'tk.Label', (['f3'], {'text': '""""""', 'font': "('calibre', 10, 'bold')"}), "(f3, text='', font=('calibre', 10, 'bold'))\n", (9249, 9292), True, 'import tkinter as tk\n'), ((9318, 9329), 'sortedcontainers.SortedSet', 'SortedSet', ([], {}), '()\n', (9327, 9329), False, 'from sortedcontainers import SortedSet\n'), ((9401, 9441), 'tkinter.OptionMenu', 'tk.OptionMenu', (['f3', 'bolusTime_var', '*times'], {}), '(f3, bolusTime_var, *times)\n', (9414, 9441), True, 'import tkinter as tk\n'), ((9456, 9546), 'tkinter.Label', 'tk.Label', (['f3'], {'text': '"""Choose a time to apply bolus\nin 5 minute increments from now"""'}), '(f3, text=\n """Choose a time to apply bolus\nin 5 minute increments from now""")\n', (9464, 9546), True, 'import tkinter as tk\n'), ((9865, 9911), 'tkinter.Button', 'tk.Button', (['f3'], {'text': '"""Apply"""', 'command': 'submit_f3'}), "(f3, text='Apply', command=submit_f3)\n", (9874, 9911), True, 'import tkinter as tk\n'), ((1989, 2025), 'main.applyInitialBolus', 'main.applyInitialBolus', (['bolusProfile'], {}), '(bolusProfile)\n', (2011, 2025), True, 'import main as main\n'), ((2068, 2122), 'main.build_resistance_profile', 'main.build_resistance_profile', (['trending', 'insulinEffect'], {}), '(trending, insulinEffect)\n', (2097, 2122), True, 'import main as main\n'), ((2154, 2211), 'main.show_adjusted_graph', 'main.show_adjusted_graph', (['insulinEffectResistanceAdjusted'], {}), '(insulinEffectResistanceAdjusted)\n', (2178, 2211), True, 'import main as main\n'), ((4680, 4737), 'main.show_adjusted_graph', 'main.show_adjusted_graph', (['insulinEffectResistanceAdjusted'], {}), '(insulinEffectResistanceAdjusted)\n', (4704, 4737), True, 'import main as main\n'), ((1293, 1312), 'main.initial_bg', 'main.initial_bg', (['bg'], {}), '(bg)\n', (1308, 1312), True, 'import main as main\n'), ((1325, 1356), 'main.unadjusted_graph', 'main.unadjusted_graph', (['trending'], {}), '(trending)\n', (1346, 1356), True, 'import main as main\n'), ((1369, 1397), 'main.show_unadjusted_graph', 'main.show_unadjusted_graph', ([], {}), '()\n', (1395, 1397), True, 'import main as main\n'), ((2363, 2399), 'main.applyInitialBolus', 'main.applyInitialBolus', (['bolusProfile'], {}), '(bolusProfile)\n', (2385, 2399), True, 'import main as main\n'), ((2446, 2500), 'main.build_resistance_profile', 'main.build_resistance_profile', (['trending', 'insulinEffect'], {}), '(trending, insulinEffect)\n', (2475, 2500), True, 'import main as main\n'), ((2540, 2597), 'main.show_adjusted_graph', 'main.show_adjusted_graph', (['insulinEffectResistanceAdjusted'], {}), '(insulinEffectResistanceAdjusted)\n', (2564, 2597), True, 'import main as main\n'), ((3182, 3218), 'main.applyInitialBolus', 'main.applyInitialBolus', (['bolusProfile'], {}), '(bolusProfile)\n', (3204, 3218), True, 'import main as main\n'), ((5216, 5273), 'main.show_adjusted_graph', 'main.show_adjusted_graph', (['insulinEffectResistanceAdjusted'], {}), '(insulinEffectResistanceAdjusted)\n', (5240, 5273), True, 'import main as main\n'), ((3259, 3303), 'main.bolusStack', 'main.bolusStack', (['insulinEffect', 'bolusProfile'], {}), '(insulinEffect, bolusProfile)\n', (3274, 3303), True, 'import main as main\n')]
|
from autumn.projects.covid_19.vaccine_optimisation.vaccine_opti import (
get_decision_vars_names,
initialise_opti_object,
)
import numpy as np
import yaml
COUNTRY = "malaysia" # should use "malaysia" or "philippines"
def run_sample_code():
# Initialisation of the optimisation object. This needs to be run once before optimising.
opti_object = initialise_opti_object(COUNTRY)
# Create decision variables for random allocations and random relaxation
decision_vars = []
for phase_number in range(2):
sample = list(np.random.uniform(low=0.0, high=1.0, size=(8,)))
_sum = sum(sample)
decision_vars += [s / _sum for s in sample]
decision_vars.append(np.random.uniform(low=0.0, high=1.0))
# create_scenario_yml_file(COUNTRY, decision_vars, sc_index=6)
# Evaluate objective function
[total_deaths, max_hospital, relaxation] = opti_object.evaluate_objective(decision_vars)
# Print decision vars and outputs
print(get_decision_vars_names())
print(f"Decision variables: {decision_vars}")
print(f"N deaths: {total_deaths} / Max hospital: {max_hospital} / Relaxation: {relaxation}")
def dump_decision_vars_sample(n_samples):
decision_vars_sample = []
for i in range(n_samples):
decision_vars = []
for phase_number in range(2):
sample = list(np.random.uniform(low=0.0, high=1.0, size=(8,)))
_sum = sum(sample)
decision_vars += [s / _sum for s in sample]
decision_vars.append(float(np.random.uniform(low=0.0, high=1.0)))
decision_vars = [float(v) for v in decision_vars]
decision_vars_sample.append(decision_vars)
file_path = "comparison_test/vars_sample.yml"
with open(file_path, "w") as f:
yaml.dump(decision_vars_sample, f)
def evaluate_sample_decision_vars(user="Guillaume"):
file_path = "comparison_test/vars_sample.yml"
with open(file_path) as file:
vars_samples = yaml.load(file)
opti_object = initialise_opti_object(COUNTRY)
dumped_dict = {"deaths": [], "hosp": []}
for decision_vars in vars_samples:
[total_deaths, max_hospital, _] = opti_object.evaluate_objective(decision_vars)
dumped_dict["deaths"].append(float(total_deaths))
dumped_dict["hosp"].append(float(max_hospital))
file_path = f"comparison_test/obj_values_{user}.yml"
with open(file_path, "w") as f:
yaml.dump(dumped_dict, f)
def compare_outputs():
outputs = {}
for name in ["Romain", "Guillaume"]:
file_path = f"comparison_test/obj_values_{name}.yml"
with open(file_path) as file:
outputs[name] = yaml.load(file)
for obj in ["deaths", "hosp"]:
perc_diff = [
int(
100
* (outputs["Guillaume"][obj][i] - outputs["Romain"][obj][i])
/ outputs["Romain"][obj][i]
)
for i in range(len(outputs["Romain"][obj]))
]
average_perc_diff = sum(perc_diff) / len(perc_diff)
print(f"Comparison for {obj}:")
print("Percentage difference (ref. Romain):")
print(perc_diff)
print(f"Average perc diff: {average_perc_diff}%")
for name in ["Romain", "Guillaume"]:
x = outputs[name][obj]
ordered_output = sorted(x)
ranks = [ordered_output.index(v) for v in x]
print(f"Ranks {name}:")
print(ranks)
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print()
# evaluate_sample_decision_vars("Guillaume")
# compare_outputs()
# This can be run using:
# python -m apps runsamplevaccopti
|
[
"numpy.random.uniform",
"yaml.load",
"autumn.projects.covid_19.vaccine_optimisation.vaccine_opti.initialise_opti_object",
"autumn.projects.covid_19.vaccine_optimisation.vaccine_opti.get_decision_vars_names",
"yaml.dump"
] |
[((365, 396), 'autumn.projects.covid_19.vaccine_optimisation.vaccine_opti.initialise_opti_object', 'initialise_opti_object', (['COUNTRY'], {}), '(COUNTRY)\n', (387, 396), False, 'from autumn.projects.covid_19.vaccine_optimisation.vaccine_opti import get_decision_vars_names, initialise_opti_object\n'), ((2009, 2040), 'autumn.projects.covid_19.vaccine_optimisation.vaccine_opti.initialise_opti_object', 'initialise_opti_object', (['COUNTRY'], {}), '(COUNTRY)\n', (2031, 2040), False, 'from autumn.projects.covid_19.vaccine_optimisation.vaccine_opti import get_decision_vars_names, initialise_opti_object\n'), ((707, 743), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.0)', 'high': '(1.0)'}), '(low=0.0, high=1.0)\n', (724, 743), True, 'import numpy as np\n'), ((990, 1015), 'autumn.projects.covid_19.vaccine_optimisation.vaccine_opti.get_decision_vars_names', 'get_decision_vars_names', ([], {}), '()\n', (1013, 1015), False, 'from autumn.projects.covid_19.vaccine_optimisation.vaccine_opti import get_decision_vars_names, initialise_opti_object\n'), ((1776, 1810), 'yaml.dump', 'yaml.dump', (['decision_vars_sample', 'f'], {}), '(decision_vars_sample, f)\n', (1785, 1810), False, 'import yaml\n'), ((1974, 1989), 'yaml.load', 'yaml.load', (['file'], {}), '(file)\n', (1983, 1989), False, 'import yaml\n'), ((2429, 2454), 'yaml.dump', 'yaml.dump', (['dumped_dict', 'f'], {}), '(dumped_dict, f)\n', (2438, 2454), False, 'import yaml\n'), ((554, 601), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.0)', 'high': '(1.0)', 'size': '(8,)'}), '(low=0.0, high=1.0, size=(8,))\n', (571, 601), True, 'import numpy as np\n'), ((2666, 2681), 'yaml.load', 'yaml.load', (['file'], {}), '(file)\n', (2675, 2681), False, 'import yaml\n'), ((1360, 1407), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.0)', 'high': '(1.0)', 'size': '(8,)'}), '(low=0.0, high=1.0, size=(8,))\n', (1377, 1407), True, 'import numpy as np\n'), ((1531, 1567), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.0)', 'high': '(1.0)'}), '(low=0.0, high=1.0)\n', (1548, 1567), True, 'import numpy as np\n')]
|
from rest_framework.response import Response
from rest_framework_simplejwt.views import TokenObtainPairView
from rest_framework.viewsets import ModelViewSet
from rest_framework.permissions import AllowAny
from rest_framework import status
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
from core.auth.serializers import LoginSerializer, RegisterSerializer
class LoginViewSet(ModelViewSet, TokenObtainPairView):
serializer_class = LoginSerializer
permission_classes = (AllowAny,)
http_method_names = ['post']
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
try:
serializer.is_valid(raise_exception=True)
except TokenError as e:
raise InvalidToken(e.args[0])
return Response(serializer.validated_data, status=status.HTTP_200_OK)
class RegistrationViewSet(ModelViewSet, TokenObtainPairView):
serializer_class = RegisterSerializer
permission_classes = (AllowAny,)
http_method_names = ['post']
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.save()
refresh = RefreshToken.for_user(user)
res = {
"refresh": str(refresh),
"access": str(refresh.access_token),
}
return Response({
"user": serializer.data,
"refresh": res["refresh"],
"token": res["access"]
}, status=status.HTTP_201_CREATED)
|
[
"rest_framework_simplejwt.tokens.RefreshToken.for_user",
"rest_framework_simplejwt.exceptions.InvalidToken",
"rest_framework.response.Response"
] |
[((872, 934), 'rest_framework.response.Response', 'Response', (['serializer.validated_data'], {'status': 'status.HTTP_200_OK'}), '(serializer.validated_data, status=status.HTTP_200_OK)\n', (880, 934), False, 'from rest_framework.response import Response\n'), ((1322, 1349), 'rest_framework_simplejwt.tokens.RefreshToken.for_user', 'RefreshToken.for_user', (['user'], {}), '(user)\n', (1343, 1349), False, 'from rest_framework_simplejwt.tokens import RefreshToken\n'), ((1478, 1601), 'rest_framework.response.Response', 'Response', (["{'user': serializer.data, 'refresh': res['refresh'], 'token': res['access']}"], {'status': 'status.HTTP_201_CREATED'}), "({'user': serializer.data, 'refresh': res['refresh'], 'token': res[\n 'access']}, status=status.HTTP_201_CREATED)\n", (1486, 1601), False, 'from rest_framework.response import Response\n'), ((832, 855), 'rest_framework_simplejwt.exceptions.InvalidToken', 'InvalidToken', (['e.args[0]'], {}), '(e.args[0])\n', (844, 855), False, 'from rest_framework_simplejwt.exceptions import TokenError, InvalidToken\n')]
|
#! python
import logging
import os.path
import sys
import traceback
import toil.config
import toil.parm
import toil.parm.parse
import toil.framework
import toil.util.decorator
from toil.batch.base import BaseBatch
logging.basicConfig(format='%(asctime)s %(levelname)s: %(name)s %(message)s', level=logging.ERROR)
logger = logging.getLogger(__name__)
class Batch(BaseBatch):
def create_toil(self):
# require a config file to be passed in as parameter
args = toil.parm.parse.handle_parms(optional=['create', 'create_key', 'encrypt', 'decrypt', 'init'])
# require config file, encyyption key and initialization vector
# args = parm.handle.handle_parms(['c', 'k', 'i'])
logger.debug(args)
return toil.framework.create(**args)
@toil.util.decorator.timeit(loops=1)
def execute(self, framework):
logger.info('execute')
logger.debug(framework.args)
performed_config = False
if framework.args['init'] is not None:
self.toil_init(framework, framework.args['init'])
performed_config = True
if framework.args['create'] is not None:
self.create_config(framework, framework.args['create'])
performed_config = True
if framework.args['create_key'] is not None:
self.create_encryption_key(framework, framework.args['create_key'])
performed_config = True
if framework.args['encrypt'] is not None:
self.encrypt(framework, framework.args['encrypt'])
performed_config = True
if framework.args['decrypt'] is not None:
self.decrypt(framework, framework.args['decrypt'])
performed_config = True
if not performed_config:
usage = """
usage: toil-config [--init CONFIG_DIR_NAME]
create directory, create config.json, create key
[--create FILE_NAME]
create config file with file name
[--create_key FILE_NAME]
create encryption key with file name
[--encrypt CONFIG_FILE_NAME]
encrypt config file
[--decrypt CONFIG_FILE_NAME]
decrypt config file
To get started try this:
toil-config --init /path/.toil
creates a config.json file in your directory and an encryption key
toil-config -k /path/.toil/key --encrypt /path/.toil/config.json
create the file /path/.toil/config.json.encrypted where all values are encrypted
toil-config -k /path/.toil/key --decrypt /Users/aclove/.toil/config.json.encrypted
create the file /path/.toil/config.json.encrypted.decrypted where all values are decrypted
"""
print(usage)
def toil_init(self, framework, dir_name):
self.create_config(framework, dir_name + '/config.json')
self.create_encryption_key(framework, dir_name + '/key')
def create_config(self, framework, file_name):
try:
if os.path.isfile(file_name):
print('The file {0} already exists'.format(file_name))
else:
toil.config.util.generate_config_file(file_name)
print('created {0}'.format(file_name))
except Exception as ex:
logger.error(ex)
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
logger.error(message)
traceback.print_exc(file=sys.stdout)
def create_encryption_key(self, framework, file_name):
try:
if os.path.isfile(file_name):
print('The file {0} already exists'.format(file_name))
else:
key = framework.encryptor.generate_key(file_name)
print('created {0} with value {1}'.format(file_name, key))
except Exception as ex:
logger.error(ex)
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
logger.error(message)
traceback.print_exc(file=sys.stdout)
def encrypt(self, framework, file_name):
logger.info('execute')
try:
framework.encrypt_config_file(file_name, file_name + '.encrypted')
except Exception as ex:
logger.error(ex)
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
logger.error(message)
traceback.print_exc(file=sys.stdout)
def decrypt(self, framework, file_name):
logger.info('execute')
try:
framework.decrypt_config_file(file_name, file_name + '.decrypted')
except Exception as ex:
logger.error(ex)
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
logger.error(message)
traceback.print_exc(file=sys.stdout)
def main():
Batch().run()
|
[
"logging.getLogger",
"traceback.print_exc",
"logging.basicConfig"
] |
[((227, 330), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s %(levelname)s: %(name)s %(message)s"""', 'level': 'logging.ERROR'}), "(format=\n '%(asctime)s %(levelname)s: %(name)s %(message)s', level=logging.ERROR)\n", (246, 330), False, 'import logging\n'), ((336, 363), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (353, 363), False, 'import logging\n'), ((4138, 4174), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stdout'}), '(file=sys.stdout)\n', (4157, 4174), False, 'import traceback\n'), ((4787, 4823), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stdout'}), '(file=sys.stdout)\n', (4806, 4823), False, 'import traceback\n'), ((5259, 5295), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stdout'}), '(file=sys.stdout)\n', (5278, 5295), False, 'import traceback\n'), ((5731, 5767), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stdout'}), '(file=sys.stdout)\n', (5750, 5767), False, 'import traceback\n')]
|
"""Info needed by the plugins."""
from dataclasses import dataclass, field
from typing import Set
from identify import identify
from nitpick.constants import DOT
from nitpick.exceptions import Deprecation
from nitpick.project import Project
@dataclass
class FileInfo:
"""File information needed by the plugin."""
project: Project
path_from_root: str
tags: Set[str] = field(default_factory=set)
@classmethod
def create(cls, project: Project, path_from_root: str) -> "FileInfo":
"""Clean the file name and get its tags."""
if Deprecation.pre_commit_without_dash(path_from_root):
clean_path = DOT + path_from_root
else:
clean_path = DOT + path_from_root[1:] if path_from_root.startswith("-") else path_from_root
tags = set(identify.tags_from_filename(clean_path))
return cls(project, clean_path, tags)
|
[
"dataclasses.field",
"nitpick.exceptions.Deprecation.pre_commit_without_dash",
"identify.identify.tags_from_filename"
] |
[((388, 414), 'dataclasses.field', 'field', ([], {'default_factory': 'set'}), '(default_factory=set)\n', (393, 414), False, 'from dataclasses import dataclass, field\n'), ((570, 621), 'nitpick.exceptions.Deprecation.pre_commit_without_dash', 'Deprecation.pre_commit_without_dash', (['path_from_root'], {}), '(path_from_root)\n', (605, 621), False, 'from nitpick.exceptions import Deprecation\n'), ((806, 845), 'identify.identify.tags_from_filename', 'identify.tags_from_filename', (['clean_path'], {}), '(clean_path)\n', (833, 845), False, 'from identify import identify\n')]
|
# 1. kubectl create -k "github.com/kubernetes-sigs/aws-fsx-csi-driver/deploy/kubernetes/overlays/stable/?ref=master"
# 2. python get_security_group.py
# 3. kubectl apply -f specs/eks/fsx.yml
import os
import sys
import tempfile
import time
from typing import List
import boto3
from kube2.types import Volume
from kube2.utils import (
check_name,
get_current_cluster,
get_volumes,
humanize_date,
load_template,
make_table,
sh,
sh_capture,
)
from kube2.aws_utils import (
get_cluster_vpc_id,
get_clusters,
get_security_group_id,
get_subnet_id,
)
def enable_fsx():
sh(f'kubectl create -k "github.com/kubernetes-sigs/aws-fsx-csi-driver/deploy/kubernetes/overlays/stable/?ref=master"')
def is_fsx_enabled():
s = sh_capture(f'kubectl get csidrivers.storage.k8s.io fsx.csi.aws.com').strip()
return s.startswith('NAME')
def create_and_configure_security_group(
*,
cluster_name: str,
volume_name: str,
vpc_id: str,
):
client = boto3.client('ec2', region_name='us-east-1')
group_name = f'{cluster_name}-{volume_name}-fsx'
sg_id = get_security_group_id(vpc_id=vpc_id, group_name=group_name)
# create if doesn't already exist
if sg_id is None:
print('Security group does not exist for cluster. Creating one...')
response = client.create_security_group(
GroupName=group_name,
Description=f'SG for FSx {cluster_name}-{volume_name}',
VpcId=vpc_id,
)
sg_id: str = response['GroupId']
client.authorize_security_group_ingress(
GroupId=sg_id,
IpPermissions=[
{
'IpProtocol': 'tcp',
'FromPort': 988,
'ToPort': 988,
'IpRanges': [{'CidrIp': '192.168.0.0/16'}],
},
],
)
else:
print('Security group already exists for cluster...')
return sg_id
def get_pvc_name(volume_name: str):
return f'pvc-{volume_name}'
def get_sc_name(volume_name: str):
return f'sc-{volume_name}'
class VolumeCLI(object):
'''
Create or destroy shared persistent volumes on FSx.
'''
def create(
self,
*,
name: str,
storage_size: str,
):
'''
Create a new FSx volume backed by S3.
'''
check_name(name)
if name in [v.name for v in get_volumes()]:
print(f'Error: Volume "{name}" already exists.')
sys.exit(1)
cluster_name = get_current_cluster()
if cluster_name is None:
print('No cluster selected. Switch to or create a cluster first.')
sys.exit(1)
# enable the FSx feature on this cluster
if not is_fsx_enabled():
enable_fsx()
vpc_id = get_cluster_vpc_id(cluster_name)
sg_id = create_and_configure_security_group(
cluster_name=cluster_name,
volume_name=name,
vpc_id=vpc_id
)
pvc_name = get_pvc_name(name)
sc_name = get_sc_name(name)
subnet_id = get_subnet_id(vpc_id)
assert subnet_id is not None
with tempfile.TemporaryDirectory() as tmpdir:
script_fn = os.path.join(tmpdir, 'fsx.yml')
script = load_template(
fn='templates/fsx.yml',
args={
'storage_class_name': sc_name,
's3_import_path': f's3://kube2-volumes/{name}',
's3_export_path': f's3://kube2-volumes/{name}/export',
'security_group_id': sg_id,
'persistent_volume_claim_name': pvc_name,
'storage_size': storage_size,
'storage_class_name': sc_name,
'subnet_id': subnet_id,
}
)
with open(script_fn, 'w') as f:
f.write(script)
print('Creating volume...')
sh(f'kubectl apply -f {script_fn}')
print('Waiting for FSx filesystem to be created (check progress here: https://console.aws.amazon.com/fsx/home?region=us-east-1)...')
for _ in range(60*2):
s = sh_capture(f'''kubectl get pvc pvc-my-vol -o 'jsonpath={{..status.phase}}' ''').strip()
if s == 'Bound':
break
time.sleep(1)
sh(f'kubectl describe pvc | tail -n 1')
def delete(
self,
*,
name: str,
):
'''
Delete an FSx volume.
'''
# TODO: more checks here around what can can't be deleted:
# - doesn't exist?
# - is attached to pods?
# - has some files? (maybe y/n checks)
pvc_name = get_pvc_name(name)
sc_name = get_sc_name(name)
try:
sh(f'kubectl delete pvc {pvc_name}')
except Exception as e:
print(e)
try:
sh(f'kubectl delete sc {sc_name}')
except Exception as e:
print(e)
def list(self):
'''
List all the volumes in the current cluster.
'''
volumes = get_volumes()
if len(volumes) == 0:
print('No volumes.')
else:
table = [['NAME', 'CAPACITY', 'USAGE', 'CREATED']]
for v in volumes:
table.append([
v.name,
v.capacity,
v.usage,
humanize_date(v.created),
])
print(make_table(table))
|
[
"kube2.aws_utils.get_security_group_id",
"kube2.utils.load_template",
"kube2.utils.sh_capture",
"tempfile.TemporaryDirectory",
"boto3.client",
"kube2.aws_utils.get_cluster_vpc_id",
"kube2.utils.make_table",
"time.sleep",
"kube2.utils.sh",
"kube2.utils.check_name",
"kube2.aws_utils.get_subnet_id",
"kube2.utils.humanize_date",
"sys.exit",
"kube2.utils.get_current_cluster",
"os.path.join",
"kube2.utils.get_volumes"
] |
[((621, 744), 'kube2.utils.sh', 'sh', (['f"""kubectl create -k "github.com/kubernetes-sigs/aws-fsx-csi-driver/deploy/kubernetes/overlays/stable/?ref=master\\""""'], {}), '(f\'kubectl create -k "github.com/kubernetes-sigs/aws-fsx-csi-driver/deploy/kubernetes/overlays/stable/?ref=master"\'\n )\n', (623, 744), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((1009, 1053), 'boto3.client', 'boto3.client', (['"""ec2"""'], {'region_name': '"""us-east-1"""'}), "('ec2', region_name='us-east-1')\n", (1021, 1053), False, 'import boto3\n'), ((1120, 1179), 'kube2.aws_utils.get_security_group_id', 'get_security_group_id', ([], {'vpc_id': 'vpc_id', 'group_name': 'group_name'}), '(vpc_id=vpc_id, group_name=group_name)\n', (1141, 1179), False, 'from kube2.aws_utils import get_cluster_vpc_id, get_clusters, get_security_group_id, get_subnet_id\n'), ((2391, 2407), 'kube2.utils.check_name', 'check_name', (['name'], {}), '(name)\n', (2401, 2407), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((2569, 2590), 'kube2.utils.get_current_cluster', 'get_current_cluster', ([], {}), '()\n', (2588, 2590), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((2853, 2885), 'kube2.aws_utils.get_cluster_vpc_id', 'get_cluster_vpc_id', (['cluster_name'], {}), '(cluster_name)\n', (2871, 2885), False, 'from kube2.aws_utils import get_cluster_vpc_id, get_clusters, get_security_group_id, get_subnet_id\n'), ((3138, 3159), 'kube2.aws_utils.get_subnet_id', 'get_subnet_id', (['vpc_id'], {}), '(vpc_id)\n', (3151, 3159), False, 'from kube2.aws_utils import get_cluster_vpc_id, get_clusters, get_security_group_id, get_subnet_id\n'), ((5207, 5220), 'kube2.utils.get_volumes', 'get_volumes', ([], {}), '()\n', (5218, 5220), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((772, 840), 'kube2.utils.sh_capture', 'sh_capture', (['f"""kubectl get csidrivers.storage.k8s.io fsx.csi.aws.com"""'], {}), "(f'kubectl get csidrivers.storage.k8s.io fsx.csi.aws.com')\n", (782, 840), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((2533, 2544), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2541, 2544), False, 'import sys\n'), ((2715, 2726), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2723, 2726), False, 'import sys\n'), ((3211, 3240), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (3238, 3240), False, 'import tempfile\n'), ((3277, 3308), 'os.path.join', 'os.path.join', (['tmpdir', '"""fsx.yml"""'], {}), "(tmpdir, 'fsx.yml')\n", (3289, 3308), False, 'import os\n'), ((3330, 3679), 'kube2.utils.load_template', 'load_template', ([], {'fn': '"""templates/fsx.yml"""', 'args': "{'storage_class_name': sc_name, 's3_import_path':\n f's3://kube2-volumes/{name}', 's3_export_path':\n f's3://kube2-volumes/{name}/export', 'security_group_id': sg_id,\n 'persistent_volume_claim_name': pvc_name, 'storage_size': storage_size,\n 'storage_class_name': sc_name, 'subnet_id': subnet_id}"}), "(fn='templates/fsx.yml', args={'storage_class_name': sc_name,\n 's3_import_path': f's3://kube2-volumes/{name}', 's3_export_path':\n f's3://kube2-volumes/{name}/export', 'security_group_id': sg_id,\n 'persistent_volume_claim_name': pvc_name, 'storage_size': storage_size,\n 'storage_class_name': sc_name, 'subnet_id': subnet_id})\n", (3343, 3679), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((4018, 4053), 'kube2.utils.sh', 'sh', (['f"""kubectl apply -f {script_fn}"""'], {}), "(f'kubectl apply -f {script_fn}')\n", (4020, 4053), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((4442, 4481), 'kube2.utils.sh', 'sh', (['f"""kubectl describe pvc | tail -n 1"""'], {}), "(f'kubectl describe pvc | tail -n 1')\n", (4444, 4481), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((4889, 4925), 'kube2.utils.sh', 'sh', (['f"""kubectl delete pvc {pvc_name}"""'], {}), "(f'kubectl delete pvc {pvc_name}')\n", (4891, 4925), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((5003, 5037), 'kube2.utils.sh', 'sh', (['f"""kubectl delete sc {sc_name}"""'], {}), "(f'kubectl delete sc {sc_name}')\n", (5005, 5037), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((4416, 4429), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4426, 4429), False, 'import time\n'), ((5594, 5611), 'kube2.utils.make_table', 'make_table', (['table'], {}), '(table)\n', (5604, 5611), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((2444, 2457), 'kube2.utils.get_volumes', 'get_volumes', ([], {}), '()\n', (2455, 2457), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((4253, 4328), 'kube2.utils.sh_capture', 'sh_capture', (['f"""kubectl get pvc pvc-my-vol -o \'jsonpath={{..status.phase}}\' """'], {}), '(f"kubectl get pvc pvc-my-vol -o \'jsonpath={{..status.phase}}\' ")\n', (4263, 4328), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n'), ((5531, 5555), 'kube2.utils.humanize_date', 'humanize_date', (['v.created'], {}), '(v.created)\n', (5544, 5555), False, 'from kube2.utils import check_name, get_current_cluster, get_volumes, humanize_date, load_template, make_table, sh, sh_capture\n')]
|
import numpy as np
import os
from astropy.time import Time
from pandas import DataFrame
from orbitize.kepler import calc_orbit
from orbitize import read_input, system, sampler
def test_secondary_rv_lnlike_calc():
"""
Generates fake secondary RV data and asserts that
the log(likelihood) of the true parameters is what we expect.
Also tests that the primary and secondary RV orbits are related by
-m/mtot
"""
# define an orbit & generate secondary RVs
a = 10
e = 0
i = np.pi / 4
omega = 0
Omega = 0
tau = 0.3
m0 = 1
m1 = 0.1
plx = 10
orbitize_params_list = np.array([a, e, i, omega, Omega, tau, plx, m1, m0])
epochs = Time(np.linspace(2005, 2025, int(1e3)), format='decimalyear').mjd
_, _, rv_p = calc_orbit(epochs, a, e, i, omega, Omega, tau, plx, m0+m1, mass_for_Kamp=m0)
data_file = DataFrame(columns=['epoch', 'object','rv', 'rv_err'])
data_file.epoch = epochs
data_file.object = np.ones(len(epochs), dtype=int)
data_file.rv = rv_p
data_file.rv_err = np.ones(len(epochs)) * 0.01
data_file.to_csv('tmp.csv', index=False)
# set up a fit using the simulated data
data_table = read_input.read_file('tmp.csv')
mySys = system.System(1, data_table, m0, plx, mass_err=0.1, plx_err=0.1, fit_secondary_mass=True)
mySamp = sampler.MCMC(mySys)
computed_lnlike = mySamp._logl(orbitize_params_list)
# residuals should be 0
assert computed_lnlike == np.sum(-np.log(np.sqrt(2 * np.pi * data_file.rv_err.values**2)))
# clean up
os.system('rm tmp.csv')
# assert that the secondary orbit is the primary orbit scaled
_, _, rv = mySys.compute_all_orbits(orbitize_params_list)
rv0 = rv[:,0]
rv1 = rv[:,1]
assert np.all(rv0 == -m1 / m0 * rv1)
if __name__ == '__main__':
test_secondary_rv_lnlike_calc()
|
[
"pandas.DataFrame",
"orbitize.kepler.calc_orbit",
"orbitize.read_input.read_file",
"os.system",
"orbitize.system.System",
"numpy.array",
"orbitize.sampler.MCMC",
"numpy.all",
"numpy.sqrt"
] |
[((628, 679), 'numpy.array', 'np.array', (['[a, e, i, omega, Omega, tau, plx, m1, m0]'], {}), '([a, e, i, omega, Omega, tau, plx, m1, m0])\n', (636, 679), True, 'import numpy as np\n'), ((778, 856), 'orbitize.kepler.calc_orbit', 'calc_orbit', (['epochs', 'a', 'e', 'i', 'omega', 'Omega', 'tau', 'plx', '(m0 + m1)'], {'mass_for_Kamp': 'm0'}), '(epochs, a, e, i, omega, Omega, tau, plx, m0 + m1, mass_for_Kamp=m0)\n', (788, 856), False, 'from orbitize.kepler import calc_orbit\n'), ((872, 926), 'pandas.DataFrame', 'DataFrame', ([], {'columns': "['epoch', 'object', 'rv', 'rv_err']"}), "(columns=['epoch', 'object', 'rv', 'rv_err'])\n", (881, 926), False, 'from pandas import DataFrame\n'), ((1193, 1224), 'orbitize.read_input.read_file', 'read_input.read_file', (['"""tmp.csv"""'], {}), "('tmp.csv')\n", (1213, 1224), False, 'from orbitize import read_input, system, sampler\n'), ((1237, 1330), 'orbitize.system.System', 'system.System', (['(1)', 'data_table', 'm0', 'plx'], {'mass_err': '(0.1)', 'plx_err': '(0.1)', 'fit_secondary_mass': '(True)'}), '(1, data_table, m0, plx, mass_err=0.1, plx_err=0.1,\n fit_secondary_mass=True)\n', (1250, 1330), False, 'from orbitize import read_input, system, sampler\n'), ((1340, 1359), 'orbitize.sampler.MCMC', 'sampler.MCMC', (['mySys'], {}), '(mySys)\n', (1352, 1359), False, 'from orbitize import read_input, system, sampler\n'), ((1561, 1584), 'os.system', 'os.system', (['"""rm tmp.csv"""'], {}), "('rm tmp.csv')\n", (1570, 1584), False, 'import os\n'), ((1762, 1791), 'numpy.all', 'np.all', (['(rv0 == -m1 / m0 * rv1)'], {}), '(rv0 == -m1 / m0 * rv1)\n', (1768, 1791), True, 'import numpy as np\n'), ((1491, 1540), 'numpy.sqrt', 'np.sqrt', (['(2 * np.pi * data_file.rv_err.values ** 2)'], {}), '(2 * np.pi * data_file.rv_err.values ** 2)\n', (1498, 1540), True, 'import numpy as np\n')]
|
import math
import pygame
import sys
from pygame.locals import *
pygame.init()
screen = pygame.display.set_mode((600,500))
pygame.display.set_caption("The Pie Game - Press 1,2,3,4")
myfont = pygame.font.Font(None, 60)
color = 200, 80, 60
width = 4
x = 300
y = 250
radius = 200
position = x-radius, y-radius, radius*2, radius*2
piece1 = False
piece2 = False
piece3 = False
piece4 = False
while True:
for event in pygame.event.get():
if event.type == QUIT:
sys.exit()
elif event.type == KEYUP:
if event.key == pygame.K_ESCAPE:
sys.exit()
elif event.key == pygame.K_1:
piece1 = True
elif event.key == pygame.K_2:
piece2 = True
elif event.key == pygame.K_3:
piece3 = True
elif event.key == pygame.K_4:
piece4 = True
screen.fill((0,0,200))
textImg1 = myfont.render("1", True, color)
screen.blit(textImg1, (x+radius/2-20, y-radius/2))
textImg2 = myfont.render("2", True, color)
screen.blit(textImg2, (x-radius/2, y-radius/2))
textImg3 = myfont.render("3", True, color)
screen.blit(textImg3, (x-radius/2, y+radius/2-20))
textImg4 = myfont.render("4", True, color)
screen.blit(textImg4, (x+radius/2-20, y+radius/2-20))
if piece1:
start_angle = math.radians(0)
end_angle = math.radians(90)
pygame.draw.arc(screen, color, position, start_angle, end_angle, width)
pygame.draw.line(screen, color, (x,y), (x,y-radius), width)
pygame.draw.line(screen, color, (x,y), (x+radius,y), width)
if piece2:
start_angle = math.radians(90)
end_angle = math.radians(180)
pygame.draw.arc(screen, color, position, start_angle, end_angle, width)
pygame.draw.line(screen, color, (x,y), (x,y-radius), width)
pygame.draw.line(screen, color, (x,y), (x-radius,y), width)
if piece3:
start_angle = math.radians(180)
end_angle = math.radians(270)
pygame.draw.arc(screen, color, position, start_angle, end_angle, width)
pygame.draw.line(screen, color, (x,y), (x-radius,y), width)
pygame.draw.line(screen, color, (x,y), (x,y+radius), width)
if piece4:
start_angle = math.radians(270)
end_angle = math.radians(360)
pygame.draw.arc(screen, color, position, start_angle, end_angle, width)
pygame.draw.line(screen, color, (x,y), (x,y+radius), width)
pygame.draw.line(screen, color, (x,y), (x+radius,y), width)
if piece1 and piece2 and piece3 and piece4:
color = 0,255,0
pygame.display.update()
|
[
"pygame.draw.arc",
"pygame.draw.line",
"pygame.event.get",
"pygame.display.set_mode",
"math.radians",
"pygame.init",
"pygame.display.update",
"pygame.font.Font",
"pygame.display.set_caption",
"sys.exit"
] |
[((66, 79), 'pygame.init', 'pygame.init', ([], {}), '()\n', (77, 79), False, 'import pygame\n'), ((89, 124), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(600, 500)'], {}), '((600, 500))\n', (112, 124), False, 'import pygame\n'), ((124, 182), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""The Pie Game - Press 1,2,3,4"""'], {}), "('The Pie Game - Press 1,2,3,4')\n", (150, 182), False, 'import pygame\n'), ((192, 218), 'pygame.font.Font', 'pygame.font.Font', (['None', '(60)'], {}), '(None, 60)\n', (208, 218), False, 'import pygame\n'), ((417, 435), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (433, 435), False, 'import pygame\n'), ((2344, 2367), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (2365, 2367), False, 'import pygame\n'), ((1204, 1219), 'math.radians', 'math.radians', (['(0)'], {}), '(0)\n', (1216, 1219), False, 'import math\n'), ((1234, 1250), 'math.radians', 'math.radians', (['(90)'], {}), '(90)\n', (1246, 1250), False, 'import math\n'), ((1253, 1324), 'pygame.draw.arc', 'pygame.draw.arc', (['screen', 'color', 'position', 'start_angle', 'end_angle', 'width'], {}), '(screen, color, position, start_angle, end_angle, width)\n', (1268, 1324), False, 'import pygame\n'), ((1327, 1390), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'color', '(x, y)', '(x, y - radius)', 'width'], {}), '(screen, color, (x, y), (x, y - radius), width)\n', (1343, 1390), False, 'import pygame\n'), ((1389, 1452), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'color', '(x, y)', '(x + radius, y)', 'width'], {}), '(screen, color, (x, y), (x + radius, y), width)\n', (1405, 1452), False, 'import pygame\n'), ((1477, 1493), 'math.radians', 'math.radians', (['(90)'], {}), '(90)\n', (1489, 1493), False, 'import math\n'), ((1508, 1525), 'math.radians', 'math.radians', (['(180)'], {}), '(180)\n', (1520, 1525), False, 'import math\n'), ((1528, 1599), 'pygame.draw.arc', 'pygame.draw.arc', (['screen', 'color', 'position', 'start_angle', 'end_angle', 'width'], {}), '(screen, color, position, start_angle, end_angle, width)\n', (1543, 1599), False, 'import pygame\n'), ((1602, 1665), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'color', '(x, y)', '(x, y - radius)', 'width'], {}), '(screen, color, (x, y), (x, y - radius), width)\n', (1618, 1665), False, 'import pygame\n'), ((1664, 1727), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'color', '(x, y)', '(x - radius, y)', 'width'], {}), '(screen, color, (x, y), (x - radius, y), width)\n', (1680, 1727), False, 'import pygame\n'), ((1752, 1769), 'math.radians', 'math.radians', (['(180)'], {}), '(180)\n', (1764, 1769), False, 'import math\n'), ((1784, 1801), 'math.radians', 'math.radians', (['(270)'], {}), '(270)\n', (1796, 1801), False, 'import math\n'), ((1804, 1875), 'pygame.draw.arc', 'pygame.draw.arc', (['screen', 'color', 'position', 'start_angle', 'end_angle', 'width'], {}), '(screen, color, position, start_angle, end_angle, width)\n', (1819, 1875), False, 'import pygame\n'), ((1878, 1941), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'color', '(x, y)', '(x - radius, y)', 'width'], {}), '(screen, color, (x, y), (x - radius, y), width)\n', (1894, 1941), False, 'import pygame\n'), ((1940, 2003), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'color', '(x, y)', '(x, y + radius)', 'width'], {}), '(screen, color, (x, y), (x, y + radius), width)\n', (1956, 2003), False, 'import pygame\n'), ((2028, 2045), 'math.radians', 'math.radians', (['(270)'], {}), '(270)\n', (2040, 2045), False, 'import math\n'), ((2060, 2077), 'math.radians', 'math.radians', (['(360)'], {}), '(360)\n', (2072, 2077), False, 'import math\n'), ((2080, 2151), 'pygame.draw.arc', 'pygame.draw.arc', (['screen', 'color', 'position', 'start_angle', 'end_angle', 'width'], {}), '(screen, color, position, start_angle, end_angle, width)\n', (2095, 2151), False, 'import pygame\n'), ((2154, 2217), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'color', '(x, y)', '(x, y + radius)', 'width'], {}), '(screen, color, (x, y), (x, y + radius), width)\n', (2170, 2217), False, 'import pygame\n'), ((2216, 2279), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'color', '(x, y)', '(x + radius, y)', 'width'], {}), '(screen, color, (x, y), (x + radius, y), width)\n', (2232, 2279), False, 'import pygame\n'), ((465, 475), 'sys.exit', 'sys.exit', ([], {}), '()\n', (473, 475), False, 'import sys\n'), ((544, 554), 'sys.exit', 'sys.exit', ([], {}), '()\n', (552, 554), False, 'import sys\n')]
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import torch
from sklearn.metrics import hamming_loss as sk_hamming_loss
from tests.classification.inputs import _input_binary, _input_binary_prob
from tests.classification.inputs import _input_multiclass as _input_mcls
from tests.classification.inputs import _input_multiclass_prob as _input_mcls_prob
from tests.classification.inputs import _input_multidim_multiclass as _input_mdmc
from tests.classification.inputs import _input_multidim_multiclass_prob as _input_mdmc_prob
from tests.classification.inputs import _input_multilabel as _input_mlb
from tests.classification.inputs import _input_multilabel_multidim as _input_mlmd
from tests.classification.inputs import _input_multilabel_multidim_prob as _input_mlmd_prob
from tests.classification.inputs import _input_multilabel_prob as _input_mlb_prob
from tests.helpers.testers import THRESHOLD, MetricTester
from torchmetrics import HammingDistance
from torchmetrics.classification.checks import _input_format_classification
from torchmetrics.functional import hamming_distance
torch.manual_seed(42)
def _sk_hamming_loss(preds, target):
sk_preds, sk_target, _ = _input_format_classification(preds, target, threshold=THRESHOLD)
sk_preds, sk_target = sk_preds.numpy(), sk_target.numpy()
sk_preds, sk_target = sk_preds.reshape(sk_preds.shape[0], -1), sk_target.reshape(sk_target.shape[0], -1)
return sk_hamming_loss(y_true=sk_target, y_pred=sk_preds)
@pytest.mark.parametrize(
"preds, target",
[
(_input_binary_prob.preds, _input_binary_prob.target),
(_input_binary.preds, _input_binary.target),
(_input_mlb_prob.preds, _input_mlb_prob.target),
(_input_mlb.preds, _input_mlb.target),
(_input_mcls_prob.preds, _input_mcls_prob.target),
(_input_mcls.preds, _input_mcls.target),
(_input_mdmc_prob.preds, _input_mdmc_prob.target),
(_input_mdmc.preds, _input_mdmc.target),
(_input_mlmd_prob.preds, _input_mlmd_prob.target),
(_input_mlmd.preds, _input_mlmd.target),
],
)
class TestHammingDistance(MetricTester):
@pytest.mark.parametrize("ddp", [True, False])
@pytest.mark.parametrize("dist_sync_on_step", [False, True])
def test_hamming_distance_class(self, ddp, dist_sync_on_step, preds, target):
self.run_class_metric_test(
ddp=ddp,
preds=preds,
target=target,
metric_class=HammingDistance,
sk_metric=_sk_hamming_loss,
dist_sync_on_step=dist_sync_on_step,
metric_args={"threshold": THRESHOLD},
)
def test_hamming_distance_fn(self, preds, target):
self.run_functional_metric_test(
preds,
target,
metric_functional=hamming_distance,
sk_metric=_sk_hamming_loss,
metric_args={"threshold": THRESHOLD},
)
@pytest.mark.parametrize("threshold", [1.5])
def test_wrong_params(threshold):
preds, target = _input_mcls_prob.preds, _input_mcls_prob.target
with pytest.raises(ValueError):
ham_dist = HammingDistance(threshold=threshold)
ham_dist(preds, target)
ham_dist.compute()
with pytest.raises(ValueError):
hamming_distance(preds, target, threshold=threshold)
|
[
"torch.manual_seed",
"pytest.raises",
"torchmetrics.classification.checks._input_format_classification",
"torchmetrics.HammingDistance",
"pytest.mark.parametrize",
"torchmetrics.functional.hamming_distance",
"sklearn.metrics.hamming_loss"
] |
[((1635, 1656), 'torch.manual_seed', 'torch.manual_seed', (['(42)'], {}), '(42)\n', (1652, 1656), False, 'import torch\n'), ((2027, 2562), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""preds, target"""', '[(_input_binary_prob.preds, _input_binary_prob.target), (_input_binary.\n preds, _input_binary.target), (_input_mlb_prob.preds, _input_mlb_prob.\n target), (_input_mlb.preds, _input_mlb.target), (_input_mcls_prob.preds,\n _input_mcls_prob.target), (_input_mcls.preds, _input_mcls.target), (\n _input_mdmc_prob.preds, _input_mdmc_prob.target), (_input_mdmc.preds,\n _input_mdmc.target), (_input_mlmd_prob.preds, _input_mlmd_prob.target),\n (_input_mlmd.preds, _input_mlmd.target)]'], {}), "('preds, target', [(_input_binary_prob.preds,\n _input_binary_prob.target), (_input_binary.preds, _input_binary.target),\n (_input_mlb_prob.preds, _input_mlb_prob.target), (_input_mlb.preds,\n _input_mlb.target), (_input_mcls_prob.preds, _input_mcls_prob.target),\n (_input_mcls.preds, _input_mcls.target), (_input_mdmc_prob.preds,\n _input_mdmc_prob.target), (_input_mdmc.preds, _input_mdmc.target), (\n _input_mlmd_prob.preds, _input_mlmd_prob.target), (_input_mlmd.preds,\n _input_mlmd.target)])\n", (2050, 2562), False, 'import pytest\n'), ((3459, 3502), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""threshold"""', '[1.5]'], {}), "('threshold', [1.5])\n", (3482, 3502), False, 'import pytest\n'), ((1725, 1789), 'torchmetrics.classification.checks._input_format_classification', '_input_format_classification', (['preds', 'target'], {'threshold': 'THRESHOLD'}), '(preds, target, threshold=THRESHOLD)\n', (1753, 1789), False, 'from torchmetrics.classification.checks import _input_format_classification\n'), ((1973, 2023), 'sklearn.metrics.hamming_loss', 'sk_hamming_loss', ([], {'y_true': 'sk_target', 'y_pred': 'sk_preds'}), '(y_true=sk_target, y_pred=sk_preds)\n', (1988, 2023), True, 'from sklearn.metrics import hamming_loss as sk_hamming_loss\n'), ((2679, 2724), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ddp"""', '[True, False]'], {}), "('ddp', [True, False])\n", (2702, 2724), False, 'import pytest\n'), ((2730, 2789), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dist_sync_on_step"""', '[False, True]'], {}), "('dist_sync_on_step', [False, True])\n", (2753, 2789), False, 'import pytest\n'), ((3615, 3640), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3628, 3640), False, 'import pytest\n'), ((3661, 3697), 'torchmetrics.HammingDistance', 'HammingDistance', ([], {'threshold': 'threshold'}), '(threshold=threshold)\n', (3676, 3697), False, 'from torchmetrics import HammingDistance\n'), ((3767, 3792), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3780, 3792), False, 'import pytest\n'), ((3802, 3854), 'torchmetrics.functional.hamming_distance', 'hamming_distance', (['preds', 'target'], {'threshold': 'threshold'}), '(preds, target, threshold=threshold)\n', (3818, 3854), False, 'from torchmetrics.functional import hamming_distance\n')]
|
import tensorflow as tf
import numpy as np
def get_infos2Laplace_1D(input_dim=1, out_dim=1, intervalL=0.0, intervalR=1.0, equa_name=None):
# -uxx = f
if equa_name == 'PDE1':
# u=sin(pi*x), f=-pi*pi*sin(pi*x)
fside = lambda x: -(np.pi)*(np.pi)*tf.sin(np.pi*x)
utrue = lambda x: tf.sin(np.pi*x)
uleft = lambda x: tf.sin(np.pi*intervalL)
uright = lambda x: tf.sin(np.pi*intervalR)
return fside, utrue, uleft, uright
# 偏微分方程的一些信息:边界条件,初始条件,真解,右端项函数
def get_infos2Laplace_2D(input_dim=1, out_dim=1, left_bottom=0.0, right_top=1.0, equa_name=None):
if equa_name == 'PDE1':
# u=exp(-x)(x_y^3), f = -exp(-x)(x-2+y^3+6y)
f_side = lambda x, y: -(tf.exp(-1.0*x)) * (x - 2 + tf.pow(y, 3) + 6 * y)
u_true = lambda x, y: (tf.exp(-1.0*x))*(x + tf.pow(y, 3))
ux_left = lambda x, y: tf.exp(-left_bottom) * (tf.pow(y, 3) + 1.0 * left_bottom)
ux_right = lambda x, y: tf.exp(-right_top) * (tf.pow(y, 3) + 1.0 * right_top)
uy_bottom = lambda x, y: tf.exp(-x) * (tf.pow(left_bottom, 3) + x)
uy_top = lambda x, y: tf.exp(-x) * (tf.pow(right_top, 3) + x)
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE2':
f_side = lambda x, y: (-1.0)*tf.sin(np.pi*x) * (2 - np.square(np.pi)*tf.square(y))
u_true = lambda x, y: tf.square(y)*tf.sin(np.pi*x)
ux_left = lambda x, y: tf.square(y) * tf.sin(np.pi * left_bottom)
ux_right = lambda x, y: tf.square(y) * tf.sin(np.pi * right_top)
uy_bottom = lambda x, y: tf.square(left_bottom) * tf.sin(np.pi * x)
uy_top = lambda x, y: tf.square(right_top) * tf.sin(np.pi * x)
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE3':
# u=exp(x+y), f = -2*exp(x+y)
f_side = lambda x, y: -2.0*(tf.exp(x)*tf.exp(y))
u_true = lambda x, y: tf.exp(x)*tf.exp(y)
ux_left = lambda x, y: tf.multiply(tf.exp(y), tf.exp(left_bottom))
ux_right = lambda x, y: tf.multiply(tf.exp(y), tf.exp(right_top))
uy_bottom = lambda x, y: tf.multiply(tf.exp(x), tf.exp(left_bottom))
uy_top = lambda x, y: tf.multiply(tf.exp(x), tf.exp(right_top))
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE4':
# u=(1/4)*(x^2+y^2), f = -1
f_side = lambda x, y: -1.0*tf.ones_like(x)
u_true = lambda x, y: 0.25*(tf.pow(x, 2)+tf.pow(y, 2))
ux_left = lambda x, y: 0.25 * tf.pow(y, 2) + 0.25 * tf.pow(left_bottom, 2)
ux_right = lambda x, y: 0.25 * tf.pow(y, 2) + 0.25 * tf.pow(right_top, 2)
uy_bottom = lambda x, y: 0.25 * tf.pow(x, 2) + 0.25 * tf.pow(left_bottom, 2)
uy_top = lambda x, y: 0.25 * tf.pow(x, 2) + 0.25 * tf.pow(right_top, 2)
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE5':
# u=(1/4)*(x^2+y^2)+x+y, f = -1
f_side = lambda x, y: -1.0*tf.ones_like(x)
u_true = lambda x, y: 0.25*(tf.pow(x, 2)+tf.pow(y, 2)) + x + y
ux_left = lambda x, y: 0.25 * tf.pow(y, 2) + 0.25 * tf.pow(left_bottom, 2) + left_bottom + y
ux_right = lambda x, y: 0.25 * tf.pow(y, 2) + 0.25 * tf.pow(right_top, 2) + right_top + y
uy_bottom = lambda x, y: 0.25 * tf.pow(x, 2) + tf.pow(left_bottom, 2) + left_bottom + x
uy_top = lambda x, y: 0.25 * tf.pow(x, 2) + 0.25 * tf.pow(right_top, 2) + right_top + x
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE6':
# u=(1/2)*(x^2)*(y^2), f = -(x^2+y^2)
f_side = lambda x, y: -1.0*(tf.pow(x, 2)+tf.pow(y, 2))
u_true = lambda x, y: 0.5 * (tf.pow(x, 2) * tf.pow(y, 2))
ux_left = lambda x, y: 0.5 * (tf.pow(left_bottom, 2) * tf.pow(y, 2))
ux_right = lambda x, y: 0.5 * (tf.pow(right_top, 2) * tf.pow(y, 2))
uy_bottom = lambda x, y: 0.5 * (tf.pow(x, 2) * tf.pow(left_bottom, 2))
uy_top = lambda x, y: 0.5 * (tf.pow(x, 2) * tf.pow(right_top, 2))
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE7':
# u=(1/2)*(x^2)*(y^2)+x+y, f = -(x^2+y^2)
f_side = lambda x, y: -1.0*(tf.pow(x, 2)+tf.pow(y, 2))
u_true = lambda x, y: 0.5*(tf.pow(x, 2)*tf.pow(y, 2)) + x*tf.ones_like(x) + y*tf.ones_like(y)
ux_left = lambda x, y: 0.5 * tf.multiply(tf.pow(left_bottom, 2), tf.pow(y, 2)) + left_bottom + y
ux_right = lambda x, y: 0.5 * tf.multiply(tf.pow(right_top, 2), tf.pow(y, 2)) + right_top + y
uy_bottom = lambda x, y: 0.5 * tf.multiply(tf.pow(x, 2), tf.pow(left_bottom, 2)) + x + left_bottom
uy_top = lambda x, y: 0.5 * tf.multiply(tf.pow(x, 2), tf.pow(right_top, 2)) + x + right_top
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
# 偏微分方程的一些信息:边界条件,初始条件,真解,右端项函数
def get_infos2Laplace_3D(input_dim=1, out_dim=1, intervalL=0.0, intervalR=1.0, equa_name=None):
if equa_name == 'PDE1':
# -Laplace U = f
# u=sin(pi*x)*sin(pi*y)*sin(pi*z), f=-pi*pi*sin(pi*x)*sin(pi*y)*sin(pi*z)
fside = lambda x, y, z: -(np.pi)*(np.pi)*tf.sin(np.pi*x)
utrue = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*y)*tf.sin(np.pi*z)
u_00 = lambda x, y, z: tf.sin(np.pi*intervalL)*tf.sin(np.pi*y)*tf.sin(np.pi*z)
u_01 = lambda x, y, z: tf.sin(np.pi*intervalR)*tf.sin(np.pi*y)*tf.sin(np.pi*z)
u_10 = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*intervalL)*tf.sin(np.pi*z)
u_11 = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*intervalR)*tf.sin(np.pi*z)
u_20 = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*y)*tf.sin(np.pi*intervalL)
u_21 = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*y)*tf.sin(np.pi*intervalR)
return fside, utrue, u_00, u_01, u_10, u_11, u_20, u_21
# 偏微分方程的一些信息:边界条件,初始条件,真解,右端项函数
def get_infos2Laplace_5D(input_dim=1, out_dim=1, intervalL=0.0, intervalR=1.0, equa_name=None):
if equa_name == 'PDE1':
# u=sin(pi*x), f=-pi*pi*sin(pi*x)
fside = lambda x, y, z, s, t: -(np.pi)*(np.pi)*tf.sin(np.pi*x)
utrue = lambda x, y, z, s, t: tf.sin(np.pi*x)*tf.sin(np.pi*y)*tf.sin(np.pi*z)*tf.sin(np.pi*s)*tf.sin(np.pi*t)
u_00 = lambda x, y, z, s, t: tf.sin(np.pi*intervalL)*tf.sin(np.pi*y)*tf.sin(np.pi*z)*tf.sin(np.pi*s)*tf.sin(np.pi*t)
u_01 = lambda x, y, z, s, t: tf.sin(np.pi*intervalR)*tf.sin(np.pi*y)*tf.sin(np.pi*z)*tf.sin(np.pi*s)*tf.sin(np.pi*t)
u_10 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * intervalL) * tf.sin(np.pi * z) * tf.sin(np.pi * s) * tf.sin(np.pi * t)
u_11 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * intervalR) * tf.sin(np.pi * z) * tf.sin(np.pi * s) * tf.sin(np.pi * t)
u_20 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * intervalL) * tf.sin(np.pi * s) * tf.sin(np.pi * t)
u_21 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * intervalR) * tf.sin(np.pi * s) * tf.sin(np.pi * t)
u_30 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * z) * tf.sin(np.pi * intervalL) * tf.sin(np.pi * t)
u_31 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * z) * tf.sin(np.pi * intervalR) * tf.sin(np.pi * t)
u_40 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * z) * tf.sin(np.pi * s) * tf.sin(np.pi * intervalL)
u_41 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * z) * tf.sin(np.pi * s) * tf.sin(np.pi * intervalR)
return fside, utrue, u_00, u_01, u_10, u_11, u_20, u_21, u_30, u_31, u_40, u_41
|
[
"tensorflow.sin",
"numpy.square",
"tensorflow.pow",
"tensorflow.ones_like",
"tensorflow.exp",
"tensorflow.square"
] |
[((320, 337), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (326, 337), True, 'import tensorflow as tf\n'), ((363, 388), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalL)'], {}), '(np.pi * intervalL)\n', (369, 388), True, 'import tensorflow as tf\n'), ((415, 440), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalR)'], {}), '(np.pi * intervalR)\n', (421, 440), True, 'import tensorflow as tf\n'), ((277, 294), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (283, 294), True, 'import tensorflow as tf\n'), ((814, 830), 'tensorflow.exp', 'tf.exp', (['(-1.0 * x)'], {}), '(-1.0 * x)\n', (820, 830), True, 'import tensorflow as tf\n'), ((883, 903), 'tensorflow.exp', 'tf.exp', (['(-left_bottom)'], {}), '(-left_bottom)\n', (889, 903), True, 'import tensorflow as tf\n'), ((974, 992), 'tensorflow.exp', 'tf.exp', (['(-right_top)'], {}), '(-right_top)\n', (980, 992), True, 'import tensorflow as tf\n'), ((1062, 1072), 'tensorflow.exp', 'tf.exp', (['(-x)'], {}), '(-x)\n', (1068, 1072), True, 'import tensorflow as tf\n'), ((1135, 1145), 'tensorflow.exp', 'tf.exp', (['(-x)'], {}), '(-x)\n', (1141, 1145), True, 'import tensorflow as tf\n'), ((5253, 5270), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (5259, 5270), True, 'import tensorflow as tf\n'), ((5334, 5351), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (5340, 5351), True, 'import tensorflow as tf\n'), ((5422, 5439), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (5428, 5439), True, 'import tensorflow as tf\n'), ((5510, 5527), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (5516, 5527), True, 'import tensorflow as tf\n'), ((5598, 5615), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (5604, 5615), True, 'import tensorflow as tf\n'), ((5686, 5703), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (5692, 5703), True, 'import tensorflow as tf\n'), ((5766, 5791), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalL)'], {}), '(np.pi * intervalL)\n', (5772, 5791), True, 'import tensorflow as tf\n'), ((5854, 5879), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalR)'], {}), '(np.pi * intervalR)\n', (5860, 5879), True, 'import tensorflow as tf\n'), ((6201, 6218), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (6207, 6218), True, 'import tensorflow as tf\n'), ((6320, 6337), 'tensorflow.sin', 'tf.sin', (['(np.pi * t)'], {}), '(np.pi * t)\n', (6326, 6337), True, 'import tensorflow as tf\n'), ((6446, 6463), 'tensorflow.sin', 'tf.sin', (['(np.pi * t)'], {}), '(np.pi * t)\n', (6452, 6463), True, 'import tensorflow as tf\n'), ((6572, 6589), 'tensorflow.sin', 'tf.sin', (['(np.pi * t)'], {}), '(np.pi * t)\n', (6578, 6589), True, 'import tensorflow as tf\n'), ((6714, 6731), 'tensorflow.sin', 'tf.sin', (['(np.pi * t)'], {}), '(np.pi * t)\n', (6720, 6731), True, 'import tensorflow as tf\n'), ((6858, 6875), 'tensorflow.sin', 'tf.sin', (['(np.pi * t)'], {}), '(np.pi * t)\n', (6864, 6875), True, 'import tensorflow as tf\n'), ((7002, 7019), 'tensorflow.sin', 'tf.sin', (['(np.pi * t)'], {}), '(np.pi * t)\n', (7008, 7019), True, 'import tensorflow as tf\n'), ((7146, 7163), 'tensorflow.sin', 'tf.sin', (['(np.pi * t)'], {}), '(np.pi * t)\n', (7152, 7163), True, 'import tensorflow as tf\n'), ((7290, 7307), 'tensorflow.sin', 'tf.sin', (['(np.pi * t)'], {}), '(np.pi * t)\n', (7296, 7307), True, 'import tensorflow as tf\n'), ((7434, 7451), 'tensorflow.sin', 'tf.sin', (['(np.pi * t)'], {}), '(np.pi * t)\n', (7440, 7451), True, 'import tensorflow as tf\n'), ((7570, 7595), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalL)'], {}), '(np.pi * intervalL)\n', (7576, 7595), True, 'import tensorflow as tf\n'), ((7714, 7739), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalR)'], {}), '(np.pi * intervalR)\n', (7720, 7739), True, 'import tensorflow as tf\n'), ((731, 747), 'tensorflow.exp', 'tf.exp', (['(-1.0 * x)'], {}), '(-1.0 * x)\n', (737, 747), True, 'import tensorflow as tf\n'), ((835, 847), 'tensorflow.pow', 'tf.pow', (['y', '(3)'], {}), '(y, 3)\n', (841, 847), True, 'import tensorflow as tf\n'), ((907, 919), 'tensorflow.pow', 'tf.pow', (['y', '(3)'], {}), '(y, 3)\n', (913, 919), True, 'import tensorflow as tf\n'), ((996, 1008), 'tensorflow.pow', 'tf.pow', (['y', '(3)'], {}), '(y, 3)\n', (1002, 1008), True, 'import tensorflow as tf\n'), ((1076, 1098), 'tensorflow.pow', 'tf.pow', (['left_bottom', '(3)'], {}), '(left_bottom, 3)\n', (1082, 1098), True, 'import tensorflow as tf\n'), ((1149, 1169), 'tensorflow.pow', 'tf.pow', (['right_top', '(3)'], {}), '(right_top, 3)\n', (1155, 1169), True, 'import tensorflow as tf\n'), ((1400, 1412), 'tensorflow.square', 'tf.square', (['y'], {}), '(y)\n', (1409, 1412), True, 'import tensorflow as tf\n'), ((1413, 1430), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (1419, 1430), True, 'import tensorflow as tf\n'), ((1463, 1475), 'tensorflow.square', 'tf.square', (['y'], {}), '(y)\n', (1472, 1475), True, 'import tensorflow as tf\n'), ((1478, 1505), 'tensorflow.sin', 'tf.sin', (['(np.pi * left_bottom)'], {}), '(np.pi * left_bottom)\n', (1484, 1505), True, 'import tensorflow as tf\n'), ((1539, 1551), 'tensorflow.square', 'tf.square', (['y'], {}), '(y)\n', (1548, 1551), True, 'import tensorflow as tf\n'), ((1554, 1579), 'tensorflow.sin', 'tf.sin', (['(np.pi * right_top)'], {}), '(np.pi * right_top)\n', (1560, 1579), True, 'import tensorflow as tf\n'), ((1614, 1636), 'tensorflow.square', 'tf.square', (['left_bottom'], {}), '(left_bottom)\n', (1623, 1636), True, 'import tensorflow as tf\n'), ((1639, 1656), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (1645, 1656), True, 'import tensorflow as tf\n'), ((1688, 1708), 'tensorflow.square', 'tf.square', (['right_top'], {}), '(right_top)\n', (1697, 1708), True, 'import tensorflow as tf\n'), ((1711, 1728), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (1717, 1728), True, 'import tensorflow as tf\n'), ((5302, 5319), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (5308, 5319), True, 'import tensorflow as tf\n'), ((5318, 5335), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (5324, 5335), True, 'import tensorflow as tf\n'), ((5382, 5407), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalL)'], {}), '(np.pi * intervalL)\n', (5388, 5407), True, 'import tensorflow as tf\n'), ((5406, 5423), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (5412, 5423), True, 'import tensorflow as tf\n'), ((5470, 5495), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalR)'], {}), '(np.pi * intervalR)\n', (5476, 5495), True, 'import tensorflow as tf\n'), ((5494, 5511), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (5500, 5511), True, 'import tensorflow as tf\n'), ((5558, 5575), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (5564, 5575), True, 'import tensorflow as tf\n'), ((5574, 5599), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalL)'], {}), '(np.pi * intervalL)\n', (5580, 5599), True, 'import tensorflow as tf\n'), ((5646, 5663), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (5652, 5663), True, 'import tensorflow as tf\n'), ((5662, 5687), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalR)'], {}), '(np.pi * intervalR)\n', (5668, 5687), True, 'import tensorflow as tf\n'), ((5734, 5751), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (5740, 5751), True, 'import tensorflow as tf\n'), ((5750, 5767), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (5756, 5767), True, 'import tensorflow as tf\n'), ((5822, 5839), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (5828, 5839), True, 'import tensorflow as tf\n'), ((5838, 5855), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (5844, 5855), True, 'import tensorflow as tf\n'), ((6304, 6321), 'tensorflow.sin', 'tf.sin', (['(np.pi * s)'], {}), '(np.pi * s)\n', (6310, 6321), True, 'import tensorflow as tf\n'), ((6430, 6447), 'tensorflow.sin', 'tf.sin', (['(np.pi * s)'], {}), '(np.pi * s)\n', (6436, 6447), True, 'import tensorflow as tf\n'), ((6556, 6573), 'tensorflow.sin', 'tf.sin', (['(np.pi * s)'], {}), '(np.pi * s)\n', (6562, 6573), True, 'import tensorflow as tf\n'), ((6694, 6711), 'tensorflow.sin', 'tf.sin', (['(np.pi * s)'], {}), '(np.pi * s)\n', (6700, 6711), True, 'import tensorflow as tf\n'), ((6838, 6855), 'tensorflow.sin', 'tf.sin', (['(np.pi * s)'], {}), '(np.pi * s)\n', (6844, 6855), True, 'import tensorflow as tf\n'), ((6982, 6999), 'tensorflow.sin', 'tf.sin', (['(np.pi * s)'], {}), '(np.pi * s)\n', (6988, 6999), True, 'import tensorflow as tf\n'), ((7126, 7143), 'tensorflow.sin', 'tf.sin', (['(np.pi * s)'], {}), '(np.pi * s)\n', (7132, 7143), True, 'import tensorflow as tf\n'), ((7262, 7287), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalL)'], {}), '(np.pi * intervalL)\n', (7268, 7287), True, 'import tensorflow as tf\n'), ((7406, 7431), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalR)'], {}), '(np.pi * intervalR)\n', (7412, 7431), True, 'import tensorflow as tf\n'), ((7550, 7567), 'tensorflow.sin', 'tf.sin', (['(np.pi * s)'], {}), '(np.pi * s)\n', (7556, 7567), True, 'import tensorflow as tf\n'), ((7694, 7711), 'tensorflow.sin', 'tf.sin', (['(np.pi * s)'], {}), '(np.pi * s)\n', (7700, 7711), True, 'import tensorflow as tf\n'), ((758, 770), 'tensorflow.pow', 'tf.pow', (['y', '(3)'], {}), '(y, 3)\n', (764, 770), True, 'import tensorflow as tf\n'), ((1313, 1330), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (1319, 1330), True, 'import tensorflow as tf\n'), ((1957, 1966), 'tensorflow.exp', 'tf.exp', (['x'], {}), '(x)\n', (1963, 1966), True, 'import tensorflow as tf\n'), ((1967, 1976), 'tensorflow.exp', 'tf.exp', (['y'], {}), '(y)\n', (1973, 1976), True, 'import tensorflow as tf\n'), ((2021, 2030), 'tensorflow.exp', 'tf.exp', (['y'], {}), '(y)\n', (2027, 2030), True, 'import tensorflow as tf\n'), ((2032, 2051), 'tensorflow.exp', 'tf.exp', (['left_bottom'], {}), '(left_bottom)\n', (2038, 2051), True, 'import tensorflow as tf\n'), ((2098, 2107), 'tensorflow.exp', 'tf.exp', (['y'], {}), '(y)\n', (2104, 2107), True, 'import tensorflow as tf\n'), ((2109, 2126), 'tensorflow.exp', 'tf.exp', (['right_top'], {}), '(right_top)\n', (2115, 2126), True, 'import tensorflow as tf\n'), ((2174, 2183), 'tensorflow.exp', 'tf.exp', (['x'], {}), '(x)\n', (2180, 2183), True, 'import tensorflow as tf\n'), ((2185, 2204), 'tensorflow.exp', 'tf.exp', (['left_bottom'], {}), '(left_bottom)\n', (2191, 2204), True, 'import tensorflow as tf\n'), ((2249, 2258), 'tensorflow.exp', 'tf.exp', (['x'], {}), '(x)\n', (2255, 2258), True, 'import tensorflow as tf\n'), ((2260, 2277), 'tensorflow.exp', 'tf.exp', (['right_top'], {}), '(right_top)\n', (2266, 2277), True, 'import tensorflow as tf\n'), ((6288, 6305), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (6294, 6305), True, 'import tensorflow as tf\n'), ((6414, 6431), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (6420, 6431), True, 'import tensorflow as tf\n'), ((6540, 6557), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (6546, 6557), True, 'import tensorflow as tf\n'), ((6674, 6691), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (6680, 6691), True, 'import tensorflow as tf\n'), ((6818, 6835), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (6824, 6835), True, 'import tensorflow as tf\n'), ((6954, 6979), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalL)'], {}), '(np.pi * intervalL)\n', (6960, 6979), True, 'import tensorflow as tf\n'), ((7098, 7123), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalR)'], {}), '(np.pi * intervalR)\n', (7104, 7123), True, 'import tensorflow as tf\n'), ((7242, 7259), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (7248, 7259), True, 'import tensorflow as tf\n'), ((7386, 7403), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (7392, 7403), True, 'import tensorflow as tf\n'), ((7530, 7547), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (7536, 7547), True, 'import tensorflow as tf\n'), ((7674, 7691), 'tensorflow.sin', 'tf.sin', (['(np.pi * z)'], {}), '(np.pi * z)\n', (7680, 7691), True, 'import tensorflow as tf\n'), ((1336, 1352), 'numpy.square', 'np.square', (['np.pi'], {}), '(np.pi)\n', (1345, 1352), True, 'import numpy as np\n'), ((1353, 1365), 'tensorflow.square', 'tf.square', (['y'], {}), '(y)\n', (1362, 1365), True, 'import tensorflow as tf\n'), ((1905, 1914), 'tensorflow.exp', 'tf.exp', (['x'], {}), '(x)\n', (1911, 1914), True, 'import tensorflow as tf\n'), ((1915, 1924), 'tensorflow.exp', 'tf.exp', (['y'], {}), '(y)\n', (1921, 1924), True, 'import tensorflow as tf\n'), ((2452, 2467), 'tensorflow.ones_like', 'tf.ones_like', (['x'], {}), '(x)\n', (2464, 2467), True, 'import tensorflow as tf\n'), ((6256, 6273), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (6262, 6273), True, 'import tensorflow as tf\n'), ((6272, 6289), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (6278, 6289), True, 'import tensorflow as tf\n'), ((6374, 6399), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalL)'], {}), '(np.pi * intervalL)\n', (6380, 6399), True, 'import tensorflow as tf\n'), ((6398, 6415), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (6404, 6415), True, 'import tensorflow as tf\n'), ((6500, 6525), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalR)'], {}), '(np.pi * intervalR)\n', (6506, 6525), True, 'import tensorflow as tf\n'), ((6524, 6541), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (6530, 6541), True, 'import tensorflow as tf\n'), ((6626, 6643), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (6632, 6643), True, 'import tensorflow as tf\n'), ((6646, 6671), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalL)'], {}), '(np.pi * intervalL)\n', (6652, 6671), True, 'import tensorflow as tf\n'), ((6770, 6787), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (6776, 6787), True, 'import tensorflow as tf\n'), ((6790, 6815), 'tensorflow.sin', 'tf.sin', (['(np.pi * intervalR)'], {}), '(np.pi * intervalR)\n', (6796, 6815), True, 'import tensorflow as tf\n'), ((6914, 6931), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (6920, 6931), True, 'import tensorflow as tf\n'), ((6934, 6951), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (6940, 6951), True, 'import tensorflow as tf\n'), ((7058, 7075), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (7064, 7075), True, 'import tensorflow as tf\n'), ((7078, 7095), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (7084, 7095), True, 'import tensorflow as tf\n'), ((7202, 7219), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (7208, 7219), True, 'import tensorflow as tf\n'), ((7222, 7239), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (7228, 7239), True, 'import tensorflow as tf\n'), ((7346, 7363), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (7352, 7363), True, 'import tensorflow as tf\n'), ((7366, 7383), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (7372, 7383), True, 'import tensorflow as tf\n'), ((7490, 7507), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (7496, 7507), True, 'import tensorflow as tf\n'), ((7510, 7527), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (7516, 7527), True, 'import tensorflow as tf\n'), ((7634, 7651), 'tensorflow.sin', 'tf.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (7640, 7651), True, 'import tensorflow as tf\n'), ((7654, 7671), 'tensorflow.sin', 'tf.sin', (['(np.pi * y)'], {}), '(np.pi * y)\n', (7660, 7671), True, 'import tensorflow as tf\n'), ((2505, 2517), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (2511, 2517), True, 'import tensorflow as tf\n'), ((2518, 2530), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (2524, 2530), True, 'import tensorflow as tf\n'), ((2571, 2583), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (2577, 2583), True, 'import tensorflow as tf\n'), ((2593, 2615), 'tensorflow.pow', 'tf.pow', (['left_bottom', '(2)'], {}), '(left_bottom, 2)\n', (2599, 2615), True, 'import tensorflow as tf\n'), ((2656, 2668), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (2662, 2668), True, 'import tensorflow as tf\n'), ((2678, 2698), 'tensorflow.pow', 'tf.pow', (['right_top', '(2)'], {}), '(right_top, 2)\n', (2684, 2698), True, 'import tensorflow as tf\n'), ((2740, 2752), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (2746, 2752), True, 'import tensorflow as tf\n'), ((2762, 2784), 'tensorflow.pow', 'tf.pow', (['left_bottom', '(2)'], {}), '(left_bottom, 2)\n', (2768, 2784), True, 'import tensorflow as tf\n'), ((2823, 2835), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (2829, 2835), True, 'import tensorflow as tf\n'), ((2845, 2865), 'tensorflow.pow', 'tf.pow', (['right_top', '(2)'], {}), '(right_top, 2)\n', (2851, 2865), True, 'import tensorflow as tf\n'), ((3043, 3058), 'tensorflow.ones_like', 'tf.ones_like', (['x'], {}), '(x)\n', (3055, 3058), True, 'import tensorflow as tf\n'), ((3392, 3414), 'tensorflow.pow', 'tf.pow', (['left_bottom', '(2)'], {}), '(left_bottom, 2)\n', (3398, 3414), True, 'import tensorflow as tf\n'), ((3714, 3726), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (3720, 3726), True, 'import tensorflow as tf\n'), ((3727, 3739), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (3733, 3739), True, 'import tensorflow as tf\n'), ((3781, 3793), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (3787, 3793), True, 'import tensorflow as tf\n'), ((3796, 3808), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (3802, 3808), True, 'import tensorflow as tf\n'), ((3851, 3873), 'tensorflow.pow', 'tf.pow', (['left_bottom', '(2)'], {}), '(left_bottom, 2)\n', (3857, 3873), True, 'import tensorflow as tf\n'), ((3876, 3888), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (3882, 3888), True, 'import tensorflow as tf\n'), ((3930, 3950), 'tensorflow.pow', 'tf.pow', (['right_top', '(2)'], {}), '(right_top, 2)\n', (3936, 3950), True, 'import tensorflow as tf\n'), ((3953, 3965), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (3959, 3965), True, 'import tensorflow as tf\n'), ((4008, 4020), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (4014, 4020), True, 'import tensorflow as tf\n'), ((4023, 4045), 'tensorflow.pow', 'tf.pow', (['left_bottom', '(2)'], {}), '(left_bottom, 2)\n', (4029, 4045), True, 'import tensorflow as tf\n'), ((4085, 4097), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (4091, 4097), True, 'import tensorflow as tf\n'), ((4100, 4120), 'tensorflow.pow', 'tf.pow', (['right_top', '(2)'], {}), '(right_top, 2)\n', (4106, 4120), True, 'import tensorflow as tf\n'), ((3098, 3110), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (3104, 3110), True, 'import tensorflow as tf\n'), ((3111, 3123), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (3117, 3123), True, 'import tensorflow as tf\n'), ((3174, 3186), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (3180, 3186), True, 'import tensorflow as tf\n'), ((3196, 3218), 'tensorflow.pow', 'tf.pow', (['left_bottom', '(2)'], {}), '(left_bottom, 2)\n', (3202, 3218), True, 'import tensorflow as tf\n'), ((3277, 3289), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (3283, 3289), True, 'import tensorflow as tf\n'), ((3299, 3319), 'tensorflow.pow', 'tf.pow', (['right_top', '(2)'], {}), '(right_top, 2)\n', (3305, 3319), True, 'import tensorflow as tf\n'), ((3377, 3389), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (3383, 3389), True, 'import tensorflow as tf\n'), ((3471, 3483), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (3477, 3483), True, 'import tensorflow as tf\n'), ((3493, 3513), 'tensorflow.pow', 'tf.pow', (['right_top', '(2)'], {}), '(right_top, 2)\n', (3499, 3513), True, 'import tensorflow as tf\n'), ((4310, 4322), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (4316, 4322), True, 'import tensorflow as tf\n'), ((4323, 4335), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (4329, 4335), True, 'import tensorflow as tf\n'), ((4426, 4441), 'tensorflow.ones_like', 'tf.ones_like', (['y'], {}), '(y)\n', (4438, 4441), True, 'import tensorflow as tf\n'), ((4406, 4421), 'tensorflow.ones_like', 'tf.ones_like', (['x'], {}), '(x)\n', (4418, 4421), True, 'import tensorflow as tf\n'), ((4375, 4387), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (4381, 4387), True, 'import tensorflow as tf\n'), ((4388, 4400), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (4394, 4400), True, 'import tensorflow as tf\n'), ((4494, 4516), 'tensorflow.pow', 'tf.pow', (['left_bottom', '(2)'], {}), '(left_bottom, 2)\n', (4500, 4516), True, 'import tensorflow as tf\n'), ((4518, 4530), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (4524, 4530), True, 'import tensorflow as tf\n'), ((4601, 4621), 'tensorflow.pow', 'tf.pow', (['right_top', '(2)'], {}), '(right_top, 2)\n', (4607, 4621), True, 'import tensorflow as tf\n'), ((4623, 4635), 'tensorflow.pow', 'tf.pow', (['y', '(2)'], {}), '(y, 2)\n', (4629, 4635), True, 'import tensorflow as tf\n'), ((4705, 4717), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (4711, 4717), True, 'import tensorflow as tf\n'), ((4719, 4741), 'tensorflow.pow', 'tf.pow', (['left_bottom', '(2)'], {}), '(left_bottom, 2)\n', (4725, 4741), True, 'import tensorflow as tf\n'), ((4810, 4822), 'tensorflow.pow', 'tf.pow', (['x', '(2)'], {}), '(x, 2)\n', (4816, 4822), True, 'import tensorflow as tf\n'), ((4824, 4844), 'tensorflow.pow', 'tf.pow', (['right_top', '(2)'], {}), '(right_top, 2)\n', (4830, 4844), True, 'import tensorflow as tf\n')]
|
import modelexp
from modelexp.experiments import Generic
from modelexp.models.Generic import Parabola
import numpy as np
import random
app = modelexp.App()
app.setExperiment(Generic)
modelRef = app.setModel(Parabola)
modelRef.defineDomain(np.linspace(-3, 3, 100))
modelRef.setParam('a', 1.3)
modelRef.setParam('x0', 0.3)
modelRef.setParam('c', -0.2)
modelRef.calcModel()
sig_y = 0.05*modelRef.y
randomized_y = []
for i in range(len(modelRef.y)):
randomized_y.append(random.gauss(modelRef.y[i], 0.05*modelRef.y[i]))
randomized_y = np.array(randomized_y)
with open('parabolaData.xye', 'w') as f:
for i in range(len(modelRef.y)):
f.write(f'{modelRef.x[i]}\t{randomized_y[i]}\t{sig_y[i]}\n')
|
[
"modelexp.App",
"random.gauss",
"numpy.array",
"numpy.linspace"
] |
[((142, 156), 'modelexp.App', 'modelexp.App', ([], {}), '()\n', (154, 156), False, 'import modelexp\n'), ((536, 558), 'numpy.array', 'np.array', (['randomized_y'], {}), '(randomized_y)\n', (544, 558), True, 'import numpy as np\n'), ((242, 265), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(100)'], {}), '(-3, 3, 100)\n', (253, 265), True, 'import numpy as np\n'), ((472, 521), 'random.gauss', 'random.gauss', (['modelRef.y[i]', '(0.05 * modelRef.y[i])'], {}), '(modelRef.y[i], 0.05 * modelRef.y[i])\n', (484, 521), False, 'import random\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-09 08:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MettEater',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=25)),
('last_name', models.CharField(max_length=25)),
],
),
migrations.CreateModel(
name='Metting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('eaters', models.ManyToManyField(to='league.MettEater')),
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='organized_mettings', to='league.MettEater')),
],
),
]
|
[
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.DateField"
] |
[((403, 496), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (419, 496), False, 'from django.db import migrations, models\n'), ((526, 557), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(25)'}), '(max_length=25)\n', (542, 557), False, 'from django.db import migrations, models\n'), ((590, 621), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(25)'}), '(max_length=25)\n', (606, 621), False, 'from django.db import migrations, models\n'), ((754, 847), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (770, 847), False, 'from django.db import migrations, models\n'), ((871, 889), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (887, 889), False, 'from django.db import migrations, models\n'), ((919, 964), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""league.MettEater"""'}), "(to='league.MettEater')\n", (941, 964), False, 'from django.db import migrations, models\n'), ((997, 1122), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""organized_mettings"""', 'to': '"""league.MettEater"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='organized_mettings', to='league.MettEater')\n", (1014, 1122), False, 'from django.db import migrations, models\n')]
|
from paleo.profilers.flops_profiler import FlopsProfiler as PaleoFlopsProfiler
from paleo.profilers.base import ProfilerOptions
class FlopsProfiler:
@staticmethod
def profile(layer_spec, device, backward=False, batch_size=None, comm_penalization=1, comp_penalization=1):
layer = layer_spec.operation
assert layer is not None, f'{layer_spec} has no operation'
if batch_size:
layer.batch_size = batch_size
profiler_options = ProfilerOptions()
direction = 'backward' if backward else 'forward'
profiler_options.direction = direction
profiler_options.use_cudnn_heuristics = False
profiler_options.include_bias_and_activation = False
profiler_options.ppp_comm = comm_penalization
profiler_options.ppp_comp = comp_penalization
profiler = PaleoFlopsProfiler(profiler_options, device)
time = profiler.profile(layer, cross_device_bandwidth=0)
return time.comp_time + time.comm_time
|
[
"paleo.profilers.flops_profiler.FlopsProfiler",
"paleo.profilers.base.ProfilerOptions"
] |
[((480, 497), 'paleo.profilers.base.ProfilerOptions', 'ProfilerOptions', ([], {}), '()\n', (495, 497), False, 'from paleo.profilers.base import ProfilerOptions\n'), ((846, 890), 'paleo.profilers.flops_profiler.FlopsProfiler', 'PaleoFlopsProfiler', (['profiler_options', 'device'], {}), '(profiler_options, device)\n', (864, 890), True, 'from paleo.profilers.flops_profiler import FlopsProfiler as PaleoFlopsProfiler\n')]
|
import keras.layers
import numpy as np
import random
import string
import tensorflow as tf
from .common import random_string
def convert_gemm(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert Linear.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting Linear ...')
if names == 'short':
tf_name = 'FC' + random_string(6)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
bias_name = '{0}.bias'.format(w_name)
weights_name = '{0}.weight'.format(w_name)
W = weights[weights_name].numpy().transpose()
input_channels, output_channels = W.shape
keras_weights = [W]
has_bias = False
if bias_name in weights:
bias = weights[bias_name].numpy()
keras_weights = [W, bias]
has_bias = True
dense = keras.layers.Dense(
output_channels,
weights=keras_weights, use_bias=has_bias, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros',
)
layers[scope_name] = dense(layers[inputs[0]])
def convert_matmul(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert matmul layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting matmul ...')
if names == 'short':
tf_name = 'MMUL' + random_string(4)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
if len(inputs) == 1:
weights_name = '{0}.weight'.format(w_name)
W = weights[weights_name].numpy().transpose()
input_channels, output_channels = W.shape
keras_weights = [W]
dense = keras.layers.Dense(
output_channels,
weights=keras_weights, use_bias=False, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros',
)
layers[scope_name] = dense(layers[inputs[0]])
elif len(inputs) == 2:
weights_name = '{0}.weight'.format(w_name)
W = weights[weights_name].numpy().transpose()
input_channels, output_channels = W.shape
keras_weights = [W]
dense = keras.layers.Dense(
output_channels,
weights=keras_weights, use_bias=False, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros',
)
layers[scope_name] = dense(layers[inputs[0]])
else:
raise AssertionError('Cannot convert matmul layer')
|
[
"random.random"
] |
[((743, 758), 'random.random', 'random.random', ([], {}), '()\n', (756, 758), False, 'import random\n'), ((1983, 1998), 'random.random', 'random.random', ([], {}), '()\n', (1996, 1998), False, 'import random\n')]
|