text
stringlengths 4
1.02M
| meta
dict |
|---|---|
from ...utils import verbose
from ..utils import (_data_path, _data_path_doc,
_get_version, _version_doc)
@verbose
def data_path(path=None, force_update=False, update_path=False,
download=True, verbose=None): # noqa: D103
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='fake',
download=download)
data_path.__doc__ = _data_path_doc.format(name='fake',
conf='MNE_DATASETS_FAKE_PATH')
def get_version(): # noqa: D103
return _get_version('fake')
get_version.__doc__ = _version_doc.format(name='fake')
|
{
"content_hash": "d95174945e387befa017d0b5d5031f15",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 72,
"avg_line_length": 33.65,
"alnum_prop": 0.5780089153046062,
"repo_name": "nicproulx/mne-python",
"id": "bbd5df577737747c1f92bb2c17e28905ee47c9b2",
"size": "872",
"binary": false,
"copies": "9",
"ref": "refs/heads/placeholder",
"path": "mne/datasets/_fake/_fake.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Csound Document",
"bytes": "69806"
},
{
"name": "Makefile",
"bytes": "3723"
},
{
"name": "Python",
"bytes": "5866703"
},
{
"name": "Shell",
"bytes": "936"
}
],
"symlink_target": ""
}
|
import json
from sdcclient._common import _SdcCommon
class ScanningAlertsClientV1(_SdcCommon):
def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None):
super(ScanningAlertsClientV1, self).__init__(token, sdc_url, ssl_verify, custom_headers)
self.product = "SDS"
class RepositoryAlertTrigger:
@staticmethod
def new_image_analyzed(alert):
alert["triggers"]["analysis_update"] = True
@staticmethod
def scan_result_change_fail(alert):
alert["triggers"]["policy_eval"] = True
alert["onlyPassFail"] = True
@staticmethod
def scan_result_change_any(alert):
alert["triggers"]["policy_eval"] = True
alert["onlyPassFail"] = False
@staticmethod
def cve_update(alert):
alert["triggers"]["vuln_update"] = True
def add_repository_alert(self, name, registry, repository, tag, description="", triggers=None, notification_channels=None, enabled=True):
'''
Create a new repository alert
Args:
name(str): The name of the alert.
registry(str): Registry to alert (e.g. docker.io)
repository(str): Repository to alert (e.g. sysdig/agent)
tag(str): Tag to alert (e.g. latest)
description(str): The description of the alert.
triggers(list): A list of RepositoryAlertTrigger indicating which triggers should be enabled. (default: [ScanningAlertsClientV1.RuntimeAlertTrigger.new_image_analyzed])
notification_channels(list): A list of notification channel ids.
enabled(bool): Whether this alert should actually be applied. Defaults to true.
Returns:
The created alert.
Examples:
>>> client = ScanningAlertsClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"),
>>> token=os.getenv("SDC_SECURE_TOKEN"))
>>> ok, res = client.add_repository_alert(
>>> name="A name",
>>> registry="docker.io",
>>> repository="sysdig/agent",
>>> tag="latest",
>>> description="A description",
>>> triggers=[ScanningAlertsClientV1.RepositoryAlertTrigger.new_image_analyzed,
>>> ScanningAlertsClientV1.RepositoryAlertTrigger.scan_result_change_fail,
>>> ScanningAlertsClientV1.RepositoryAlertTrigger.cve_update]
>>>)
>>> if not ok:
>>> print(f"error creating alert: {res}")
>>> alert_id = res["alertId"]
'''
if not triggers:
triggers = [ScanningAlertsClientV1.RepositoryAlertTrigger.new_image_analyzed]
alert = {
'name': name,
'description': description,
'type': 'repository',
'triggers': {
"unscanned": False,
"analysis_update": False,
"vuln_update": False,
"policy_eval": False,
"failed": False
},
'repositories': [{
'registry': registry,
'repository': repository,
'tag': tag,
}],
"onlyPassFail": False,
"skipEventSend": False,
'enabled': enabled,
'notificationChannelIds': notification_channels,
}
for trigger in triggers:
trigger(alert)
res = self.http.post(f"{self.url}/api/scanning/v1/alerts", headers=self.hdrs, data=json.dumps(alert), verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
def update_repository_alert(self, id, name=None, registry=None, repository=None, tag=None, description=None, triggers=None, notification_channels=None, enabled=None):
'''
Updates a repository alert. Fields that are not specified, will not be modified.
Args:
id(str): Alert ID.
name(str): The name of the alert.
registry(str): Registry to alert (e.g. docker.io)
repository(str): Repository to alert (e.g. sysdig/agent)
tag(str): Tag to alert (e.g. latest)
description(str): The description of the alert.
triggers(list): A list of RepositoryAlertTrigger indicating which triggers should be enabled. (default: [ScanningAlertsClientV1.RuntimeAlertTrigger.unscanned_image])
notification_channels(list): A list of notification channel ids.
enabled(bool): Whether this alert should actually be applied. Defaults to true.
Returns:
The updated alert.
Examples:
>>> client = ScanningAlertsClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"),
>>> token=os.getenv("SDC_SECURE_TOKEN"))
>>> ok, res = client.update_repository_alert(
>>> id=alert_id,
>>> name="An updated name",
>>> registry="updated_registry",
>>> repository="updated_repository",
>>> tag="v1",
>>> description="An updated description",
>>> triggers=[ScanningAlertsClientV1.RepositoryAlertTrigger.scan_result_change_fail]
>>> )
>>> if not ok:
>>> print(f"error updating alert: {res}")
>>> alert_id = res["alertId"]
'''
ok, alert = self.get_alert(id)
if not ok:
return False, f"unable to retrieve alert by ID {id}: {alert}"
if name is not None:
alert["name"] = name
if description is not None:
alert["description"] = description
if registry is not None:
alert["repositories"][0]["registry"] = registry
if repository is not None:
alert["repositories"][0]["repository"] = repository
if tag is not None:
alert["repositories"][0]["tag"] = tag
if triggers is not None:
alert["triggers"] = {
"unscanned": False,
"analysis_update": False,
"vuln_update": False,
"policy_eval": False,
"failed": False
}
alert["onlyPassFail"] = False
for trigger in triggers:
trigger(alert)
if notification_channels is not None:
alert["notificationChannelIds"] = notification_channels
if enabled is not None:
alert["enabled"] = enabled
res = self.http.put(f"{self.url}/api/scanning/v1/alerts/{id}", headers=self.hdrs, data=json.dumps(alert), verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
class RuntimeAlertTrigger:
@staticmethod
def unscanned_image(alert):
alert["triggers"]["unscanned"] = True
@staticmethod
def scan_result_change_fail(alert):
alert["triggers"]["policy_eval"] = True
alert["onlyPassFail"] = True
@staticmethod
def scan_result_change_any(alert):
alert["triggers"]["policy_eval"] = True
alert["onlyPassFail"] = False
@staticmethod
def cve_update(alert):
alert["triggers"]["vuln_update"] = True
def add_runtime_alert(self, name, description="", scope="", triggers=None, notification_channels=None, enabled=True):
'''
Create a new runtime alert
Args:
name(str): The name of the alert.
description(str): The description of the alert.
scope(str): An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"')
triggers(list): A list of RuntimeAlertTrigger indicating which triggers should be enabled. (default: [ScanningAlertsClientV1.RuntimeAlertTrigger.unscanned_image])
notification_channels(list): A list of notification channel ids.
enabled(bool): Whether this alert should actually be applied. Defaults to true.
Returns:
The created alert.
Examples:
>>> client = ScanningAlertsClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"),
>>> token=os.getenv("SDC_SECURE_TOKEN"))
>>> ok, res = client.add_runtime_alert(
>>> name="A name",
>>> description="A description",
>>> scope="",
>>> triggers=[ScanningAlertsClientV1.RuntimeAlertTrigger.unscanned_image,
>>> ScanningAlertsClientV1.RuntimeAlertTrigger.scan_result_change_fail,
>>> ScanningAlertsClientV1.RuntimeAlertTrigger.cve_update]
>>>)
>>> if not ok:
>>> print(f"error creating alert: {res}")
>>> alert_id = res["alertId"]
'''
if not triggers:
triggers = [ScanningAlertsClientV1.RuntimeAlertTrigger.unscanned_image]
alert = {
'name': name,
'description': description,
'type': 'runtime',
'triggers': {
"unscanned": False,
"analysis_update": False,
"vuln_update": False,
"policy_eval": False,
"failed": False
},
'scope': scope,
"onlyPassFail": False,
"skipEventSend": False,
'enabled': enabled,
'notificationChannelIds': notification_channels,
}
for trigger in triggers:
trigger(alert)
res = self.http.post(f"{self.url}/api/scanning/v1/alerts", headers=self.hdrs, data=json.dumps(alert), verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
def update_runtime_alert(self, id, name=None, description=None, scope=None, triggers=None, notification_channels=None, enabled=None):
'''
Updates a runtime alert. Fields that are not specified, will not be modified.
Args:
id(str): Alert ID.
name(str): The name of the alert.
description(str): The description of the alert.
scope(str): An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"')
triggers(list): A list of RuntimeAlertTrigger indicating which triggers should be enabled. (default: [ScanningAlertsClientV1.RuntimeAlertTrigger.unscanned_image])
notification_channels(list): A list of notification channel ids.
enabled(bool): Whether this alert should actually be applied. Defaults to true.
Returns:
The updated alert.
Examples:
>>> client = ScanningAlertsClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"),
>>> token=os.getenv("SDC_SECURE_TOKEN"))
>>> ok, res = client.update_runtime_alert(
>>> id=alert_id,
>>> name="An updated name",
>>> description="An updated description",
>>> scope="agent.id = 'foo'",
>>> triggers=[ScanningAlertsClientV1.RuntimeAlertTrigger.scan_result_change_fail]
>>> )
>>> if not ok:
>>> print(f"error updating alert: {res}")
>>> alert_id = res["alertId"]
'''
ok, alert = self.get_alert(id)
if not ok:
return False, f"unable to retrieve alert by ID {id}: {alert}"
if name is not None:
alert["name"] = name
if description is not None:
alert["description"] = description
if scope is not None:
alert["scope"] = scope
if triggers is not None:
alert["triggers"] = {
"unscanned": False,
"analysis_update": False,
"vuln_update": False,
"policy_eval": False,
"failed": False
}
alert["onlyPassFail"] = False
for trigger in triggers:
trigger(alert)
if notification_channels is not None:
alert["notificationChannelIds"] = notification_channels
if enabled is not None:
alert["enabled"] = enabled
res = self.http.put(f"{self.url}/api/scanning/v1/alerts/{id}", headers=self.hdrs, data=json.dumps(alert), verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
def get_alert(self, alertid):
'''
Retrieve the scanning alert with the given id
Args:
alertid: Unique identifier associated with this alert.
Returns:
A JSON object containing the alert description.
Examples:
>>> client = ScanningAlertsClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"),
>>> token=os.getenv("SDC_SECURE_TOKEN"))
>>> ok, res = client.get_alert(alert_id)
>>> if not ok:
>>> print(f"error retrieving alert {alert_id}: {res}")
>>> alert = res
'''
res = self.http.get(f"{self.url}/api/scanning/v1/alerts/{alertid}", headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
def list_alerts(self, limit=None, cursor=None):
'''
List the current set of scanning alerts.
Args:
limit(int): Maximum number of alerts in the response.
cursor: An opaque string representing the current position in the list of alerts. It's provided in the 'responseMetadata' of the list_alerts response.
Returns:
A JSON containing the list of alerts in the 'alerts' field, and the current cursor position in the 'responseMetadata' field.
Examples:
>>> client = ScanningAlertsClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"),
>>> token=os.getenv("SDC_SECURE_TOKEN"))
>>> ok, res =client.list_alerts()
>>> if not ok:
>>> print(f"error listing alerts: {res}")
>>> for alert in res["alerts"]:
>>> print(alert["alertId"])
>>>
>>> # Load more alerts
>>> if res["responseMetadata"] is not None:
>>> ok, res = client.list_alerts(cursor=res["responseMetadata"]["next_cursor"])
'''
url = f"{self.url}/api/scanning/v1/alerts"
if limit:
url += '?limit=' + str(limit)
if cursor:
url += '&cursor=' + cursor
res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
def delete_alert(self, policyid): # FIXME: policyid must be maintained for backwards compatibility reasons with older versions, but should be renamed to id or alert_id
'''
Delete the alert with the given id
Args:
policyid: Unique identifier associated with this alert.
Examples:
>>> client = ScanningAlertsClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"),
>>> token=os.getenv("SDC_SECURE_TOKEN"))
>>> client.delete_alert(alert_id)
'''
res = self.http.delete(f"{self.url}/api/scanning/v1/alerts/{policyid}", headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.text]
|
{
"content_hash": "f75ba42b370e7d1553ce3992e7ef9d8a",
"timestamp": "",
"source": "github",
"line_count": 381,
"max_line_length": 196,
"avg_line_length": 44.36482939632546,
"alnum_prop": 0.5406140921729871,
"repo_name": "draios/python-sdc-client",
"id": "7c721d0307f74189e973d48be2ee0e388f41f6a1",
"size": "16903",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sdcclient/secure/scanning/_alerts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "247480"
},
{
"name": "Shell",
"bytes": "13957"
}
],
"symlink_target": ""
}
|
__author__ = 'Jose Jimenez-Berni'
import RPi.GPIO as GPIO
import time
import rospy
from std_msgs.msg import Float32
class SonarControl():
def __init__(self):
# Define Sonar Pin (same pin for both Ping and Echo
self.gpio_sonar = 8
#use physical pin numbering
GPIO.setmode(GPIO.BOARD)
# Disable warnings
GPIO.setwarnings(False)
rospy.init_node("sonar_control")
self.nodename = rospy.get_name()
rospy.loginfo("%s started" % self.nodename)
self.rate = rospy.get_param('~rate', 5.0) # the rate at which to publish
self.pub_distance = rospy.Publisher('sonar_distance', Float32)
def spin(self):
r = rospy.Rate(self.rate)
while not rospy.is_shutdown():
self.get_sonar_distance()
r.sleep()
def get_sonar_distance(self):
GPIO.setup(self.gpio_sonar, GPIO.OUT)
# Send 10us pulse to trigger
GPIO.output(self.gpio_sonar, True)
time.sleep(0.00001)
GPIO.output(self.gpio_sonar, False)
start = time.time()
count = time.time()
GPIO.setup(self.gpio_sonar, GPIO.IN)
while GPIO.input(self.gpio_sonar) == 0 and time.time()-count < 0.1:
start = time.time()
count = time.time()
stop = count
while GPIO.input(self.gpio_sonar) == 1 and time.time()-count < 0.1:
stop = time.time()
# Calculate pulse length
elapsed = stop-start
# Distance pulse travelled in that time is time
# multiplied by the speed of sound 34000(cm/s) divided by 2
distance = elapsed * 17000
self.pub_distance.publish(distance)
if __name__ == '__main__':
""" main """
sonar_control = SonarControl()
sonar_control.spin()
|
{
"content_hash": "8b5ce8293847835b0b21edc26f2fbf58",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 81,
"avg_line_length": 30.913793103448278,
"alnum_prop": 0.5945343000557725,
"repo_name": "jajberni/pi2go_ros",
"id": "11bdc1fd5e7f44cf62dbf852a615dab5453f5455",
"size": "1819",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pi2go_wheel/scripts/sonar_control.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CMake",
"bytes": "15737"
},
{
"name": "Python",
"bytes": "22140"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class MinexponentValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="minexponent", parent_name="icicle.marker.colorbar", **kwargs
):
super(MinexponentValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 0),
**kwargs,
)
|
{
"content_hash": "7d10da90d8cffc5b72c591d6af6a99bd",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 87,
"avg_line_length": 34.357142857142854,
"alnum_prop": 0.6091476091476091,
"repo_name": "plotly/plotly.py",
"id": "5b104b90a66c9e0cab80a8505fe0863da609c369",
"size": "481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/icicle/marker/colorbar/_minexponent.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
import os
from pysteel import fs
def setup(context):
"""
:type context: behave.runner.Context
"""
repo = os.path.join(context.project_dir, 'spring-cloud-config')
fs.deltree(repo)
|
{
"content_hash": "a731d92964dda439f819e4a10cd3a0f0",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 67,
"avg_line_length": 18.363636363636363,
"alnum_prop": 0.6584158415841584,
"repo_name": "SteelToeOSS/Samples",
"id": "8a17132b5f45fdb7c7b1c39a96eedf513f805e77",
"size": "202",
"binary": false,
"copies": "1",
"ref": "refs/heads/2.x",
"path": "Configuration/src/AspDotNetCore/Simple/scaffold/local.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "210"
},
{
"name": "Batchfile",
"bytes": "2137"
},
{
"name": "C#",
"bytes": "546165"
},
{
"name": "CSS",
"bytes": "8760"
},
{
"name": "HTML",
"bytes": "107853"
},
{
"name": "JavaScript",
"bytes": "72677"
},
{
"name": "Shell",
"bytes": "2091"
}
],
"symlink_target": ""
}
|
"""
.. module: security_monkey.auditors.sns
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Patrick Kelley <pkelley@netflix.com> @monkeysecurity
"""
from security_monkey.auditor import Auditor
from security_monkey.watchers.sns import SNS
from security_monkey.exceptions import InvalidARN
from security_monkey.exceptions import InvalidSourceOwner
from security_monkey.datastore import Account
import re
class SNSAuditor(Auditor):
index = SNS.index
i_am_singular = SNS.i_am_singular
i_am_plural = SNS.i_am_plural
def __init__(self, accounts=None, debug=False):
super(SNSAuditor, self).__init__(accounts=accounts, debug=debug)
def check_snstopicpolicy_empty(self, snsitem):
"""
alert on empty SNSs Policy
"""
tag = "SNS Topic Policy is empty"
severity = 1
if snsitem.config.get('SNSPolicy', {}) == {}:
self.add_issue(severity, tag, snsitem, notes=None)
def check_snstopicpolicy_crossaccount(self, snsitem):
"""
alert on cross account access
"""
#(region, account, arn, aws_object) = audit_object
#"Principal": { "AWS": "*" }
# "AWS": "arn:aws:iam::027213240437:root"
policy = snsitem.config.get('SNSPolicy', {})
for statement in policy.get("Statement", []):
account_numbers = []
account_number = ''
princ_aws = statement.get("Principal", {}) \
.get("AWS", "error")
if princ_aws == "*":
account_number = statement.get("Condition", {}) \
.get("StringEquals", {}) \
.get("AWS:SourceOwner", None)
if not account_number:
tag = "SNS Topic open to everyone"
notes = "An SNS policy where { 'Principal': { 'AWS': '*' } } must also have"
notes += " a {'Condition': {'StringEquals': { 'AWS:SourceOwner': '<ACCOUNT_NUMBER>' } } }"
notes += " or it is open to the world. In this case, anyone is allowed to perform "
notes += " this action(s): {}".format(statement.get("Action"))
self.add_issue(10, tag, snsitem, notes=notes)
continue
else:
try:
account_numbers.append(str(account_number))
except ValueError:
raise InvalidSourceOwner(account_number)
else:
if isinstance(princ_aws, list):
for entry in princ_aws:
account_numbers.append(str(re.search('arn:aws:iam::([0-9-]+):', entry).group(1)))
else:
try:
account_numbers.append(str(re.search('arn:aws:iam::([0-9-]+):', princ_aws).group(1)))
except:
import json
print json.dumps(snsitem.config, indent=4)
raise InvalidARN(princ_aws)
for account_number in account_numbers:
account = Account.query.filter(Account.number == account_number).first()
account_name = None
if account is not None:
account_name = account.name
if not account_name:
tag = "Unknown Cross Account Access"
notes = "from {} to {}".format(account_number, snsitem.account)
self.add_issue(10, tag, snsitem, notes=notes)
elif account_name != snsitem.account:
tag = "Friendly Cross Account Access"
notes = "from {} to {}".format(account_name, snsitem.account)
self.add_issue(5, tag, snsitem, notes=notes)
|
{
"content_hash": "5b4e54e05b8bfafed164aee2568e2ad8",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 100,
"avg_line_length": 37.30769230769231,
"alnum_prop": 0.5955817378497791,
"repo_name": "guardian/security_monkey",
"id": "5539ddb8fd17504029fd19ca8fffc2e2cc43a420",
"size": "4012",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "security_monkey/auditors/sns.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "44446"
},
{
"name": "Dart",
"bytes": "15639830"
},
{
"name": "JavaScript",
"bytes": "767790"
},
{
"name": "PHP",
"bytes": "38"
},
{
"name": "Python",
"bytes": "296775"
},
{
"name": "Shell",
"bytes": "6947"
}
],
"symlink_target": ""
}
|
import csv
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
# dmrx.net uses SSL SNI, which urllib2 doesn't support
import requests
from dmr_marc_users_cs750 import (get_groups_dci, get_groups_bm)
MOST_HEARD_URL = 'https://dmrx.net/csvfiles/MostHeard.csv'
# Neither of these formats uses a header row
COLUMNS_N0GSG = ('Call ID', 'Call Alias', 'Call Type', 'Receive Tone')
COLUMNS_DMRX = ('id', 'callsign', 'name')
def read_most_heard_csv(users):
"""Reads DMRX csv from the heard file-like object and returns a list of
dicts in N0GSG export format."""
csvr = csv.DictReader(users, fieldnames=COLUMNS_DMRX)
result = []
for row in csvr:
result.append(dict(zip(COLUMNS_N0GSG, (
row['id'],
' '.join((row['callsign'], row['name'])),
'Private Call', # Call Type
'No', # Receive Tone
))))
return result
def write_n0gsg_csv(contacts, csvo,
fieldnames=COLUMNS_N0GSG, writeheader=False):
"""Writes contacts to the csvo file-like object.
"""
csvw = csv.DictWriter(csvo, fieldnames)
if writeheader:
csvw.writeheader()
for row in contacts:
csvw.writerow(row)
def get_users(db_url=MOST_HEARD_URL):
source = requests.get(db_url)
data = source.content.decode('utf-8', 'replace').encode('ascii', 'replace')
users = read_most_heard_csv(StringIO(str(data)))
source.close()
return users
if __name__ == '__main__':
marc = get_users()
dci = get_groups_dci()
bm = get_groups_bm()
with open('n0gsg-dci-bm-dmrx-most-heard.csv', 'w') as csvo:
write_n0gsg_csv(dci + bm + marc, csvo)
|
{
"content_hash": "f067547f70c5cb57e3b553ff5edceaf4",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 79,
"avg_line_length": 28.796610169491526,
"alnum_prop": 0.6280164802825191,
"repo_name": "ajorg/DMR_contacts",
"id": "b258b2baf11e05c0c4d8f9cf51ae12f8e8f0b629",
"size": "1722",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "dmrx_most_heard_n0gsg.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "12822"
}
],
"symlink_target": ""
}
|
"""
Filename: _downgrade.py
From: Miguel Grinberg
Description: Downgrade version to revert back.
"""
import sys
sys.path.append('..')
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
api.downgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, v - 1)
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
print('Current database version: ' + str(v))
|
{
"content_hash": "6aa49b36830e1f0d183702a283df8db3",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 70,
"avg_line_length": 31.9375,
"alnum_prop": 0.7808219178082192,
"repo_name": "bsoe003/MWCH_Website",
"id": "fe5196419f05d13bcb9b5a2be5073631787aea42",
"size": "531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "database/_downgrade.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "33115"
},
{
"name": "HTML",
"bytes": "15921"
},
{
"name": "JavaScript",
"bytes": "7799"
},
{
"name": "Python",
"bytes": "13196"
},
{
"name": "Shell",
"bytes": "48"
}
],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import sys
from abc import abstractproperty
from pants.util.memo import memoized
from pants.util.meta import AbstractClass
from pants.util.objects import datatype
class Selector(AbstractClass):
@abstractproperty
def optional(self):
"""Return true if this Selector is optional. It may result in a `None` match."""
class Select(datatype('Subject', ['product', 'optional']), Selector):
"""Selects the given Product for the Subject provided to the constructor.
If optional=True and no matching product can be produced, will return None.
"""
def __new__(cls, product, optional=False):
return super(Select, cls).__new__(cls, product, optional)
class SelectVariant(datatype('Variant', ['product', 'variant_key']), Selector):
"""Selects the matching Product and variant name for the Subject provided to the constructor.
For example: a SelectVariant with a variant_key of "thrift" and a product of type ApacheThrift
will only match when a consumer passes a variant value for "thrift" that matches the name of an
ApacheThrift value.
"""
optional = False
class SelectDependencies(datatype('Dependencies', ['product', 'deps_product', 'field']), Selector):
"""Selects a product for each of the dependencies of a product for the Subject.
The dependencies declared on `deps_product` (in the optional `field` parameter, which defaults
to 'dependencies' when not specified) will be provided to the requesting task in the
order they were declared.
"""
def __new__(cls, product, deps_product, field=None):
return super(SelectDependencies, cls).__new__(cls, product, deps_product, field)
optional = False
class SelectProjection(datatype('Projection', ['product', 'projected_subject', 'fields', 'input_product']), Selector):
"""Selects a field of the given Subject to produce a Subject, Product dependency from.
Projecting an input allows for deduplication in the graph, where multiple Subjects
resolve to a single backing Subject instead.
For convenience, if a single field is requested and it is of the requested type, the field value
is projected directly rather than attempting to use it to construct the projected type.
"""
optional = False
class SelectLiteral(datatype('Literal', ['subject', 'product']), Selector):
"""Selects a literal Subject (other than the one applied to the selector)."""
optional = False
class Collection(object):
"""
Singleton Collection Type. The ambition is to gain native support for flattening,
so methods like <pants.engine.fs.merge_files> won't have to be defined separately.
Related to: https://github.com/pantsbuild/pants/issues/3169
"""
@classmethod
@memoized
def of(cls, element_type, fields=('dependencies',)):
type_name = b'{}({})'.format(cls.__name__, element_type.__name__)
collection_of_type = type(type_name, (cls, datatype("{}s".format(element_type.__name__), fields)), {})
# Expose the custom class type at the module level to be pickle compatible.
setattr(sys.modules[cls.__module__], type_name, collection_of_type)
return collection_of_type
|
{
"content_hash": "bb57aad7b127261779a4cad1eef87719",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 118,
"avg_line_length": 37.83720930232558,
"alnum_prop": 0.7267977873386601,
"repo_name": "gmalmquist/pants",
"id": "4e7a295cd93f4239a49c5f37b384ca7b6d3343e9",
"size": "3401",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/python/pants/engine/selectors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "781"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Cucumber",
"bytes": "919"
},
{
"name": "GAP",
"bytes": "2459"
},
{
"name": "Go",
"bytes": "1746"
},
{
"name": "HTML",
"bytes": "79866"
},
{
"name": "Java",
"bytes": "437330"
},
{
"name": "JavaScript",
"bytes": "29992"
},
{
"name": "Protocol Buffer",
"bytes": "3783"
},
{
"name": "Python",
"bytes": "5053630"
},
{
"name": "Scala",
"bytes": "84585"
},
{
"name": "Shell",
"bytes": "58748"
},
{
"name": "Thrift",
"bytes": "1966"
}
],
"symlink_target": ""
}
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tada.settings")
sys.path.append(os.path.join(os.getcwd(), 'apps'))
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
{
"content_hash": "a4a12615e33dcb22425c2420fad07c10",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 68,
"avg_line_length": 28,
"alnum_prop": 0.6928571428571428,
"repo_name": "klpdotorg/tada",
"id": "2359d51b693a7f4c50c24bbc329b5a814c093bb2",
"size": "302",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "175572"
},
{
"name": "SQLPL",
"bytes": "2154"
},
{
"name": "Shell",
"bytes": "4714"
}
],
"symlink_target": ""
}
|
class T(object):
def __init__(self, *objs):
self.__objs = list(objs)
def add(self, *objs):
self.__objs.extend(objs)
def add_objects(self, objs):
self.__objs.extend(objs)
def iterate(self):
return Iterator(self)
def list(self):
return self.__objs
def __getitem__(self, idx):
return self.__objs[idx]
def nth(self, idx):
return self.__objs[idx]
class Iterator(object):
def __init__(self, set_):
self.__set = set_
self.__idx = 0
def reset(self):
self.__idx = 0
def next(self):
val = self.__set.nth(self.__idx)
self.__idx += 1
if self.__idx >= len(self.__set.list()):
self.__idx = 0
return val
|
{
"content_hash": "55e602f8cd967daed3be48d691188817",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 48,
"avg_line_length": 25.466666666666665,
"alnum_prop": 0.5039267015706806,
"repo_name": "mollstam/UnrealPy",
"id": "7d3c9ab70d73c1d76bf121f40ef18ae927219c1c",
"size": "1305",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/PyChart-1.39/pychart/object_set.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "APL",
"bytes": "587"
},
{
"name": "ASP",
"bytes": "2753"
},
{
"name": "ActionScript",
"bytes": "5686"
},
{
"name": "Ada",
"bytes": "94225"
},
{
"name": "Agda",
"bytes": "3154"
},
{
"name": "Alloy",
"bytes": "6579"
},
{
"name": "ApacheConf",
"bytes": "12482"
},
{
"name": "AppleScript",
"bytes": "421"
},
{
"name": "Assembly",
"bytes": "1093261"
},
{
"name": "AutoHotkey",
"bytes": "3733"
},
{
"name": "AutoIt",
"bytes": "667"
},
{
"name": "Awk",
"bytes": "63276"
},
{
"name": "Batchfile",
"bytes": "147828"
},
{
"name": "BlitzBasic",
"bytes": "185102"
},
{
"name": "BlitzMax",
"bytes": "2387"
},
{
"name": "Boo",
"bytes": "1111"
},
{
"name": "Bro",
"bytes": "7337"
},
{
"name": "C",
"bytes": "108397183"
},
{
"name": "C#",
"bytes": "156749"
},
{
"name": "C++",
"bytes": "13535833"
},
{
"name": "CLIPS",
"bytes": "6933"
},
{
"name": "CMake",
"bytes": "12441"
},
{
"name": "COBOL",
"bytes": "114812"
},
{
"name": "CSS",
"bytes": "430375"
},
{
"name": "Ceylon",
"bytes": "1387"
},
{
"name": "Chapel",
"bytes": "4366"
},
{
"name": "Cirru",
"bytes": "2574"
},
{
"name": "Clean",
"bytes": "9679"
},
{
"name": "Clojure",
"bytes": "23871"
},
{
"name": "CoffeeScript",
"bytes": "20149"
},
{
"name": "ColdFusion",
"bytes": "9006"
},
{
"name": "Common Lisp",
"bytes": "49017"
},
{
"name": "Coq",
"bytes": "66"
},
{
"name": "Cucumber",
"bytes": "390"
},
{
"name": "Cuda",
"bytes": "776"
},
{
"name": "D",
"bytes": "7556"
},
{
"name": "DIGITAL Command Language",
"bytes": "425938"
},
{
"name": "DTrace",
"bytes": "6706"
},
{
"name": "Dart",
"bytes": "591"
},
{
"name": "Dylan",
"bytes": "6343"
},
{
"name": "Ecl",
"bytes": "2599"
},
{
"name": "Eiffel",
"bytes": "2145"
},
{
"name": "Elixir",
"bytes": "4340"
},
{
"name": "Emacs Lisp",
"bytes": "18303"
},
{
"name": "Erlang",
"bytes": "5746"
},
{
"name": "F#",
"bytes": "19156"
},
{
"name": "FORTRAN",
"bytes": "38458"
},
{
"name": "Factor",
"bytes": "10194"
},
{
"name": "Fancy",
"bytes": "2581"
},
{
"name": "Fantom",
"bytes": "25331"
},
{
"name": "GAP",
"bytes": "29880"
},
{
"name": "GLSL",
"bytes": "450"
},
{
"name": "Gnuplot",
"bytes": "11501"
},
{
"name": "Go",
"bytes": "5444"
},
{
"name": "Golo",
"bytes": "1649"
},
{
"name": "Gosu",
"bytes": "2853"
},
{
"name": "Groff",
"bytes": "3458639"
},
{
"name": "Groovy",
"bytes": "2586"
},
{
"name": "HTML",
"bytes": "92126540"
},
{
"name": "Haskell",
"bytes": "49593"
},
{
"name": "Haxe",
"bytes": "16812"
},
{
"name": "Hy",
"bytes": "7237"
},
{
"name": "IDL",
"bytes": "2098"
},
{
"name": "Idris",
"bytes": "2771"
},
{
"name": "Inform 7",
"bytes": "1944"
},
{
"name": "Inno Setup",
"bytes": "18796"
},
{
"name": "Ioke",
"bytes": "469"
},
{
"name": "Isabelle",
"bytes": "21392"
},
{
"name": "Jasmin",
"bytes": "9428"
},
{
"name": "Java",
"bytes": "4040623"
},
{
"name": "JavaScript",
"bytes": "223927"
},
{
"name": "Julia",
"bytes": "27687"
},
{
"name": "KiCad",
"bytes": "475"
},
{
"name": "Kotlin",
"bytes": "971"
},
{
"name": "LSL",
"bytes": "160"
},
{
"name": "Lasso",
"bytes": "18650"
},
{
"name": "Lean",
"bytes": "6921"
},
{
"name": "Limbo",
"bytes": "9891"
},
{
"name": "Liquid",
"bytes": "862"
},
{
"name": "LiveScript",
"bytes": "972"
},
{
"name": "Logos",
"bytes": "19509"
},
{
"name": "Logtalk",
"bytes": "7260"
},
{
"name": "Lua",
"bytes": "8677"
},
{
"name": "Makefile",
"bytes": "2053844"
},
{
"name": "Mask",
"bytes": "815"
},
{
"name": "Mathematica",
"bytes": "191"
},
{
"name": "Max",
"bytes": "296"
},
{
"name": "Modelica",
"bytes": "6213"
},
{
"name": "Modula-2",
"bytes": "23838"
},
{
"name": "Module Management System",
"bytes": "14798"
},
{
"name": "Monkey",
"bytes": "2587"
},
{
"name": "Moocode",
"bytes": "3343"
},
{
"name": "MoonScript",
"bytes": "14862"
},
{
"name": "Myghty",
"bytes": "3939"
},
{
"name": "NSIS",
"bytes": "7663"
},
{
"name": "Nemerle",
"bytes": "1517"
},
{
"name": "NewLisp",
"bytes": "42726"
},
{
"name": "Nimrod",
"bytes": "37191"
},
{
"name": "Nit",
"bytes": "55581"
},
{
"name": "Nix",
"bytes": "2448"
},
{
"name": "OCaml",
"bytes": "42416"
},
{
"name": "Objective-C",
"bytes": "104883"
},
{
"name": "Objective-J",
"bytes": "15340"
},
{
"name": "Opa",
"bytes": "172"
},
{
"name": "OpenEdge ABL",
"bytes": "49943"
},
{
"name": "PAWN",
"bytes": "6555"
},
{
"name": "PHP",
"bytes": "68611"
},
{
"name": "PLSQL",
"bytes": "45772"
},
{
"name": "Pan",
"bytes": "1241"
},
{
"name": "Pascal",
"bytes": "349743"
},
{
"name": "Perl",
"bytes": "5931502"
},
{
"name": "Perl6",
"bytes": "113623"
},
{
"name": "PigLatin",
"bytes": "6657"
},
{
"name": "Pike",
"bytes": "8479"
},
{
"name": "PostScript",
"bytes": "18216"
},
{
"name": "PowerShell",
"bytes": "14236"
},
{
"name": "Prolog",
"bytes": "43750"
},
{
"name": "Protocol Buffer",
"bytes": "3401"
},
{
"name": "Puppet",
"bytes": "130"
},
{
"name": "Python",
"bytes": "122886305"
},
{
"name": "QML",
"bytes": "3912"
},
{
"name": "R",
"bytes": "49247"
},
{
"name": "Racket",
"bytes": "11341"
},
{
"name": "Rebol",
"bytes": "17708"
},
{
"name": "Red",
"bytes": "10536"
},
{
"name": "Redcode",
"bytes": "830"
},
{
"name": "Ruby",
"bytes": "91403"
},
{
"name": "Rust",
"bytes": "6788"
},
{
"name": "SAS",
"bytes": "15603"
},
{
"name": "SaltStack",
"bytes": "1040"
},
{
"name": "Scala",
"bytes": "730"
},
{
"name": "Scheme",
"bytes": "50346"
},
{
"name": "Scilab",
"bytes": "943"
},
{
"name": "Shell",
"bytes": "2925518"
},
{
"name": "ShellSession",
"bytes": "320"
},
{
"name": "Smali",
"bytes": "832"
},
{
"name": "Smalltalk",
"bytes": "158636"
},
{
"name": "Smarty",
"bytes": "523"
},
{
"name": "SourcePawn",
"bytes": "130"
},
{
"name": "Standard ML",
"bytes": "36869"
},
{
"name": "Swift",
"bytes": "2035"
},
{
"name": "SystemVerilog",
"bytes": "265"
},
{
"name": "Tcl",
"bytes": "6077233"
},
{
"name": "TeX",
"bytes": "487999"
},
{
"name": "Tea",
"bytes": "391"
},
{
"name": "TypeScript",
"bytes": "535"
},
{
"name": "VHDL",
"bytes": "4446"
},
{
"name": "VimL",
"bytes": "32053"
},
{
"name": "Visual Basic",
"bytes": "19441"
},
{
"name": "XQuery",
"bytes": "4289"
},
{
"name": "XS",
"bytes": "178055"
},
{
"name": "XSLT",
"bytes": "1995174"
},
{
"name": "Xtend",
"bytes": "727"
},
{
"name": "Yacc",
"bytes": "25665"
},
{
"name": "Zephir",
"bytes": "485"
},
{
"name": "eC",
"bytes": "31545"
},
{
"name": "mupad",
"bytes": "2442"
},
{
"name": "nesC",
"bytes": "23697"
},
{
"name": "xBase",
"bytes": "3349"
}
],
"symlink_target": ""
}
|
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._network_connections_operations import (
build_create_or_update_request,
build_delete_request,
build_get_health_details_request,
build_get_request,
build_list_by_resource_group_request,
build_list_by_subscription_request,
build_list_health_details_request,
build_run_health_checks_request,
build_update_request,
)
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkConnectionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.devcenter.aio.DevCenterMgmtClient`'s
:attr:`network_connections` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_subscription(
self, top: Optional[int] = None, **kwargs: Any
) -> AsyncIterable["_models.NetworkConnection"]:
"""Lists network connections in a subscription.
:param top: The maximum number of resources to return from the operation. Example: '$top=10'.
Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkConnection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.devcenter.models.NetworkConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.NetworkConnectionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
top=top,
api_version=api_version,
template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkConnectionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DevCenter/networkConnections"} # type: ignore
@distributed_trace
def list_by_resource_group(
self, resource_group_name: str, top: Optional[int] = None, **kwargs: Any
) -> AsyncIterable["_models.NetworkConnection"]:
"""Lists network connections in a resource group.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param top: The maximum number of resources to return from the operation. Example: '$top=10'.
Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkConnection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.devcenter.models.NetworkConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.NetworkConnectionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
top=top,
api_version=api_version,
template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkConnectionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections"} # type: ignore
@distributed_trace_async
async def get(
self, resource_group_name: str, network_connection_name: str, **kwargs: Any
) -> _models.NetworkConnection:
"""Gets a network connection resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkConnection or the result of cls(response)
:rtype: ~azure.mgmt.devcenter.models.NetworkConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.NetworkConnection]
request = build_get_request(
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("NetworkConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_connection_name: str,
body: Union[_models.NetworkConnection, IO],
**kwargs: Any
) -> _models.NetworkConnection:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.NetworkConnection]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(body, (IO, bytes)):
_content = body
else:
_json = self._serialize.body(body, "NetworkConnection")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("NetworkConnection", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("NetworkConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}"} # type: ignore
@overload
async def begin_create_or_update(
self,
resource_group_name: str,
network_connection_name: str,
body: _models.NetworkConnection,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.NetworkConnection]:
"""Creates or updates a Network Connections resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:param body: Represents network connection. Required.
:type body: ~azure.mgmt.devcenter.models.NetworkConnection
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkConnection or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.devcenter.models.NetworkConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_create_or_update(
self,
resource_group_name: str,
network_connection_name: str,
body: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.NetworkConnection]:
"""Creates or updates a Network Connections resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:param body: Represents network connection. Required.
:type body: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkConnection or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.devcenter.models.NetworkConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
network_connection_name: str,
body: Union[_models.NetworkConnection, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.NetworkConnection]:
"""Creates or updates a Network Connections resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:param body: Represents network connection. Is either a model type or a IO type. Required.
:type body: ~azure.mgmt.devcenter.models.NetworkConnection or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkConnection or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.devcenter.models.NetworkConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.NetworkConnection]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial( # type: ignore
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
body=body,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("NetworkConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}"} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
network_connection_name: str,
body: Union[_models.NetworkConnectionUpdate, IO],
**kwargs: Any
) -> Optional[_models.NetworkConnection]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.NetworkConnection]]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(body, (IO, bytes)):
_content = body
else:
_json = self._serialize.body(body, "NetworkConnectionUpdate")
request = build_update_request(
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("NetworkConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}"} # type: ignore
@overload
async def begin_update(
self,
resource_group_name: str,
network_connection_name: str,
body: _models.NetworkConnectionUpdate,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.NetworkConnection]:
"""Partially updates a Network Connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:param body: Represents network connection. Required.
:type body: ~azure.mgmt.devcenter.models.NetworkConnectionUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkConnection or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.devcenter.models.NetworkConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_update(
self,
resource_group_name: str,
network_connection_name: str,
body: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.NetworkConnection]:
"""Partially updates a Network Connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:param body: Represents network connection. Required.
:type body: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkConnection or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.devcenter.models.NetworkConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_update(
self,
resource_group_name: str,
network_connection_name: str,
body: Union[_models.NetworkConnectionUpdate, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.NetworkConnection]:
"""Partially updates a Network Connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:param body: Represents network connection. Is either a model type or a IO type. Required.
:type body: ~azure.mgmt.devcenter.models.NetworkConnectionUpdate or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkConnection or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.devcenter.models.NetworkConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.NetworkConnection]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial( # type: ignore
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
body=body,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("NetworkConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, network_connection_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}"} # type: ignore
@distributed_trace_async
async def begin_delete(
self, resource_group_name: str, network_connection_name: str, **kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes a Network Connections resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}"} # type: ignore
@distributed_trace
def list_health_details(
self, resource_group_name: str, network_connection_name: str, top: Optional[int] = None, **kwargs: Any
) -> AsyncIterable["_models.HealthCheckStatusDetails"]:
"""Lists health check status details.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:param top: The maximum number of resources to return from the operation. Example: '$top=10'.
Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either HealthCheckStatusDetails or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.devcenter.models.HealthCheckStatusDetails]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.HealthCheckStatusDetailsListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_health_details_request(
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
subscription_id=self._config.subscription_id,
top=top,
api_version=api_version,
template_url=self.list_health_details.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("HealthCheckStatusDetailsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_health_details.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}/healthChecks"} # type: ignore
@distributed_trace_async
async def get_health_details(
self, resource_group_name: str, network_connection_name: str, **kwargs: Any
) -> _models.HealthCheckStatusDetails:
"""Gets health check status details.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: HealthCheckStatusDetails or the result of cls(response)
:rtype: ~azure.mgmt.devcenter.models.HealthCheckStatusDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.HealthCheckStatusDetails]
request = build_get_health_details_request(
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_health_details.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("HealthCheckStatusDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_health_details.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}/healthChecks/latest"} # type: ignore
async def _run_health_checks_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, network_connection_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_run_health_checks_request(
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._run_health_checks_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_run_health_checks_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}/runHealthChecks"} # type: ignore
@distributed_trace_async
async def begin_run_health_checks(
self, resource_group_name: str, network_connection_name: str, **kwargs: Any
) -> AsyncLROPoller[None]:
"""Triggers a new health check run. The execution and health check result can be tracked via the
network Connection health check details.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param network_connection_name: Name of the Network Connection that can be applied to a Pool.
Required.
:type network_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-11-11-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._run_health_checks_initial( # type: ignore
resource_group_name=resource_group_name,
network_connection_name=network_connection_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_run_health_checks.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevCenter/networkConnections/{networkConnectionName}/runHealthChecks"} # type: ignore
|
{
"content_hash": "7f875bcbc5d13f9e43a82f7f43badf3b",
"timestamp": "",
"source": "github",
"line_count": 1155,
"max_line_length": 224,
"avg_line_length": 47.40692640692641,
"alnum_prop": 0.6371838188293306,
"repo_name": "Azure/azure-sdk-for-python",
"id": "9028a2dcd0b01497001ed8febf7e099f2ebe301a",
"size": "55255",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/devcenter/azure-mgmt-devcenter/azure/mgmt/devcenter/aio/operations/_network_connections_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, printLocation, NearTestCase
setOutDir(__name__)
import unittest,re,codecs
from reportlab.pdfbase import pdfdoc
class PdfdocTestCase(NearTestCase):
"""Tests of expected Unicode and encoding behaviour
"""
def setUp(self):
self.doc = pdfdoc.DummyDoc()
def testPDFText(self):
self.assertEquals(pdfdoc.PDFText('Hello World').format(self.doc),'<48656c6c6f20576f726c64>')
def testPDFString(self):
self.assertEquals(pdfdoc.PDFString('Hello World').format(self.doc),'(Hello World)')
self.assertEquals(pdfdoc.PDFString('Hello\xc2\xa2World',0).format(self.doc),'(Hello\xa2World)')
self.assertEquals(pdfdoc.PDFString('Hello\xc2\xa0World',0).format(self.doc),'(\xfe\xff\x00H\x00e\x00l\x00l\x00o\x00\xa0\x00W\x00o\x00r\x00l\x00d)')
self.assertEquals(pdfdoc.PDFString('Hello\xc2\xa0World',1).format(self.doc),'(\\376\\377\\000H\\000e\\000l\\000l\\000o\\000\\240\\000W\\000o\\000r\\000l\\000d)')
self.assertEquals(pdfdoc.PDFString(u'Hello\xa0World',1).format(self.doc),'(\\376\\377\\000H\\000e\\000l\\000l\\000o\\000\\240\\000W\\000o\\000r\\000l\\000d)')
self.assertEquals(pdfdoc.PDFString(u'Hello\xa0World',0).format(self.doc),'(\xfe\xff\x00H\x00e\x00l\x00l\x00o\x00\xa0\x00W\x00o\x00r\x00l\x00d)')
def makeSuite():
return makeSuiteForClasses(
PdfdocTestCase,
)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
printLocation()
|
{
"content_hash": "03a7227a2c71c5082205a890c94c3752",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 169,
"avg_line_length": 49.70967741935484,
"alnum_prop": 0.7027903958468527,
"repo_name": "blampe/M2M",
"id": "cba3ce747e393bfef3296128225fa8f9000c96b6",
"size": "1541",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "reportlab-2.5/tests/test_pdfbase_pdfdoc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "754736"
},
{
"name": "Java",
"bytes": "6333"
},
{
"name": "JavaScript",
"bytes": "21268"
},
{
"name": "PHP",
"bytes": "18"
},
{
"name": "Python",
"bytes": "6374305"
},
{
"name": "Shell",
"bytes": "4721"
}
],
"symlink_target": ""
}
|
"""Test class for Ironic SSH power driver."""
import fixtures
import mock
import paramiko
from ironic.common import boot_devices
from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.db import api as dbapi
from ironic.drivers.modules import ssh
from ironic.drivers import utils as driver_utils
from ironic.openstack.common import context
from ironic.openstack.common import processutils
from ironic.tests import base
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
from oslo.config import cfg
CONF = cfg.CONF
class SSHValidateParametersTestCase(base.TestCase):
def setUp(self):
super(SSHValidateParametersTestCase, self).setUp()
self.context = context.get_admin_context()
def test__parse_driver_info_good_password(self):
# make sure we get back the expected things
node = obj_utils.get_test_node(
self.context,
driver='fake_ssh',
driver_info=db_utils.get_test_ssh_info('password'))
info = ssh._parse_driver_info(node)
self.assertIsNotNone(info.get('host'))
self.assertIsNotNone(info.get('username'))
self.assertIsNotNone(info.get('password'))
self.assertIsNotNone(info.get('port'))
self.assertIsNotNone(info.get('virt_type'))
self.assertIsNotNone(info.get('cmd_set'))
self.assertIsNotNone(info.get('uuid'))
def test__parse_driver_info_good_key(self):
# make sure we get back the expected things
node = obj_utils.get_test_node(
self.context,
driver='fake_ssh',
driver_info=db_utils.get_test_ssh_info('key'))
info = ssh._parse_driver_info(node)
self.assertIsNotNone(info.get('host'))
self.assertIsNotNone(info.get('username'))
self.assertIsNotNone(info.get('key_contents'))
self.assertIsNotNone(info.get('port'))
self.assertIsNotNone(info.get('virt_type'))
self.assertIsNotNone(info.get('cmd_set'))
self.assertIsNotNone(info.get('uuid'))
def test__parse_driver_info_good_file(self):
# make sure we get back the expected things
d_info = db_utils.get_test_ssh_info('file')
tempdir = self.useFixture(fixtures.TempDir())
key_path = tempdir.path + '/foo'
open(key_path, 'wt').close()
d_info['ssh_key_filename'] = key_path
node = obj_utils.get_test_node(
self.context,
driver='fake_ssh',
driver_info=d_info)
info = ssh._parse_driver_info(node)
self.assertIsNotNone(info.get('host'))
self.assertIsNotNone(info.get('username'))
self.assertIsNotNone(info.get('key_filename'))
self.assertIsNotNone(info.get('port'))
self.assertIsNotNone(info.get('virt_type'))
self.assertIsNotNone(info.get('cmd_set'))
self.assertIsNotNone(info.get('uuid'))
def test__parse_driver_info_bad_file(self):
# A filename that doesn't exist errors.
info = db_utils.get_test_ssh_info('file')
node = obj_utils.get_test_node(
self.context,
driver='fake_ssh',
driver_info=info)
self.assertRaises(
exception.InvalidParameterValue, ssh._parse_driver_info, node)
def test__parse_driver_info_too_many(self):
info = db_utils.get_test_ssh_info('too_many')
node = obj_utils.get_test_node(
self.context,
driver='fake_ssh',
driver_info=info)
self.assertRaises(
exception.InvalidParameterValue, ssh._parse_driver_info, node)
def test__parse_driver_info_missing_host(self):
# make sure error is raised when info is missing
info = db_utils.get_test_ssh_info()
del info['ssh_address']
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
ssh._parse_driver_info,
node)
def test__parse_driver_info_missing_user(self):
# make sure error is raised when info is missing
info = db_utils.get_test_ssh_info()
del info['ssh_username']
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
ssh._parse_driver_info,
node)
def test__parse_driver_info_invalid_creds(self):
# make sure error is raised when info is missing
info = db_utils.get_test_ssh_info('no-creds')
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.InvalidParameterValue,
ssh._parse_driver_info,
node)
def test__parse_driver_info_missing_virt_type(self):
# make sure error is raised when info is missing
info = db_utils.get_test_ssh_info()
del info['ssh_virt_type']
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
ssh._parse_driver_info,
node)
def test__parse_driver_info_ssh_port_wrong_type(self):
# make sure error is raised when ssh_port is not integer
info = db_utils.get_test_ssh_info()
info['ssh_port'] = 'wrong_port_value'
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.InvalidParameterValue,
ssh._parse_driver_info,
node)
def test__normalize_mac_string(self):
mac_raw = "0A:1B-2C-3D:4F"
mac_clean = ssh._normalize_mac(mac_raw)
self.assertEqual("0a1b2c3d4f", mac_clean)
def test__normalize_mac_unicode(self):
mac_raw = u"0A:1B-2C-3D:4F"
mac_clean = ssh._normalize_mac(mac_raw)
self.assertEqual("0a1b2c3d4f", mac_clean)
def test__parse_driver_info_with_custom_libvirt_uri(self):
CONF.set_override('libvirt_uri', 'qemu:///foo', 'ssh')
expected_base_cmd = "/usr/bin/virsh --connect qemu:///foo"
node = obj_utils.get_test_node(
self.context,
driver='fake_ssh',
driver_info=db_utils.get_test_ssh_info())
node['driver_info']['ssh_virt_type'] = 'virsh'
info = ssh._parse_driver_info(node)
self.assertEqual(expected_base_cmd, info['cmd_set']['base_cmd'])
def test__get_boot_device_map_parallels(self):
boot_map = ssh._get_boot_device_map('parallels')
self.assertEqual('net0', boot_map[boot_devices.PXE])
def test__get_boot_device_map_vbox(self):
boot_map = ssh._get_boot_device_map('vbox')
self.assertEqual('net', boot_map[boot_devices.PXE])
def test__get_boot_device_map_exception(self):
self.assertRaises(exception.InvalidParameterValue,
ssh._get_boot_device_map,
'this_doesn_t_exist')
class SSHPrivateMethodsTestCase(base.TestCase):
def setUp(self):
super(SSHPrivateMethodsTestCase, self).setUp()
self.context = context.get_admin_context()
self.node = obj_utils.get_test_node(
self.context,
driver='fake_ssh',
driver_info=db_utils.get_test_ssh_info())
self.sshclient = paramiko.SSHClient()
@mock.patch.object(utils, 'ssh_connect')
def test__get_connection_client(self, ssh_connect_mock):
ssh_connect_mock.return_value = self.sshclient
client = ssh._get_connection(self.node)
self.assertEqual(self.sshclient, client)
driver_info = ssh._parse_driver_info(self.node)
ssh_connect_mock.assert_called_once_with(driver_info)
@mock.patch.object(utils, 'ssh_connect')
def test__get_connection_exception(self, ssh_connect_mock):
ssh_connect_mock.side_effect = exception.SSHConnectFailed(host='fake')
self.assertRaises(exception.SSHConnectFailed,
ssh._get_connection,
self.node)
driver_info = ssh._parse_driver_info(self.node)
ssh_connect_mock.assert_called_once_with(driver_info)
@mock.patch.object(processutils, 'ssh_execute')
def test__ssh_execute(self, exec_ssh_mock):
ssh_cmd = "somecmd"
expected = ['a', 'b', 'c']
exec_ssh_mock.return_value = ('\n'.join(expected), '')
lst = ssh._ssh_execute(self.sshclient, ssh_cmd)
exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
self.assertEqual(expected, lst)
@mock.patch.object(processutils, 'ssh_execute')
def test__ssh_execute_exception(self, exec_ssh_mock):
ssh_cmd = "somecmd"
exec_ssh_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.SSHCommandFailed,
ssh._ssh_execute,
self.sshclient,
ssh_cmd)
exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
@mock.patch.object(processutils, 'ssh_execute')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test__get_power_status_on(self, get_hosts_name_mock, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
exec_ssh_mock.return_value = (
'"NodeName" {b43c4982-110c-4c29-9325-d5f41b053513}', '')
get_hosts_name_mock.return_value = "NodeName"
pstate = ssh._get_power_status(self.sshclient, info)
ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['list_running'])
self.assertEqual(states.POWER_ON, pstate)
exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
@mock.patch.object(processutils, 'ssh_execute')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test__get_power_status_off(self, get_hosts_name_mock, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
exec_ssh_mock.return_value = (
'"NodeName" {b43c4982-110c-4c29-9325-d5f41b053513}', '')
get_hosts_name_mock.return_value = "NotNodeName"
pstate = ssh._get_power_status(self.sshclient, info)
ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['list_running'])
self.assertEqual(states.POWER_OFF, pstate)
exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
@mock.patch.object(processutils, 'ssh_execute')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test__get_power_status_error(self, get_hosts_name_mock, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
exec_ssh_mock.return_value = (
'"NodeName" {b43c4982-110c-4c29-9325-d5f41b053513}', '')
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_hosts_name_mock.return_value = None
self.assertRaises(exception.NodeNotFound,
ssh._get_power_status,
self.sshclient,
info)
ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['list_running'])
exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
@mock.patch.object(processutils, 'ssh_execute')
def test__get_power_status_exception(self, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
exec_ssh_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.SSHCommandFailed,
ssh._get_power_status,
self.sshclient,
info)
ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['list_running'])
exec_ssh_mock.assert_called_once_with(
self.sshclient, ssh_cmd)
@mock.patch.object(processutils, 'ssh_execute')
def test__get_hosts_name_for_node_match(self, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['list_all'])
cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['get_node_macs'])
cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
exec_ssh_mock.side_effect = [('NodeName', ''),
('52:54:00:cf:2d:31', '')]
expected = [mock.call(self.sshclient, ssh_cmd),
mock.call(self.sshclient, cmd_to_exec)]
found_name = ssh._get_hosts_name_for_node(self.sshclient, info)
self.assertEqual('NodeName', found_name)
self.assertEqual(expected, exec_ssh_mock.call_args_list)
@mock.patch.object(processutils, 'ssh_execute')
def test__get_hosts_name_for_node_no_match(self, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "22:22:22:22:22:22"]
exec_ssh_mock.side_effect = [('NodeName', ''),
('52:54:00:cf:2d:31', '')]
ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['list_all'])
cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['get_node_macs'])
cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
expected = [mock.call(self.sshclient, ssh_cmd),
mock.call(self.sshclient, cmd_to_exec)]
found_name = ssh._get_hosts_name_for_node(self.sshclient, info)
self.assertIsNone(found_name)
self.assertEqual(expected, exec_ssh_mock.call_args_list)
@mock.patch.object(processutils, 'ssh_execute')
def test__get_hosts_name_for_node_exception(self, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['list_all'])
cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['get_node_macs'])
cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
exec_ssh_mock.side_effect = [('NodeName', ''),
processutils.ProcessExecutionError]
expected = [mock.call(self.sshclient, ssh_cmd),
mock.call(self.sshclient, cmd_to_exec)]
self.assertRaises(exception.SSHCommandFailed,
ssh._get_hosts_name_for_node,
self.sshclient,
info)
self.assertEqual(expected, exec_ssh_mock.call_args_list)
@mock.patch.object(processutils, 'ssh_execute')
@mock.patch.object(ssh, '_get_power_status')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test__power_on_good(self, get_hosts_name_mock, get_power_status_mock,
exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_power_status_mock.side_effect = [states.POWER_OFF,
states.POWER_ON]
get_hosts_name_mock.return_value = "NodeName"
expected = [mock.call(self.sshclient, info),
mock.call(self.sshclient, info)]
cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['start_cmd'])
cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
current_state = ssh._power_on(self.sshclient, info)
self.assertEqual(states.POWER_ON, current_state)
self.assertEqual(expected, get_power_status_mock.call_args_list)
get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
@mock.patch.object(processutils, 'ssh_execute')
@mock.patch.object(ssh, '_get_power_status')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test__power_on_fail(self, get_hosts_name_mock, get_power_status_mock,
exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_power_status_mock.side_effect = [states.POWER_OFF,
states.POWER_OFF]
get_hosts_name_mock.return_value = "NodeName"
expected = [mock.call(self.sshclient, info),
mock.call(self.sshclient, info)]
cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['start_cmd'])
cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
current_state = ssh._power_on(self.sshclient, info)
self.assertEqual(states.ERROR, current_state)
self.assertEqual(expected, get_power_status_mock.call_args_list)
get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
@mock.patch.object(processutils, 'ssh_execute')
@mock.patch.object(ssh, '_get_power_status')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test__power_on_exception(self, get_hosts_name_mock,
get_power_status_mock, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
exec_ssh_mock.side_effect = processutils.ProcessExecutionError
get_power_status_mock.side_effect = [states.POWER_OFF,
states.POWER_ON]
get_hosts_name_mock.return_value = "NodeName"
cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['start_cmd'])
cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
self.assertRaises(exception.SSHCommandFailed,
ssh._power_on,
self.sshclient,
info)
get_power_status_mock.assert_called_once_with(self.sshclient, info)
get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
@mock.patch.object(processutils, 'ssh_execute')
@mock.patch.object(ssh, '_get_power_status')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test__power_off_good(self, get_hosts_name_mock,
get_power_status_mock, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_power_status_mock.side_effect = [states.POWER_ON,
states.POWER_OFF]
get_hosts_name_mock.return_value = "NodeName"
expected = [mock.call(self.sshclient, info),
mock.call(self.sshclient, info)]
cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['stop_cmd'])
cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
current_state = ssh._power_off(self.sshclient, info)
self.assertEqual(states.POWER_OFF, current_state)
self.assertEqual(expected, get_power_status_mock.call_args_list)
get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
@mock.patch.object(processutils, 'ssh_execute')
@mock.patch.object(ssh, '_get_power_status')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test__power_off_fail(self, get_hosts_name_mock,
get_power_status_mock, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_power_status_mock.side_effect = [states.POWER_ON,
states.POWER_ON]
get_hosts_name_mock.return_value = "NodeName"
expected = [mock.call(self.sshclient, info),
mock.call(self.sshclient, info)]
cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['stop_cmd'])
cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
current_state = ssh._power_off(self.sshclient, info)
self.assertEqual(states.ERROR, current_state)
self.assertEqual(expected, get_power_status_mock.call_args_list)
get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
@mock.patch.object(processutils, 'ssh_execute')
@mock.patch.object(ssh, '_get_power_status')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test__power_off_exception(self, get_hosts_name_mock,
get_power_status_mock, exec_ssh_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
exec_ssh_mock.side_effect = processutils.ProcessExecutionError
get_power_status_mock.side_effect = [states.POWER_ON,
states.POWER_OFF]
get_hosts_name_mock.return_value = "NodeName"
cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
info['cmd_set']['stop_cmd'])
cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
self.assertRaises(exception.SSHCommandFailed, ssh._power_off,
self.sshclient, info)
get_power_status_mock.assert_called_once_with(self.sshclient, info)
get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
def test_exec_ssh_command_good(self):
class Channel(object):
def recv_exit_status(self):
return 0
class Stream(object):
def __init__(self, buffer=''):
self.buffer = buffer
self.channel = Channel()
def read(self):
return self.buffer
def close(self):
pass
with mock.patch.object(self.sshclient, 'exec_command') \
as exec_command_mock:
exec_command_mock.return_value = (Stream(),
Stream('hello'),
Stream())
stdout, stderr = processutils.ssh_execute(self.sshclient,
"command")
self.assertEqual('hello', stdout)
exec_command_mock.assert_called_once_with("command")
def test_exec_ssh_command_fail(self):
class Channel(object):
def recv_exit_status(self):
return 127
class Stream(object):
def __init__(self, buffer=''):
self.buffer = buffer
self.channel = Channel()
def read(self):
return self.buffer
def close(self):
pass
with mock.patch.object(self.sshclient, 'exec_command') \
as exec_command_mock:
exec_command_mock.return_value = (Stream(),
Stream('hello'),
Stream())
self.assertRaises(processutils.ProcessExecutionError,
processutils.ssh_execute,
self.sshclient,
"command")
exec_command_mock.assert_called_once_with("command")
class SSHDriverTestCase(db_base.DbTestCase):
def setUp(self):
super(SSHDriverTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_ssh")
self.driver = driver_factory.get_driver("fake_ssh")
self.node = obj_utils.create_test_node(
self.context, driver='fake_ssh',
driver_info=db_utils.get_test_ssh_info())
self.dbapi = dbapi.get_instance()
self.port = obj_utils.create_test_port(self.context,
node_id=self.node.id)
self.sshclient = paramiko.SSHClient()
@mock.patch.object(utils, 'ssh_connect')
def test__validate_info_ssh_connect_failed(self, ssh_connect_mock):
info = ssh._parse_driver_info(self.node)
ssh_connect_mock.side_effect = exception.SSHConnectFailed(host='fake')
with task_manager.acquire(self.context, info['uuid'],
shared=False) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.power.validate, task)
driver_info = ssh._parse_driver_info(task.node)
ssh_connect_mock.assert_called_once_with(driver_info)
def test_get_properties(self):
expected = ssh.COMMON_PROPERTIES
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertEqual(expected, task.driver.power.get_properties())
self.assertEqual(expected, task.driver.get_properties())
self.assertEqual(expected, task.driver.management.get_properties())
def test_validate_fail_no_port(self):
new_node = obj_utils.create_test_node(
self.context,
id=321,
uuid='aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee',
driver='fake_ssh',
driver_info=db_utils.get_test_ssh_info())
with task_manager.acquire(self.context, new_node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.power.validate,
task)
@mock.patch.object(driver_utils, 'get_node_mac_addresses')
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_power_status')
@mock.patch.object(ssh, '_power_off')
@mock.patch.object(ssh, '_power_on')
def test_reboot_good(self, power_on_mock, power_off_mock,
get_power_stat_mock, get_conn_mock,
get_mac_addr_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_mac_addr_mock.return_value = info['macs']
get_conn_mock.return_value = self.sshclient
get_power_stat_mock.return_value = states.POWER_ON
power_off_mock.return_value = None
power_on_mock.return_value = states.POWER_ON
with mock.patch.object(ssh,
'_parse_driver_info') as parse_drv_info_mock:
parse_drv_info_mock.return_value = info
with task_manager.acquire(self.context, info['uuid'],
shared=False) as task:
task.driver.power.reboot(task)
parse_drv_info_mock.assert_called_once_with(task.node)
get_mac_addr_mock.assert_called_once_with(mock.ANY)
get_conn_mock.assert_called_once_with(task.node)
get_power_stat_mock.assert_called_once_with(self.sshclient,
info)
power_off_mock.assert_called_once_with(self.sshclient, info)
power_on_mock.assert_called_once_with(self.sshclient, info)
@mock.patch.object(driver_utils, 'get_node_mac_addresses')
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_power_status')
@mock.patch.object(ssh, '_power_off')
@mock.patch.object(ssh, '_power_on')
def test_reboot_fail(self, power_on_mock, power_off_mock,
get_power_stat_mock, get_conn_mock,
get_mac_addr_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_mac_addr_mock.return_value = info['macs']
get_conn_mock.return_value = self.sshclient
get_power_stat_mock.return_value = states.POWER_ON
power_off_mock.return_value = None
power_on_mock.return_value = states.POWER_OFF
with mock.patch.object(ssh,
'_parse_driver_info') as parse_drv_info_mock:
parse_drv_info_mock.return_value = info
with task_manager.acquire(self.context, info['uuid'],
shared=False) as task:
self.assertRaises(exception.PowerStateFailure,
task.driver.power.reboot, task)
parse_drv_info_mock.assert_called_once_with(task.node)
get_mac_addr_mock.assert_called_once_with(mock.ANY)
get_conn_mock.assert_called_once_with(task.node)
get_power_stat_mock.assert_called_once_with(self.sshclient,
info)
power_off_mock.assert_called_once_with(self.sshclient, info)
power_on_mock.assert_called_once_with(self.sshclient, info)
@mock.patch.object(driver_utils, 'get_node_mac_addresses')
@mock.patch.object(ssh, '_get_connection')
def test_set_power_state_bad_state(self, get_conn_mock,
get_mac_addr_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_mac_addr_mock.return_value = info['macs']
get_conn_mock.return_value = self.sshclient
with mock.patch.object(ssh,
'_parse_driver_info') as parse_drv_info_mock:
parse_drv_info_mock.return_value = info
with task_manager.acquire(self.context, info['uuid'],
shared=False) as task:
self.assertRaises(
exception.InvalidParameterValue,
task.driver.power.set_power_state,
task,
"BAD_PSTATE")
parse_drv_info_mock.assert_called_once_with(task.node)
get_mac_addr_mock.assert_called_once_with(mock.ANY)
get_conn_mock.assert_called_once_with(task.node)
@mock.patch.object(driver_utils, 'get_node_mac_addresses')
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_power_on')
def test_set_power_state_on_good(self, power_on_mock, get_conn_mock,
get_mac_addr_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_mac_addr_mock.return_value = info['macs']
get_conn_mock.return_value = self.sshclient
power_on_mock.return_value = states.POWER_ON
with mock.patch.object(ssh,
'_parse_driver_info') as parse_drv_info_mock:
parse_drv_info_mock.return_value = info
with task_manager.acquire(self.context, info['uuid'],
shared=False) as task:
task.driver.power.set_power_state(task, states.POWER_ON)
parse_drv_info_mock.assert_called_once_with(task.node)
get_mac_addr_mock.assert_called_once_with(mock.ANY)
get_conn_mock.assert_called_once_with(task.node)
power_on_mock.assert_called_once_with(self.sshclient, info)
@mock.patch.object(driver_utils, 'get_node_mac_addresses')
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_power_on')
def test_set_power_state_on_fail(self, power_on_mock, get_conn_mock,
get_mac_addr_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_mac_addr_mock.return_value = info['macs']
get_conn_mock.return_value = self.sshclient
power_on_mock.return_value = states.POWER_OFF
with mock.patch.object(ssh,
'_parse_driver_info') as parse_drv_info_mock:
parse_drv_info_mock.return_value = info
with task_manager.acquire(self.context, info['uuid'],
shared=False) as task:
self.assertRaises(
exception.PowerStateFailure,
task.driver.power.set_power_state,
task,
states.POWER_ON)
parse_drv_info_mock.assert_called_once_with(task.node)
get_mac_addr_mock.assert_called_once_with(mock.ANY)
get_conn_mock.assert_called_once_with(task.node)
power_on_mock.assert_called_once_with(self.sshclient, info)
@mock.patch.object(driver_utils, 'get_node_mac_addresses')
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_power_off')
def test_set_power_state_off_good(self, power_off_mock, get_conn_mock,
get_mac_addr_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_mac_addr_mock.return_value = info['macs']
get_conn_mock.return_value = self.sshclient
power_off_mock.return_value = states.POWER_OFF
with mock.patch.object(ssh,
'_parse_driver_info') as parse_drv_info_mock:
parse_drv_info_mock.return_value = info
with task_manager.acquire(self.context, info['uuid'],
shared=False) as task:
task.driver.power.set_power_state(task, states.POWER_OFF)
parse_drv_info_mock.assert_called_once_with(task.node)
get_mac_addr_mock.assert_called_once_with(mock.ANY)
get_conn_mock.assert_called_once_with(task.node)
power_off_mock.assert_called_once_with(self.sshclient, info)
@mock.patch.object(driver_utils, 'get_node_mac_addresses')
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_power_off')
def test_set_power_state_off_fail(self, power_off_mock, get_conn_mock,
get_mac_addr_mock):
info = ssh._parse_driver_info(self.node)
info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
get_mac_addr_mock.return_value = info['macs']
get_conn_mock.return_value = self.sshclient
power_off_mock.return_value = states.POWER_ON
with mock.patch.object(ssh,
'_parse_driver_info') as parse_drv_info_mock:
parse_drv_info_mock.return_value = info
with task_manager.acquire(self.context, info['uuid'],
shared=False) as task:
self.assertRaises(
exception.PowerStateFailure,
task.driver.power.set_power_state,
task,
states.POWER_OFF)
parse_drv_info_mock.assert_called_once_with(task.node)
get_mac_addr_mock.assert_called_once_with(mock.ANY)
get_conn_mock.assert_called_once_with(task.node)
power_off_mock.assert_called_once_with(self.sshclient, info)
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
@mock.patch.object(ssh, '_ssh_execute')
def test_management_interface_set_boot_device_vbox_ok(self, mock_exc,
mock_h,
mock_get_conn):
fake_name = 'fake-name'
mock_h.return_value = fake_name
mock_get_conn.return_value = self.sshclient
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node['driver_info']['ssh_virt_type'] = 'vbox'
self.driver.management.set_boot_device(task, boot_devices.PXE)
expected_cmd = ('/usr/bin/VBoxManage modifyvm %s '
'--boot1 net') % fake_name
mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
@mock.patch.object(ssh, '_ssh_execute')
def test_management_interface_set_boot_device_parallels_ok(self, mock_exc,
mock_h,
mock_get_conn):
fake_name = 'fake-name'
mock_h.return_value = fake_name
mock_get_conn.return_value = self.sshclient
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node['driver_info']['ssh_virt_type'] = 'parallels'
self.driver.management.set_boot_device(task, boot_devices.PXE)
expected_cmd = ('/usr/bin/prlctl set %s '
'--device-bootorder "net0"') % fake_name
mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
@mock.patch.object(ssh, '_ssh_execute')
def test_management_interface_set_boot_device_virsh_ok(self, mock_exc,
mock_h,
mock_get_conn):
fake_name = 'fake-name'
mock_h.return_value = fake_name
mock_get_conn.return_value = self.sshclient
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node['driver_info']['ssh_virt_type'] = 'virsh'
self.driver.management.set_boot_device(task, boot_devices.PXE)
expected_cmd = ('EDITOR="sed -i \'/<boot \\(dev\\|order\\)=*\\>'
'/d;/<\\/os>/i\\<boot dev=\\"network\\"/>\'" '
'/usr/bin/virsh --connect qemu:///system '
'edit %s') % fake_name
mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
def test_set_boot_device_bad_device(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.InvalidParameterValue,
self.driver.management.set_boot_device,
task, 'invalid-device')
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test_set_boot_device_not_supported(self, mock_h, mock_get_conn):
mock_h.return_value = 'NodeName'
mock_get_conn.return_value = self.sshclient
with task_manager.acquire(self.context, self.node.uuid) as task:
# vmware does not support set_boot_device()
task.node['driver_info']['ssh_virt_type'] = 'vmware'
self.assertRaises(NotImplementedError,
self.driver.management.set_boot_device,
task, boot_devices.PXE)
def test_management_interface_get_supported_boot_devices(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
expected = [boot_devices.PXE, boot_devices.DISK,
boot_devices.CDROM]
self.assertEqual(sorted(expected), sorted(task.driver.management.
get_supported_boot_devices()))
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
@mock.patch.object(ssh, '_ssh_execute')
def test_management_interface_get_boot_device_vbox(self, mock_exc,
mock_h,
mock_get_conn):
fake_name = 'fake-name'
mock_h.return_value = fake_name
mock_exc.return_value = ('net', '')
mock_get_conn.return_value = self.sshclient
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node['driver_info']['ssh_virt_type'] = 'vbox'
result = self.driver.management.get_boot_device(task)
self.assertEqual(boot_devices.PXE, result['boot_device'])
expected_cmd = ('/usr/bin/VBoxManage showvminfo --machinereadable %s '
'| awk -F \'"\' \'/boot1/{print $2}\'') % fake_name
mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
@mock.patch.object(ssh, '_ssh_execute')
def test_management_interface_get_boot_device_parallels(self, mock_exc,
mock_h,
mock_get_conn):
fake_name = 'fake-name'
mock_h.return_value = fake_name
mock_exc.return_value = ('net0', '')
mock_get_conn.return_value = self.sshclient
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node['driver_info']['ssh_virt_type'] = 'parallels'
result = self.driver.management.get_boot_device(task)
self.assertEqual(boot_devices.PXE, result['boot_device'])
expected_cmd = ('/usr/bin/prlctl list -i %s '
'| awk \'/^Boot order:/ {print $3}\'') % fake_name
mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
@mock.patch.object(ssh, '_ssh_execute')
def test_management_interface_get_boot_device_virsh(self, mock_exc,
mock_h,
mock_get_conn):
fake_name = 'fake-name'
mock_h.return_value = fake_name
mock_exc.return_value = ('network', '')
mock_get_conn.return_value = self.sshclient
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node['driver_info']['ssh_virt_type'] = 'virsh'
result = self.driver.management.get_boot_device(task)
self.assertEqual(boot_devices.PXE, result['boot_device'])
expected_cmd = ('/usr/bin/virsh --connect qemu:///system dumpxml '
'%s | awk \'/boot dev=/ { gsub( ".*dev=" Q, "" ); '
'gsub( Q ".*", "" ); print; }\' Q="\'" RS="[<>]" | '
'head -1') % fake_name
mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
@mock.patch.object(ssh, '_get_connection')
@mock.patch.object(ssh, '_get_hosts_name_for_node')
def test_get_boot_device_not_supported(self, mock_h, mock_get_conn):
mock_h.return_value = 'NodeName'
mock_get_conn.return_value = self.sshclient
with task_manager.acquire(self.context, self.node.uuid) as task:
# vmware does not support get_boot_device()
task.node['driver_info']['ssh_virt_type'] = 'vmware'
expected = {'boot_device': None, 'persistent': None}
self.assertEqual(expected,
self.driver.management.get_boot_device(task))
def test_management_interface_validate_good(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
task.driver.management.validate(task)
def test_management_interface_validate_fail(self):
# Missing SSH driver_info information
node = obj_utils.create_test_node(self.context, id=2,
uuid=utils.generate_uuid(),
driver='fake_ssh')
with task_manager.acquire(self.context, node.uuid) as task:
self.assertRaises(exception.MissingParameterValue,
task.driver.management.validate, task)
|
{
"content_hash": "0f1951cdaed3bfd1cf5eedc660733a07",
"timestamp": "",
"source": "github",
"line_count": 936,
"max_line_length": 79,
"avg_line_length": 48.14316239316239,
"alnum_prop": 0.5698371133105499,
"repo_name": "faizan-barmawer/openstack_ironic",
"id": "d25f501d9ae1dade036e14c28e6d314b9287c094",
"size": "45719",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ironic/tests/drivers/test_ssh.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2168035"
}
],
"symlink_target": ""
}
|
import libsedml
from matplotlib import pyplot as plt
from matplotlib.collections import LineCollection
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
from random import gauss
## Base class for data description.
class Data:
## @var id
# A unique identifier.
## @var name
# Name of this object.
## @var type
# Type of output.
## @var workflow
# Reference to the WorkFlow object this belongs to.
## Constructor; either 'data' or 'idf' ad 'typ' must be passed
## as keyword argument(s).
# @param self The object pointer.
# @param workflow A WorkFlow object.
# @param data A virtual libsedml element; optional (default: None).
# @param idf A unique identifier; optional (default: None).
# @param name A name for 'self'; optional (default: None).
# @param typ The type of data encoded by 'self'; can be either 'curve',
# 'surface' or 'dataSet'. Optional (default: None).
def __init__(self, workflow, data=None, idf=None, name=None, typ=None):
if data is None and (idf is None or typ is None):
raise RuntimeError("Either 'data' or 'idf' ad 'typ' must be passed " +
"as keyword argument(s).")
else:
self.workflow = workflow
if data is not None:
self.id = data.getId()
self.name = data.getName()
self.type = data.getElementName()
else:
self.id = idf
self.name = name
self.type = typ
## Getter. Returns self.id.
# @param self The object pointer.
def get_id(self):
return self.id
## Setter for self.id.
# @param self The object pointer.
# @param id New value for self.id.
def set_id(self, id):
self.id = id
## Getter. Returns self.name.
# @param self The object pointer.
def get_name(self):
return self.name
## Setter for self.name.
# @param self The object pointer.
# @param name New value for self.name.
def set_name(self, name):
self.name = name
## Getter. Returns self.workflow.
# @param self The object pointer.
def get_workflow(self):
return self.workflow
## Setter for self.workflow.
# @param self The object pointer.
# @param workflow A biopredyn.workflow.WorkFlow object.
def set_workflow(self, workflow):
self.workflow = workflow
## Getter. Returns self.type.
# @param self The object pointer.
# @return self.type
def get_type(self):
return self.type
## Data-derived class for N-dimensional data set description.
class DataSet(Data):
## @var data_id
# ID of a DataGenerator object.
## @var label
# A label for this.
## Overridden constructor; either 'data' or 'idf', 'lbl' and 'dg_ref' must be
## passed as keyword argument(s).
# @param self The object pointer.
# @param workflow A WorkFlow object.
# @param data A libsedml.SedDataSet element; optional (default: None).
# @param idf A unique identifier; optional (default: None).
# @param name A name for 'self'; optional (default: None).
# @param lbl An unambiguous label describing 'self'; optional (default:
# None).
# @param dg_ref Identifier of a biopredyn.datagenerator.DataGenerator object
# in 'workflow'; optional (default: None).
def __init__(self, workflow, data=None, idf=None, name=None, lbl=None,
dg_ref=None):
if data is None and (idf is None or lbl is None or dg_ref is None):
raise RuntimeError("Either 'data' or 'idf', 'lbl' and 'dg_ref' must be " +
"passed as keyword argument(s).")
else:
if data is not None:
Data.__init__(self, workflow, data=data)
self.label = data.getLabel()
self.data_id = data.getDataReference()
else:
Data.__init__(self, workflow, idf=idf, name=name, typ="dataSet")
self.label = lbl
self.data_id = dg_ref
## String representation of this. Displays it as a hierarchy.
# @param self The object pointer.
# @return A string representing this as a hierarchy.
def __str__(self):
tree = " |-" + self.type + " id=" + self.id + " label=" + self.label
tree += " dataReference=" + self.data_id + "\n"
return tree
## Getter. Returns self.label.
# @param self The object pointer.
# @return self.label
def get_label(self):
return self.label
## Setter for self.label.
# @param self The object pointer.
# @param label New value for self.label.
def set_label(self, label):
self.label = label
## Returns the number of experiments in the data generated by the associated
## biopredyn.datagenerator.DataGenerator object.
# @param self The object pointer.
# @return An integer.
def get_num_experiments(self):
return self.get_data_gen().get_num_experiments()
## Returns the number of time points in self.data_ref.
# @param self The object pointer.
# @return An integer..
def get_number_of_points(self):
return self.get_data_gen().get_number_of_points()
## Returns the DataGenerator object of self.workflow which ID is self.data_id.
# @param self The object pointer.
# @return A DataGenerator object.
def get_data_gen(self):
return self.workflow.get_data_generator_by_id(self.data_id)
## Getter. Returns self.data_id.
# @param self The object pointer.
# @return self.data_id
def get_data_id(self):
return self.data_id
## Setter for self.data_id.
# @param self The object pointer.
# @param data_id New value for self.data_id.
def set_data_id(self, data_id):
self.data_id = data_id
## Returns the libsedml.SedDataSet representation of this.
# @param self The object pointer.
# @param level Level of SED-ML to be used.
# @param version Version of SED-ML to be used.
# @return A libsedml.SedDataSet object.
def to_sedml(self, level, version):
ds = libsedml.SedDataSet(level, version)
ds.setId(self.get_id())
if self.get_name() is not None:
ds.setName(str(self.get_name()))
ds.setLabel(self.get_label())
ds.setDataReference(self.get_data_id())
return ds
## Write the data encoded in the input Dimension object.
# Each data value is written in the composite value corresponding to its
# iteration and index. It is assumed that all the series have the same number
# of time points.
# @param self The object pointer.
# @param dim A Dimension instance.
# @param artificial Whether this report should be used to generate artificial
# data by adding noise to the non-time datasets. Default: False.
# @param noise_type The type of noise to be added to the datasets. Possible
# values are 'homoscedastic' (standard deviation of the noise is constant)
# and 'heteroscedastic' (standard deviation is proportional to the value of
# each data point). Default: 'heteroscedastic'.
# @param std_dev Standard deviation of the noise distribution (gaussian). If
# noise_type is 'homoscedastic', std_dev is the exact value of the standard
# deviation; if noise_type is 'heteroscedastic', std_dev is a percentage.
# Default: 0.1
def write_as_numl(self, dim, artificial, noise_type, std_dev):
data_gen = self.get_data_gen()
values = data_gen.get_values()
for v in range(data_gen.get_number_of_points()):
comp = dim.get(v)
if str.lower(self.label).__contains__('time'):
comp.setIndexValue(str(values[0][v]))
else:
# series level
series = comp.createCompositeValue()
series.setIndexValue(self.label)
# experiment level
for e in range(len(values)):
exp = series.createCompositeValue()
exp.setIndexValue(str(e))
value = exp.createAtomicValue()
if not artificial:
value.setValue(str(values[e][v]))
elif noise_type == 'heteroscedastic':
value.setValue(str(gauss(values[e][v], values[e][v] * std_dev)))
elif noise_type == 'homoscedastic':
value.setValue(str(gauss(values[e][v], std_dev)))
else:
raise ValueError("Invalid noise type: " + noise_type +
"\nExpected noise types are 'homoscedastic' or " +
"'heteroscedastic'.")
## Data-derived class for 2-dimensional data set description.
class Curve(Data):
## @var x_data_id
# ID of a DataGenerator object.
## @var y_data_id
# ID of a DataGenerator object.
## @var log_x
# Boolean value stating whether the scale of the data generated by x_data_ref
# is logarithmic.
## @var log_y
# Boolean value stating whether the scale of the data generated by y_data_ref
# is logarithmic.
## Overridden constructor; either 'curve' or 'idf', 'xid', 'yid', 'logx' and
## 'logy' must be passed as keyword argument(s).
# @param self The object pointer.
# @param workflow A WorkFlow object.
# @param curve A libsedml.SedCurve element; optional (default: None).
# @param idf A unique identifier; optional (default: None).
# @param name A name for 'self'; optional (default: None).
# @param xid Identifier of a biopredyn.datagenerator.DataGenerator object in
# 'workflow'; optional (default: None).
# @param yid Identifier of a biopredyn.datagenerator.DataGenerator object in
# 'workflow'; optional (default: None).
# @param logx Boolean stating whether the dimension encoded in x should be
# plotted on a logarithmic scale; optional (default: None).
# @param logy Boolean stating whether the dimension encoded in y should be
# plotted on a logarithmic scale; optional (default: None).
def __init__(self, workflow, curve=None, idf=None, name=None, xid=None,
yid=None, logx=None, logy=None):
if curve is None and (idf is None or xid is None or yid is None or logx is
None or logy is None):
raise RuntimeError("Either 'curve' or 'idf', 'xid', 'yid', 'logx' and " +
"'logy' must be passed as keyword argument(s).")
else:
if curve is not None:
Data.__init__(self, workflow, data=curve)
self.x_data_id = curve.getXDataReference()
self.y_data_id = curve.getYDataReference()
self.log_x = curve.getLogX()
self.log_y = curve.getLogY()
else:
Data.__init__(self, workflow, idf=idf, name=name, typ='curve')
self.x_data_id = xid
self.y_data_id = yid
self.log_x = logx
self.log_y = logy
## String representation of this. Displays it as a hierarchy.
# @param self The object pointer.
# @return A string representing this as a hierarchy.
def __str__(self):
tree = " |-" + self.type + " id=" + self.id + " name=" + self.name
tree += " xDataReference=" + self.x_data_id
tree += " yDataReference=" + self.y_data_id
tree += " logX=" + str(self.log_x)
tree += " logY=" + str(self.log_y) + "\n"
return tree
## Getter. Returns self.x_data_id.
# @param self The object pointer.
# @return self.x_data_id
def get_x_data_id(self):
return self.x_data_id
## Setter for self.x_data_id.
# @param self The object pointer.
# @param x_data_id New value for self.x_data_id.
def set_x_data_id(self, x_data_id):
self.x_data_id = x_data_id
## Returns the DataGenerator object of self.workflow which ID is
## self.x_data_id.
# @param self The object pointer.
# @return A DataGenerator object.
def get_x_data_gen(self):
return self.workflow.get_data_generator_by_id(self.x_data_id)
## Getter. Returns self.y_data_id.
# @param self The object pointer.
# @return self.y_data_id
def get_y_data_id(self):
return self.y_data_id
## Setter for self.y_data_id.
# @param self The object pointer.
# @param y_data_id New value for self.id.
def set_y_data_id(self, y_data_id):
self.y_data_id = y_data_id
## Returns the DataGenerator object of self.workflow which ID is
## self.y_data_id.
# @param self The object pointer.
# @return A DataGenerator object.
def get_y_data_gen(self):
return self.workflow.get_data_generator_by_id(self.y_data_id)
## Getter. Returns self.log_x.
# @param self The object pointer.
# @return self.x_data_ref
def get_log_x(self):
return self.log_x
## Setter for self.log_x.
# @param self The object pointer.
# @param log_x New value for self.log_x.
def set_log_x(self, log_x):
self.log_x = log_x
## Getter. Returns self.log_y.
# @param self The object pointer.
# @return self.y_data_ref
def get_log_y(self):
return self.log_y
## Setter for self.log_y.
# @param self The object pointer.
# @param log_y New value for self.log_y.
def set_log_y(self, log_y):
self.log_y = log_y
## Plot the data encoded in this on the input plot object.
# @param self The object pointer.
# @param plot The matplotlib object on which this should be added.
# @param col A 3-tuple representing a RGB color.
def plot(self, plot, col):
# Set the scale of the plot
if self.log_x:
plot.xscale('log')
if self.log_y:
plot.yscale('log')
# Process the values
values = []
for x in self.get_x_data_gen().get_values():
for y in self.get_y_data_gen().get_values():
values.append(zip(x,y))
lines = LineCollection(values)
lines.set_color(col)
lines.set_label(str(self.get_name()))
# Plot the values
plot.add_collection(lines)
## Returns the libsedml.SedCurve representation of this.
# @param self The object pointer.
# @param level Level of SED-ML to be used.
# @param version Version of SED-ML to be used.
# @return A libsedml.SedCurve object.
def to_sedml(self, level, version):
crv = libsedml.SedCurve(level, version)
crv.setId(self.get_id())
if self.get_name() is not None:
crv.setName(str(self.get_name()))
crv.setXDataReference(self.get_x_data_id())
crv.setLogX(self.get_log_x())
crv.setYDataReference(self.get_y_data_id())
crv.setLogY(self.get_log_y())
return crv
## Data-derived class for 3-dimensional data set description.
class Surface(Data):
## @var x_data_id
# ID of a DataGenerator object.
## @var y_data_id
# ID of a DataGenerator object.
## @var z_data_id
# ID of a DataGenerator object.
## @var log_x
# Boolean value stating whether the scale of the data generated by x_data_ref
# is logarithmic.
## @var log_y
# Boolean value stating whether the scale of the data generated by y_data_ref
# is logarithmic.
## @var log_z
# Boolean value stating whether the scale of the data generated by z_data_ref
# is logarithmic.
## Overridden constructor; either 'surf' or 'idf', 'xid', 'yid', 'zid',
## 'logx', 'logy' and 'logz' must be passed as keyword argument(s).
# @param self The object pointer.
# @param workflow A WorkFlow object.
# @param surf A libsedml.SedSurface element; optional (default: None).
# @param idf A unique identifier; optional (default: None).
# @param name A name for 'self'; optional (default: None).
# @param xid Identifier of a biopredyn.datagenerator.DataGenerator object in
# 'workflow'; optional (default: None).
# @param yid Identifier of a biopredyn.datagenerator.DataGenerator object in
# 'workflow'; optional (default: None).
# @param zid Identifier of a biopredyn.datagenerator.DataGenerator object in
# 'workflow'; optional (default: None).
# @param logx Boolean stating whether the dimension encoded in x should be
# plotted on a logarithmic scale; optional (default: None).
# @param logy Boolean stating whether the dimension encoded in y should be
# plotted on a logarithmic scale; optional (default: None).
# @param logz Boolean stating whether the dimension encoded in z should be
# plotted on a logarithmic scale; optional (default: None).
def __init__(self, workflow, surf=None, idf=None, name=None, xid=None,
yid=None, zid=None, logx=None, logy=None, logz=None):
if surf is None and (idf is None or xid is None or yid is None or zid is
None or logx is None or logy is None or logz is None):
raise RuntimeError("Either 'surf' or 'idf', 'xid', 'yid', 'zid', " +
"'logx', 'logy' and 'logz' must be passed as keyword argument(s).")
else:
if surf is not None:
Data.__init__(self, workflow, data=surf)
self.x_data_id = surf.getXDataReference()
self.y_data_id = surf.getYDataReference()
self.z_data_id = surf.getZDataReference()
self.log_x = surf.getLogX()
self.log_y = surf.getLogY()
self.log_z = surf.getLogZ()
else:
Data.__init__(self, workflow, idf=idf, name=name, typ='surface')
self.x_data_id = xid
self.y_data_id = yid
self.z_data_id = zid
self.log_x = logx
self.log_y = logy
self.log_z = logz
## String representation of this. Displays it as a hierarchy.
# @param self The object pointer.
# @return A string representing this as a hierarchy.
def __str__(self):
tree = " |-" + self.type + " id=" + self.id + " name=" + self.name
tree += " xDataReference=" + self.x_data_id
tree += " yDataReference=" + self.y_data_id
tree += " zDataReference=" + self.z_data_id
tree += " logX=" + str(self.log_x)
tree += " logY=" + str(self.log_y)
tree += " logZ=" + str(self.log_z) + "\n"
return tree
## Returns the DataGenerator object of self.workflow which ID is
## self.x_data_id.
# @param self The object pointer.
# @return A DataGenerator object.
def get_x_data_gen(self):
return self.workflow.get_data_generator_by_id(self.x_data_id)
## Getter. Returns self.x_data_id.
# @param self The object pointer.
# @return self.x_data_id
def get_x_data_id(self):
return self.x_data_id
## Returns the DataGenerator object of self.workflow which ID is
## self.y_data_id.
# @param self The object pointer.
# @return A DataGenerator object.
def get_y_data_gen(self):
return self.workflow.get_data_generator_by_id(self.y_data_id)
## Getter. Returns self.y_data_id.
# @param self The object pointer.
# @return self.y_data_id
def get_y_data_id(self):
return self.y_data_id
## Returns the DataGenerator object of self.workflow which ID is
## self.z_data_id.
# @param self The object pointer.
# @return A DataGenerator object.
def get_z_data_gen(self):
return self.workflow.get_data_generator_by_id(self.z_data_id)
## Getter. Returns self.z_data_id.
# @param self The object pointer.
# @return self.z_data_id
def get_z_data_id(self):
return self.z_data_id
## Getter. Returns self.log_x.
# @param self The object pointer.
# @return self.x_data_ref
def get_log_x(self):
return self.log_x
## Getter. Returns self.log_y.
# @param self The object pointer.
# @return self.y_data_ref
def get_log_y(self):
return self.log_y
## Getter. Returns self.log_z.
# @param self The object pointer.
# @return self.z_data_ref
def get_log_z(self):
return self.log_z
## Plot the data encoded in this on the input plot object.
# @param self The object pointer.
# @param plot The matplotlib object on which this should be added.
def plot(self, plot):
# Set the scale of the plot
if self.log_x:
plot.xscale('log')
if self.log_y:
plot.yscale('log')
if self.log_y:
plot.zscale('log')
# Plot the values
x = np.array(self.get_x_data_gen().get_values())
y = np.array(self.get_y_data_gen().get_values())
z = np.array(self.get_z_data_gen().get_values())
plot.scatter(x, y, zs=z)
## Setter for self.log_x.
# @param self The object pointer.
# @param log_x New value for self.log_x.
def set_log_x(self, log_x):
self.log_x = log_x
## Setter for self.log_y.
# @param self The object pointer.
# @param log_y New value for self.log_y.
def set_log_y(self, log_y):
self.log_y = log_y
## Setter for self.log_z.
# @param self The object pointer.
# @param log_z New value for self.log_z.
def set_log_z(self, log_z):
self.log_z = log_z
## Setter for self.x_data_id.
# @param self The object pointer.
# @param x_data_id New value for self.x_data_id.
def set_x_data_id(self, x_data_id):
self.x_data_id = x_data_id
## Setter for self.y_data_id.
# @param self The object pointer.
# @param y_data_id New value for self.y_data_id.
def set_y_data_id(self, y_data_id):
self.y_data_id = y_data_id
## Setter for self.z_data_id.
# @param self The object pointer.
# @param z_data_id New value for self.z_data_id.
def set_z_data_id(self, z_data_id):
self.z_data_id = z_data_id
## Returns the libsedml.SedSurface representation of this.
# @param self The object pointer.
# @param level Level of SED-ML to be used.
# @param version Version of SED-ML to be used.
# @return A libsedml.SedSurface object.
def to_sedml(self, level, version):
srf = libsedml.SedSurface(level, version)
srf.setId(self.get_id())
if self.get_name() is not None:
srf.setName(str(self.get_name()))
srf.setXDataReference(self.get_x_data_id())
srf.setLogX(self.get_log_x())
srf.setYDataReference(self.get_y_data_id())
srf.setLogY(self.get_log_y())
srf.setZDataReference(self.get_z_data_id())
srf.setLogZ(self.get_log_z())
return srf
|
{
"content_hash": "96a44d4bda31d6bfbe779d06fb5de0d5",
"timestamp": "",
"source": "github",
"line_count": 581,
"max_line_length": 80,
"avg_line_length": 36.33390705679862,
"alnum_prop": 0.6550923732828043,
"repo_name": "TheCoSMoCompany/biopredyn",
"id": "c0e5cbd18d4279767d0b6f0952164442bd0c9010",
"size": "21254",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Prototype/python/biopredyn/signals.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3535918"
},
{
"name": "C++",
"bytes": "26120778"
},
{
"name": "CMake",
"bytes": "455400"
},
{
"name": "CSS",
"bytes": "49020"
},
{
"name": "Gnuplot",
"bytes": "206"
},
{
"name": "HTML",
"bytes": "193068"
},
{
"name": "Java",
"bytes": "66517"
},
{
"name": "JavaScript",
"bytes": "3847"
},
{
"name": "Makefile",
"bytes": "30905"
},
{
"name": "Perl",
"bytes": "3018"
},
{
"name": "Python",
"bytes": "7891301"
},
{
"name": "Shell",
"bytes": "247654"
},
{
"name": "TeX",
"bytes": "22566"
},
{
"name": "XSLT",
"bytes": "55564"
}
],
"symlink_target": ""
}
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
# Warning: This code was generated by a tool.
#
# Changes to this file may cause incorrect behavior and will be lost if the
# code is regenerated.
import json
import re
from requests import Session, Request
import time
try:
from urllib import quote, unquote
except:
from urllib.parse import quote, unquote
from azure.common import AzureHttpError
from azure.mgmt.common import AzureOperationResponse, OperationStatusResponse, OperationStatus, Service
from azure.mgmt.common.arm import ResourceBase, ResourceBaseExtended
class ComputeLongRunningOperationResponse(AzureOperationResponse):
"""
The Compute service response for long-running operations.
"""
def __init__(self, **kwargs):
super(ComputeLongRunningOperationResponse, self).__init__(**kwargs)
self._tracking_operation_id = kwargs.get('tracking_operation_id')
self._status = kwargs.get('status')
self._start_time = kwargs.get('start_time')
self._end_time = kwargs.get('end_time')
self._output = kwargs.get('output')
self._error = kwargs.get('error')
@property
def end_time(self):
"""
Gets the operation end time
"""
return self._end_time
@end_time.setter
def end_time(self, value):
self._end_time = value
@property
def error(self):
"""
Gets the operation error, if any occurred.
"""
return self._error
@error.setter
def error(self, value):
self._error = value
@property
def output(self):
"""
Operation output data (raw JSON)
"""
return self._output
@output.setter
def output(self, value):
self._output = value
@property
def start_time(self):
"""
Gets the operation start time
"""
return self._start_time
@start_time.setter
def start_time(self, value):
self._start_time = value
@property
def status(self):
"""
Gets the operation status.
"""
return self._status
@status.setter
def status(self, value):
self._status = value
@property
def tracking_operation_id(self):
"""
Gets the operation identifier.
"""
return self._tracking_operation_id
@tracking_operation_id.setter
def tracking_operation_id(self, value):
self._tracking_operation_id = value
class AvailabilitySetGetResponse(AzureOperationResponse):
"""
GET Availability Set operation response.
"""
def __init__(self, **kwargs):
super(AvailabilitySetGetResponse, self).__init__(**kwargs)
self._availability_set = kwargs.get('availability_set')
@property
def availability_set(self):
"""
Gets or sets the details of the Availability Set.
"""
return self._availability_set
@availability_set.setter
def availability_set(self, value):
self._availability_set = value
class AvailabilitySetListResponse(AzureOperationResponse):
"""
The List Availability Set operation response.
"""
def __init__(self, **kwargs):
super(AvailabilitySetListResponse, self).__init__(**kwargs)
self._availability_sets = kwargs.get('availability_sets')
@property
def availability_sets(self):
"""
Gets or sets the list of availability sets
"""
return self._availability_sets
@availability_sets.setter
def availability_sets(self, value):
self._availability_sets = value
class VirtualMachineSizeListResponse(AzureOperationResponse):
"""
The List Virtual Machine operation response.
"""
def __init__(self, **kwargs):
super(VirtualMachineSizeListResponse, self).__init__(**kwargs)
self._virtual_machine_sizes = kwargs.get('virtual_machine_sizes')
@property
def virtual_machine_sizes(self):
"""
Gets or sets the list of virtual machine sizes.
"""
return self._virtual_machine_sizes
@virtual_machine_sizes.setter
def virtual_machine_sizes(self, value):
self._virtual_machine_sizes = value
class AvailabilitySetCreateOrUpdateResponse(AzureOperationResponse):
"""
The Create Availability Set operation response.
"""
def __init__(self, **kwargs):
super(AvailabilitySetCreateOrUpdateResponse, self).__init__(**kwargs)
self._availability_set = kwargs.get('availability_set')
@property
def availability_set(self):
"""
Gets or sets the details of the Availability Set.
"""
return self._availability_set
@availability_set.setter
def availability_set(self, value):
self._availability_set = value
class AvailabilitySet(ResourceBaseExtended):
"""
Create or update Availability Set parameters.
"""
def __init__(self, **kwargs):
super(AvailabilitySet, self).__init__(**kwargs)
self._platform_update_domain_count = kwargs.get('platform_update_domain_count')
self._platform_fault_domain_count = kwargs.get('platform_fault_domain_count')
self._virtual_machines_references = kwargs.get('virtual_machines_references')
self._statuses = kwargs.get('statuses')
@property
def platform_fault_domain_count(self):
"""
Gets or sets Fault Domain count.
"""
return self._platform_fault_domain_count
@platform_fault_domain_count.setter
def platform_fault_domain_count(self, value):
self._platform_fault_domain_count = value
@property
def platform_update_domain_count(self):
"""
Gets or sets Update Domain count.
"""
return self._platform_update_domain_count
@platform_update_domain_count.setter
def platform_update_domain_count(self, value):
self._platform_update_domain_count = value
@property
def statuses(self):
"""
Gets or sets the resource status information.
"""
return self._statuses
@statuses.setter
def statuses(self, value):
self._statuses = value
@property
def virtual_machines_references(self):
"""
Gets or sets a list containing reference to all Virtual Machines
created under this Availability Set.
"""
return self._virtual_machines_references
@virtual_machines_references.setter
def virtual_machines_references(self, value):
self._virtual_machines_references = value
class VirtualMachineImageGetResponse(AzureOperationResponse):
"""
The get vm image operation response.
"""
def __init__(self, **kwargs):
super(VirtualMachineImageGetResponse, self).__init__(**kwargs)
self._virtual_machine_image = kwargs.get('virtual_machine_image')
@property
def virtual_machine_image(self):
"""
Gets the details of the Virtual Machine Image.
"""
return self._virtual_machine_image
@virtual_machine_image.setter
def virtual_machine_image(self, value):
self._virtual_machine_image = value
class VirtualMachineImageListPublishersParameters(object):
"""
Contains the parameters required to list publishers.
"""
def __init__(self, **kwargs):
self._location = kwargs.get('location')
@property
def location(self):
"""
Location of the PIR, used for orchestration and required by CSM to
direct request to appropriate PIR region. Example: westus, eastus.
"""
return self._location
@location.setter
def location(self, value):
self._location = value
class VirtualMachineImageListOffersParameters(VirtualMachineImageListPublishersParameters):
"""
Contains the parameters required to list offers.
"""
def __init__(self, **kwargs):
super(VirtualMachineImageListOffersParameters, self).__init__(**kwargs)
self._publisher_name = kwargs.get('publisher_name')
@property
def publisher_name(self):
"""
Publisher identifier. For example: Microsoft.Windows or Canonical.
"""
return self._publisher_name
@publisher_name.setter
def publisher_name(self, value):
self._publisher_name = value
class VirtualMachineImageListSkusParameters(VirtualMachineImageListOffersParameters):
"""
Contains the parameters required to list skus.
"""
def __init__(self, **kwargs):
super(VirtualMachineImageListSkusParameters, self).__init__(**kwargs)
self._offer = kwargs.get('offer')
@property
def offer(self):
"""
Unique identifier to distinguish an image. The vmImageName must be
unique within publisher’s namespace. Example: WindowsServer2012
"""
return self._offer
@offer.setter
def offer(self, value):
self._offer = value
class VirtualMachineImageListDetailsParameters(VirtualMachineImageListSkusParameters):
"""
Contains the parameters required to list virtual machine images with
details.
"""
def __init__(self, **kwargs):
super(VirtualMachineImageListDetailsParameters, self).__init__(**kwargs)
self._skus = kwargs.get('skus')
@property
def skus(self):
"""
A distinct identifier for an item in image name. For example:
DataCenter or Enterprise
"""
return self._skus
@skus.setter
def skus(self, value):
self._skus = value
class VirtualMachineImageGetParameters(VirtualMachineImageListDetailsParameters):
"""
Contains the parameters required to get a virtual machine image.
"""
def __init__(self, **kwargs):
super(VirtualMachineImageGetParameters, self).__init__(**kwargs)
self._version = kwargs.get('version')
@property
def version(self):
"""
Unique version number that distinguish each monthly release of this
product. The allowed characters are digit and period.Format:
<MajorVersion>.<MinorVersion>Example: 1.0.0 or 1.1.0
"""
return self._version
@version.setter
def version(self, value):
self._version = value
class VirtualMachineImageResourceList(AzureOperationResponse):
"""
A list of virtual machine image resource information.
"""
def __init__(self, **kwargs):
super(VirtualMachineImageResourceList, self).__init__(**kwargs)
self._resources = kwargs.get('resources')
@property
def resources(self):
"""
Gets a list of virtual machine image resources.
"""
return self._resources
@resources.setter
def resources(self, value):
self._resources = value
class VirtualMachineImageListParameters(VirtualMachineImageListDetailsParameters):
"""
Contains the parameters required to list virtual machine image versions.
"""
def __init__(self, **kwargs):
super(VirtualMachineImageListParameters, self).__init__(**kwargs)
self._filter_expression = kwargs.get('filter_expression')
@property
def filter_expression(self):
"""
ODAta filter
expression.https://msdn.microsoft.com/en-us/library/hh169248(v=nav.70).aspxSupported
operatives: -eq -startswith Examples: To list the all version that
begin with 1.0 $filter=startswith(name, ‘1.0’) To get the latest
version $filter= name eq ‘latest’
"""
return self._filter_expression
@filter_expression.setter
def filter_expression(self, value):
self._filter_expression = value
class VirtualMachineExtensionGetResponse(AzureOperationResponse):
"""
The Get VM-Extension operation response.
"""
def __init__(self, **kwargs):
super(VirtualMachineExtensionGetResponse, self).__init__(**kwargs)
self._virtual_machine_extension = kwargs.get('virtual_machine_extension')
@property
def virtual_machine_extension(self):
"""
Gets or sets details of the Virtual Machine extension.
"""
return self._virtual_machine_extension
@virtual_machine_extension.setter
def virtual_machine_extension(self, value):
self._virtual_machine_extension = value
class ComputeOperationResponse(AzureOperationResponse):
"""
The compute long running operation response.
"""
def __init__(self, **kwargs):
super(ComputeOperationResponse, self).__init__(**kwargs)
self._azure_async_operation = kwargs.get('azure_async_operation')
@property
def azure_async_operation(self):
"""
Gets or sets the the Azure Async Operation Uri.
"""
return self._azure_async_operation
@azure_async_operation.setter
def azure_async_operation(self, value):
self._azure_async_operation = value
class VirtualMachineExtensionCreateOrUpdateResponse(ComputeOperationResponse):
"""
The compute long running operation response.
"""
def __init__(self, **kwargs):
super(VirtualMachineExtensionCreateOrUpdateResponse, self).__init__(**kwargs)
self._virtual_machine_extension = kwargs.get('virtual_machine_extension')
@property
def virtual_machine_extension(self):
"""
Gets or sets details of the Virtual Machine Extension.
"""
return self._virtual_machine_extension
@virtual_machine_extension.setter
def virtual_machine_extension(self, value):
self._virtual_machine_extension = value
class VirtualMachineExtension(ResourceBaseExtended):
"""
Describes a Virtual Machine Extension.
"""
def __init__(self, **kwargs):
super(VirtualMachineExtension, self).__init__(**kwargs)
self._publisher = kwargs.get('publisher')
self._extension_type = kwargs.get('extension_type')
self._type_handler_version = kwargs.get('type_handler_version')
self._auto_upgrade_minor_version = kwargs.get('auto_upgrade_minor_version')
self._settings = kwargs.get('settings')
self._protected_settings = kwargs.get('protected_settings')
self._provisioning_state = kwargs.get('provisioning_state')
self._instance_view = kwargs.get('instance_view')
@property
def auto_upgrade_minor_version(self):
"""
Gets or sets whether the extension handler should be automatically
upgraded across minor versions.
"""
return self._auto_upgrade_minor_version
@auto_upgrade_minor_version.setter
def auto_upgrade_minor_version(self, value):
self._auto_upgrade_minor_version = value
@property
def extension_type(self):
"""
Gets or sets the type of the extension handler.
"""
return self._extension_type
@extension_type.setter
def extension_type(self, value):
self._extension_type = value
@property
def instance_view(self):
"""
Gets or sets the virtual machine extension instance view.
"""
return self._instance_view
@instance_view.setter
def instance_view(self, value):
self._instance_view = value
@property
def protected_settings(self):
"""
Gets or sets Json formatted protected settings for the extension.
"""
return self._protected_settings
@protected_settings.setter
def protected_settings(self, value):
self._protected_settings = value
@property
def provisioning_state(self):
"""
Gets or sets the provisioning state, which only appears in the
response.
"""
return self._provisioning_state
@provisioning_state.setter
def provisioning_state(self, value):
self._provisioning_state = value
@property
def publisher(self):
"""
Gets or sets the name of the extension handler publisher.
"""
return self._publisher
@publisher.setter
def publisher(self, value):
self._publisher = value
@property
def settings(self):
"""
Gets or sets Json formatted public settings for the extension.
"""
return self._settings
@settings.setter
def settings(self, value):
self._settings = value
@property
def type_handler_version(self):
"""
Gets or sets the type version of the extension handler.
"""
return self._type_handler_version
@type_handler_version.setter
def type_handler_version(self, value):
self._type_handler_version = value
class VirtualMachineExtensionImageGetResponse(AzureOperationResponse):
"""
The get virtual machine extension image operation response.
"""
def __init__(self, **kwargs):
super(VirtualMachineExtensionImageGetResponse, self).__init__(**kwargs)
self._virtual_machine_extension_image = kwargs.get('virtual_machine_extension_image')
@property
def virtual_machine_extension_image(self):
"""
Gets the details of the Virtual Machine Extension Image.
"""
return self._virtual_machine_extension_image
@virtual_machine_extension_image.setter
def virtual_machine_extension_image(self, value):
self._virtual_machine_extension_image = value
class VirtualMachineExtensionImageListTypesParameters(object):
"""
Contains the parameteres required to list virtual machine extension image
types.
"""
def __init__(self, **kwargs):
self._location = kwargs.get('location')
self._publisher_name = kwargs.get('publisher_name')
@property
def location(self):
"""
Location of the PIR, used for orchestration and required by CSM to
direct request to appropriate PIR region. Example: westus, eastus.
"""
return self._location
@location.setter
def location(self, value):
self._location = value
@property
def publisher_name(self):
"""
Publisher identifier. For example: Microsoft.Windows or Canonical.
"""
return self._publisher_name
@publisher_name.setter
def publisher_name(self, value):
self._publisher_name = value
class VirtualMachineExtensionImageListVersionsParameters(VirtualMachineExtensionImageListTypesParameters):
"""
Contains the parameteres required to list virtual machine extension image
versions.
"""
def __init__(self, **kwargs):
super(VirtualMachineExtensionImageListVersionsParameters, self).__init__(**kwargs)
self._type = kwargs.get('type')
self._filter_expression = kwargs.get('filter_expression')
@property
def filter_expression(self):
"""
ODAta filter
expression.https://msdn.microsoft.com/en-us/library/hh169248(v=nav.70).aspxSupported
operatives: -eq -startswith Examples: To list the all version that
begin with 1.0 $filter=startswith(name, ‘1.0’) To get the latest
version $filter= name eq ‘latest’
"""
return self._filter_expression
@filter_expression.setter
def filter_expression(self, value):
self._filter_expression = value
@property
def type(self):
"""
Unique (across the publisher) identifier to distinguish an extension
for this publisher. Example: 'BGInfo' or 'VMAccess'.The allowed
characters are uppercase or lowercase letters, digit, hypen(-),
period (.)Dot or hyphen is not allowed the end of value. Max length
is 64.
"""
return self._type
@type.setter
def type(self, value):
self._type = value
class VirtualMachineExtensionImageGetParameters(VirtualMachineExtensionImageListVersionsParameters):
"""
Contains the parameteres required to get a virtual machine extension image.
"""
def __init__(self, **kwargs):
super(VirtualMachineExtensionImageGetParameters, self).__init__(**kwargs)
self._version = kwargs.get('version')
@property
def version(self):
"""
Unique version number that distinguish each monthly release of this
product. The allowed characters are digit and period.Format:
<MajorVersion>.<MinorVersion>Example: 1.0.0 or 1.1.0
"""
return self._version
@version.setter
def version(self, value):
self._version = value
class ListUsagesResponse(AzureOperationResponse):
"""
The List Usages operation response.
"""
def __init__(self, **kwargs):
super(ListUsagesResponse, self).__init__(**kwargs)
self._usages = kwargs.get('usages')
@property
def usages(self):
"""
Gets or sets the list Compute Resource Usages.
"""
return self._usages
@usages.setter
def usages(self, value):
self._usages = value
class VirtualMachineCaptureParameters(object):
"""
Capture Virtual Machine parameters.
"""
def __init__(self, **kwargs):
self._virtual_hard_disk_name_prefix = kwargs.get('virtual_hard_disk_name_prefix')
self._destination_container_name = kwargs.get('destination_container_name')
self._overwrite = kwargs.get('overwrite')
@property
def destination_container_name(self):
"""
Gets or sets the destination container name.
"""
return self._destination_container_name
@destination_container_name.setter
def destination_container_name(self, value):
self._destination_container_name = value
@property
def overwrite(self):
"""
Gets or sets whether it overwrites destination VirtualHardDisk if
true, in case of conflict.
"""
return self._overwrite
@overwrite.setter
def overwrite(self, value):
self._overwrite = value
@property
def virtual_hard_disk_name_prefix(self):
"""
Gets or sets the captured VirtualHardDisk's name prefix.
"""
return self._virtual_hard_disk_name_prefix
@virtual_hard_disk_name_prefix.setter
def virtual_hard_disk_name_prefix(self, value):
self._virtual_hard_disk_name_prefix = value
class VirtualMachineCreateOrUpdateResponse(ComputeOperationResponse):
"""
The Create Virtual Machine operation response.
"""
def __init__(self, **kwargs):
super(VirtualMachineCreateOrUpdateResponse, self).__init__(**kwargs)
self._virtual_machine = kwargs.get('virtual_machine')
@property
def virtual_machine(self):
"""
Gets or sets details of the Virtual Machine.
"""
return self._virtual_machine
@virtual_machine.setter
def virtual_machine(self, value):
self._virtual_machine = value
class VirtualMachine(ResourceBaseExtended):
"""
Describes a Virtual Machine.
"""
def __init__(self, **kwargs):
super(VirtualMachine, self).__init__(**kwargs)
self._plan = kwargs.get('plan')
self._hardware_profile = kwargs.get('hardware_profile')
self._storage_profile = kwargs.get('storage_profile')
self._os_profile = kwargs.get('os_profile')
self._network_profile = kwargs.get('network_profile')
self._availability_set_reference = kwargs.get('availability_set_reference')
self._provisioning_state = kwargs.get('provisioning_state')
self._instance_view = kwargs.get('instance_view')
self._extensions = kwargs.get('extensions')
@property
def availability_set_reference(self):
"""
Gets or sets the reference Id of the availailbity set to which this
virtual machine belongs.
"""
return self._availability_set_reference
@availability_set_reference.setter
def availability_set_reference(self, value):
self._availability_set_reference = value
@property
def extensions(self):
"""
Gets the virtual machine child extension resources.
"""
return self._extensions
@extensions.setter
def extensions(self, value):
self._extensions = value
@property
def hardware_profile(self):
"""
Gets or sets the hardware profile.
"""
return self._hardware_profile
@hardware_profile.setter
def hardware_profile(self, value):
self._hardware_profile = value
@property
def instance_view(self):
"""
Gets the virtual machine instance view.
"""
return self._instance_view
@instance_view.setter
def instance_view(self, value):
self._instance_view = value
@property
def network_profile(self):
"""
Gets or sets the network profile.
"""
return self._network_profile
@network_profile.setter
def network_profile(self, value):
self._network_profile = value
@property
def os_profile(self):
"""
Gets or sets the OS profile.
"""
return self._os_profile
@os_profile.setter
def os_profile(self, value):
self._os_profile = value
@property
def plan(self):
"""
Gets or sets the purchase plan when deploying virtual machine from VM
Marketplace images.
"""
return self._plan
@plan.setter
def plan(self, value):
self._plan = value
@property
def provisioning_state(self):
"""
Gets or sets the provisioning state, which only appears in the
response.
"""
return self._provisioning_state
@provisioning_state.setter
def provisioning_state(self, value):
self._provisioning_state = value
@property
def storage_profile(self):
"""
Gets or sets the storage profile.
"""
return self._storage_profile
@storage_profile.setter
def storage_profile(self, value):
self._storage_profile = value
class VirtualMachineGetResponse(AzureOperationResponse):
"""
The GetVM operation response.
"""
def __init__(self, **kwargs):
super(VirtualMachineGetResponse, self).__init__(**kwargs)
self._virtual_machine = kwargs.get('virtual_machine')
@property
def virtual_machine(self):
"""
Gets or sets the details of the Virtual Machine.
"""
return self._virtual_machine
@virtual_machine.setter
def virtual_machine(self, value):
self._virtual_machine = value
class VirtualMachineListResponse(AzureOperationResponse):
"""
The List Virtual Machine operation response.
"""
def __init__(self, **kwargs):
super(VirtualMachineListResponse, self).__init__(**kwargs)
self._virtual_machines = kwargs.get('virtual_machines')
self._next_link = kwargs.get('next_link')
@property
def next_link(self):
"""
Gets or sets the uri to fetch the next page of VMs. Call ListNext()
with this to fetch the next page of Virtual Machines.
"""
return self._next_link
@next_link.setter
def next_link(self, value):
self._next_link = value
@property
def virtual_machines(self):
"""
Gets or sets the list of virtual machines.
"""
return self._virtual_machines
@virtual_machines.setter
def virtual_machines(self, value):
self._virtual_machines = value
class ListParameters(object):
"""
Specifies the parameters to be passed to List APIs.
"""
def __init__(self, **kwargs):
pass
class ComputeOperationStatus(object):
"""
The operation status.
"""
in_progress = "InProgress"
failed = "Failed"
succeeded = "Succeeded"
preempted = "Preempted"
class ApiErrorBase(object):
"""
Api error base.
"""
def __init__(self, **kwargs):
self._code = kwargs.get('code')
self._target = kwargs.get('target')
self._message = kwargs.get('message')
@property
def code(self):
"""
Gets or sets the error code.
"""
return self._code
@code.setter
def code(self, value):
self._code = value
@property
def message(self):
"""
Gets or sets the error message.
"""
return self._message
@message.setter
def message(self, value):
self._message = value
@property
def target(self):
"""
Gets or sets the target of the particular error.
"""
return self._target
@target.setter
def target(self, value):
self._target = value
class ApiError(ApiErrorBase):
"""
Api error.
"""
def __init__(self, **kwargs):
super(ApiError, self).__init__(**kwargs)
self._details = kwargs.get('details')
self._inner_error = kwargs.get('inner_error')
@property
def details(self):
"""
Gets or sets the Api error details
"""
return self._details
@details.setter
def details(self, value):
self._details = value
@property
def inner_error(self):
"""
Gets or sets the Api inner error
"""
return self._inner_error
@inner_error.setter
def inner_error(self, value):
self._inner_error = value
class InnerError(object):
"""
Inner error details.
"""
def __init__(self, **kwargs):
self._exception_type = kwargs.get('exception_type')
self._error_detail = kwargs.get('error_detail')
@property
def error_detail(self):
"""
Gets or sets the internal error message or exception dump.
"""
return self._error_detail
@error_detail.setter
def error_detail(self, value):
self._error_detail = value
@property
def exception_type(self):
"""
Gets or sets the exception type.
"""
return self._exception_type
@exception_type.setter
def exception_type(self, value):
self._exception_type = value
class VirtualMachineSize(object):
"""
Describes the properties of a VM size.
"""
def __init__(self, **kwargs):
self._name = kwargs.get('name')
self._number_of_cores = kwargs.get('number_of_cores')
self._os_disk_size_in_mb = kwargs.get('os_disk_size_in_mb')
self._resource_disk_size_in_mb = kwargs.get('resource_disk_size_in_mb')
self._memory_in_mb = kwargs.get('memory_in_mb')
self._max_data_disk_count = kwargs.get('max_data_disk_count')
@property
def max_data_disk_count(self):
"""
Gets or sets the Maximum number of data disks allowed by a VM size.
"""
return self._max_data_disk_count
@max_data_disk_count.setter
def max_data_disk_count(self, value):
self._max_data_disk_count = value
@property
def memory_in_mb(self):
"""
Gets or sets the Memory size supported by a VM size.
"""
return self._memory_in_mb
@memory_in_mb.setter
def memory_in_mb(self, value):
self._memory_in_mb = value
@property
def name(self):
"""
Gets or sets the VM size name.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def number_of_cores(self):
"""
Gets or sets the Number of cores supported by a VM size.
"""
return self._number_of_cores
@number_of_cores.setter
def number_of_cores(self, value):
self._number_of_cores = value
@property
def os_disk_size_in_mb(self):
"""
Gets or sets the OS disk size allowed by a VM size.
"""
return self._os_disk_size_in_mb
@os_disk_size_in_mb.setter
def os_disk_size_in_mb(self, value):
self._os_disk_size_in_mb = value
@property
def resource_disk_size_in_mb(self):
"""
Gets or sets the Resource disk size allowed by a VM size.
"""
return self._resource_disk_size_in_mb
@resource_disk_size_in_mb.setter
def resource_disk_size_in_mb(self, value):
self._resource_disk_size_in_mb = value
class ApiEntityReference(object):
"""
The API entity reference.
"""
def __init__(self, **kwargs):
self._reference_uri = kwargs.get('reference_uri')
@property
def reference_uri(self):
"""
Gets or sets the relative URL in the previous Service Management API's
namespace. For Example:
/subscriptions/{SubcriptionId}/resourceGroups/{ResourceGroupName}/...
"""
return self._reference_uri
@reference_uri.setter
def reference_uri(self, value):
self._reference_uri = value
class VirtualMachineReference(ApiEntityReference):
"""
Describes a virtual machine reference.
"""
def __init__(self, **kwargs):
super(VirtualMachineReference, self).__init__(**kwargs)
class AvailabilitySetReference(ApiEntityReference):
"""
Describes an availability set reference.
"""
def __init__(self, **kwargs):
super(AvailabilitySetReference, self).__init__(**kwargs)
class NetworkInterfaceReference(ApiEntityReference):
"""
Describes a network interface reference.
"""
def __init__(self, **kwargs):
super(NetworkInterfaceReference, self).__init__(**kwargs)
self._primary = kwargs.get('primary')
@property
def primary(self):
"""
Gets or sets whether this is a primary NIC on a virtual machine
"""
return self._primary
@primary.setter
def primary(self, value):
self._primary = value
class SourceImageReference(ApiEntityReference):
"""
The source image reference.
"""
def __init__(self, **kwargs):
super(SourceImageReference, self).__init__(**kwargs)
class SourceVaultReference(ApiEntityReference):
"""
Contains a Source Key Vault relative URL.
"""
def __init__(self, **kwargs):
super(SourceVaultReference, self).__init__(**kwargs)
class InstanceViewStatus(object):
"""
Instance view status.
"""
def __init__(self, **kwargs):
self._code = kwargs.get('code')
self._level = kwargs.get('level')
self._display_status = kwargs.get('display_status')
self._message = kwargs.get('message')
self._time = kwargs.get('time')
@property
def code(self):
"""
Gets or sets the status Code.
"""
return self._code
@code.setter
def code(self, value):
self._code = value
@property
def display_status(self):
"""
Gets or sets the short localizable label for the status.
"""
return self._display_status
@display_status.setter
def display_status(self, value):
self._display_status = value
@property
def level(self):
"""
Gets or sets the level Code.
"""
return self._level
@level.setter
def level(self, value):
self._level = value
@property
def message(self):
"""
Gets or sets the optional detailed Message, including for alerts and
error messages.
"""
return self._message
@message.setter
def message(self, value):
self._message = value
@property
def time(self):
"""
Gets or sets the time of the status.
"""
return self._time
@time.setter
def time(self, value):
self._time = value
class StatusLevelTypes(object):
"""
The Level of the status.
"""
info = 'Info'
warning = 'Warning'
error = 'Error'
class VirtualMachineImageResource(object):
"""
Virtual machine image resource information.
"""
def __init__(self, **kwargs):
self._id = kwargs.get('id')
self._name = kwargs.get('name')
self._location = kwargs.get('location')
@property
def id(self):
"""
Gets or sets the ID of the artifact.
"""
return self._id
@id.setter
def id(self, value):
self._id = value
@property
def location(self):
"""
Gets or sets the location of the resource.
"""
return self._location
@location.setter
def location(self, value):
self._location = value
@property
def name(self):
"""
Gets or sets the name of the resource.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
class VirtualMachineImage(VirtualMachineImageResource):
"""
Describes a Virtual Machine Image.
"""
def __init__(self, **kwargs):
super(VirtualMachineImage, self).__init__(**kwargs)
self._purchase_plan = kwargs.get('purchase_plan')
self._os_disk_image = kwargs.get('os_disk_image')
self._data_disk_images = kwargs.get('data_disk_images')
@property
def data_disk_images(self):
return self._data_disk_images
@data_disk_images.setter
def data_disk_images(self, value):
self._data_disk_images = value
@property
def os_disk_image(self):
return self._os_disk_image
@os_disk_image.setter
def os_disk_image(self, value):
self._os_disk_image = value
@property
def purchase_plan(self):
return self._purchase_plan
@purchase_plan.setter
def purchase_plan(self, value):
self._purchase_plan = value
class PurchasePlan(object):
"""
Used for establishing the purchase context of any 3rd Party artifact
through MarketPlace.
"""
def __init__(self, **kwargs):
self._publisher = kwargs.get('publisher')
self._name = kwargs.get('name')
self._product = kwargs.get('product')
@property
def name(self):
"""
Gets or sets the plan ID.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def product(self):
"""
Gets or sets the product ID.
"""
return self._product
@product.setter
def product(self, value):
self._product = value
@property
def publisher(self):
"""
Gets or sets the publisher ID.
"""
return self._publisher
@publisher.setter
def publisher(self, value):
self._publisher = value
class OSDiskImage(object):
"""
Contains the os disk image information.
"""
def __init__(self, **kwargs):
self._operating_system = kwargs.get('operating_system')
@property
def operating_system(self):
"""
Gets or sets the operating system of the osDiskImage.
"""
return self._operating_system
@operating_system.setter
def operating_system(self, value):
self._operating_system = value
class OperatingSystemTypes(object):
"""
The Operating System type.
"""
windows = 'Windows'
linux = 'Linux'
class DataDiskImage(object):
"""
Contains the data disk images information.
"""
def __init__(self, **kwargs):
self._lun = kwargs.get('lun')
@property
def lun(self):
"""
Gets the LUN number for a data disk.This value is used to identify
data disk image inside the VMImage therefore it must be unique for
each data disk.The allowed character for the value is digit.
"""
return self._lun
@lun.setter
def lun(self, value):
self._lun = value
class VirtualMachineExtensionImage(VirtualMachineImageResource):
"""
Describes a Virtual Machine Extension Image.
"""
def __init__(self, **kwargs):
super(VirtualMachineExtensionImage, self).__init__(**kwargs)
self._operating_system = kwargs.get('operating_system')
self._compute_role = kwargs.get('compute_role')
self._handler_schema = kwargs.get('handler_schema')
self._vm_scale_set_enabled = kwargs.get('vm_scale_set_enabled')
self._supports_multiple_extensions = kwargs.get('supports_multiple_extensions')
@property
def compute_role(self):
"""
Gets or sets the type of role (IaaS or PaaS) this extension supports.
"""
return self._compute_role
@compute_role.setter
def compute_role(self, value):
self._compute_role = value
@property
def handler_schema(self):
"""
Gets or sets the schema defined by publisher, where extension
consumers should provide settings in a matching schema.
"""
return self._handler_schema
@handler_schema.setter
def handler_schema(self, value):
self._handler_schema = value
@property
def operating_system(self):
"""
Gets or sets the operating system this extension supports.
"""
return self._operating_system
@operating_system.setter
def operating_system(self, value):
self._operating_system = value
@property
def supports_multiple_extensions(self):
"""
Gets or sets whether the handler can support multiple extensions.
"""
return self._supports_multiple_extensions
@supports_multiple_extensions.setter
def supports_multiple_extensions(self, value):
self._supports_multiple_extensions = value
@property
def vm_scale_set_enabled(self):
"""
Gets or sets whether the extension can be used on xRP VMScaleSets.By
default existing extensions are usable on scalesets, but there might
be cases where a publisher wants to explicitly indicate the extension
is only enabled for CRP VMs but not VMSS.
"""
return self._vm_scale_set_enabled
@vm_scale_set_enabled.setter
def vm_scale_set_enabled(self, value):
self._vm_scale_set_enabled = value
class ResourceInstanceView(object):
"""
The instance view of a resource.
"""
def __init__(self, **kwargs):
self._statuses = kwargs.get('statuses')
@property
def statuses(self):
"""
Gets or sets the resource status information.
"""
return self._statuses
@statuses.setter
def statuses(self, value):
self._statuses = value
class VirtualMachineExtensionInstanceView(ResourceInstanceView):
"""
The instance view of a virtual machine extension.
"""
def __init__(self, **kwargs):
super(VirtualMachineExtensionInstanceView, self).__init__(**kwargs)
self._name = kwargs.get('name')
self._extension_type = kwargs.get('extension_type')
self._type_handler_version = kwargs.get('type_handler_version')
self._sub_statuses = kwargs.get('sub_statuses')
@property
def extension_type(self):
"""
Gets or sets the full type of the extension handler which includes
both publisher and type.
"""
return self._extension_type
@extension_type.setter
def extension_type(self, value):
self._extension_type = value
@property
def name(self):
"""
Gets or sets the virtual machine extension name.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def sub_statuses(self):
"""
Gets or sets the resource status information.
"""
return self._sub_statuses
@sub_statuses.setter
def sub_statuses(self, value):
self._sub_statuses = value
@property
def type_handler_version(self):
"""
Gets or sets the type version of the extension handler.
"""
return self._type_handler_version
@type_handler_version.setter
def type_handler_version(self, value):
self._type_handler_version = value
class DiskInstanceView(ResourceInstanceView):
"""
The instance view of the disk.
"""
def __init__(self, **kwargs):
super(DiskInstanceView, self).__init__(**kwargs)
self._name = kwargs.get('name')
@property
def name(self):
"""
Gets or sets the disk name.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
class VirtualMachineInstanceView(ResourceInstanceView):
"""
The instance view of a virtual machine.
"""
def __init__(self, **kwargs):
super(VirtualMachineInstanceView, self).__init__(**kwargs)
self._platform_update_domain = kwargs.get('platform_update_domain')
self._platform_fault_domain = kwargs.get('platform_fault_domain')
self._remote_desktop_thumbprint = kwargs.get('remote_desktop_thumbprint')
self._vm_agent = kwargs.get('vm_agent')
self._disks = kwargs.get('disks')
self._extensions = kwargs.get('extensions')
self._statuses = kwargs.get('statuses')
@property
def disks(self):
"""
Gets or sets the the disks information.
"""
return self._disks
@disks.setter
def disks(self, value):
self._disks = value
@property
def extensions(self):
"""
Gets or sets the extensions information.
"""
return self._extensions
@extensions.setter
def extensions(self, value):
self._extensions = value
@property
def platform_fault_domain(self):
"""
Gets or sets the Fault Domain count.
"""
return self._platform_fault_domain
@platform_fault_domain.setter
def platform_fault_domain(self, value):
self._platform_fault_domain = value
@property
def platform_update_domain(self):
"""
Gets or sets the Update Domain count.
"""
return self._platform_update_domain
@platform_update_domain.setter
def platform_update_domain(self, value):
self._platform_update_domain = value
@property
def remote_desktop_thumbprint(self):
"""
Gets or sets the Remote desktop certificate thumbprint.
"""
return self._remote_desktop_thumbprint
@remote_desktop_thumbprint.setter
def remote_desktop_thumbprint(self, value):
self._remote_desktop_thumbprint = value
@property
def statuses(self):
"""
Gets or sets the VM status.
"""
return self._statuses
@statuses.setter
def statuses(self, value):
self._statuses = value
@property
def vm_agent(self):
"""
Gets or sets the VM Agent running on the virtual machine.
"""
return self._vm_agent
@vm_agent.setter
def vm_agent(self, value):
self._vm_agent = value
class VirtualMachineAgentInstanceView(ResourceInstanceView):
"""
The instance view of the VM Agent running on the virtual machine.
"""
def __init__(self, **kwargs):
super(VirtualMachineAgentInstanceView, self).__init__(**kwargs)
self._vm_agent_version = kwargs.get('vm_agent_version')
self._extension_handlers = kwargs.get('extension_handlers')
@property
def extension_handlers(self):
"""
Gets or sets the virtual machine extension handler instance view.
"""
return self._extension_handlers
@extension_handlers.setter
def extension_handlers(self, value):
self._extension_handlers = value
@property
def vm_agent_version(self):
"""
Gets or sets the VM Agent full version.
"""
return self._vm_agent_version
@vm_agent_version.setter
def vm_agent_version(self, value):
self._vm_agent_version = value
class VirtualMachineExtensionHandlerInstanceView(object):
"""
The instance view of a virtual machine extension handler.
"""
def __init__(self, **kwargs):
self._type = kwargs.get('type')
self._type_handler_version = kwargs.get('type_handler_version')
self._status = kwargs.get('status')
@property
def status(self):
"""
Gets or sets the extension handler status.
"""
return self._status
@status.setter
def status(self, value):
self._status = value
@property
def type(self):
"""
Gets or sets full type of the extension handler which includes both
publisher and type.
"""
return self._type
@type.setter
def type(self, value):
self._type = value
@property
def type_handler_version(self):
"""
Gets or sets the type version of the extension handler.
"""
return self._type_handler_version
@type_handler_version.setter
def type_handler_version(self, value):
self._type_handler_version = value
class Plan(object):
"""
Plan for the resource.
"""
def __init__(self, **kwargs):
self._name = kwargs.get('name')
self._publisher = kwargs.get('publisher')
self._product = kwargs.get('product')
self._promotion_code = kwargs.get('promotion_code')
@property
def name(self):
"""
Gets or sets the plan ID.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def product(self):
"""
Gets or sets the offer ID.
"""
return self._product
@product.setter
def product(self, value):
self._product = value
@property
def promotion_code(self):
"""
Gets or sets the promotion code.
"""
return self._promotion_code
@promotion_code.setter
def promotion_code(self, value):
self._promotion_code = value
@property
def publisher(self):
"""
Gets or sets the publisher ID.
"""
return self._publisher
@publisher.setter
def publisher(self, value):
self._publisher = value
class HardwareProfile(object):
"""
Describes a hardware profile.
"""
def __init__(self, **kwargs):
self._virtual_machine_size = kwargs.get('virtual_machine_size')
@property
def virtual_machine_size(self):
"""
The virtual machine size name.
"""
return self._virtual_machine_size
@virtual_machine_size.setter
def virtual_machine_size(self, value):
self._virtual_machine_size = value
class VirtualMachineSizeTypes(object):
"""
The virtual machine size.
"""
basic_a0 = 'Basic_A0'
basic_a1 = 'Basic_A1'
basic_a2 = 'Basic_A2'
basic_a3 = 'Basic_A3'
basic_a4 = 'Basic_A4'
standard_a0 = 'Standard_A0'
standard_a1 = 'Standard_A1'
standard_a2 = 'Standard_A2'
standard_a3 = 'Standard_A3'
standard_a4 = 'Standard_A4'
standard_a5 = 'Standard_A5'
standard_a6 = 'Standard_A6'
standard_a7 = 'Standard_A7'
standard_a8 = 'Standard_A8'
standard_a9 = 'Standard_A9'
standard_g1 = 'Standard_G1'
standard_g2 = 'Standard_G2'
standard_g3 = 'Standard_G3'
standard_g4 = 'Standard_G4'
standard_g5 = 'Standard_G5'
class StorageProfile(object):
"""
Describes a storage profile.
"""
def __init__(self, **kwargs):
self._image_reference = kwargs.get('image_reference')
self._source_image = kwargs.get('source_image')
self._os_disk = kwargs.get('os_disk')
self._data_disks = kwargs.get('data_disks')
@property
def data_disks(self):
"""
Gets or sets the data disks.
"""
return self._data_disks
@data_disks.setter
def data_disks(self, value):
self._data_disks = value
@property
def image_reference(self):
"""
Gets or sets the image reference.
"""
return self._image_reference
@image_reference.setter
def image_reference(self, value):
self._image_reference = value
@property
def os_disk(self):
"""
Gets or sets the OS disk.
"""
return self._os_disk
@os_disk.setter
def os_disk(self, value):
self._os_disk = value
@property
def source_image(self):
"""
Gets or sets the source image reference.
"""
return self._source_image
@source_image.setter
def source_image(self, value):
self._source_image = value
class ImageReference(object):
"""
The image reference.
"""
def __init__(self, **kwargs):
self._publisher = kwargs.get('publisher')
self._offer = kwargs.get('offer')
self._sku = kwargs.get('sku')
self._version = kwargs.get('version')
@property
def offer(self):
"""
Gets or sets the image offer.
"""
return self._offer
@offer.setter
def offer(self, value):
self._offer = value
@property
def publisher(self):
"""
Gets or sets the image publisher.
"""
return self._publisher
@publisher.setter
def publisher(self, value):
self._publisher = value
@property
def sku(self):
"""
Gets or sets the image sku.
"""
return self._sku
@sku.setter
def sku(self, value):
self._sku = value
@property
def version(self):
"""
Gets or sets the image version. The allowed formats are
Major.Minor.Build or 'latest'. Major, Minor and Build being decimal
numbers. Specify 'latest' to use the latest version of image.
"""
return self._version
@version.setter
def version(self, value):
self._version = value
class Disk(object):
"""
Describes a disk.
"""
def __init__(self, **kwargs):
self._name = kwargs.get('name')
self._virtual_hard_disk = kwargs.get('virtual_hard_disk')
self._source_image = kwargs.get('source_image')
self._caching = kwargs.get('caching')
self._create_option = kwargs.get('create_option')
@property
def caching(self):
"""
Gets or sets the caching type.
"""
return self._caching
@caching.setter
def caching(self, value):
self._caching = value
@property
def create_option(self):
"""
Gets or sets the create option.
"""
return self._create_option
@create_option.setter
def create_option(self, value):
self._create_option = value
@property
def name(self):
"""
Gets or sets the disk name.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def source_image(self):
"""
Gets or sets the Source User Image VirtualHardDisk. This
VirtualHardDisk will be copied before using it to attach to the
Virtual Machine.If SourceImage is provided, the destination
VirtualHardDisk should not exist.
"""
return self._source_image
@source_image.setter
def source_image(self, value):
self._source_image = value
@property
def virtual_hard_disk(self):
"""
Gets or sets the Virtual Hard Disk.
"""
return self._virtual_hard_disk
@virtual_hard_disk.setter
def virtual_hard_disk(self, value):
self._virtual_hard_disk = value
class OSDisk(Disk):
"""
Describes an Operating System disk.
"""
def __init__(self, **kwargs):
super(OSDisk, self).__init__(**kwargs)
self._operating_system_type = kwargs.get('operating_system_type')
@property
def operating_system_type(self):
"""
Gets or sets the Operating System type.
"""
return self._operating_system_type
@operating_system_type.setter
def operating_system_type(self, value):
self._operating_system_type = value
class DataDisk(Disk):
"""
Describes a data disk.
"""
def __init__(self, **kwargs):
super(DataDisk, self).__init__(**kwargs)
self._lun = kwargs.get('lun')
self._disk_size_gb = kwargs.get('disk_size_gb')
@property
def disk_size_gb(self):
"""
Gets or sets the disk size in GB for a blank data disk to be created.
"""
return self._disk_size_gb
@disk_size_gb.setter
def disk_size_gb(self, value):
self._disk_size_gb = value
@property
def lun(self):
"""
Gets or sets the logical unit number.
"""
return self._lun
@lun.setter
def lun(self, value):
self._lun = value
class VirtualHardDisk(object):
"""
Describes the uri of a disk.
"""
def __init__(self, **kwargs):
self._uri = kwargs.get('uri')
@property
def uri(self):
"""
Gets or sets the virtual hard disk's uri. It should be a valid Uri to
a virtual hard disk.
"""
return self._uri
@uri.setter
def uri(self, value):
self._uri = value
class CachingTypes(object):
"""
The caching types of OS or data disk.
"""
none = 'None'
read_only = 'ReadOnly'
read_write = 'ReadWrite'
class DiskCreateOptionTypes(object):
"""
The create options for disks.
"""
from_image = 'fromImage'
empty = 'empty'
attach = 'attach'
class OSProfile(object):
"""
Describes an OS profile.
"""
def __init__(self, **kwargs):
self._computer_name = kwargs.get('computer_name')
self._admin_username = kwargs.get('admin_username')
self._admin_password = kwargs.get('admin_password')
self._custom_data = kwargs.get('custom_data')
self._windows_configuration = kwargs.get('windows_configuration')
self._linux_configuration = kwargs.get('linux_configuration')
self._secrets = kwargs.get('secrets')
@property
def admin_password(self):
"""
Gets or sets the admin user password.
"""
return self._admin_password
@admin_password.setter
def admin_password(self, value):
self._admin_password = value
@property
def admin_username(self):
"""
Gets or sets the admin user name.
"""
return self._admin_username
@admin_username.setter
def admin_username(self, value):
self._admin_username = value
@property
def computer_name(self):
"""
Gets or sets the computer name.
"""
return self._computer_name
@computer_name.setter
def computer_name(self, value):
self._computer_name = value
@property
def custom_data(self):
"""
Gets or sets a base-64 encoded string of custom data.
"""
return self._custom_data
@custom_data.setter
def custom_data(self, value):
self._custom_data = value
@property
def linux_configuration(self):
"""
Gets or sets the Linux Configuration of the OS profile.
"""
return self._linux_configuration
@linux_configuration.setter
def linux_configuration(self, value):
self._linux_configuration = value
@property
def secrets(self):
"""
Gets or sets the List of certificates for addition to the VM.
"""
return self._secrets
@secrets.setter
def secrets(self, value):
self._secrets = value
@property
def windows_configuration(self):
"""
Gets or sets the Windows Configuration of the OS profile.
"""
return self._windows_configuration
@windows_configuration.setter
def windows_configuration(self, value):
self._windows_configuration = value
class WindowsConfiguration(object):
"""
Describes Windows Configuration of the OS Profile.
"""
def __init__(self, **kwargs):
self._provision_vm_agent = kwargs.get('provision_vm_agent')
self._enable_automatic_updates = kwargs.get('enable_automatic_updates')
self._time_zone = kwargs.get('time_zone')
self._additional_unattend_contents = kwargs.get('additional_unattend_contents')
self._win_rm_configuration = kwargs.get('win_rm_configuration')
@property
def additional_unattend_contents(self):
"""
Gets or sets the additional base-64 encoded XML formatted information
that can be included in the Unattend.xml file.
"""
return self._additional_unattend_contents
@additional_unattend_contents.setter
def additional_unattend_contents(self, value):
self._additional_unattend_contents = value
@property
def enable_automatic_updates(self):
"""
Gets or sets whether Windows updates are automatically installed on
the VM
"""
return self._enable_automatic_updates
@enable_automatic_updates.setter
def enable_automatic_updates(self, value):
self._enable_automatic_updates = value
@property
def provision_vm_agent(self):
"""
Gets or sets whether VM Agent should be provisioned on the Virtual
Machine.
"""
return self._provision_vm_agent
@provision_vm_agent.setter
def provision_vm_agent(self, value):
self._provision_vm_agent = value
@property
def time_zone(self):
"""
Gets or sets the Time Zone of the VM
"""
return self._time_zone
@time_zone.setter
def time_zone(self, value):
self._time_zone = value
@property
def win_rm_configuration(self):
"""
Gets or sets the Windows Remote Management configuration of the VM
"""
return self._win_rm_configuration
@win_rm_configuration.setter
def win_rm_configuration(self, value):
self._win_rm_configuration = value
class AdditionalUnattendContent(object):
"""
Gets or sets additional XML formatted information that can be included in
the Unattend.xml file, which is used by Windows Setup. Contents are
defined by setting name, component name, and the pass in which the
content is a applied.
"""
def __init__(self, **kwargs):
self._pass_name = kwargs.get('pass_name')
self._component_name = kwargs.get('component_name')
self._setting_name = kwargs.get('setting_name')
self._content = kwargs.get('content')
@property
def component_name(self):
"""
Gets or sets the component name. Currently, the only allowable value
is Microsoft-Windows-Shell-Setup.
"""
return self._component_name
@component_name.setter
def component_name(self, value):
self._component_name = value
@property
def content(self):
"""
Gets or sets XML formatted content that is added to the unattend.xml
file in the specified pass and component.The XML must be less than 4
KB and must include the root element for the setting or feature that
is being inserted.
"""
return self._content
@content.setter
def content(self, value):
self._content = value
@property
def pass_name(self):
"""
Gets or sets the pass name. Currently, the only allowable value is
oobeSystem.
"""
return self._pass_name
@pass_name.setter
def pass_name(self, value):
self._pass_name = value
@property
def setting_name(self):
"""
Gets or sets setting name (e.g. FirstLogonCommands, AutoLogon )
"""
return self._setting_name
@setting_name.setter
def setting_name(self, value):
self._setting_name = value
class PassNames(object):
"""
Pass names supported by AdditionalUnattendContent.
"""
oobe_system = 'oobeSystem'
class ComponentNames(object):
"""
Component names supported by AdditionalUnattendContent.
"""
microsoft_windows_shell_setup = 'Microsoft-Windows-Shell-Setup'
class SettingNames(object):
"""
Setting names supported by AdditionalUnattendContent.
"""
auto_logon = 'AutoLogon'
first_logon_commands = 'FirstLogonCommands'
class WinRMConfiguration(object):
"""
Describes Windows Remote Management configuration of the VM
"""
def __init__(self, **kwargs):
self._listeners = kwargs.get('listeners')
@property
def listeners(self):
"""
Gets or sets the list of Windows Remote Management listeners
"""
return self._listeners
@listeners.setter
def listeners(self, value):
self._listeners = value
class WinRMListener(object):
"""
Describes Protocol and thumbprint of Windows Remote Management listener
"""
def __init__(self, **kwargs):
self._protocol = kwargs.get('protocol')
self._certificate_url = kwargs.get('certificate_url')
@property
def certificate_url(self):
"""
Gets or sets the Certificate URL in KMS for Https listeners. Should be
null for Http listeners.
"""
return self._certificate_url
@certificate_url.setter
def certificate_url(self, value):
self._certificate_url = value
@property
def protocol(self):
"""
Gets or sets the Protocol used by WinRM listener. Currently only Http
and Https are supported.
"""
return self._protocol
@protocol.setter
def protocol(self, value):
self._protocol = value
class ProtocolTypes(object):
"""
The Operating System type.
"""
http = 'Http'
https = 'Https'
class LinuxConfiguration(object):
"""
Describes Windows Configuration of the OS Profile.
"""
def __init__(self, **kwargs):
self._disable_password_authentication = kwargs.get('disable_password_authentication')
self._ssh_configuration = kwargs.get('ssh_configuration')
@property
def disable_password_authentication(self):
"""
Gets or sets whether Authentication using user name and password is
allowed or not
"""
return self._disable_password_authentication
@disable_password_authentication.setter
def disable_password_authentication(self, value):
self._disable_password_authentication = value
@property
def ssh_configuration(self):
"""
Gets or sets the SSH configuration for linux VMs
"""
return self._ssh_configuration
@ssh_configuration.setter
def ssh_configuration(self, value):
self._ssh_configuration = value
class SshConfiguration(object):
"""
SSH configuration for Linux based VMs running on Azure
"""
def __init__(self, **kwargs):
self._public_keys = kwargs.get('public_keys')
@property
def public_keys(self):
"""
Gets or sets the list of SSH public keys used to authenticate with
linux based VMs
"""
return self._public_keys
@public_keys.setter
def public_keys(self, value):
self._public_keys = value
class SshPublicKey(object):
"""
Contains information about SSH certificate public key and the path on the
Linux VM where the public key is placed.
"""
def __init__(self, **kwargs):
self._path = kwargs.get('path')
self._key_data = kwargs.get('key_data')
@property
def key_data(self):
"""
Gets or sets Certificate public key used to authenticate with VM
through SSH.The certificate must be in Pem format with or without
headers.
"""
return self._key_data
@key_data.setter
def key_data(self, value):
self._key_data = value
@property
def path(self):
"""
Gets or sets the full path on the created VM where SSH public key is
stored. If the file already exists, the specified key is appended to
the file.
"""
return self._path
@path.setter
def path(self, value):
self._path = value
class VaultSecretGroup(object):
"""
Describes a set of certificates which are all in the same Key Vault.
"""
def __init__(self, **kwargs):
self._source_vault = kwargs.get('source_vault')
self._vault_certificates = kwargs.get('vault_certificates')
@property
def source_vault(self):
"""
Gets or sets the Relative URL of the Key Vault containing all of the
certificates in VaultCertificates.
"""
return self._source_vault
@source_vault.setter
def source_vault(self, value):
self._source_vault = value
@property
def vault_certificates(self):
"""
Gets or sets the list of key vault references in SourceVault which
contain certificates
"""
return self._vault_certificates
@vault_certificates.setter
def vault_certificates(self, value):
self._vault_certificates = value
class VaultCertificate(object):
"""
Describes a single certificate reference in a Key Vault, and where the
certificate should reside on the VM.
"""
def __init__(self, **kwargs):
self._certificate_url = kwargs.get('certificate_url')
self._certificate_store = kwargs.get('certificate_store')
@property
def certificate_store(self):
"""
Gets or sets the Certificate store in LocalMachine to add the
certificate to on Windows, leave empty on Linux.
"""
return self._certificate_store
@certificate_store.setter
def certificate_store(self, value):
self._certificate_store = value
@property
def certificate_url(self):
"""
Gets or sets the URL referencing a secret in a Key Vault which
contains a properly formatted certificate.
"""
return self._certificate_url
@certificate_url.setter
def certificate_url(self, value):
self._certificate_url = value
class ProvisioningStateTypes(object):
"""
The provisioning state.
"""
creating = 'Creating'
updating = 'Updating'
failed = 'Failed'
succeeded = 'Succeeded'
deleting = 'Deleting'
class Usage(object):
"""
Describes Compute Resource Usage.
"""
def __init__(self, **kwargs):
self._unit = kwargs.get('unit')
self._current_value = kwargs.get('current_value')
self._limit = kwargs.get('limit')
self._name = kwargs.get('name')
@property
def current_value(self):
"""
Gets or sets the current value of the usage.
"""
return self._current_value
@current_value.setter
def current_value(self, value):
self._current_value = value
@property
def limit(self):
"""
Gets or sets the limit of usage.
"""
return self._limit
@limit.setter
def limit(self, value):
self._limit = value
@property
def name(self):
"""
Gets or sets the name of the type of usage.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def unit(self):
"""
Gets or sets an enum describing the unit of measurement.
"""
return self._unit
@unit.setter
def unit(self, value):
self._unit = value
class UsageUnit(object):
"""
The usage unit.
"""
count = "Count"
class UsageName(object):
"""
The Usage Names.
"""
def __init__(self, **kwargs):
self._value = kwargs.get('value')
self._localized_value = kwargs.get('localized_value')
@property
def localized_value(self):
"""
Gets or sets a localized string describing the resource name.
"""
return self._localized_value
@localized_value.setter
def localized_value(self, value):
self._localized_value = value
@property
def value(self):
"""
Gets or sets a string describing the resource name.
"""
return self._value
@value.setter
def value(self, value):
self._value = value
class NetworkProfile(object):
"""
Describes a network profile.
"""
def __init__(self, **kwargs):
self._network_interfaces = kwargs.get('network_interfaces')
@property
def network_interfaces(self):
"""
Gets or sets the network interfaces.
"""
return self._network_interfaces
@network_interfaces.setter
def network_interfaces(self, value):
self._network_interfaces = value
class ComputeManagementClient(Service):
"""
The Compute Management Client.
"""
@property
def api_version(self):
"""
Gets the API version.
"""
return self._api_version
@property
def long_running_operation_initial_timeout(self):
"""
Gets or sets the initial timeout for Long Running Operations.
"""
return self._long_running_operation_initial_timeout
@long_running_operation_initial_timeout.setter
def long_running_operation_initial_timeout(self, value):
self._long_running_operation_initial_timeout = value
@property
def long_running_operation_retry_timeout(self):
"""
Gets or sets the retry timeout for Long Running Operations.
"""
return self._long_running_operation_retry_timeout
@long_running_operation_retry_timeout.setter
def long_running_operation_retry_timeout(self, value):
self._long_running_operation_retry_timeout = value
@property
def availability_sets(self):
"""
Operations for managing the availability sets in compute management.
"""
return self._availability_sets
@property
def usage(self):
"""
Operations for listing usage.
"""
return self._usage
@property
def virtual_machine_extension_images(self):
"""
Operations for managing the virtual machine extension images in
compute management.
"""
return self._virtual_machine_extension_images
@property
def virtual_machine_extensions(self):
"""
Operations for managing the virtual machine extensions in compute
management.
"""
return self._virtual_machine_extensions
@property
def virtual_machine_images(self):
"""
Operations for managing the virtual machine images in compute
management.
"""
return self._virtual_machine_images
@property
def virtual_machines(self):
"""
Operations for managing the virtual machines in compute management.
"""
return self._virtual_machines
@property
def virtual_machine_sizes(self):
"""
Operations for listing virtual machine sizes available in a region.
"""
return self._virtual_machine_sizes
def __init__(self, credentials, **kwargs):
super(ComputeManagementClient, self).__init__(credentials, **kwargs)
if getattr(self, '_base_uri', None) is None:
self._base_uri = 'https://management.azure.com'
if getattr(self, '_api_version', None) is None:
self._api_version = '2015-05-01-preview'
if getattr(self, '_long_running_operation_initial_timeout', None) is None:
self._long_running_operation_initial_timeout = -1
if getattr(self, '_long_running_operation_retry_timeout', None) is None:
self._long_running_operation_retry_timeout = -1
self._availability_sets = AvailabilitySetOperations(self)
self._usage = UsageOperations(self)
self._virtual_machine_extension_images = VirtualMachineExtensionImageOperations(self)
self._virtual_machine_extensions = VirtualMachineExtensionOperations(self)
self._virtual_machine_images = VirtualMachineImageOperations(self)
self._virtual_machines = VirtualMachineOperations(self)
self._virtual_machine_sizes = VirtualMachineSizeOperations(self)
def get_long_running_operation_status(self, operation_status_link):
"""
The Get Operation Status operation returns the status of the specified
operation. After calling an asynchronous operation, you can call
GetLongRunningOperationStatus to determine whether the operation has
succeeded, failed, or is still in progress.
Args:
operation_status_link (string): Location value returned by the Begin
operation.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
# Validate
if operation_status_link is None:
raise ValueError('operation_status_link cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + operation_status_link
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
# Send Request
response = self.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200 and status_code != 202:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200 or status_code == 202:
response_content = body
result = ComputeLongRunningOperationResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
operation_id_value = response_doc.get('operationId', None)
if operation_id_value is not None:
operation_id_instance = operation_id_value
result.tracking_operation_id = operation_id_instance
status_value = response_doc.get('status', None)
if status_value is not None:
status_instance = status_value
result.status = status_instance
start_time_value = response_doc.get('startTime', None)
if start_time_value is not None:
start_time_instance = start_time_value
result.start_time = start_time_instance
end_time_value = response_doc.get('endTime', None)
if end_time_value is not None:
end_time_instance = end_time_value
result.end_time = end_time_instance
properties_value = response_doc.get('properties', None)
if properties_value is not None:
output_value = properties_value.get('output', None)
if output_value is not None:
output_instance = json.dumps(output_value)
result.output = output_instance
error_value = response_doc.get('error', None)
if error_value is not None:
error_instance = ApiError(details=[])
result.error = error_instance
details_array = error_value.get('details', None)
if details_array is not None:
for details_value in details_array:
api_error_base_instance = ApiErrorBase()
error_instance.details.append(api_error_base_instance)
code_value = details_value.get('code', None)
if code_value is not None:
code_instance = code_value
api_error_base_instance.code = code_instance
target_value = details_value.get('target', None)
if target_value is not None:
target_instance = target_value
api_error_base_instance.target = target_instance
message_value = details_value.get('message', None)
if message_value is not None:
message_instance = message_value
api_error_base_instance.message = message_instance
innererror_value = error_value.get('innererror', None)
if innererror_value is not None:
innererror_instance = InnerError()
error_instance.inner_error = innererror_instance
exceptiontype_value = innererror_value.get('exceptiontype', None)
if exceptiontype_value is not None:
exceptiontype_instance = exceptiontype_value
innererror_instance.exception_type = exceptiontype_instance
errordetail_value = innererror_value.get('errordetail', None)
if errordetail_value is not None:
errordetail_instance = errordetail_value
innererror_instance.error_detail = errordetail_instance
code_value2 = error_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
error_instance.code = code_instance2
target_value2 = error_value.get('target', None)
if target_value2 is not None:
target_instance2 = target_value2
error_instance.target = target_instance2
message_value2 = error_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
error_instance.message = message_instance2
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
class AvailabilitySetOperations(object):
"""
Operations for managing the availability sets in compute management.
__NOTE__: An instance of this class is automatically created for an
instance of the [ComputeManagementClient]
"""
def __init__(self, client):
self._client = client
@property
def client(self):
"""
Gets a reference to the
Microsoft.Azure.Management.Compute.ComputeManagementClient.
"""
return self._client
def create_or_update(self, resource_group_name, parameters):
"""
The operation to create or update the availability set.
Args:
resource_group_name (string): The name of the resource group.
parameters (AvailabilitySet): Parameters supplied to the Create
Availability Set operation.
Returns:
AvailabilitySetCreateOrUpdateResponse: The Create Availability Set
operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if parameters is None:
raise ValueError('parameters cannot be None.')
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/availabilitySets/'
if parameters.name is not None:
url = url + quote(parameters.name)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'PUT'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Serialize Request
request_content = None
request_doc = None
availability_set_json_value = {}
request_doc = availability_set_json_value
properties_value = {}
availability_set_json_value['properties'] = properties_value
if parameters.platform_update_domain_count is not None:
properties_value['platformUpdateDomainCount'] = parameters.platform_update_domain_count
if parameters.platform_fault_domain_count is not None:
properties_value['platformFaultDomainCount'] = parameters.platform_fault_domain_count
if parameters.virtual_machines_references is not None:
virtual_machines_array = []
for virtual_machines_item in parameters.virtual_machines_references:
virtual_machine_reference_value = {}
virtual_machines_array.append(virtual_machine_reference_value)
if virtual_machines_item.reference_uri is not None:
virtual_machine_reference_value['id'] = virtual_machines_item.reference_uri
properties_value['virtualMachines'] = virtual_machines_array
if parameters.statuses is not None:
statuses_array = []
for statuses_item in parameters.statuses:
instance_view_status_value = {}
statuses_array.append(instance_view_status_value)
if statuses_item.code is not None:
instance_view_status_value['code'] = statuses_item.code
if statuses_item.level is not None:
instance_view_status_value['level'] = statuses_item.level
if statuses_item.display_status is not None:
instance_view_status_value['displayStatus'] = statuses_item.display_status
if statuses_item.message is not None:
instance_view_status_value['message'] = statuses_item.message
if statuses_item.time is not None:
instance_view_status_value['time'] = statuses_item.time
properties_value['statuses'] = statuses_array
if parameters.id is not None:
availability_set_json_value['id'] = parameters.id
if parameters.name is not None:
availability_set_json_value['name'] = parameters.name
if parameters.type is not None:
availability_set_json_value['type'] = parameters.type
availability_set_json_value['location'] = parameters.location
if parameters.tags is not None:
tags_dictionary = {}
for tags_key in parameters.tags:
tags_value = parameters.tags[tags_key]
tags_dictionary[tags_key] = tags_value
availability_set_json_value['tags'] = tags_dictionary
request_content = json.dumps(request_doc)
http_request.data = request_content
http_request.headers['Content-Length'] = len(request_content)
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = AvailabilitySetCreateOrUpdateResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
availability_set_instance = AvailabilitySet(statuses=[], tags={}, virtual_machines_references=[])
result.availability_set = availability_set_instance
properties_value2 = response_doc.get('properties', None)
if properties_value2 is not None:
platform_update_domain_count_value = properties_value2.get('platformUpdateDomainCount', None)
if platform_update_domain_count_value is not None:
platform_update_domain_count_instance = platform_update_domain_count_value
availability_set_instance.platform_update_domain_count = platform_update_domain_count_instance
platform_fault_domain_count_value = properties_value2.get('platformFaultDomainCount', None)
if platform_fault_domain_count_value is not None:
platform_fault_domain_count_instance = platform_fault_domain_count_value
availability_set_instance.platform_fault_domain_count = platform_fault_domain_count_instance
virtual_machines_array2 = properties_value2.get('virtualMachines', None)
if virtual_machines_array2 is not None:
for virtual_machines_value in virtual_machines_array2:
virtual_machine_reference_instance = VirtualMachineReference()
availability_set_instance.virtual_machines_references.append(virtual_machine_reference_instance)
id_value = virtual_machines_value.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_reference_instance.reference_uri = id_instance
statuses_array2 = properties_value2.get('statuses', None)
if statuses_array2 is not None:
for statuses_value in statuses_array2:
instance_view_status_instance = InstanceViewStatus()
availability_set_instance.statuses.append(instance_view_status_instance)
code_value = statuses_value.get('code', None)
if code_value is not None:
code_instance = code_value
instance_view_status_instance.code = code_instance
level_value = statuses_value.get('level', None)
if level_value is not None:
level_instance = level_value
instance_view_status_instance.level = level_instance
display_status_value = statuses_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
instance_view_status_instance.display_status = display_status_instance
message_value = statuses_value.get('message', None)
if message_value is not None:
message_instance = message_value
instance_view_status_instance.message = message_instance
time_value = statuses_value.get('time', None)
if time_value is not None:
time_instance = time_value
instance_view_status_instance.time = time_instance
id_value2 = response_doc.get('id', None)
if id_value2 is not None:
id_instance2 = id_value2
availability_set_instance.id = id_instance2
name_value = response_doc.get('name', None)
if name_value is not None:
name_instance = name_value
availability_set_instance.name = name_instance
type_value = response_doc.get('type', None)
if type_value is not None:
type_instance = type_value
availability_set_instance.type = type_instance
location_value = response_doc.get('location', None)
if location_value is not None:
location_instance = location_value
availability_set_instance.location = location_instance
tags_sequence_element = response_doc.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key2 = property
tags_value2 = tags_sequence_element[property]
availability_set_instance.tags[tags_key2] = tags_value2
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def delete(self, resource_group_name, availability_set_name):
"""
The operation to delete the availability set.
Args:
resource_group_name (string): The name of the resource group.
availability_set_name (string): The name of the availability set.
Returns:
AzureOperationResponse: A standard service response including an HTTP
status code and request ID.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if availability_set_name is None:
raise ValueError('availability_set_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/availabilitySets/'
url = url + quote(availability_set_name)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'DELETE'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
result = AzureOperationResponse()
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def get(self, resource_group_name, availability_set_name):
"""
The operation to get the availability set.
Args:
resource_group_name (string): The name of the resource group.
availability_set_name (string): The name of the availability set.
Returns:
AvailabilitySetGetResponse: GET Availability Set operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if availability_set_name is None:
raise ValueError('availability_set_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/availabilitySets/'
url = url + quote(availability_set_name)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = AvailabilitySetGetResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
availability_set_instance = AvailabilitySet(statuses=[], tags={}, virtual_machines_references=[])
result.availability_set = availability_set_instance
properties_value = response_doc.get('properties', None)
if properties_value is not None:
platform_update_domain_count_value = properties_value.get('platformUpdateDomainCount', None)
if platform_update_domain_count_value is not None:
platform_update_domain_count_instance = platform_update_domain_count_value
availability_set_instance.platform_update_domain_count = platform_update_domain_count_instance
platform_fault_domain_count_value = properties_value.get('platformFaultDomainCount', None)
if platform_fault_domain_count_value is not None:
platform_fault_domain_count_instance = platform_fault_domain_count_value
availability_set_instance.platform_fault_domain_count = platform_fault_domain_count_instance
virtual_machines_array = properties_value.get('virtualMachines', None)
if virtual_machines_array is not None:
for virtual_machines_value in virtual_machines_array:
virtual_machine_reference_instance = VirtualMachineReference()
availability_set_instance.virtual_machines_references.append(virtual_machine_reference_instance)
id_value = virtual_machines_value.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_reference_instance.reference_uri = id_instance
statuses_array = properties_value.get('statuses', None)
if statuses_array is not None:
for statuses_value in statuses_array:
instance_view_status_instance = InstanceViewStatus()
availability_set_instance.statuses.append(instance_view_status_instance)
code_value = statuses_value.get('code', None)
if code_value is not None:
code_instance = code_value
instance_view_status_instance.code = code_instance
level_value = statuses_value.get('level', None)
if level_value is not None:
level_instance = level_value
instance_view_status_instance.level = level_instance
display_status_value = statuses_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
instance_view_status_instance.display_status = display_status_instance
message_value = statuses_value.get('message', None)
if message_value is not None:
message_instance = message_value
instance_view_status_instance.message = message_instance
time_value = statuses_value.get('time', None)
if time_value is not None:
time_instance = time_value
instance_view_status_instance.time = time_instance
id_value2 = response_doc.get('id', None)
if id_value2 is not None:
id_instance2 = id_value2
availability_set_instance.id = id_instance2
name_value = response_doc.get('name', None)
if name_value is not None:
name_instance = name_value
availability_set_instance.name = name_instance
type_value = response_doc.get('type', None)
if type_value is not None:
type_instance = type_value
availability_set_instance.type = type_instance
location_value = response_doc.get('location', None)
if location_value is not None:
location_instance = location_value
availability_set_instance.location = location_instance
tags_sequence_element = response_doc.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key = property
tags_value = tags_sequence_element[property]
availability_set_instance.tags[tags_key] = tags_value
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list(self, resource_group_name):
"""
The operation to list the availability sets.
Args:
resource_group_name (string): The name of the resource group.
Returns:
AvailabilitySetListResponse: The List Availability Set operation
response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/availabilitySets'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = AvailabilitySetListResponse(availability_sets=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
value_array = response_doc.get('value', None)
if value_array is not None:
for value_value in value_array:
availability_set_json_instance = AvailabilitySet(statuses=[], tags={}, virtual_machines_references=[])
result.availability_sets.append(availability_set_json_instance)
properties_value = value_value.get('properties', None)
if properties_value is not None:
platform_update_domain_count_value = properties_value.get('platformUpdateDomainCount', None)
if platform_update_domain_count_value is not None:
platform_update_domain_count_instance = platform_update_domain_count_value
availability_set_json_instance.platform_update_domain_count = platform_update_domain_count_instance
platform_fault_domain_count_value = properties_value.get('platformFaultDomainCount', None)
if platform_fault_domain_count_value is not None:
platform_fault_domain_count_instance = platform_fault_domain_count_value
availability_set_json_instance.platform_fault_domain_count = platform_fault_domain_count_instance
virtual_machines_array = properties_value.get('virtualMachines', None)
if virtual_machines_array is not None:
for virtual_machines_value in virtual_machines_array:
virtual_machine_reference_instance = VirtualMachineReference()
availability_set_json_instance.virtual_machines_references.append(virtual_machine_reference_instance)
id_value = virtual_machines_value.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_reference_instance.reference_uri = id_instance
statuses_array = properties_value.get('statuses', None)
if statuses_array is not None:
for statuses_value in statuses_array:
instance_view_status_instance = InstanceViewStatus()
availability_set_json_instance.statuses.append(instance_view_status_instance)
code_value = statuses_value.get('code', None)
if code_value is not None:
code_instance = code_value
instance_view_status_instance.code = code_instance
level_value = statuses_value.get('level', None)
if level_value is not None:
level_instance = level_value
instance_view_status_instance.level = level_instance
display_status_value = statuses_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
instance_view_status_instance.display_status = display_status_instance
message_value = statuses_value.get('message', None)
if message_value is not None:
message_instance = message_value
instance_view_status_instance.message = message_instance
time_value = statuses_value.get('time', None)
if time_value is not None:
time_instance = time_value
instance_view_status_instance.time = time_instance
id_value2 = value_value.get('id', None)
if id_value2 is not None:
id_instance2 = id_value2
availability_set_json_instance.id = id_instance2
name_value = value_value.get('name', None)
if name_value is not None:
name_instance = name_value
availability_set_json_instance.name = name_instance
type_value = value_value.get('type', None)
if type_value is not None:
type_instance = type_value
availability_set_json_instance.type = type_instance
location_value = value_value.get('location', None)
if location_value is not None:
location_instance = location_value
availability_set_json_instance.location = location_instance
tags_sequence_element = value_value.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key = property
tags_value = tags_sequence_element[property]
availability_set_json_instance.tags[tags_key] = tags_value
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list_available_sizes(self, resource_group_name, availability_set_name):
"""
Lists virtual-machine-sizes available to be used for an availability
set.
Args:
resource_group_name (string): The name of the resource group.
availability_set_name (string): The name of the availability set.
Returns:
VirtualMachineSizeListResponse: The List Virtual Machine operation
response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if availability_set_name is None:
raise ValueError('availability_set_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/availabilitySets/'
url = url + quote(availability_set_name)
url = url + '/vmSizes'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineSizeListResponse(virtual_machine_sizes=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
value_array = response_doc.get('value', None)
if value_array is not None:
for value_value in value_array:
virtual_machine_size_instance = VirtualMachineSize()
result.virtual_machine_sizes.append(virtual_machine_size_instance)
name_value = value_value.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_size_instance.name = name_instance
number_of_cores_value = value_value.get('numberOfCores', None)
if number_of_cores_value is not None:
number_of_cores_instance = number_of_cores_value
virtual_machine_size_instance.number_of_cores = number_of_cores_instance
os_disk_size_in_mb_value = value_value.get('osDiskSizeInMB', None)
if os_disk_size_in_mb_value is not None:
os_disk_size_in_mb_instance = os_disk_size_in_mb_value
virtual_machine_size_instance.os_disk_size_in_mb = os_disk_size_in_mb_instance
resource_disk_size_in_mb_value = value_value.get('resourceDiskSizeInMB', None)
if resource_disk_size_in_mb_value is not None:
resource_disk_size_in_mb_instance = resource_disk_size_in_mb_value
virtual_machine_size_instance.resource_disk_size_in_mb = resource_disk_size_in_mb_instance
memory_in_mb_value = value_value.get('memoryInMB', None)
if memory_in_mb_value is not None:
memory_in_mb_instance = memory_in_mb_value
virtual_machine_size_instance.memory_in_mb = memory_in_mb_instance
max_data_disk_count_value = value_value.get('maxDataDiskCount', None)
if max_data_disk_count_value is not None:
max_data_disk_count_instance = max_data_disk_count_value
virtual_machine_size_instance.max_data_disk_count = max_data_disk_count_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
class UsageOperations(object):
"""
Operations for listing usage.
__NOTE__: An instance of this class is automatically created for an
instance of the [ComputeManagementClient]
"""
def __init__(self, client):
self._client = client
@property
def client(self):
"""
Gets a reference to the
Microsoft.Azure.Management.Compute.ComputeManagementClient.
"""
return self._client
def list(self, location):
"""
Lists compute usages for a subscription.
Args:
location (string): The location upon which resource usage is queried.
Returns:
ListUsagesResponse: The List Usages operation response.
"""
# Validate
if location is None:
raise ValueError('location cannot be None.')
if location is not None and len(location) > 1000:
raise IndexError('location is outside the valid range.')
if (re.search('^[-\\w\\._]+$', location) is not None) == False:
raise IndexError('location is outside the valid range.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
url = url + quote(location)
url = url + '/usages'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = ListUsagesResponse(usages=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
value_array = response_doc.get('value', None)
if value_array is not None:
for value_value in value_array:
usage_instance = Usage()
result.usages.append(usage_instance)
unit_value = value_value.get('unit', None)
if unit_value is not None:
unit_instance = unit_value
usage_instance.unit = unit_instance
current_value_value = value_value.get('currentValue', None)
if current_value_value is not None:
current_value_instance = current_value_value
usage_instance.current_value = current_value_instance
limit_value = value_value.get('limit', None)
if limit_value is not None:
limit_instance = limit_value
usage_instance.limit = limit_instance
name_value = value_value.get('name', None)
if name_value is not None:
name_instance = UsageName()
usage_instance.name = name_instance
value_value2 = name_value.get('value', None)
if value_value2 is not None:
value_instance = value_value2
name_instance.value = value_instance
localized_value_value = name_value.get('localizedValue', None)
if localized_value_value is not None:
localized_value_instance = localized_value_value
name_instance.localized_value = localized_value_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
class VirtualMachineExtensionImageOperations(object):
"""
Operations for managing the virtual machine extension images in compute
management.
__NOTE__: An instance of this class is automatically created for an
instance of the [ComputeManagementClient]
"""
def __init__(self, client):
self._client = client
@property
def client(self):
"""
Gets a reference to the
Microsoft.Azure.Management.Compute.ComputeManagementClient.
"""
return self._client
def get(self, parameters):
"""
Gets a virtual machine extension image.
Args:
parameters (VirtualMachineExtensionImageGetParameters)
Returns:
VirtualMachineExtensionImageGetResponse: The get virtual machine
extension image operation response.
"""
# Validate
if parameters is not None:
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
if parameters.publisher_name is None:
raise ValueError('parameters.publisher_name cannot be None.')
if parameters.type is None:
raise ValueError('parameters.type cannot be None.')
if parameters.version is None:
raise ValueError('parameters.version cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
if parameters is not None:
url = url + quote(parameters.location)
url = url + '/publishers/'
if parameters is not None:
url = url + quote(parameters.publisher_name)
url = url + '/artifacttypes/vmextension/types/'
if parameters is not None:
url = url + quote(parameters.type)
url = url + '/versions/'
if parameters is not None:
url = url + quote(parameters.version)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineExtensionImageGetResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
virtual_machine_extension_image_instance = VirtualMachineExtensionImage()
result.virtual_machine_extension_image = virtual_machine_extension_image_instance
properties_value = response_doc.get('properties', None)
if properties_value is not None:
operating_system_value = properties_value.get('operatingSystem', None)
if operating_system_value is not None:
operating_system_instance = operating_system_value
virtual_machine_extension_image_instance.operating_system = operating_system_instance
compute_role_value = properties_value.get('computeRole', None)
if compute_role_value is not None:
compute_role_instance = compute_role_value
virtual_machine_extension_image_instance.compute_role = compute_role_instance
handler_schema_value = properties_value.get('handlerSchema', None)
if handler_schema_value is not None:
handler_schema_instance = handler_schema_value
virtual_machine_extension_image_instance.handler_schema = handler_schema_instance
vm_scale_set_enabled_value = properties_value.get('vmScaleSetEnabled', None)
if vm_scale_set_enabled_value is not None:
vm_scale_set_enabled_instance = vm_scale_set_enabled_value
virtual_machine_extension_image_instance.vm_scale_set_enabled = vm_scale_set_enabled_instance
supports_multiple_extensions_value = properties_value.get('supportsMultipleExtensions', None)
if supports_multiple_extensions_value is not None:
supports_multiple_extensions_instance = supports_multiple_extensions_value
virtual_machine_extension_image_instance.supports_multiple_extensions = supports_multiple_extensions_instance
id_value = response_doc.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_extension_image_instance.id = id_instance
name_value = response_doc.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_extension_image_instance.name = name_instance
location_value = response_doc.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_image_instance.location = location_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list_types(self, parameters):
"""
Gets a list of virtual machine extension image types.
Args:
parameters (VirtualMachineExtensionImageListTypesParameters)
Returns:
VirtualMachineImageResourceList: A list of virtual machine image
resource information.
"""
# Validate
if parameters is not None:
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
if parameters.publisher_name is None:
raise ValueError('parameters.publisher_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
if parameters is not None:
url = url + quote(parameters.location)
url = url + '/publishers/'
if parameters is not None:
url = url + quote(parameters.publisher_name)
url = url + '/artifacttypes/vmextension/types'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineImageResourceList(resources=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
resources_array = response_doc
if resources_array is not None:
for resources_value in resources_array:
virtual_machine_image_resource_instance = VirtualMachineImageResource()
result.resources.append(virtual_machine_image_resource_instance)
id_value = resources_value.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_image_resource_instance.id = id_instance
name_value = resources_value.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_image_resource_instance.name = name_instance
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_image_resource_instance.location = location_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list_versions(self, parameters):
"""
Gets a list of virtual machine extension image versions.
Args:
parameters (VirtualMachineExtensionImageListVersionsParameters)
Returns:
VirtualMachineImageResourceList: A list of virtual machine image
resource information.
"""
# Validate
if parameters is not None:
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
if parameters.publisher_name is None:
raise ValueError('parameters.publisher_name cannot be None.')
if parameters.type is None:
raise ValueError('parameters.type cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
if parameters is not None:
url = url + quote(parameters.location)
url = url + '/publishers/'
if parameters is not None:
url = url + quote(parameters.publisher_name)
url = url + '/artifacttypes/vmextension/types/'
if parameters is not None:
url = url + quote(parameters.type)
url = url + '/versions'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if parameters is not None and parameters.filter_expression is not None:
query_parameters.append(parameters.filter_expression)
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineImageResourceList(resources=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
resources_array = response_doc
if resources_array is not None:
for resources_value in resources_array:
virtual_machine_image_resource_instance = VirtualMachineImageResource()
result.resources.append(virtual_machine_image_resource_instance)
id_value = resources_value.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_image_resource_instance.id = id_instance
name_value = resources_value.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_image_resource_instance.name = name_instance
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_image_resource_instance.location = location_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
class VirtualMachineExtensionOperations(object):
"""
Operations for managing the virtual machine extensions in compute
management.
__NOTE__: An instance of this class is automatically created for an
instance of the [ComputeManagementClient]
"""
def __init__(self, client):
self._client = client
@property
def client(self):
"""
Gets a reference to the
Microsoft.Azure.Management.Compute.ComputeManagementClient.
"""
return self._client
def begin_creating_or_updating(self, resource_group_name, vm_name, extension_parameters):
"""
The operation to create or update the extension.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine where the extension
should be create or updated.
extension_parameters (VirtualMachineExtension): Parameters supplied to
the Create Virtual Machine Extension operation.
Returns:
VirtualMachineExtensionCreateOrUpdateResponse: The compute long
running operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
if extension_parameters is None:
raise ValueError('extension_parameters cannot be None.')
if extension_parameters.location is None:
raise ValueError('extension_parameters.location cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/extensions/'
if extension_parameters.name is not None:
url = url + quote(extension_parameters.name)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'PUT'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Serialize Request
request_content = None
request_doc = None
virtual_machine_extension_json_value = {}
request_doc = virtual_machine_extension_json_value
properties_value = {}
virtual_machine_extension_json_value['properties'] = properties_value
if extension_parameters.publisher is not None:
properties_value['publisher'] = extension_parameters.publisher
if extension_parameters.extension_type is not None:
properties_value['type'] = extension_parameters.extension_type
if extension_parameters.type_handler_version is not None:
properties_value['typeHandlerVersion'] = extension_parameters.type_handler_version
if extension_parameters.auto_upgrade_minor_version is not None:
properties_value['autoUpgradeMinorVersion'] = extension_parameters.auto_upgrade_minor_version
if extension_parameters.settings is not None:
properties_value['settings'] = json.loads(extension_parameters.settings)
if extension_parameters.protected_settings is not None:
properties_value['protectedSettings'] = json.loads(extension_parameters.protected_settings)
if extension_parameters.provisioning_state is not None:
properties_value['provisioningState'] = extension_parameters.provisioning_state
if extension_parameters.instance_view is not None:
instance_view_value = {}
properties_value['instanceView'] = instance_view_value
if extension_parameters.instance_view.name is not None:
instance_view_value['name'] = extension_parameters.instance_view.name
if extension_parameters.instance_view.extension_type is not None:
instance_view_value['type'] = extension_parameters.instance_view.extension_type
if extension_parameters.instance_view.type_handler_version is not None:
instance_view_value['typeHandlerVersion'] = extension_parameters.instance_view.type_handler_version
if extension_parameters.instance_view.sub_statuses is not None:
substatuses_array = []
for substatuses_item in extension_parameters.instance_view.sub_statuses:
instance_view_status_value = {}
substatuses_array.append(instance_view_status_value)
if substatuses_item.code is not None:
instance_view_status_value['code'] = substatuses_item.code
if substatuses_item.level is not None:
instance_view_status_value['level'] = substatuses_item.level
if substatuses_item.display_status is not None:
instance_view_status_value['displayStatus'] = substatuses_item.display_status
if substatuses_item.message is not None:
instance_view_status_value['message'] = substatuses_item.message
if substatuses_item.time is not None:
instance_view_status_value['time'] = substatuses_item.time
instance_view_value['substatuses'] = substatuses_array
if extension_parameters.instance_view.statuses is not None:
statuses_array = []
for statuses_item in extension_parameters.instance_view.statuses:
instance_view_status_value2 = {}
statuses_array.append(instance_view_status_value2)
if statuses_item.code is not None:
instance_view_status_value2['code'] = statuses_item.code
if statuses_item.level is not None:
instance_view_status_value2['level'] = statuses_item.level
if statuses_item.display_status is not None:
instance_view_status_value2['displayStatus'] = statuses_item.display_status
if statuses_item.message is not None:
instance_view_status_value2['message'] = statuses_item.message
if statuses_item.time is not None:
instance_view_status_value2['time'] = statuses_item.time
instance_view_value['statuses'] = statuses_array
if extension_parameters.id is not None:
virtual_machine_extension_json_value['id'] = extension_parameters.id
if extension_parameters.name is not None:
virtual_machine_extension_json_value['name'] = extension_parameters.name
if extension_parameters.type is not None:
virtual_machine_extension_json_value['type'] = extension_parameters.type
virtual_machine_extension_json_value['location'] = extension_parameters.location
if extension_parameters.tags is not None:
tags_dictionary = {}
for tags_key in extension_parameters.tags:
tags_value = extension_parameters.tags[tags_key]
tags_dictionary[tags_key] = tags_value
virtual_machine_extension_json_value['tags'] = tags_dictionary
request_content = json.dumps(request_doc)
http_request.data = request_content
http_request.headers['Content-Length'] = len(request_content)
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200 and status_code != 201:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200 or status_code == 201:
response_content = body
result = VirtualMachineExtensionCreateOrUpdateResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
virtual_machine_extension_instance = VirtualMachineExtension(tags={})
result.virtual_machine_extension = virtual_machine_extension_instance
properties_value2 = response_doc.get('properties', None)
if properties_value2 is not None:
publisher_value = properties_value2.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
virtual_machine_extension_instance.publisher = publisher_instance
type_value = properties_value2.get('type', None)
if type_value is not None:
type_instance = type_value
virtual_machine_extension_instance.extension_type = type_instance
type_handler_version_value = properties_value2.get('typeHandlerVersion', None)
if type_handler_version_value is not None:
type_handler_version_instance = type_handler_version_value
virtual_machine_extension_instance.type_handler_version = type_handler_version_instance
auto_upgrade_minor_version_value = properties_value2.get('autoUpgradeMinorVersion', None)
if auto_upgrade_minor_version_value is not None:
auto_upgrade_minor_version_instance = auto_upgrade_minor_version_value
virtual_machine_extension_instance.auto_upgrade_minor_version = auto_upgrade_minor_version_instance
settings_value = properties_value2.get('settings', None)
if settings_value is not None:
settings_instance = json.dumps(settings_value)
virtual_machine_extension_instance.settings = settings_instance
protected_settings_value = properties_value2.get('protectedSettings', None)
if protected_settings_value is not None:
protected_settings_instance = json.dumps(protected_settings_value)
virtual_machine_extension_instance.protected_settings = protected_settings_instance
provisioning_state_value = properties_value2.get('provisioningState', None)
if provisioning_state_value is not None:
provisioning_state_instance = provisioning_state_value
virtual_machine_extension_instance.provisioning_state = provisioning_state_instance
instance_view_value2 = properties_value2.get('instanceView', None)
if instance_view_value2 is not None:
instance_view_instance = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
virtual_machine_extension_instance.instance_view = instance_view_instance
name_value = instance_view_value2.get('name', None)
if name_value is not None:
name_instance = name_value
instance_view_instance.name = name_instance
type_value2 = instance_view_value2.get('type', None)
if type_value2 is not None:
type_instance2 = type_value2
instance_view_instance.extension_type = type_instance2
type_handler_version_value2 = instance_view_value2.get('typeHandlerVersion', None)
if type_handler_version_value2 is not None:
type_handler_version_instance2 = type_handler_version_value2
instance_view_instance.type_handler_version = type_handler_version_instance2
substatuses_array2 = instance_view_value2.get('substatuses', None)
if substatuses_array2 is not None:
for substatuses_value in substatuses_array2:
instance_view_status_instance = InstanceViewStatus()
instance_view_instance.sub_statuses.append(instance_view_status_instance)
code_value = substatuses_value.get('code', None)
if code_value is not None:
code_instance = code_value
instance_view_status_instance.code = code_instance
level_value = substatuses_value.get('level', None)
if level_value is not None:
level_instance = level_value
instance_view_status_instance.level = level_instance
display_status_value = substatuses_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
instance_view_status_instance.display_status = display_status_instance
message_value = substatuses_value.get('message', None)
if message_value is not None:
message_instance = message_value
instance_view_status_instance.message = message_instance
time_value = substatuses_value.get('time', None)
if time_value is not None:
time_instance = time_value
instance_view_status_instance.time = time_instance
statuses_array2 = instance_view_value2.get('statuses', None)
if statuses_array2 is not None:
for statuses_value in statuses_array2:
instance_view_status_instance2 = InstanceViewStatus()
instance_view_instance.statuses.append(instance_view_status_instance2)
code_value2 = statuses_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
instance_view_status_instance2.code = code_instance2
level_value2 = statuses_value.get('level', None)
if level_value2 is not None:
level_instance2 = level_value2
instance_view_status_instance2.level = level_instance2
display_status_value2 = statuses_value.get('displayStatus', None)
if display_status_value2 is not None:
display_status_instance2 = display_status_value2
instance_view_status_instance2.display_status = display_status_instance2
message_value2 = statuses_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
instance_view_status_instance2.message = message_instance2
time_value2 = statuses_value.get('time', None)
if time_value2 is not None:
time_instance2 = time_value2
instance_view_status_instance2.time = time_instance2
id_value = response_doc.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_extension_instance.id = id_instance
name_value2 = response_doc.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
virtual_machine_extension_instance.name = name_instance2
type_value3 = response_doc.get('type', None)
if type_value3 is not None:
type_instance3 = type_value3
virtual_machine_extension_instance.type = type_instance3
location_value = response_doc.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_instance.location = location_instance
tags_sequence_element = response_doc.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key2 = property
tags_value2 = tags_sequence_element[property]
virtual_machine_extension_instance.tags[tags_key2] = tags_value2
result.status_code = status_code
result.azure_async_operation = response.headers.get('azure-asyncoperation')
result.request_id = response.headers.get('x-ms-request-id')
return result
def begin_deleting(self, resource_group_name, vm_name, vm_extension_name):
"""
The operation to delete the extension.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine where the extension
should be deleted.
vm_extension_name (string): The name of the virtual machine extension.
Returns:
ComputeOperationResponse: The compute long running operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
if vm_extension_name is None:
raise ValueError('vm_extension_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/extensions/'
url = url + quote(vm_extension_name)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'DELETE'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 202:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
result = ComputeOperationResponse()
result.status_code = status_code
result.azure_async_operation = response.headers.get('azure-asyncoperation')
result.request_id = response.headers.get('x-ms-request-id')
return result
def create_or_update(self, resource_group_name, vm_name, extension_parameters):
"""
The operation to create or update the extension.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine where the extension
should be create or updated.
extension_parameters (VirtualMachineExtension): Parameters supplied to
the Create Virtual Machine Extension operation.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
client2 = self.client
response = client2.virtual_machine_extensions.begin_creating_or_updating(resource_group_name, vm_name, extension_parameters)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_initial_timeout >= 0:
delay_in_seconds = client2.long_running_operation_initial_timeout
while (result.status != ComputeOperationStatus.in_progress) == False:
time.sleep(delay_in_seconds)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_retry_timeout >= 0:
delay_in_seconds = client2.long_running_operation_retry_timeout
return result
def delete(self, resource_group_name, vm_name, vm_extension_name):
"""
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine where the extension
should be deleted.
vm_extension_name (string): The name of the virtual machine extension.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
client2 = self.client
response = client2.virtual_machine_extensions.begin_deleting(resource_group_name, vm_name, vm_extension_name)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_initial_timeout >= 0:
delay_in_seconds = client2.long_running_operation_initial_timeout
while (result.status != ComputeOperationStatus.in_progress) == False:
time.sleep(delay_in_seconds)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_retry_timeout >= 0:
delay_in_seconds = client2.long_running_operation_retry_timeout
return result
def get(self, resource_group_name, vm_name, vm_extension_name):
"""
The operation to get the extension.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine containing the
extension.
vm_extension_name (string): The name of the virtual machine extension.
Returns:
VirtualMachineExtensionGetResponse: The Get VM-Extension operation
response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
if vm_extension_name is None:
raise ValueError('vm_extension_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/extensions/'
url = url + quote(vm_extension_name)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineExtensionGetResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
virtual_machine_extension_instance = VirtualMachineExtension(tags={})
result.virtual_machine_extension = virtual_machine_extension_instance
properties_value = response_doc.get('properties', None)
if properties_value is not None:
publisher_value = properties_value.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
virtual_machine_extension_instance.publisher = publisher_instance
type_value = properties_value.get('type', None)
if type_value is not None:
type_instance = type_value
virtual_machine_extension_instance.extension_type = type_instance
type_handler_version_value = properties_value.get('typeHandlerVersion', None)
if type_handler_version_value is not None:
type_handler_version_instance = type_handler_version_value
virtual_machine_extension_instance.type_handler_version = type_handler_version_instance
auto_upgrade_minor_version_value = properties_value.get('autoUpgradeMinorVersion', None)
if auto_upgrade_minor_version_value is not None:
auto_upgrade_minor_version_instance = auto_upgrade_minor_version_value
virtual_machine_extension_instance.auto_upgrade_minor_version = auto_upgrade_minor_version_instance
settings_value = properties_value.get('settings', None)
if settings_value is not None:
settings_instance = json.dumps(settings_value)
virtual_machine_extension_instance.settings = settings_instance
protected_settings_value = properties_value.get('protectedSettings', None)
if protected_settings_value is not None:
protected_settings_instance = json.dumps(protected_settings_value)
virtual_machine_extension_instance.protected_settings = protected_settings_instance
provisioning_state_value = properties_value.get('provisioningState', None)
if provisioning_state_value is not None:
provisioning_state_instance = provisioning_state_value
virtual_machine_extension_instance.provisioning_state = provisioning_state_instance
instance_view_value = properties_value.get('instanceView', None)
if instance_view_value is not None:
instance_view_instance = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
virtual_machine_extension_instance.instance_view = instance_view_instance
name_value = instance_view_value.get('name', None)
if name_value is not None:
name_instance = name_value
instance_view_instance.name = name_instance
type_value2 = instance_view_value.get('type', None)
if type_value2 is not None:
type_instance2 = type_value2
instance_view_instance.extension_type = type_instance2
type_handler_version_value2 = instance_view_value.get('typeHandlerVersion', None)
if type_handler_version_value2 is not None:
type_handler_version_instance2 = type_handler_version_value2
instance_view_instance.type_handler_version = type_handler_version_instance2
substatuses_array = instance_view_value.get('substatuses', None)
if substatuses_array is not None:
for substatuses_value in substatuses_array:
instance_view_status_instance = InstanceViewStatus()
instance_view_instance.sub_statuses.append(instance_view_status_instance)
code_value = substatuses_value.get('code', None)
if code_value is not None:
code_instance = code_value
instance_view_status_instance.code = code_instance
level_value = substatuses_value.get('level', None)
if level_value is not None:
level_instance = level_value
instance_view_status_instance.level = level_instance
display_status_value = substatuses_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
instance_view_status_instance.display_status = display_status_instance
message_value = substatuses_value.get('message', None)
if message_value is not None:
message_instance = message_value
instance_view_status_instance.message = message_instance
time_value = substatuses_value.get('time', None)
if time_value is not None:
time_instance = time_value
instance_view_status_instance.time = time_instance
statuses_array = instance_view_value.get('statuses', None)
if statuses_array is not None:
for statuses_value in statuses_array:
instance_view_status_instance2 = InstanceViewStatus()
instance_view_instance.statuses.append(instance_view_status_instance2)
code_value2 = statuses_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
instance_view_status_instance2.code = code_instance2
level_value2 = statuses_value.get('level', None)
if level_value2 is not None:
level_instance2 = level_value2
instance_view_status_instance2.level = level_instance2
display_status_value2 = statuses_value.get('displayStatus', None)
if display_status_value2 is not None:
display_status_instance2 = display_status_value2
instance_view_status_instance2.display_status = display_status_instance2
message_value2 = statuses_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
instance_view_status_instance2.message = message_instance2
time_value2 = statuses_value.get('time', None)
if time_value2 is not None:
time_instance2 = time_value2
instance_view_status_instance2.time = time_instance2
id_value = response_doc.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_extension_instance.id = id_instance
name_value2 = response_doc.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
virtual_machine_extension_instance.name = name_instance2
type_value3 = response_doc.get('type', None)
if type_value3 is not None:
type_instance3 = type_value3
virtual_machine_extension_instance.type = type_instance3
location_value = response_doc.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_instance.location = location_instance
tags_sequence_element = response_doc.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key = property
tags_value = tags_sequence_element[property]
virtual_machine_extension_instance.tags[tags_key] = tags_value
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def get_with_instance_view(self, resource_group_name, vm_name, vm_extension_name):
"""
The operation to get an extension along with its instance view.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine containing the
extension.
vm_extension_name (string): The name of the virtual machine extension.
Returns:
VirtualMachineExtensionGetResponse: The Get VM-Extension operation
response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
if vm_extension_name is None:
raise ValueError('vm_extension_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/extensions/'
url = url + quote(vm_extension_name)
query_parameters = []
query_parameters.append('$expand=instanceView')
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineExtensionGetResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
virtual_machine_extension_instance = VirtualMachineExtension(tags={})
result.virtual_machine_extension = virtual_machine_extension_instance
properties_value = response_doc.get('properties', None)
if properties_value is not None:
publisher_value = properties_value.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
virtual_machine_extension_instance.publisher = publisher_instance
type_value = properties_value.get('type', None)
if type_value is not None:
type_instance = type_value
virtual_machine_extension_instance.extension_type = type_instance
type_handler_version_value = properties_value.get('typeHandlerVersion', None)
if type_handler_version_value is not None:
type_handler_version_instance = type_handler_version_value
virtual_machine_extension_instance.type_handler_version = type_handler_version_instance
auto_upgrade_minor_version_value = properties_value.get('autoUpgradeMinorVersion', None)
if auto_upgrade_minor_version_value is not None:
auto_upgrade_minor_version_instance = auto_upgrade_minor_version_value
virtual_machine_extension_instance.auto_upgrade_minor_version = auto_upgrade_minor_version_instance
settings_value = properties_value.get('settings', None)
if settings_value is not None:
settings_instance = json.dumps(settings_value)
virtual_machine_extension_instance.settings = settings_instance
protected_settings_value = properties_value.get('protectedSettings', None)
if protected_settings_value is not None:
protected_settings_instance = json.dumps(protected_settings_value)
virtual_machine_extension_instance.protected_settings = protected_settings_instance
provisioning_state_value = properties_value.get('provisioningState', None)
if provisioning_state_value is not None:
provisioning_state_instance = provisioning_state_value
virtual_machine_extension_instance.provisioning_state = provisioning_state_instance
instance_view_value = properties_value.get('instanceView', None)
if instance_view_value is not None:
instance_view_instance = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
virtual_machine_extension_instance.instance_view = instance_view_instance
name_value = instance_view_value.get('name', None)
if name_value is not None:
name_instance = name_value
instance_view_instance.name = name_instance
type_value2 = instance_view_value.get('type', None)
if type_value2 is not None:
type_instance2 = type_value2
instance_view_instance.extension_type = type_instance2
type_handler_version_value2 = instance_view_value.get('typeHandlerVersion', None)
if type_handler_version_value2 is not None:
type_handler_version_instance2 = type_handler_version_value2
instance_view_instance.type_handler_version = type_handler_version_instance2
substatuses_array = instance_view_value.get('substatuses', None)
if substatuses_array is not None:
for substatuses_value in substatuses_array:
instance_view_status_instance = InstanceViewStatus()
instance_view_instance.sub_statuses.append(instance_view_status_instance)
code_value = substatuses_value.get('code', None)
if code_value is not None:
code_instance = code_value
instance_view_status_instance.code = code_instance
level_value = substatuses_value.get('level', None)
if level_value is not None:
level_instance = level_value
instance_view_status_instance.level = level_instance
display_status_value = substatuses_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
instance_view_status_instance.display_status = display_status_instance
message_value = substatuses_value.get('message', None)
if message_value is not None:
message_instance = message_value
instance_view_status_instance.message = message_instance
time_value = substatuses_value.get('time', None)
if time_value is not None:
time_instance = time_value
instance_view_status_instance.time = time_instance
statuses_array = instance_view_value.get('statuses', None)
if statuses_array is not None:
for statuses_value in statuses_array:
instance_view_status_instance2 = InstanceViewStatus()
instance_view_instance.statuses.append(instance_view_status_instance2)
code_value2 = statuses_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
instance_view_status_instance2.code = code_instance2
level_value2 = statuses_value.get('level', None)
if level_value2 is not None:
level_instance2 = level_value2
instance_view_status_instance2.level = level_instance2
display_status_value2 = statuses_value.get('displayStatus', None)
if display_status_value2 is not None:
display_status_instance2 = display_status_value2
instance_view_status_instance2.display_status = display_status_instance2
message_value2 = statuses_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
instance_view_status_instance2.message = message_instance2
time_value2 = statuses_value.get('time', None)
if time_value2 is not None:
time_instance2 = time_value2
instance_view_status_instance2.time = time_instance2
id_value = response_doc.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_extension_instance.id = id_instance
name_value2 = response_doc.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
virtual_machine_extension_instance.name = name_instance2
type_value3 = response_doc.get('type', None)
if type_value3 is not None:
type_instance3 = type_value3
virtual_machine_extension_instance.type = type_instance3
location_value = response_doc.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_instance.location = location_instance
tags_sequence_element = response_doc.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key = property
tags_value = tags_sequence_element[property]
virtual_machine_extension_instance.tags[tags_key] = tags_value
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
class VirtualMachineImageOperations(object):
"""
Operations for managing the virtual machine images in compute management.
__NOTE__: An instance of this class is automatically created for an
instance of the [ComputeManagementClient]
"""
def __init__(self, client):
self._client = client
@property
def client(self):
"""
Gets a reference to the
Microsoft.Azure.Management.Compute.ComputeManagementClient.
"""
return self._client
def get(self, parameters):
"""
Gets a virtual machine image.
Args:
parameters (VirtualMachineImageGetParameters)
Returns:
VirtualMachineImageGetResponse: The get vm image operation response.
"""
# Validate
if parameters is not None:
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
if parameters.offer is None:
raise ValueError('parameters.offer cannot be None.')
if parameters.publisher_name is None:
raise ValueError('parameters.publisher_name cannot be None.')
if parameters.skus is None:
raise ValueError('parameters.skus cannot be None.')
if parameters.version is None:
raise ValueError('parameters.version cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
if parameters is not None:
url = url + quote(parameters.location)
url = url + '/publishers/'
if parameters is not None:
url = url + quote(parameters.publisher_name)
url = url + '/artifacttypes/vmimage/offers/'
if parameters is not None:
url = url + quote(parameters.offer)
url = url + '/skus/'
if parameters is not None:
url = url + quote(parameters.skus)
url = url + '/versions/'
if parameters is not None:
url = url + quote(parameters.version)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineImageGetResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
virtual_machine_image_instance = VirtualMachineImage(data_disk_images=[])
result.virtual_machine_image = virtual_machine_image_instance
properties_value = response_doc.get('properties', None)
if properties_value is not None:
plan_value = properties_value.get('plan', None)
if plan_value is not None:
plan_instance = PurchasePlan()
virtual_machine_image_instance.purchase_plan = plan_instance
publisher_value = plan_value.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
plan_instance.publisher = publisher_instance
name_value = plan_value.get('name', None)
if name_value is not None:
name_instance = name_value
plan_instance.name = name_instance
product_value = plan_value.get('product', None)
if product_value is not None:
product_instance = product_value
plan_instance.product = product_instance
os_disk_image_value = properties_value.get('osDiskImage', None)
if os_disk_image_value is not None:
os_disk_image_instance = OSDiskImage()
virtual_machine_image_instance.os_disk_image = os_disk_image_instance
operating_system_value = os_disk_image_value.get('operatingSystem', None)
if operating_system_value is not None:
operating_system_instance = operating_system_value
os_disk_image_instance.operating_system = operating_system_instance
data_disk_images_array = properties_value.get('dataDiskImages', None)
if data_disk_images_array is not None:
for data_disk_images_value in data_disk_images_array:
data_disk_image_instance = DataDiskImage()
virtual_machine_image_instance.data_disk_images.append(data_disk_image_instance)
lun_value = data_disk_images_value.get('lun', None)
if lun_value is not None:
lun_instance = lun_value
data_disk_image_instance.lun = lun_instance
id_value = response_doc.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_image_instance.id = id_instance
name_value2 = response_doc.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
virtual_machine_image_instance.name = name_instance2
location_value = response_doc.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_image_instance.location = location_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list(self, parameters):
"""
Gets a list of virtual machine images.
Args:
parameters (VirtualMachineImageListParameters)
Returns:
VirtualMachineImageResourceList: A list of virtual machine image
resource information.
"""
# Validate
if parameters is not None:
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
if parameters.offer is None:
raise ValueError('parameters.offer cannot be None.')
if parameters.publisher_name is None:
raise ValueError('parameters.publisher_name cannot be None.')
if parameters.skus is None:
raise ValueError('parameters.skus cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
if parameters is not None:
url = url + quote(parameters.location)
url = url + '/publishers/'
if parameters is not None:
url = url + quote(parameters.publisher_name)
url = url + '/artifacttypes/vmimage/offers/'
if parameters is not None:
url = url + quote(parameters.offer)
url = url + '/skus/'
if parameters is not None:
url = url + quote(parameters.skus)
url = url + '/versions'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if parameters is not None and parameters.filter_expression is not None:
query_parameters.append(parameters.filter_expression)
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineImageResourceList(resources=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
resources_array = response_doc
if resources_array is not None:
for resources_value in resources_array:
virtual_machine_image_resource_instance = VirtualMachineImageResource()
result.resources.append(virtual_machine_image_resource_instance)
id_value = resources_value.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_image_resource_instance.id = id_instance
name_value = resources_value.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_image_resource_instance.name = name_instance
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_image_resource_instance.location = location_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list_offers(self, parameters):
"""
Gets a list of virtual machine image offers.
Args:
parameters (VirtualMachineImageListOffersParameters)
Returns:
VirtualMachineImageResourceList: A list of virtual machine image
resource information.
"""
# Validate
if parameters is not None:
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
if parameters.publisher_name is None:
raise ValueError('parameters.publisher_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
if parameters is not None:
url = url + quote(parameters.location)
url = url + '/publishers/'
if parameters is not None:
url = url + quote(parameters.publisher_name)
url = url + '/artifacttypes/vmimage/offers'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineImageResourceList(resources=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
resources_array = response_doc
if resources_array is not None:
for resources_value in resources_array:
virtual_machine_image_resource_instance = VirtualMachineImageResource()
result.resources.append(virtual_machine_image_resource_instance)
id_value = resources_value.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_image_resource_instance.id = id_instance
name_value = resources_value.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_image_resource_instance.name = name_instance
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_image_resource_instance.location = location_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list_publishers(self, parameters):
"""
Gets a list of virtual machine image publishers.
Args:
parameters (VirtualMachineImageListPublishersParameters)
Returns:
VirtualMachineImageResourceList: A list of virtual machine image
resource information.
"""
# Validate
if parameters is not None:
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
if parameters is not None:
url = url + quote(parameters.location)
url = url + '/publishers'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineImageResourceList(resources=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
resources_array = response_doc
if resources_array is not None:
for resources_value in resources_array:
virtual_machine_image_resource_instance = VirtualMachineImageResource()
result.resources.append(virtual_machine_image_resource_instance)
id_value = resources_value.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_image_resource_instance.id = id_instance
name_value = resources_value.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_image_resource_instance.name = name_instance
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_image_resource_instance.location = location_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list_skus(self, parameters):
"""
Gets a list of virtual machine image skus.
Args:
parameters (VirtualMachineImageListSkusParameters)
Returns:
VirtualMachineImageResourceList: A list of virtual machine image
resource information.
"""
# Validate
if parameters is not None:
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
if parameters.offer is None:
raise ValueError('parameters.offer cannot be None.')
if parameters.publisher_name is None:
raise ValueError('parameters.publisher_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
if parameters is not None:
url = url + quote(parameters.location)
url = url + '/publishers/'
if parameters is not None:
url = url + quote(parameters.publisher_name)
url = url + '/artifacttypes/vmimage/offers/'
if parameters is not None:
url = url + quote(parameters.offer)
url = url + '/skus'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineImageResourceList(resources=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
resources_array = response_doc
if resources_array is not None:
for resources_value in resources_array:
virtual_machine_image_resource_instance = VirtualMachineImageResource()
result.resources.append(virtual_machine_image_resource_instance)
id_value = resources_value.get('id', None)
if id_value is not None:
id_instance = id_value
virtual_machine_image_resource_instance.id = id_instance
name_value = resources_value.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_image_resource_instance.name = name_instance
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_image_resource_instance.location = location_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
class VirtualMachineOperations(object):
"""
Operations for managing the virtual machines in compute management.
__NOTE__: An instance of this class is automatically created for an
instance of the [ComputeManagementClient]
"""
def __init__(self, client):
self._client = client
@property
def client(self):
"""
Gets a reference to the
Microsoft.Azure.Management.Compute.ComputeManagementClient.
"""
return self._client
def begin_capturing(self, resource_group_name, vm_name, parameters):
"""
Captures the VM by copying VirtualHardDisks of the VM and outputs a
template that can be used to create similar VMs.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
parameters (VirtualMachineCaptureParameters): Parameters supplied to
the Capture Virtual Machine operation.
Returns:
ComputeOperationResponse: The compute long running operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
if parameters is None:
raise ValueError('parameters cannot be None.')
if parameters.destination_container_name is None:
raise ValueError('parameters.destination_container_name cannot be None.')
if parameters.overwrite is None:
raise ValueError('parameters.overwrite cannot be None.')
if parameters.virtual_hard_disk_name_prefix is None:
raise ValueError('parameters.virtual_hard_disk_name_prefix cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/capture'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'POST'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Serialize Request
request_content = None
request_doc = None
virtual_machine_capture_parameters_value = {}
request_doc = virtual_machine_capture_parameters_value
virtual_machine_capture_parameters_value['vhdPrefix'] = parameters.virtual_hard_disk_name_prefix
virtual_machine_capture_parameters_value['destinationContainerName'] = parameters.destination_container_name
virtual_machine_capture_parameters_value['overwriteVhds'] = parameters.overwrite
request_content = json.dumps(request_doc)
http_request.data = request_content
http_request.headers['Content-Length'] = len(request_content)
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 202:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
result = ComputeOperationResponse()
result.status_code = status_code
result.azure_async_operation = response.headers.get('azure-asyncoperation')
result.request_id = response.headers.get('x-ms-request-id')
return result
def begin_creating_or_updating(self, resource_group_name, parameters):
"""
The operation to create or update a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
parameters (VirtualMachine): Parameters supplied to the Create Virtual
Machine operation.
Returns:
VirtualMachineCreateOrUpdateResponse: The Create Virtual Machine
operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if parameters is None:
raise ValueError('parameters cannot be None.')
if parameters.extensions is not None:
for extensions_parameter_item in parameters.extensions:
if extensions_parameter_item.location is None:
raise ValueError('parameters.extensions.location cannot be None.')
if parameters.location is None:
raise ValueError('parameters.location cannot be None.')
if parameters.storage_profile is not None:
if parameters.storage_profile.data_disks is not None:
for data_disks_parameter_item in parameters.storage_profile.data_disks:
if data_disks_parameter_item.create_option is None:
raise ValueError('parameters.storage_profile.data_disks.create_option cannot be None.')
if data_disks_parameter_item.lun is None:
raise ValueError('parameters.storage_profile.data_disks.lun cannot be None.')
if data_disks_parameter_item.name is None:
raise ValueError('parameters.storage_profile.data_disks.name cannot be None.')
if data_disks_parameter_item.virtual_hard_disk is None:
raise ValueError('parameters.storage_profile.data_disks.virtual_hard_disk cannot be None.')
if parameters.storage_profile.os_disk is not None:
if parameters.storage_profile.os_disk.create_option is None:
raise ValueError('parameters.storage_profile.os_disk.create_option cannot be None.')
if parameters.storage_profile.os_disk.name is None:
raise ValueError('parameters.storage_profile.os_disk.name cannot be None.')
if parameters.storage_profile.os_disk.virtual_hard_disk is None:
raise ValueError('parameters.storage_profile.os_disk.virtual_hard_disk cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
if parameters.name is not None:
url = url + quote(parameters.name)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'PUT'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Serialize Request
request_content = None
request_doc = None
virtual_machine_json_value = {}
request_doc = virtual_machine_json_value
if parameters.plan is not None:
plan_value = {}
virtual_machine_json_value['plan'] = plan_value
if parameters.plan.name is not None:
plan_value['name'] = parameters.plan.name
if parameters.plan.publisher is not None:
plan_value['publisher'] = parameters.plan.publisher
if parameters.plan.product is not None:
plan_value['product'] = parameters.plan.product
if parameters.plan.promotion_code is not None:
plan_value['promotionCode'] = parameters.plan.promotion_code
properties_value = {}
virtual_machine_json_value['properties'] = properties_value
if parameters.hardware_profile is not None:
hardware_profile_value = {}
properties_value['hardwareProfile'] = hardware_profile_value
if parameters.hardware_profile.virtual_machine_size is not None:
hardware_profile_value['vmSize'] = parameters.hardware_profile.virtual_machine_size
if parameters.storage_profile is not None:
storage_profile_value = {}
properties_value['storageProfile'] = storage_profile_value
if parameters.storage_profile.image_reference is not None:
image_reference_value = {}
storage_profile_value['imageReference'] = image_reference_value
if parameters.storage_profile.image_reference.publisher is not None:
image_reference_value['publisher'] = parameters.storage_profile.image_reference.publisher
if parameters.storage_profile.image_reference.offer is not None:
image_reference_value['offer'] = parameters.storage_profile.image_reference.offer
if parameters.storage_profile.image_reference.sku is not None:
image_reference_value['sku'] = parameters.storage_profile.image_reference.sku
if parameters.storage_profile.image_reference.version is not None:
image_reference_value['version'] = parameters.storage_profile.image_reference.version
if parameters.storage_profile.source_image is not None:
source_image_value = {}
storage_profile_value['sourceImage'] = source_image_value
if parameters.storage_profile.source_image.reference_uri is not None:
source_image_value['id'] = parameters.storage_profile.source_image.reference_uri
if parameters.storage_profile.os_disk is not None:
os_disk_value = {}
storage_profile_value['osDisk'] = os_disk_value
if parameters.storage_profile.os_disk.operating_system_type is not None:
os_disk_value['osType'] = parameters.storage_profile.os_disk.operating_system_type
os_disk_value['name'] = parameters.storage_profile.os_disk.name
vhd_value = {}
os_disk_value['vhd'] = vhd_value
if parameters.storage_profile.os_disk.virtual_hard_disk.uri is not None:
vhd_value['uri'] = parameters.storage_profile.os_disk.virtual_hard_disk.uri
if parameters.storage_profile.os_disk.source_image is not None:
image_value = {}
os_disk_value['image'] = image_value
if parameters.storage_profile.os_disk.source_image.uri is not None:
image_value['uri'] = parameters.storage_profile.os_disk.source_image.uri
if parameters.storage_profile.os_disk.caching is not None:
os_disk_value['caching'] = parameters.storage_profile.os_disk.caching
os_disk_value['createOption'] = parameters.storage_profile.os_disk.create_option
if parameters.storage_profile.data_disks is not None:
data_disks_array = []
for data_disks_item in parameters.storage_profile.data_disks:
data_disk_value = {}
data_disks_array.append(data_disk_value)
data_disk_value['lun'] = data_disks_item.lun
if data_disks_item.disk_size_gb is not None:
data_disk_value['diskSizeGB'] = data_disks_item.disk_size_gb
data_disk_value['name'] = data_disks_item.name
vhd_value2 = {}
data_disk_value['vhd'] = vhd_value2
if data_disks_item.virtual_hard_disk.uri is not None:
vhd_value2['uri'] = data_disks_item.virtual_hard_disk.uri
if data_disks_item.source_image is not None:
image_value2 = {}
data_disk_value['image'] = image_value2
if data_disks_item.source_image.uri is not None:
image_value2['uri'] = data_disks_item.source_image.uri
if data_disks_item.caching is not None:
data_disk_value['caching'] = data_disks_item.caching
data_disk_value['createOption'] = data_disks_item.create_option
storage_profile_value['dataDisks'] = data_disks_array
if parameters.os_profile is not None:
os_profile_value = {}
properties_value['osProfile'] = os_profile_value
if parameters.os_profile.computer_name is not None:
os_profile_value['computerName'] = parameters.os_profile.computer_name
if parameters.os_profile.admin_username is not None:
os_profile_value['adminUsername'] = parameters.os_profile.admin_username
if parameters.os_profile.admin_password is not None:
os_profile_value['adminPassword'] = parameters.os_profile.admin_password
if parameters.os_profile.custom_data is not None:
os_profile_value['customData'] = parameters.os_profile.custom_data
if parameters.os_profile.windows_configuration is not None:
windows_configuration_value = {}
os_profile_value['windowsConfiguration'] = windows_configuration_value
if parameters.os_profile.windows_configuration.provision_vm_agent is not None:
windows_configuration_value['provisionVMAgent'] = parameters.os_profile.windows_configuration.provision_vm_agent
if parameters.os_profile.windows_configuration.enable_automatic_updates is not None:
windows_configuration_value['enableAutomaticUpdates'] = parameters.os_profile.windows_configuration.enable_automatic_updates
if parameters.os_profile.windows_configuration.time_zone is not None:
windows_configuration_value['timeZone'] = parameters.os_profile.windows_configuration.time_zone
if parameters.os_profile.windows_configuration.additional_unattend_contents is not None:
additional_unattend_content_array = []
for additional_unattend_content_item in parameters.os_profile.windows_configuration.additional_unattend_contents:
additional_unattend_content_value = {}
additional_unattend_content_array.append(additional_unattend_content_value)
if additional_unattend_content_item.pass_name is not None:
additional_unattend_content_value['passName'] = additional_unattend_content_item.pass_name
if additional_unattend_content_item.component_name is not None:
additional_unattend_content_value['componentName'] = additional_unattend_content_item.component_name
if additional_unattend_content_item.setting_name is not None:
additional_unattend_content_value['settingName'] = additional_unattend_content_item.setting_name
if additional_unattend_content_item.content is not None:
additional_unattend_content_value['content'] = additional_unattend_content_item.content
windows_configuration_value['additionalUnattendContent'] = additional_unattend_content_array
if parameters.os_profile.windows_configuration.win_rm_configuration is not None:
win_rm_value = {}
windows_configuration_value['winRM'] = win_rm_value
if parameters.os_profile.windows_configuration.win_rm_configuration.listeners is not None:
listeners_array = []
for listeners_item in parameters.os_profile.windows_configuration.win_rm_configuration.listeners:
win_rm_listener_value = {}
listeners_array.append(win_rm_listener_value)
if listeners_item.protocol is not None:
win_rm_listener_value['protocol'] = listeners_item.protocol
if listeners_item.certificate_url is not None:
win_rm_listener_value['certificateUrl'] = listeners_item.certificate_url
win_rm_value['listeners'] = listeners_array
if parameters.os_profile.linux_configuration is not None:
linux_configuration_value = {}
os_profile_value['linuxConfiguration'] = linux_configuration_value
if parameters.os_profile.linux_configuration.disable_password_authentication is not None:
linux_configuration_value['disablePasswordAuthentication'] = parameters.os_profile.linux_configuration.disable_password_authentication
if parameters.os_profile.linux_configuration.ssh_configuration is not None:
ssh_value = {}
linux_configuration_value['ssh'] = ssh_value
if parameters.os_profile.linux_configuration.ssh_configuration.public_keys is not None:
public_keys_array = []
for public_keys_item in parameters.os_profile.linux_configuration.ssh_configuration.public_keys:
ssh_public_key_value = {}
public_keys_array.append(ssh_public_key_value)
if public_keys_item.path is not None:
ssh_public_key_value['path'] = public_keys_item.path
if public_keys_item.key_data is not None:
ssh_public_key_value['keyData'] = public_keys_item.key_data
ssh_value['publicKeys'] = public_keys_array
if parameters.os_profile.secrets is not None:
secrets_array = []
for secrets_item in parameters.os_profile.secrets:
vault_secret_group_value = {}
secrets_array.append(vault_secret_group_value)
if secrets_item.source_vault is not None:
source_vault_value = {}
vault_secret_group_value['sourceVault'] = source_vault_value
if secrets_item.source_vault.reference_uri is not None:
source_vault_value['id'] = secrets_item.source_vault.reference_uri
if secrets_item.vault_certificates is not None:
vault_certificates_array = []
for vault_certificates_item in secrets_item.vault_certificates:
vault_certificate_value = {}
vault_certificates_array.append(vault_certificate_value)
if vault_certificates_item.certificate_url is not None:
vault_certificate_value['certificateUrl'] = vault_certificates_item.certificate_url
if vault_certificates_item.certificate_store is not None:
vault_certificate_value['certificateStore'] = vault_certificates_item.certificate_store
vault_secret_group_value['vaultCertificates'] = vault_certificates_array
os_profile_value['secrets'] = secrets_array
if parameters.network_profile is not None:
network_profile_value = {}
properties_value['networkProfile'] = network_profile_value
if parameters.network_profile.network_interfaces is not None:
network_interfaces_array = []
for network_interfaces_item in parameters.network_profile.network_interfaces:
network_interface_reference_json_value = {}
network_interfaces_array.append(network_interface_reference_json_value)
properties_value2 = {}
network_interface_reference_json_value['properties'] = properties_value2
if network_interfaces_item.primary is not None:
properties_value2['primary'] = network_interfaces_item.primary
if network_interfaces_item.reference_uri is not None:
network_interface_reference_json_value['id'] = network_interfaces_item.reference_uri
network_profile_value['networkInterfaces'] = network_interfaces_array
if parameters.availability_set_reference is not None:
availability_set_value = {}
properties_value['availabilitySet'] = availability_set_value
if parameters.availability_set_reference.reference_uri is not None:
availability_set_value['id'] = parameters.availability_set_reference.reference_uri
if parameters.provisioning_state is not None:
properties_value['provisioningState'] = parameters.provisioning_state
if parameters.instance_view is not None:
instance_view_value = {}
properties_value['instanceView'] = instance_view_value
if parameters.instance_view.platform_update_domain is not None:
instance_view_value['platformUpdateDomain'] = parameters.instance_view.platform_update_domain
if parameters.instance_view.platform_fault_domain is not None:
instance_view_value['platformFaultDomain'] = parameters.instance_view.platform_fault_domain
if parameters.instance_view.remote_desktop_thumbprint is not None:
instance_view_value['rdpThumbPrint'] = parameters.instance_view.remote_desktop_thumbprint
if parameters.instance_view.vm_agent is not None:
vm_agent_value = {}
instance_view_value['vmAgent'] = vm_agent_value
if parameters.instance_view.vm_agent.vm_agent_version is not None:
vm_agent_value['vmAgentVersion'] = parameters.instance_view.vm_agent.vm_agent_version
if parameters.instance_view.vm_agent.extension_handlers is not None:
extension_handlers_array = []
for extension_handlers_item in parameters.instance_view.vm_agent.extension_handlers:
virtual_machine_extension_handler_instance_view_value = {}
extension_handlers_array.append(virtual_machine_extension_handler_instance_view_value)
if extension_handlers_item.type is not None:
virtual_machine_extension_handler_instance_view_value['type'] = extension_handlers_item.type
if extension_handlers_item.type_handler_version is not None:
virtual_machine_extension_handler_instance_view_value['typeHandlerVersion'] = extension_handlers_item.type_handler_version
if extension_handlers_item.status is not None:
status_value = {}
virtual_machine_extension_handler_instance_view_value['status'] = status_value
if extension_handlers_item.status.code is not None:
status_value['code'] = extension_handlers_item.status.code
if extension_handlers_item.status.level is not None:
status_value['level'] = extension_handlers_item.status.level
if extension_handlers_item.status.display_status is not None:
status_value['displayStatus'] = extension_handlers_item.status.display_status
if extension_handlers_item.status.message is not None:
status_value['message'] = extension_handlers_item.status.message
if extension_handlers_item.status.time is not None:
status_value['time'] = extension_handlers_item.status.time
vm_agent_value['extensionHandlers'] = extension_handlers_array
if parameters.instance_view.vm_agent.statuses is not None:
statuses_array = []
for statuses_item in parameters.instance_view.vm_agent.statuses:
instance_view_status_value = {}
statuses_array.append(instance_view_status_value)
if statuses_item.code is not None:
instance_view_status_value['code'] = statuses_item.code
if statuses_item.level is not None:
instance_view_status_value['level'] = statuses_item.level
if statuses_item.display_status is not None:
instance_view_status_value['displayStatus'] = statuses_item.display_status
if statuses_item.message is not None:
instance_view_status_value['message'] = statuses_item.message
if statuses_item.time is not None:
instance_view_status_value['time'] = statuses_item.time
vm_agent_value['statuses'] = statuses_array
if parameters.instance_view.disks is not None:
disks_array = []
for disks_item in parameters.instance_view.disks:
disk_instance_view_value = {}
disks_array.append(disk_instance_view_value)
if disks_item.name is not None:
disk_instance_view_value['name'] = disks_item.name
if disks_item.statuses is not None:
statuses_array2 = []
for statuses_item2 in disks_item.statuses:
instance_view_status_value2 = {}
statuses_array2.append(instance_view_status_value2)
if statuses_item2.code is not None:
instance_view_status_value2['code'] = statuses_item2.code
if statuses_item2.level is not None:
instance_view_status_value2['level'] = statuses_item2.level
if statuses_item2.display_status is not None:
instance_view_status_value2['displayStatus'] = statuses_item2.display_status
if statuses_item2.message is not None:
instance_view_status_value2['message'] = statuses_item2.message
if statuses_item2.time is not None:
instance_view_status_value2['time'] = statuses_item2.time
disk_instance_view_value['statuses'] = statuses_array2
instance_view_value['disks'] = disks_array
if parameters.instance_view.extensions is not None:
extensions_array = []
for extensions_item in parameters.instance_view.extensions:
virtual_machine_extension_instance_view_value = {}
extensions_array.append(virtual_machine_extension_instance_view_value)
if extensions_item.name is not None:
virtual_machine_extension_instance_view_value['name'] = extensions_item.name
if extensions_item.extension_type is not None:
virtual_machine_extension_instance_view_value['type'] = extensions_item.extension_type
if extensions_item.type_handler_version is not None:
virtual_machine_extension_instance_view_value['typeHandlerVersion'] = extensions_item.type_handler_version
if extensions_item.sub_statuses is not None:
substatuses_array = []
for substatuses_item in extensions_item.sub_statuses:
instance_view_status_value3 = {}
substatuses_array.append(instance_view_status_value3)
if substatuses_item.code is not None:
instance_view_status_value3['code'] = substatuses_item.code
if substatuses_item.level is not None:
instance_view_status_value3['level'] = substatuses_item.level
if substatuses_item.display_status is not None:
instance_view_status_value3['displayStatus'] = substatuses_item.display_status
if substatuses_item.message is not None:
instance_view_status_value3['message'] = substatuses_item.message
if substatuses_item.time is not None:
instance_view_status_value3['time'] = substatuses_item.time
virtual_machine_extension_instance_view_value['substatuses'] = substatuses_array
if extensions_item.statuses is not None:
statuses_array3 = []
for statuses_item3 in extensions_item.statuses:
instance_view_status_value4 = {}
statuses_array3.append(instance_view_status_value4)
if statuses_item3.code is not None:
instance_view_status_value4['code'] = statuses_item3.code
if statuses_item3.level is not None:
instance_view_status_value4['level'] = statuses_item3.level
if statuses_item3.display_status is not None:
instance_view_status_value4['displayStatus'] = statuses_item3.display_status
if statuses_item3.message is not None:
instance_view_status_value4['message'] = statuses_item3.message
if statuses_item3.time is not None:
instance_view_status_value4['time'] = statuses_item3.time
virtual_machine_extension_instance_view_value['statuses'] = statuses_array3
instance_view_value['extensions'] = extensions_array
if parameters.instance_view.statuses is not None:
statuses_array4 = []
for statuses_item4 in parameters.instance_view.statuses:
instance_view_status_value5 = {}
statuses_array4.append(instance_view_status_value5)
if statuses_item4.code is not None:
instance_view_status_value5['code'] = statuses_item4.code
if statuses_item4.level is not None:
instance_view_status_value5['level'] = statuses_item4.level
if statuses_item4.display_status is not None:
instance_view_status_value5['displayStatus'] = statuses_item4.display_status
if statuses_item4.message is not None:
instance_view_status_value5['message'] = statuses_item4.message
if statuses_item4.time is not None:
instance_view_status_value5['time'] = statuses_item4.time
instance_view_value['statuses'] = statuses_array4
if parameters.extensions is not None:
resources_array = []
for resources_item in parameters.extensions:
virtual_machine_extension_json_value = {}
resources_array.append(virtual_machine_extension_json_value)
properties_value3 = {}
virtual_machine_extension_json_value['properties'] = properties_value3
if resources_item.publisher is not None:
properties_value3['publisher'] = resources_item.publisher
if resources_item.extension_type is not None:
properties_value3['type'] = resources_item.extension_type
if resources_item.type_handler_version is not None:
properties_value3['typeHandlerVersion'] = resources_item.type_handler_version
if resources_item.auto_upgrade_minor_version is not None:
properties_value3['autoUpgradeMinorVersion'] = resources_item.auto_upgrade_minor_version
if resources_item.settings is not None:
properties_value3['settings'] = json.loads(resources_item.settings)
if resources_item.protected_settings is not None:
properties_value3['protectedSettings'] = json.loads(resources_item.protected_settings)
if resources_item.provisioning_state is not None:
properties_value3['provisioningState'] = resources_item.provisioning_state
if resources_item.instance_view is not None:
instance_view_value2 = {}
properties_value3['instanceView'] = instance_view_value2
if resources_item.instance_view.name is not None:
instance_view_value2['name'] = resources_item.instance_view.name
if resources_item.instance_view.extension_type is not None:
instance_view_value2['type'] = resources_item.instance_view.extension_type
if resources_item.instance_view.type_handler_version is not None:
instance_view_value2['typeHandlerVersion'] = resources_item.instance_view.type_handler_version
if resources_item.instance_view.sub_statuses is not None:
substatuses_array2 = []
for substatuses_item2 in resources_item.instance_view.sub_statuses:
instance_view_status_value6 = {}
substatuses_array2.append(instance_view_status_value6)
if substatuses_item2.code is not None:
instance_view_status_value6['code'] = substatuses_item2.code
if substatuses_item2.level is not None:
instance_view_status_value6['level'] = substatuses_item2.level
if substatuses_item2.display_status is not None:
instance_view_status_value6['displayStatus'] = substatuses_item2.display_status
if substatuses_item2.message is not None:
instance_view_status_value6['message'] = substatuses_item2.message
if substatuses_item2.time is not None:
instance_view_status_value6['time'] = substatuses_item2.time
instance_view_value2['substatuses'] = substatuses_array2
if resources_item.instance_view.statuses is not None:
statuses_array5 = []
for statuses_item5 in resources_item.instance_view.statuses:
instance_view_status_value7 = {}
statuses_array5.append(instance_view_status_value7)
if statuses_item5.code is not None:
instance_view_status_value7['code'] = statuses_item5.code
if statuses_item5.level is not None:
instance_view_status_value7['level'] = statuses_item5.level
if statuses_item5.display_status is not None:
instance_view_status_value7['displayStatus'] = statuses_item5.display_status
if statuses_item5.message is not None:
instance_view_status_value7['message'] = statuses_item5.message
if statuses_item5.time is not None:
instance_view_status_value7['time'] = statuses_item5.time
instance_view_value2['statuses'] = statuses_array5
if resources_item.id is not None:
virtual_machine_extension_json_value['id'] = resources_item.id
if resources_item.name is not None:
virtual_machine_extension_json_value['name'] = resources_item.name
if resources_item.type is not None:
virtual_machine_extension_json_value['type'] = resources_item.type
virtual_machine_extension_json_value['location'] = resources_item.location
if resources_item.tags is not None:
tags_dictionary = {}
for tags_key in resources_item.tags:
tags_value = resources_item.tags[tags_key]
tags_dictionary[tags_key] = tags_value
virtual_machine_extension_json_value['tags'] = tags_dictionary
virtual_machine_json_value['resources'] = resources_array
if parameters.id is not None:
virtual_machine_json_value['id'] = parameters.id
if parameters.name is not None:
virtual_machine_json_value['name'] = parameters.name
if parameters.type is not None:
virtual_machine_json_value['type'] = parameters.type
virtual_machine_json_value['location'] = parameters.location
if parameters.tags is not None:
tags_dictionary2 = {}
for tags_key2 in parameters.tags:
tags_value2 = parameters.tags[tags_key2]
tags_dictionary2[tags_key2] = tags_value2
virtual_machine_json_value['tags'] = tags_dictionary2
request_content = json.dumps(request_doc)
http_request.data = request_content
http_request.headers['Content-Length'] = len(request_content)
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200 and status_code != 201:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200 or status_code == 201:
response_content = body
result = VirtualMachineCreateOrUpdateResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
virtual_machine_instance = VirtualMachine(extensions=[], tags={})
result.virtual_machine = virtual_machine_instance
plan_value2 = response_doc.get('plan', None)
if plan_value2 is not None:
plan_instance = Plan()
virtual_machine_instance.plan = plan_instance
name_value = plan_value2.get('name', None)
if name_value is not None:
name_instance = name_value
plan_instance.name = name_instance
publisher_value = plan_value2.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
plan_instance.publisher = publisher_instance
product_value = plan_value2.get('product', None)
if product_value is not None:
product_instance = product_value
plan_instance.product = product_instance
promotion_code_value = plan_value2.get('promotionCode', None)
if promotion_code_value is not None:
promotion_code_instance = promotion_code_value
plan_instance.promotion_code = promotion_code_instance
properties_value4 = response_doc.get('properties', None)
if properties_value4 is not None:
hardware_profile_value2 = properties_value4.get('hardwareProfile', None)
if hardware_profile_value2 is not None:
hardware_profile_instance = HardwareProfile()
virtual_machine_instance.hardware_profile = hardware_profile_instance
vm_size_value = hardware_profile_value2.get('vmSize', None)
if vm_size_value is not None:
vm_size_instance = vm_size_value
hardware_profile_instance.virtual_machine_size = vm_size_instance
storage_profile_value2 = properties_value4.get('storageProfile', None)
if storage_profile_value2 is not None:
storage_profile_instance = StorageProfile(data_disks=[])
virtual_machine_instance.storage_profile = storage_profile_instance
image_reference_value2 = storage_profile_value2.get('imageReference', None)
if image_reference_value2 is not None:
image_reference_instance = ImageReference()
storage_profile_instance.image_reference = image_reference_instance
publisher_value2 = image_reference_value2.get('publisher', None)
if publisher_value2 is not None:
publisher_instance2 = publisher_value2
image_reference_instance.publisher = publisher_instance2
offer_value = image_reference_value2.get('offer', None)
if offer_value is not None:
offer_instance = offer_value
image_reference_instance.offer = offer_instance
sku_value = image_reference_value2.get('sku', None)
if sku_value is not None:
sku_instance = sku_value
image_reference_instance.sku = sku_instance
version_value = image_reference_value2.get('version', None)
if version_value is not None:
version_instance = version_value
image_reference_instance.version = version_instance
source_image_value2 = storage_profile_value2.get('sourceImage', None)
if source_image_value2 is not None:
source_image_instance = SourceImageReference()
storage_profile_instance.source_image = source_image_instance
id_value = source_image_value2.get('id', None)
if id_value is not None:
id_instance = id_value
source_image_instance.reference_uri = id_instance
os_disk_value2 = storage_profile_value2.get('osDisk', None)
if os_disk_value2 is not None:
os_disk_instance = OSDisk()
storage_profile_instance.os_disk = os_disk_instance
os_type_value = os_disk_value2.get('osType', None)
if os_type_value is not None:
os_type_instance = os_type_value
os_disk_instance.operating_system_type = os_type_instance
name_value2 = os_disk_value2.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
os_disk_instance.name = name_instance2
vhd_value3 = os_disk_value2.get('vhd', None)
if vhd_value3 is not None:
vhd_instance = VirtualHardDisk()
os_disk_instance.virtual_hard_disk = vhd_instance
uri_value = vhd_value3.get('uri', None)
if uri_value is not None:
uri_instance = uri_value
vhd_instance.uri = uri_instance
image_value3 = os_disk_value2.get('image', None)
if image_value3 is not None:
image_instance = VirtualHardDisk()
os_disk_instance.source_image = image_instance
uri_value2 = image_value3.get('uri', None)
if uri_value2 is not None:
uri_instance2 = uri_value2
image_instance.uri = uri_instance2
caching_value = os_disk_value2.get('caching', None)
if caching_value is not None:
caching_instance = caching_value
os_disk_instance.caching = caching_instance
create_option_value = os_disk_value2.get('createOption', None)
if create_option_value is not None:
create_option_instance = create_option_value
os_disk_instance.create_option = create_option_instance
data_disks_array2 = storage_profile_value2.get('dataDisks', None)
if data_disks_array2 is not None:
for data_disks_value in data_disks_array2:
data_disk_instance = DataDisk()
storage_profile_instance.data_disks.append(data_disk_instance)
lun_value = data_disks_value.get('lun', None)
if lun_value is not None:
lun_instance = lun_value
data_disk_instance.lun = lun_instance
disk_size_gb_value = data_disks_value.get('diskSizeGB', None)
if disk_size_gb_value is not None:
disk_size_gb_instance = disk_size_gb_value
data_disk_instance.disk_size_gb = disk_size_gb_instance
name_value3 = data_disks_value.get('name', None)
if name_value3 is not None:
name_instance3 = name_value3
data_disk_instance.name = name_instance3
vhd_value4 = data_disks_value.get('vhd', None)
if vhd_value4 is not None:
vhd_instance2 = VirtualHardDisk()
data_disk_instance.virtual_hard_disk = vhd_instance2
uri_value3 = vhd_value4.get('uri', None)
if uri_value3 is not None:
uri_instance3 = uri_value3
vhd_instance2.uri = uri_instance3
image_value4 = data_disks_value.get('image', None)
if image_value4 is not None:
image_instance2 = VirtualHardDisk()
data_disk_instance.source_image = image_instance2
uri_value4 = image_value4.get('uri', None)
if uri_value4 is not None:
uri_instance4 = uri_value4
image_instance2.uri = uri_instance4
caching_value2 = data_disks_value.get('caching', None)
if caching_value2 is not None:
caching_instance2 = caching_value2
data_disk_instance.caching = caching_instance2
create_option_value2 = data_disks_value.get('createOption', None)
if create_option_value2 is not None:
create_option_instance2 = create_option_value2
data_disk_instance.create_option = create_option_instance2
os_profile_value2 = properties_value4.get('osProfile', None)
if os_profile_value2 is not None:
os_profile_instance = OSProfile(secrets=[])
virtual_machine_instance.os_profile = os_profile_instance
computer_name_value = os_profile_value2.get('computerName', None)
if computer_name_value is not None:
computer_name_instance = computer_name_value
os_profile_instance.computer_name = computer_name_instance
admin_username_value = os_profile_value2.get('adminUsername', None)
if admin_username_value is not None:
admin_username_instance = admin_username_value
os_profile_instance.admin_username = admin_username_instance
admin_password_value = os_profile_value2.get('adminPassword', None)
if admin_password_value is not None:
admin_password_instance = admin_password_value
os_profile_instance.admin_password = admin_password_instance
custom_data_value = os_profile_value2.get('customData', None)
if custom_data_value is not None:
custom_data_instance = custom_data_value
os_profile_instance.custom_data = custom_data_instance
windows_configuration_value2 = os_profile_value2.get('windowsConfiguration', None)
if windows_configuration_value2 is not None:
windows_configuration_instance = WindowsConfiguration(additional_unattend_contents=[])
os_profile_instance.windows_configuration = windows_configuration_instance
provision_vm_agent_value = windows_configuration_value2.get('provisionVMAgent', None)
if provision_vm_agent_value is not None:
provision_vm_agent_instance = provision_vm_agent_value
windows_configuration_instance.provision_vm_agent = provision_vm_agent_instance
enable_automatic_updates_value = windows_configuration_value2.get('enableAutomaticUpdates', None)
if enable_automatic_updates_value is not None:
enable_automatic_updates_instance = enable_automatic_updates_value
windows_configuration_instance.enable_automatic_updates = enable_automatic_updates_instance
time_zone_value = windows_configuration_value2.get('timeZone', None)
if time_zone_value is not None:
time_zone_instance = time_zone_value
windows_configuration_instance.time_zone = time_zone_instance
additional_unattend_content_array2 = windows_configuration_value2.get('additionalUnattendContent', None)
if additional_unattend_content_array2 is not None:
for additional_unattend_content_value2 in additional_unattend_content_array2:
additional_unattend_content_instance = AdditionalUnattendContent()
windows_configuration_instance.additional_unattend_contents.append(additional_unattend_content_instance)
pass_name_value = additional_unattend_content_value2.get('passName', None)
if pass_name_value is not None:
pass_name_instance = pass_name_value
additional_unattend_content_instance.pass_name = pass_name_instance
component_name_value = additional_unattend_content_value2.get('componentName', None)
if component_name_value is not None:
component_name_instance = component_name_value
additional_unattend_content_instance.component_name = component_name_instance
setting_name_value = additional_unattend_content_value2.get('settingName', None)
if setting_name_value is not None:
setting_name_instance = setting_name_value
additional_unattend_content_instance.setting_name = setting_name_instance
content_value = additional_unattend_content_value2.get('content', None)
if content_value is not None:
content_instance = content_value
additional_unattend_content_instance.content = content_instance
win_rm_value2 = windows_configuration_value2.get('winRM', None)
if win_rm_value2 is not None:
win_rm_instance = WinRMConfiguration(listeners=[])
windows_configuration_instance.win_rm_configuration = win_rm_instance
listeners_array2 = win_rm_value2.get('listeners', None)
if listeners_array2 is not None:
for listeners_value in listeners_array2:
win_rm_listener_instance = WinRMListener()
win_rm_instance.listeners.append(win_rm_listener_instance)
protocol_value = listeners_value.get('protocol', None)
if protocol_value is not None:
protocol_instance = protocol_value
win_rm_listener_instance.protocol = protocol_instance
certificate_url_value = listeners_value.get('certificateUrl', None)
if certificate_url_value is not None:
certificate_url_instance = certificate_url_value
win_rm_listener_instance.certificate_url = certificate_url_instance
linux_configuration_value2 = os_profile_value2.get('linuxConfiguration', None)
if linux_configuration_value2 is not None:
linux_configuration_instance = LinuxConfiguration()
os_profile_instance.linux_configuration = linux_configuration_instance
disable_password_authentication_value = linux_configuration_value2.get('disablePasswordAuthentication', None)
if disable_password_authentication_value is not None:
disable_password_authentication_instance = disable_password_authentication_value
linux_configuration_instance.disable_password_authentication = disable_password_authentication_instance
ssh_value2 = linux_configuration_value2.get('ssh', None)
if ssh_value2 is not None:
ssh_instance = SshConfiguration(public_keys=[])
linux_configuration_instance.ssh_configuration = ssh_instance
public_keys_array2 = ssh_value2.get('publicKeys', None)
if public_keys_array2 is not None:
for public_keys_value in public_keys_array2:
ssh_public_key_instance = SshPublicKey()
ssh_instance.public_keys.append(ssh_public_key_instance)
path_value = public_keys_value.get('path', None)
if path_value is not None:
path_instance = path_value
ssh_public_key_instance.path = path_instance
key_data_value = public_keys_value.get('keyData', None)
if key_data_value is not None:
key_data_instance = key_data_value
ssh_public_key_instance.key_data = key_data_instance
secrets_array2 = os_profile_value2.get('secrets', None)
if secrets_array2 is not None:
for secrets_value in secrets_array2:
vault_secret_group_instance = VaultSecretGroup(vault_certificates=[])
os_profile_instance.secrets.append(vault_secret_group_instance)
source_vault_value2 = secrets_value.get('sourceVault', None)
if source_vault_value2 is not None:
source_vault_instance = SourceVaultReference()
vault_secret_group_instance.source_vault = source_vault_instance
id_value2 = source_vault_value2.get('id', None)
if id_value2 is not None:
id_instance2 = id_value2
source_vault_instance.reference_uri = id_instance2
vault_certificates_array2 = secrets_value.get('vaultCertificates', None)
if vault_certificates_array2 is not None:
for vault_certificates_value in vault_certificates_array2:
vault_certificate_instance = VaultCertificate()
vault_secret_group_instance.vault_certificates.append(vault_certificate_instance)
certificate_url_value2 = vault_certificates_value.get('certificateUrl', None)
if certificate_url_value2 is not None:
certificate_url_instance2 = certificate_url_value2
vault_certificate_instance.certificate_url = certificate_url_instance2
certificate_store_value = vault_certificates_value.get('certificateStore', None)
if certificate_store_value is not None:
certificate_store_instance = certificate_store_value
vault_certificate_instance.certificate_store = certificate_store_instance
network_profile_value2 = properties_value4.get('networkProfile', None)
if network_profile_value2 is not None:
network_profile_instance = NetworkProfile(network_interfaces=[])
virtual_machine_instance.network_profile = network_profile_instance
network_interfaces_array2 = network_profile_value2.get('networkInterfaces', None)
if network_interfaces_array2 is not None:
for network_interfaces_value in network_interfaces_array2:
network_interface_reference_json_instance = NetworkInterfaceReference()
network_profile_instance.network_interfaces.append(network_interface_reference_json_instance)
properties_value5 = network_interfaces_value.get('properties', None)
if properties_value5 is not None:
primary_value = properties_value5.get('primary', None)
if primary_value is not None:
primary_instance = primary_value
network_interface_reference_json_instance.primary = primary_instance
id_value3 = network_interfaces_value.get('id', None)
if id_value3 is not None:
id_instance3 = id_value3
network_interface_reference_json_instance.reference_uri = id_instance3
availability_set_value2 = properties_value4.get('availabilitySet', None)
if availability_set_value2 is not None:
availability_set_instance = AvailabilitySetReference()
virtual_machine_instance.availability_set_reference = availability_set_instance
id_value4 = availability_set_value2.get('id', None)
if id_value4 is not None:
id_instance4 = id_value4
availability_set_instance.reference_uri = id_instance4
provisioning_state_value = properties_value4.get('provisioningState', None)
if provisioning_state_value is not None:
provisioning_state_instance = provisioning_state_value
virtual_machine_instance.provisioning_state = provisioning_state_instance
instance_view_value3 = properties_value4.get('instanceView', None)
if instance_view_value3 is not None:
instance_view_instance = VirtualMachineInstanceView(disks=[], extensions=[], statuses=[])
virtual_machine_instance.instance_view = instance_view_instance
platform_update_domain_value = instance_view_value3.get('platformUpdateDomain', None)
if platform_update_domain_value is not None:
platform_update_domain_instance = platform_update_domain_value
instance_view_instance.platform_update_domain = platform_update_domain_instance
platform_fault_domain_value = instance_view_value3.get('platformFaultDomain', None)
if platform_fault_domain_value is not None:
platform_fault_domain_instance = platform_fault_domain_value
instance_view_instance.platform_fault_domain = platform_fault_domain_instance
rdp_thumb_print_value = instance_view_value3.get('rdpThumbPrint', None)
if rdp_thumb_print_value is not None:
rdp_thumb_print_instance = rdp_thumb_print_value
instance_view_instance.remote_desktop_thumbprint = rdp_thumb_print_instance
vm_agent_value2 = instance_view_value3.get('vmAgent', None)
if vm_agent_value2 is not None:
vm_agent_instance = VirtualMachineAgentInstanceView(extension_handlers=[], statuses=[])
instance_view_instance.vm_agent = vm_agent_instance
vm_agent_version_value = vm_agent_value2.get('vmAgentVersion', None)
if vm_agent_version_value is not None:
vm_agent_version_instance = vm_agent_version_value
vm_agent_instance.vm_agent_version = vm_agent_version_instance
extension_handlers_array2 = vm_agent_value2.get('extensionHandlers', None)
if extension_handlers_array2 is not None:
for extension_handlers_value in extension_handlers_array2:
virtual_machine_extension_handler_instance_view_instance = VirtualMachineExtensionHandlerInstanceView()
vm_agent_instance.extension_handlers.append(virtual_machine_extension_handler_instance_view_instance)
type_value = extension_handlers_value.get('type', None)
if type_value is not None:
type_instance = type_value
virtual_machine_extension_handler_instance_view_instance.type = type_instance
type_handler_version_value = extension_handlers_value.get('typeHandlerVersion', None)
if type_handler_version_value is not None:
type_handler_version_instance = type_handler_version_value
virtual_machine_extension_handler_instance_view_instance.type_handler_version = type_handler_version_instance
status_value2 = extension_handlers_value.get('status', None)
if status_value2 is not None:
status_instance = InstanceViewStatus()
virtual_machine_extension_handler_instance_view_instance.status = status_instance
code_value = status_value2.get('code', None)
if code_value is not None:
code_instance = code_value
status_instance.code = code_instance
level_value = status_value2.get('level', None)
if level_value is not None:
level_instance = level_value
status_instance.level = level_instance
display_status_value = status_value2.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
status_instance.display_status = display_status_instance
message_value = status_value2.get('message', None)
if message_value is not None:
message_instance = message_value
status_instance.message = message_instance
time_value = status_value2.get('time', None)
if time_value is not None:
time_instance = time_value
status_instance.time = time_instance
statuses_array6 = vm_agent_value2.get('statuses', None)
if statuses_array6 is not None:
for statuses_value in statuses_array6:
instance_view_status_instance = InstanceViewStatus()
vm_agent_instance.statuses.append(instance_view_status_instance)
code_value2 = statuses_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
instance_view_status_instance.code = code_instance2
level_value2 = statuses_value.get('level', None)
if level_value2 is not None:
level_instance2 = level_value2
instance_view_status_instance.level = level_instance2
display_status_value2 = statuses_value.get('displayStatus', None)
if display_status_value2 is not None:
display_status_instance2 = display_status_value2
instance_view_status_instance.display_status = display_status_instance2
message_value2 = statuses_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
instance_view_status_instance.message = message_instance2
time_value2 = statuses_value.get('time', None)
if time_value2 is not None:
time_instance2 = time_value2
instance_view_status_instance.time = time_instance2
disks_array2 = instance_view_value3.get('disks', None)
if disks_array2 is not None:
for disks_value in disks_array2:
disk_instance_view_instance = DiskInstanceView(statuses=[])
instance_view_instance.disks.append(disk_instance_view_instance)
name_value4 = disks_value.get('name', None)
if name_value4 is not None:
name_instance4 = name_value4
disk_instance_view_instance.name = name_instance4
statuses_array7 = disks_value.get('statuses', None)
if statuses_array7 is not None:
for statuses_value2 in statuses_array7:
instance_view_status_instance2 = InstanceViewStatus()
disk_instance_view_instance.statuses.append(instance_view_status_instance2)
code_value3 = statuses_value2.get('code', None)
if code_value3 is not None:
code_instance3 = code_value3
instance_view_status_instance2.code = code_instance3
level_value3 = statuses_value2.get('level', None)
if level_value3 is not None:
level_instance3 = level_value3
instance_view_status_instance2.level = level_instance3
display_status_value3 = statuses_value2.get('displayStatus', None)
if display_status_value3 is not None:
display_status_instance3 = display_status_value3
instance_view_status_instance2.display_status = display_status_instance3
message_value3 = statuses_value2.get('message', None)
if message_value3 is not None:
message_instance3 = message_value3
instance_view_status_instance2.message = message_instance3
time_value3 = statuses_value2.get('time', None)
if time_value3 is not None:
time_instance3 = time_value3
instance_view_status_instance2.time = time_instance3
extensions_array2 = instance_view_value3.get('extensions', None)
if extensions_array2 is not None:
for extensions_value in extensions_array2:
virtual_machine_extension_instance_view_instance = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
instance_view_instance.extensions.append(virtual_machine_extension_instance_view_instance)
name_value5 = extensions_value.get('name', None)
if name_value5 is not None:
name_instance5 = name_value5
virtual_machine_extension_instance_view_instance.name = name_instance5
type_value2 = extensions_value.get('type', None)
if type_value2 is not None:
type_instance2 = type_value2
virtual_machine_extension_instance_view_instance.extension_type = type_instance2
type_handler_version_value2 = extensions_value.get('typeHandlerVersion', None)
if type_handler_version_value2 is not None:
type_handler_version_instance2 = type_handler_version_value2
virtual_machine_extension_instance_view_instance.type_handler_version = type_handler_version_instance2
substatuses_array3 = extensions_value.get('substatuses', None)
if substatuses_array3 is not None:
for substatuses_value in substatuses_array3:
instance_view_status_instance3 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.sub_statuses.append(instance_view_status_instance3)
code_value4 = substatuses_value.get('code', None)
if code_value4 is not None:
code_instance4 = code_value4
instance_view_status_instance3.code = code_instance4
level_value4 = substatuses_value.get('level', None)
if level_value4 is not None:
level_instance4 = level_value4
instance_view_status_instance3.level = level_instance4
display_status_value4 = substatuses_value.get('displayStatus', None)
if display_status_value4 is not None:
display_status_instance4 = display_status_value4
instance_view_status_instance3.display_status = display_status_instance4
message_value4 = substatuses_value.get('message', None)
if message_value4 is not None:
message_instance4 = message_value4
instance_view_status_instance3.message = message_instance4
time_value4 = substatuses_value.get('time', None)
if time_value4 is not None:
time_instance4 = time_value4
instance_view_status_instance3.time = time_instance4
statuses_array8 = extensions_value.get('statuses', None)
if statuses_array8 is not None:
for statuses_value3 in statuses_array8:
instance_view_status_instance4 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.statuses.append(instance_view_status_instance4)
code_value5 = statuses_value3.get('code', None)
if code_value5 is not None:
code_instance5 = code_value5
instance_view_status_instance4.code = code_instance5
level_value5 = statuses_value3.get('level', None)
if level_value5 is not None:
level_instance5 = level_value5
instance_view_status_instance4.level = level_instance5
display_status_value5 = statuses_value3.get('displayStatus', None)
if display_status_value5 is not None:
display_status_instance5 = display_status_value5
instance_view_status_instance4.display_status = display_status_instance5
message_value5 = statuses_value3.get('message', None)
if message_value5 is not None:
message_instance5 = message_value5
instance_view_status_instance4.message = message_instance5
time_value5 = statuses_value3.get('time', None)
if time_value5 is not None:
time_instance5 = time_value5
instance_view_status_instance4.time = time_instance5
statuses_array9 = instance_view_value3.get('statuses', None)
if statuses_array9 is not None:
for statuses_value4 in statuses_array9:
instance_view_status_instance5 = InstanceViewStatus()
instance_view_instance.statuses.append(instance_view_status_instance5)
code_value6 = statuses_value4.get('code', None)
if code_value6 is not None:
code_instance6 = code_value6
instance_view_status_instance5.code = code_instance6
level_value6 = statuses_value4.get('level', None)
if level_value6 is not None:
level_instance6 = level_value6
instance_view_status_instance5.level = level_instance6
display_status_value6 = statuses_value4.get('displayStatus', None)
if display_status_value6 is not None:
display_status_instance6 = display_status_value6
instance_view_status_instance5.display_status = display_status_instance6
message_value6 = statuses_value4.get('message', None)
if message_value6 is not None:
message_instance6 = message_value6
instance_view_status_instance5.message = message_instance6
time_value6 = statuses_value4.get('time', None)
if time_value6 is not None:
time_instance6 = time_value6
instance_view_status_instance5.time = time_instance6
resources_array2 = response_doc.get('resources', None)
if resources_array2 is not None:
virtual_machine_instance.extensions = []
for resources_value in resources_array2:
virtual_machine_extension_json_instance = VirtualMachineExtension(tags={})
virtual_machine_instance.extensions.append(virtual_machine_extension_json_instance)
properties_value6 = resources_value.get('properties', None)
if properties_value6 is not None:
publisher_value3 = properties_value6.get('publisher', None)
if publisher_value3 is not None:
publisher_instance3 = publisher_value3
virtual_machine_extension_json_instance.publisher = publisher_instance3
type_value3 = properties_value6.get('type', None)
if type_value3 is not None:
type_instance3 = type_value3
virtual_machine_extension_json_instance.extension_type = type_instance3
type_handler_version_value3 = properties_value6.get('typeHandlerVersion', None)
if type_handler_version_value3 is not None:
type_handler_version_instance3 = type_handler_version_value3
virtual_machine_extension_json_instance.type_handler_version = type_handler_version_instance3
auto_upgrade_minor_version_value = properties_value6.get('autoUpgradeMinorVersion', None)
if auto_upgrade_minor_version_value is not None:
auto_upgrade_minor_version_instance = auto_upgrade_minor_version_value
virtual_machine_extension_json_instance.auto_upgrade_minor_version = auto_upgrade_minor_version_instance
settings_value = properties_value6.get('settings', None)
if settings_value is not None:
settings_instance = json.dumps(settings_value)
virtual_machine_extension_json_instance.settings = settings_instance
protected_settings_value = properties_value6.get('protectedSettings', None)
if protected_settings_value is not None:
protected_settings_instance = json.dumps(protected_settings_value)
virtual_machine_extension_json_instance.protected_settings = protected_settings_instance
provisioning_state_value2 = properties_value6.get('provisioningState', None)
if provisioning_state_value2 is not None:
provisioning_state_instance2 = provisioning_state_value2
virtual_machine_extension_json_instance.provisioning_state = provisioning_state_instance2
instance_view_value4 = properties_value6.get('instanceView', None)
if instance_view_value4 is not None:
instance_view_instance2 = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
virtual_machine_extension_json_instance.instance_view = instance_view_instance2
name_value6 = instance_view_value4.get('name', None)
if name_value6 is not None:
name_instance6 = name_value6
instance_view_instance2.name = name_instance6
type_value4 = instance_view_value4.get('type', None)
if type_value4 is not None:
type_instance4 = type_value4
instance_view_instance2.extension_type = type_instance4
type_handler_version_value4 = instance_view_value4.get('typeHandlerVersion', None)
if type_handler_version_value4 is not None:
type_handler_version_instance4 = type_handler_version_value4
instance_view_instance2.type_handler_version = type_handler_version_instance4
substatuses_array4 = instance_view_value4.get('substatuses', None)
if substatuses_array4 is not None:
for substatuses_value2 in substatuses_array4:
instance_view_status_instance6 = InstanceViewStatus()
instance_view_instance2.sub_statuses.append(instance_view_status_instance6)
code_value7 = substatuses_value2.get('code', None)
if code_value7 is not None:
code_instance7 = code_value7
instance_view_status_instance6.code = code_instance7
level_value7 = substatuses_value2.get('level', None)
if level_value7 is not None:
level_instance7 = level_value7
instance_view_status_instance6.level = level_instance7
display_status_value7 = substatuses_value2.get('displayStatus', None)
if display_status_value7 is not None:
display_status_instance7 = display_status_value7
instance_view_status_instance6.display_status = display_status_instance7
message_value7 = substatuses_value2.get('message', None)
if message_value7 is not None:
message_instance7 = message_value7
instance_view_status_instance6.message = message_instance7
time_value7 = substatuses_value2.get('time', None)
if time_value7 is not None:
time_instance7 = time_value7
instance_view_status_instance6.time = time_instance7
statuses_array10 = instance_view_value4.get('statuses', None)
if statuses_array10 is not None:
for statuses_value5 in statuses_array10:
instance_view_status_instance7 = InstanceViewStatus()
instance_view_instance2.statuses.append(instance_view_status_instance7)
code_value8 = statuses_value5.get('code', None)
if code_value8 is not None:
code_instance8 = code_value8
instance_view_status_instance7.code = code_instance8
level_value8 = statuses_value5.get('level', None)
if level_value8 is not None:
level_instance8 = level_value8
instance_view_status_instance7.level = level_instance8
display_status_value8 = statuses_value5.get('displayStatus', None)
if display_status_value8 is not None:
display_status_instance8 = display_status_value8
instance_view_status_instance7.display_status = display_status_instance8
message_value8 = statuses_value5.get('message', None)
if message_value8 is not None:
message_instance8 = message_value8
instance_view_status_instance7.message = message_instance8
time_value8 = statuses_value5.get('time', None)
if time_value8 is not None:
time_instance8 = time_value8
instance_view_status_instance7.time = time_instance8
id_value5 = resources_value.get('id', None)
if id_value5 is not None:
id_instance5 = id_value5
virtual_machine_extension_json_instance.id = id_instance5
name_value7 = resources_value.get('name', None)
if name_value7 is not None:
name_instance7 = name_value7
virtual_machine_extension_json_instance.name = name_instance7
type_value5 = resources_value.get('type', None)
if type_value5 is not None:
type_instance5 = type_value5
virtual_machine_extension_json_instance.type = type_instance5
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_json_instance.location = location_instance
tags_sequence_element = resources_value.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key3 = property
tags_value3 = tags_sequence_element[property]
virtual_machine_extension_json_instance.tags[tags_key3] = tags_value3
id_value6 = response_doc.get('id', None)
if id_value6 is not None:
id_instance6 = id_value6
virtual_machine_instance.id = id_instance6
name_value8 = response_doc.get('name', None)
if name_value8 is not None:
name_instance8 = name_value8
virtual_machine_instance.name = name_instance8
type_value6 = response_doc.get('type', None)
if type_value6 is not None:
type_instance6 = type_value6
virtual_machine_instance.type = type_instance6
location_value2 = response_doc.get('location', None)
if location_value2 is not None:
location_instance2 = location_value2
virtual_machine_instance.location = location_instance2
tags_sequence_element2 = response_doc.get('tags', None)
if tags_sequence_element2 is not None:
for property2 in tags_sequence_element2:
tags_key4 = property2
tags_value4 = tags_sequence_element2[property2]
virtual_machine_instance.tags[tags_key4] = tags_value4
result.status_code = status_code
result.azure_async_operation = response.headers.get('azure-asyncoperation')
result.request_id = response.headers.get('x-ms-request-id')
return result
def begin_deallocating(self, resource_group_name, vm_name):
"""
Shuts down the Virtual Machine and releases the compute resources. You
are not billed for the compute resources that this Virtual Machine
uses.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeOperationResponse: The compute long running operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/deallocate'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'POST'
# Set Headers
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 202:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
result = ComputeOperationResponse()
result.status_code = status_code
result.azure_async_operation = response.headers.get('azure-asyncoperation')
result.request_id = response.headers.get('x-ms-request-id')
return result
def begin_deleting(self, resource_group_name, vm_name):
"""
The operation to delete a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeOperationResponse: The compute long running operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'DELETE'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 202:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
result = ComputeOperationResponse()
result.status_code = status_code
result.azure_async_operation = response.headers.get('azure-asyncoperation')
result.request_id = response.headers.get('x-ms-request-id')
return result
def begin_powering_off(self, resource_group_name, vm_name):
"""
The operation to power off (stop) a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeOperationResponse: The compute long running operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/powerOff'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'POST'
# Set Headers
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 202:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
result = ComputeOperationResponse()
result.status_code = status_code
result.azure_async_operation = response.headers.get('azure-asyncoperation')
result.request_id = response.headers.get('x-ms-request-id')
return result
def begin_restarting(self, resource_group_name, vm_name):
"""
The operation to restart a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeOperationResponse: The compute long running operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/restart'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'POST'
# Set Headers
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 202:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
result = ComputeOperationResponse()
result.status_code = status_code
result.azure_async_operation = response.headers.get('azure-asyncoperation')
result.request_id = response.headers.get('x-ms-request-id')
return result
def begin_starting(self, resource_group_name, vm_name):
"""
The operation to start a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeOperationResponse: The compute long running operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/start'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'POST'
# Set Headers
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 202:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
result = ComputeOperationResponse()
result.status_code = status_code
result.azure_async_operation = response.headers.get('azure-asyncoperation')
result.request_id = response.headers.get('x-ms-request-id')
return result
def capture(self, resource_group_name, vm_name, parameters):
"""
Captures the VM by copying VirtualHardDisks of the VM and outputs a
template that can be used to create similar VMs.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
parameters (VirtualMachineCaptureParameters): Parameters supplied to
the Capture Virtual Machine operation.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
client2 = self.client
response = client2.virtual_machines.begin_capturing(resource_group_name, vm_name, parameters)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_initial_timeout >= 0:
delay_in_seconds = client2.long_running_operation_initial_timeout
while (result.status != ComputeOperationStatus.in_progress) == False:
time.sleep(delay_in_seconds)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_retry_timeout >= 0:
delay_in_seconds = client2.long_running_operation_retry_timeout
return result
def create_or_update(self, resource_group_name, parameters):
"""
The operation to create or update a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
parameters (VirtualMachine): Parameters supplied to the Create Virtual
Machine operation.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
client2 = self.client
response = client2.virtual_machines.begin_creating_or_updating(resource_group_name, parameters)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_initial_timeout >= 0:
delay_in_seconds = client2.long_running_operation_initial_timeout
while (result.status != ComputeOperationStatus.in_progress) == False:
time.sleep(delay_in_seconds)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_retry_timeout >= 0:
delay_in_seconds = client2.long_running_operation_retry_timeout
return result
def deallocate(self, resource_group_name, vm_name):
"""
Shuts down the Virtual Machine and releases the compute resources. You
are not billed for the compute resources that this Virtual Machine
uses.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
client2 = self.client
response = client2.virtual_machines.begin_deallocating(resource_group_name, vm_name)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_initial_timeout >= 0:
delay_in_seconds = client2.long_running_operation_initial_timeout
while (result.status != ComputeOperationStatus.in_progress) == False:
time.sleep(delay_in_seconds)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_retry_timeout >= 0:
delay_in_seconds = client2.long_running_operation_retry_timeout
return result
def delete(self, resource_group_name, vm_name):
"""
The operation to delete a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
client2 = self.client
response = client2.virtual_machines.begin_deleting(resource_group_name, vm_name)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_initial_timeout >= 0:
delay_in_seconds = client2.long_running_operation_initial_timeout
while (result.status != ComputeOperationStatus.in_progress) == False:
time.sleep(delay_in_seconds)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_retry_timeout >= 0:
delay_in_seconds = client2.long_running_operation_retry_timeout
return result
def generalize(self, resource_group_name, vm_name):
"""
Sets the state of the VM as Generalized.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
AzureOperationResponse: A standard service response including an HTTP
status code and request ID.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/generalize'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'POST'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
result = AzureOperationResponse()
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def get(self, resource_group_name, vm_name):
"""
The operation to get a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
VirtualMachineGetResponse: The GetVM operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineGetResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
virtual_machine_instance = VirtualMachine(extensions=[], tags={})
result.virtual_machine = virtual_machine_instance
plan_value = response_doc.get('plan', None)
if plan_value is not None:
plan_instance = Plan()
virtual_machine_instance.plan = plan_instance
name_value = plan_value.get('name', None)
if name_value is not None:
name_instance = name_value
plan_instance.name = name_instance
publisher_value = plan_value.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
plan_instance.publisher = publisher_instance
product_value = plan_value.get('product', None)
if product_value is not None:
product_instance = product_value
plan_instance.product = product_instance
promotion_code_value = plan_value.get('promotionCode', None)
if promotion_code_value is not None:
promotion_code_instance = promotion_code_value
plan_instance.promotion_code = promotion_code_instance
properties_value = response_doc.get('properties', None)
if properties_value is not None:
hardware_profile_value = properties_value.get('hardwareProfile', None)
if hardware_profile_value is not None:
hardware_profile_instance = HardwareProfile()
virtual_machine_instance.hardware_profile = hardware_profile_instance
vm_size_value = hardware_profile_value.get('vmSize', None)
if vm_size_value is not None:
vm_size_instance = vm_size_value
hardware_profile_instance.virtual_machine_size = vm_size_instance
storage_profile_value = properties_value.get('storageProfile', None)
if storage_profile_value is not None:
storage_profile_instance = StorageProfile(data_disks=[])
virtual_machine_instance.storage_profile = storage_profile_instance
image_reference_value = storage_profile_value.get('imageReference', None)
if image_reference_value is not None:
image_reference_instance = ImageReference()
storage_profile_instance.image_reference = image_reference_instance
publisher_value2 = image_reference_value.get('publisher', None)
if publisher_value2 is not None:
publisher_instance2 = publisher_value2
image_reference_instance.publisher = publisher_instance2
offer_value = image_reference_value.get('offer', None)
if offer_value is not None:
offer_instance = offer_value
image_reference_instance.offer = offer_instance
sku_value = image_reference_value.get('sku', None)
if sku_value is not None:
sku_instance = sku_value
image_reference_instance.sku = sku_instance
version_value = image_reference_value.get('version', None)
if version_value is not None:
version_instance = version_value
image_reference_instance.version = version_instance
source_image_value = storage_profile_value.get('sourceImage', None)
if source_image_value is not None:
source_image_instance = SourceImageReference()
storage_profile_instance.source_image = source_image_instance
id_value = source_image_value.get('id', None)
if id_value is not None:
id_instance = id_value
source_image_instance.reference_uri = id_instance
os_disk_value = storage_profile_value.get('osDisk', None)
if os_disk_value is not None:
os_disk_instance = OSDisk()
storage_profile_instance.os_disk = os_disk_instance
os_type_value = os_disk_value.get('osType', None)
if os_type_value is not None:
os_type_instance = os_type_value
os_disk_instance.operating_system_type = os_type_instance
name_value2 = os_disk_value.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
os_disk_instance.name = name_instance2
vhd_value = os_disk_value.get('vhd', None)
if vhd_value is not None:
vhd_instance = VirtualHardDisk()
os_disk_instance.virtual_hard_disk = vhd_instance
uri_value = vhd_value.get('uri', None)
if uri_value is not None:
uri_instance = uri_value
vhd_instance.uri = uri_instance
image_value = os_disk_value.get('image', None)
if image_value is not None:
image_instance = VirtualHardDisk()
os_disk_instance.source_image = image_instance
uri_value2 = image_value.get('uri', None)
if uri_value2 is not None:
uri_instance2 = uri_value2
image_instance.uri = uri_instance2
caching_value = os_disk_value.get('caching', None)
if caching_value is not None:
caching_instance = caching_value
os_disk_instance.caching = caching_instance
create_option_value = os_disk_value.get('createOption', None)
if create_option_value is not None:
create_option_instance = create_option_value
os_disk_instance.create_option = create_option_instance
data_disks_array = storage_profile_value.get('dataDisks', None)
if data_disks_array is not None:
for data_disks_value in data_disks_array:
data_disk_instance = DataDisk()
storage_profile_instance.data_disks.append(data_disk_instance)
lun_value = data_disks_value.get('lun', None)
if lun_value is not None:
lun_instance = lun_value
data_disk_instance.lun = lun_instance
disk_size_gb_value = data_disks_value.get('diskSizeGB', None)
if disk_size_gb_value is not None:
disk_size_gb_instance = disk_size_gb_value
data_disk_instance.disk_size_gb = disk_size_gb_instance
name_value3 = data_disks_value.get('name', None)
if name_value3 is not None:
name_instance3 = name_value3
data_disk_instance.name = name_instance3
vhd_value2 = data_disks_value.get('vhd', None)
if vhd_value2 is not None:
vhd_instance2 = VirtualHardDisk()
data_disk_instance.virtual_hard_disk = vhd_instance2
uri_value3 = vhd_value2.get('uri', None)
if uri_value3 is not None:
uri_instance3 = uri_value3
vhd_instance2.uri = uri_instance3
image_value2 = data_disks_value.get('image', None)
if image_value2 is not None:
image_instance2 = VirtualHardDisk()
data_disk_instance.source_image = image_instance2
uri_value4 = image_value2.get('uri', None)
if uri_value4 is not None:
uri_instance4 = uri_value4
image_instance2.uri = uri_instance4
caching_value2 = data_disks_value.get('caching', None)
if caching_value2 is not None:
caching_instance2 = caching_value2
data_disk_instance.caching = caching_instance2
create_option_value2 = data_disks_value.get('createOption', None)
if create_option_value2 is not None:
create_option_instance2 = create_option_value2
data_disk_instance.create_option = create_option_instance2
os_profile_value = properties_value.get('osProfile', None)
if os_profile_value is not None:
os_profile_instance = OSProfile(secrets=[])
virtual_machine_instance.os_profile = os_profile_instance
computer_name_value = os_profile_value.get('computerName', None)
if computer_name_value is not None:
computer_name_instance = computer_name_value
os_profile_instance.computer_name = computer_name_instance
admin_username_value = os_profile_value.get('adminUsername', None)
if admin_username_value is not None:
admin_username_instance = admin_username_value
os_profile_instance.admin_username = admin_username_instance
admin_password_value = os_profile_value.get('adminPassword', None)
if admin_password_value is not None:
admin_password_instance = admin_password_value
os_profile_instance.admin_password = admin_password_instance
custom_data_value = os_profile_value.get('customData', None)
if custom_data_value is not None:
custom_data_instance = custom_data_value
os_profile_instance.custom_data = custom_data_instance
windows_configuration_value = os_profile_value.get('windowsConfiguration', None)
if windows_configuration_value is not None:
windows_configuration_instance = WindowsConfiguration(additional_unattend_contents=[])
os_profile_instance.windows_configuration = windows_configuration_instance
provision_vm_agent_value = windows_configuration_value.get('provisionVMAgent', None)
if provision_vm_agent_value is not None:
provision_vm_agent_instance = provision_vm_agent_value
windows_configuration_instance.provision_vm_agent = provision_vm_agent_instance
enable_automatic_updates_value = windows_configuration_value.get('enableAutomaticUpdates', None)
if enable_automatic_updates_value is not None:
enable_automatic_updates_instance = enable_automatic_updates_value
windows_configuration_instance.enable_automatic_updates = enable_automatic_updates_instance
time_zone_value = windows_configuration_value.get('timeZone', None)
if time_zone_value is not None:
time_zone_instance = time_zone_value
windows_configuration_instance.time_zone = time_zone_instance
additional_unattend_content_array = windows_configuration_value.get('additionalUnattendContent', None)
if additional_unattend_content_array is not None:
for additional_unattend_content_value in additional_unattend_content_array:
additional_unattend_content_instance = AdditionalUnattendContent()
windows_configuration_instance.additional_unattend_contents.append(additional_unattend_content_instance)
pass_name_value = additional_unattend_content_value.get('passName', None)
if pass_name_value is not None:
pass_name_instance = pass_name_value
additional_unattend_content_instance.pass_name = pass_name_instance
component_name_value = additional_unattend_content_value.get('componentName', None)
if component_name_value is not None:
component_name_instance = component_name_value
additional_unattend_content_instance.component_name = component_name_instance
setting_name_value = additional_unattend_content_value.get('settingName', None)
if setting_name_value is not None:
setting_name_instance = setting_name_value
additional_unattend_content_instance.setting_name = setting_name_instance
content_value = additional_unattend_content_value.get('content', None)
if content_value is not None:
content_instance = content_value
additional_unattend_content_instance.content = content_instance
win_rm_value = windows_configuration_value.get('winRM', None)
if win_rm_value is not None:
win_rm_instance = WinRMConfiguration(listeners=[])
windows_configuration_instance.win_rm_configuration = win_rm_instance
listeners_array = win_rm_value.get('listeners', None)
if listeners_array is not None:
for listeners_value in listeners_array:
win_rm_listener_instance = WinRMListener()
win_rm_instance.listeners.append(win_rm_listener_instance)
protocol_value = listeners_value.get('protocol', None)
if protocol_value is not None:
protocol_instance = protocol_value
win_rm_listener_instance.protocol = protocol_instance
certificate_url_value = listeners_value.get('certificateUrl', None)
if certificate_url_value is not None:
certificate_url_instance = certificate_url_value
win_rm_listener_instance.certificate_url = certificate_url_instance
linux_configuration_value = os_profile_value.get('linuxConfiguration', None)
if linux_configuration_value is not None:
linux_configuration_instance = LinuxConfiguration()
os_profile_instance.linux_configuration = linux_configuration_instance
disable_password_authentication_value = linux_configuration_value.get('disablePasswordAuthentication', None)
if disable_password_authentication_value is not None:
disable_password_authentication_instance = disable_password_authentication_value
linux_configuration_instance.disable_password_authentication = disable_password_authentication_instance
ssh_value = linux_configuration_value.get('ssh', None)
if ssh_value is not None:
ssh_instance = SshConfiguration(public_keys=[])
linux_configuration_instance.ssh_configuration = ssh_instance
public_keys_array = ssh_value.get('publicKeys', None)
if public_keys_array is not None:
for public_keys_value in public_keys_array:
ssh_public_key_instance = SshPublicKey()
ssh_instance.public_keys.append(ssh_public_key_instance)
path_value = public_keys_value.get('path', None)
if path_value is not None:
path_instance = path_value
ssh_public_key_instance.path = path_instance
key_data_value = public_keys_value.get('keyData', None)
if key_data_value is not None:
key_data_instance = key_data_value
ssh_public_key_instance.key_data = key_data_instance
secrets_array = os_profile_value.get('secrets', None)
if secrets_array is not None:
for secrets_value in secrets_array:
vault_secret_group_instance = VaultSecretGroup(vault_certificates=[])
os_profile_instance.secrets.append(vault_secret_group_instance)
source_vault_value = secrets_value.get('sourceVault', None)
if source_vault_value is not None:
source_vault_instance = SourceVaultReference()
vault_secret_group_instance.source_vault = source_vault_instance
id_value2 = source_vault_value.get('id', None)
if id_value2 is not None:
id_instance2 = id_value2
source_vault_instance.reference_uri = id_instance2
vault_certificates_array = secrets_value.get('vaultCertificates', None)
if vault_certificates_array is not None:
for vault_certificates_value in vault_certificates_array:
vault_certificate_instance = VaultCertificate()
vault_secret_group_instance.vault_certificates.append(vault_certificate_instance)
certificate_url_value2 = vault_certificates_value.get('certificateUrl', None)
if certificate_url_value2 is not None:
certificate_url_instance2 = certificate_url_value2
vault_certificate_instance.certificate_url = certificate_url_instance2
certificate_store_value = vault_certificates_value.get('certificateStore', None)
if certificate_store_value is not None:
certificate_store_instance = certificate_store_value
vault_certificate_instance.certificate_store = certificate_store_instance
network_profile_value = properties_value.get('networkProfile', None)
if network_profile_value is not None:
network_profile_instance = NetworkProfile(network_interfaces=[])
virtual_machine_instance.network_profile = network_profile_instance
network_interfaces_array = network_profile_value.get('networkInterfaces', None)
if network_interfaces_array is not None:
for network_interfaces_value in network_interfaces_array:
network_interface_reference_json_instance = NetworkInterfaceReference()
network_profile_instance.network_interfaces.append(network_interface_reference_json_instance)
properties_value2 = network_interfaces_value.get('properties', None)
if properties_value2 is not None:
primary_value = properties_value2.get('primary', None)
if primary_value is not None:
primary_instance = primary_value
network_interface_reference_json_instance.primary = primary_instance
id_value3 = network_interfaces_value.get('id', None)
if id_value3 is not None:
id_instance3 = id_value3
network_interface_reference_json_instance.reference_uri = id_instance3
availability_set_value = properties_value.get('availabilitySet', None)
if availability_set_value is not None:
availability_set_instance = AvailabilitySetReference()
virtual_machine_instance.availability_set_reference = availability_set_instance
id_value4 = availability_set_value.get('id', None)
if id_value4 is not None:
id_instance4 = id_value4
availability_set_instance.reference_uri = id_instance4
provisioning_state_value = properties_value.get('provisioningState', None)
if provisioning_state_value is not None:
provisioning_state_instance = provisioning_state_value
virtual_machine_instance.provisioning_state = provisioning_state_instance
instance_view_value = properties_value.get('instanceView', None)
if instance_view_value is not None:
instance_view_instance = VirtualMachineInstanceView(disks=[], extensions=[], statuses=[])
virtual_machine_instance.instance_view = instance_view_instance
platform_update_domain_value = instance_view_value.get('platformUpdateDomain', None)
if platform_update_domain_value is not None:
platform_update_domain_instance = platform_update_domain_value
instance_view_instance.platform_update_domain = platform_update_domain_instance
platform_fault_domain_value = instance_view_value.get('platformFaultDomain', None)
if platform_fault_domain_value is not None:
platform_fault_domain_instance = platform_fault_domain_value
instance_view_instance.platform_fault_domain = platform_fault_domain_instance
rdp_thumb_print_value = instance_view_value.get('rdpThumbPrint', None)
if rdp_thumb_print_value is not None:
rdp_thumb_print_instance = rdp_thumb_print_value
instance_view_instance.remote_desktop_thumbprint = rdp_thumb_print_instance
vm_agent_value = instance_view_value.get('vmAgent', None)
if vm_agent_value is not None:
vm_agent_instance = VirtualMachineAgentInstanceView(extension_handlers=[], statuses=[])
instance_view_instance.vm_agent = vm_agent_instance
vm_agent_version_value = vm_agent_value.get('vmAgentVersion', None)
if vm_agent_version_value is not None:
vm_agent_version_instance = vm_agent_version_value
vm_agent_instance.vm_agent_version = vm_agent_version_instance
extension_handlers_array = vm_agent_value.get('extensionHandlers', None)
if extension_handlers_array is not None:
for extension_handlers_value in extension_handlers_array:
virtual_machine_extension_handler_instance_view_instance = VirtualMachineExtensionHandlerInstanceView()
vm_agent_instance.extension_handlers.append(virtual_machine_extension_handler_instance_view_instance)
type_value = extension_handlers_value.get('type', None)
if type_value is not None:
type_instance = type_value
virtual_machine_extension_handler_instance_view_instance.type = type_instance
type_handler_version_value = extension_handlers_value.get('typeHandlerVersion', None)
if type_handler_version_value is not None:
type_handler_version_instance = type_handler_version_value
virtual_machine_extension_handler_instance_view_instance.type_handler_version = type_handler_version_instance
status_value = extension_handlers_value.get('status', None)
if status_value is not None:
status_instance = InstanceViewStatus()
virtual_machine_extension_handler_instance_view_instance.status = status_instance
code_value = status_value.get('code', None)
if code_value is not None:
code_instance = code_value
status_instance.code = code_instance
level_value = status_value.get('level', None)
if level_value is not None:
level_instance = level_value
status_instance.level = level_instance
display_status_value = status_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
status_instance.display_status = display_status_instance
message_value = status_value.get('message', None)
if message_value is not None:
message_instance = message_value
status_instance.message = message_instance
time_value = status_value.get('time', None)
if time_value is not None:
time_instance = time_value
status_instance.time = time_instance
statuses_array = vm_agent_value.get('statuses', None)
if statuses_array is not None:
for statuses_value in statuses_array:
instance_view_status_instance = InstanceViewStatus()
vm_agent_instance.statuses.append(instance_view_status_instance)
code_value2 = statuses_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
instance_view_status_instance.code = code_instance2
level_value2 = statuses_value.get('level', None)
if level_value2 is not None:
level_instance2 = level_value2
instance_view_status_instance.level = level_instance2
display_status_value2 = statuses_value.get('displayStatus', None)
if display_status_value2 is not None:
display_status_instance2 = display_status_value2
instance_view_status_instance.display_status = display_status_instance2
message_value2 = statuses_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
instance_view_status_instance.message = message_instance2
time_value2 = statuses_value.get('time', None)
if time_value2 is not None:
time_instance2 = time_value2
instance_view_status_instance.time = time_instance2
disks_array = instance_view_value.get('disks', None)
if disks_array is not None:
for disks_value in disks_array:
disk_instance_view_instance = DiskInstanceView(statuses=[])
instance_view_instance.disks.append(disk_instance_view_instance)
name_value4 = disks_value.get('name', None)
if name_value4 is not None:
name_instance4 = name_value4
disk_instance_view_instance.name = name_instance4
statuses_array2 = disks_value.get('statuses', None)
if statuses_array2 is not None:
for statuses_value2 in statuses_array2:
instance_view_status_instance2 = InstanceViewStatus()
disk_instance_view_instance.statuses.append(instance_view_status_instance2)
code_value3 = statuses_value2.get('code', None)
if code_value3 is not None:
code_instance3 = code_value3
instance_view_status_instance2.code = code_instance3
level_value3 = statuses_value2.get('level', None)
if level_value3 is not None:
level_instance3 = level_value3
instance_view_status_instance2.level = level_instance3
display_status_value3 = statuses_value2.get('displayStatus', None)
if display_status_value3 is not None:
display_status_instance3 = display_status_value3
instance_view_status_instance2.display_status = display_status_instance3
message_value3 = statuses_value2.get('message', None)
if message_value3 is not None:
message_instance3 = message_value3
instance_view_status_instance2.message = message_instance3
time_value3 = statuses_value2.get('time', None)
if time_value3 is not None:
time_instance3 = time_value3
instance_view_status_instance2.time = time_instance3
extensions_array = instance_view_value.get('extensions', None)
if extensions_array is not None:
for extensions_value in extensions_array:
virtual_machine_extension_instance_view_instance = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
instance_view_instance.extensions.append(virtual_machine_extension_instance_view_instance)
name_value5 = extensions_value.get('name', None)
if name_value5 is not None:
name_instance5 = name_value5
virtual_machine_extension_instance_view_instance.name = name_instance5
type_value2 = extensions_value.get('type', None)
if type_value2 is not None:
type_instance2 = type_value2
virtual_machine_extension_instance_view_instance.extension_type = type_instance2
type_handler_version_value2 = extensions_value.get('typeHandlerVersion', None)
if type_handler_version_value2 is not None:
type_handler_version_instance2 = type_handler_version_value2
virtual_machine_extension_instance_view_instance.type_handler_version = type_handler_version_instance2
substatuses_array = extensions_value.get('substatuses', None)
if substatuses_array is not None:
for substatuses_value in substatuses_array:
instance_view_status_instance3 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.sub_statuses.append(instance_view_status_instance3)
code_value4 = substatuses_value.get('code', None)
if code_value4 is not None:
code_instance4 = code_value4
instance_view_status_instance3.code = code_instance4
level_value4 = substatuses_value.get('level', None)
if level_value4 is not None:
level_instance4 = level_value4
instance_view_status_instance3.level = level_instance4
display_status_value4 = substatuses_value.get('displayStatus', None)
if display_status_value4 is not None:
display_status_instance4 = display_status_value4
instance_view_status_instance3.display_status = display_status_instance4
message_value4 = substatuses_value.get('message', None)
if message_value4 is not None:
message_instance4 = message_value4
instance_view_status_instance3.message = message_instance4
time_value4 = substatuses_value.get('time', None)
if time_value4 is not None:
time_instance4 = time_value4
instance_view_status_instance3.time = time_instance4
statuses_array3 = extensions_value.get('statuses', None)
if statuses_array3 is not None:
for statuses_value3 in statuses_array3:
instance_view_status_instance4 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.statuses.append(instance_view_status_instance4)
code_value5 = statuses_value3.get('code', None)
if code_value5 is not None:
code_instance5 = code_value5
instance_view_status_instance4.code = code_instance5
level_value5 = statuses_value3.get('level', None)
if level_value5 is not None:
level_instance5 = level_value5
instance_view_status_instance4.level = level_instance5
display_status_value5 = statuses_value3.get('displayStatus', None)
if display_status_value5 is not None:
display_status_instance5 = display_status_value5
instance_view_status_instance4.display_status = display_status_instance5
message_value5 = statuses_value3.get('message', None)
if message_value5 is not None:
message_instance5 = message_value5
instance_view_status_instance4.message = message_instance5
time_value5 = statuses_value3.get('time', None)
if time_value5 is not None:
time_instance5 = time_value5
instance_view_status_instance4.time = time_instance5
statuses_array4 = instance_view_value.get('statuses', None)
if statuses_array4 is not None:
for statuses_value4 in statuses_array4:
instance_view_status_instance5 = InstanceViewStatus()
instance_view_instance.statuses.append(instance_view_status_instance5)
code_value6 = statuses_value4.get('code', None)
if code_value6 is not None:
code_instance6 = code_value6
instance_view_status_instance5.code = code_instance6
level_value6 = statuses_value4.get('level', None)
if level_value6 is not None:
level_instance6 = level_value6
instance_view_status_instance5.level = level_instance6
display_status_value6 = statuses_value4.get('displayStatus', None)
if display_status_value6 is not None:
display_status_instance6 = display_status_value6
instance_view_status_instance5.display_status = display_status_instance6
message_value6 = statuses_value4.get('message', None)
if message_value6 is not None:
message_instance6 = message_value6
instance_view_status_instance5.message = message_instance6
time_value6 = statuses_value4.get('time', None)
if time_value6 is not None:
time_instance6 = time_value6
instance_view_status_instance5.time = time_instance6
resources_array = response_doc.get('resources', None)
if resources_array is not None:
virtual_machine_instance.extensions = []
for resources_value in resources_array:
virtual_machine_extension_json_instance = VirtualMachineExtension(tags={})
virtual_machine_instance.extensions.append(virtual_machine_extension_json_instance)
properties_value3 = resources_value.get('properties', None)
if properties_value3 is not None:
publisher_value3 = properties_value3.get('publisher', None)
if publisher_value3 is not None:
publisher_instance3 = publisher_value3
virtual_machine_extension_json_instance.publisher = publisher_instance3
type_value3 = properties_value3.get('type', None)
if type_value3 is not None:
type_instance3 = type_value3
virtual_machine_extension_json_instance.extension_type = type_instance3
type_handler_version_value3 = properties_value3.get('typeHandlerVersion', None)
if type_handler_version_value3 is not None:
type_handler_version_instance3 = type_handler_version_value3
virtual_machine_extension_json_instance.type_handler_version = type_handler_version_instance3
auto_upgrade_minor_version_value = properties_value3.get('autoUpgradeMinorVersion', None)
if auto_upgrade_minor_version_value is not None:
auto_upgrade_minor_version_instance = auto_upgrade_minor_version_value
virtual_machine_extension_json_instance.auto_upgrade_minor_version = auto_upgrade_minor_version_instance
settings_value = properties_value3.get('settings', None)
if settings_value is not None:
settings_instance = json.dumps(settings_value)
virtual_machine_extension_json_instance.settings = settings_instance
protected_settings_value = properties_value3.get('protectedSettings', None)
if protected_settings_value is not None:
protected_settings_instance = json.dumps(protected_settings_value)
virtual_machine_extension_json_instance.protected_settings = protected_settings_instance
provisioning_state_value2 = properties_value3.get('provisioningState', None)
if provisioning_state_value2 is not None:
provisioning_state_instance2 = provisioning_state_value2
virtual_machine_extension_json_instance.provisioning_state = provisioning_state_instance2
instance_view_value2 = properties_value3.get('instanceView', None)
if instance_view_value2 is not None:
instance_view_instance2 = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
virtual_machine_extension_json_instance.instance_view = instance_view_instance2
name_value6 = instance_view_value2.get('name', None)
if name_value6 is not None:
name_instance6 = name_value6
instance_view_instance2.name = name_instance6
type_value4 = instance_view_value2.get('type', None)
if type_value4 is not None:
type_instance4 = type_value4
instance_view_instance2.extension_type = type_instance4
type_handler_version_value4 = instance_view_value2.get('typeHandlerVersion', None)
if type_handler_version_value4 is not None:
type_handler_version_instance4 = type_handler_version_value4
instance_view_instance2.type_handler_version = type_handler_version_instance4
substatuses_array2 = instance_view_value2.get('substatuses', None)
if substatuses_array2 is not None:
for substatuses_value2 in substatuses_array2:
instance_view_status_instance6 = InstanceViewStatus()
instance_view_instance2.sub_statuses.append(instance_view_status_instance6)
code_value7 = substatuses_value2.get('code', None)
if code_value7 is not None:
code_instance7 = code_value7
instance_view_status_instance6.code = code_instance7
level_value7 = substatuses_value2.get('level', None)
if level_value7 is not None:
level_instance7 = level_value7
instance_view_status_instance6.level = level_instance7
display_status_value7 = substatuses_value2.get('displayStatus', None)
if display_status_value7 is not None:
display_status_instance7 = display_status_value7
instance_view_status_instance6.display_status = display_status_instance7
message_value7 = substatuses_value2.get('message', None)
if message_value7 is not None:
message_instance7 = message_value7
instance_view_status_instance6.message = message_instance7
time_value7 = substatuses_value2.get('time', None)
if time_value7 is not None:
time_instance7 = time_value7
instance_view_status_instance6.time = time_instance7
statuses_array5 = instance_view_value2.get('statuses', None)
if statuses_array5 is not None:
for statuses_value5 in statuses_array5:
instance_view_status_instance7 = InstanceViewStatus()
instance_view_instance2.statuses.append(instance_view_status_instance7)
code_value8 = statuses_value5.get('code', None)
if code_value8 is not None:
code_instance8 = code_value8
instance_view_status_instance7.code = code_instance8
level_value8 = statuses_value5.get('level', None)
if level_value8 is not None:
level_instance8 = level_value8
instance_view_status_instance7.level = level_instance8
display_status_value8 = statuses_value5.get('displayStatus', None)
if display_status_value8 is not None:
display_status_instance8 = display_status_value8
instance_view_status_instance7.display_status = display_status_instance8
message_value8 = statuses_value5.get('message', None)
if message_value8 is not None:
message_instance8 = message_value8
instance_view_status_instance7.message = message_instance8
time_value8 = statuses_value5.get('time', None)
if time_value8 is not None:
time_instance8 = time_value8
instance_view_status_instance7.time = time_instance8
id_value5 = resources_value.get('id', None)
if id_value5 is not None:
id_instance5 = id_value5
virtual_machine_extension_json_instance.id = id_instance5
name_value7 = resources_value.get('name', None)
if name_value7 is not None:
name_instance7 = name_value7
virtual_machine_extension_json_instance.name = name_instance7
type_value5 = resources_value.get('type', None)
if type_value5 is not None:
type_instance5 = type_value5
virtual_machine_extension_json_instance.type = type_instance5
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_json_instance.location = location_instance
tags_sequence_element = resources_value.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key = property
tags_value = tags_sequence_element[property]
virtual_machine_extension_json_instance.tags[tags_key] = tags_value
id_value6 = response_doc.get('id', None)
if id_value6 is not None:
id_instance6 = id_value6
virtual_machine_instance.id = id_instance6
name_value8 = response_doc.get('name', None)
if name_value8 is not None:
name_instance8 = name_value8
virtual_machine_instance.name = name_instance8
type_value6 = response_doc.get('type', None)
if type_value6 is not None:
type_instance6 = type_value6
virtual_machine_instance.type = type_instance6
location_value2 = response_doc.get('location', None)
if location_value2 is not None:
location_instance2 = location_value2
virtual_machine_instance.location = location_instance2
tags_sequence_element2 = response_doc.get('tags', None)
if tags_sequence_element2 is not None:
for property2 in tags_sequence_element2:
tags_key2 = property2
tags_value2 = tags_sequence_element2[property2]
virtual_machine_instance.tags[tags_key2] = tags_value2
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def get_with_instance_view(self, resource_group_name, vm_name):
"""
The operation to get a virtual machine along with its instance view.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
VirtualMachineGetResponse: The GetVM operation response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
query_parameters = []
query_parameters.append('$expand=instanceView')
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineGetResponse()
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
virtual_machine_instance = VirtualMachine(extensions=[], tags={})
result.virtual_machine = virtual_machine_instance
plan_value = response_doc.get('plan', None)
if plan_value is not None:
plan_instance = Plan()
virtual_machine_instance.plan = plan_instance
name_value = plan_value.get('name', None)
if name_value is not None:
name_instance = name_value
plan_instance.name = name_instance
publisher_value = plan_value.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
plan_instance.publisher = publisher_instance
product_value = plan_value.get('product', None)
if product_value is not None:
product_instance = product_value
plan_instance.product = product_instance
promotion_code_value = plan_value.get('promotionCode', None)
if promotion_code_value is not None:
promotion_code_instance = promotion_code_value
plan_instance.promotion_code = promotion_code_instance
properties_value = response_doc.get('properties', None)
if properties_value is not None:
hardware_profile_value = properties_value.get('hardwareProfile', None)
if hardware_profile_value is not None:
hardware_profile_instance = HardwareProfile()
virtual_machine_instance.hardware_profile = hardware_profile_instance
vm_size_value = hardware_profile_value.get('vmSize', None)
if vm_size_value is not None:
vm_size_instance = vm_size_value
hardware_profile_instance.virtual_machine_size = vm_size_instance
storage_profile_value = properties_value.get('storageProfile', None)
if storage_profile_value is not None:
storage_profile_instance = StorageProfile(data_disks=[])
virtual_machine_instance.storage_profile = storage_profile_instance
image_reference_value = storage_profile_value.get('imageReference', None)
if image_reference_value is not None:
image_reference_instance = ImageReference()
storage_profile_instance.image_reference = image_reference_instance
publisher_value2 = image_reference_value.get('publisher', None)
if publisher_value2 is not None:
publisher_instance2 = publisher_value2
image_reference_instance.publisher = publisher_instance2
offer_value = image_reference_value.get('offer', None)
if offer_value is not None:
offer_instance = offer_value
image_reference_instance.offer = offer_instance
sku_value = image_reference_value.get('sku', None)
if sku_value is not None:
sku_instance = sku_value
image_reference_instance.sku = sku_instance
version_value = image_reference_value.get('version', None)
if version_value is not None:
version_instance = version_value
image_reference_instance.version = version_instance
source_image_value = storage_profile_value.get('sourceImage', None)
if source_image_value is not None:
source_image_instance = SourceImageReference()
storage_profile_instance.source_image = source_image_instance
id_value = source_image_value.get('id', None)
if id_value is not None:
id_instance = id_value
source_image_instance.reference_uri = id_instance
os_disk_value = storage_profile_value.get('osDisk', None)
if os_disk_value is not None:
os_disk_instance = OSDisk()
storage_profile_instance.os_disk = os_disk_instance
os_type_value = os_disk_value.get('osType', None)
if os_type_value is not None:
os_type_instance = os_type_value
os_disk_instance.operating_system_type = os_type_instance
name_value2 = os_disk_value.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
os_disk_instance.name = name_instance2
vhd_value = os_disk_value.get('vhd', None)
if vhd_value is not None:
vhd_instance = VirtualHardDisk()
os_disk_instance.virtual_hard_disk = vhd_instance
uri_value = vhd_value.get('uri', None)
if uri_value is not None:
uri_instance = uri_value
vhd_instance.uri = uri_instance
image_value = os_disk_value.get('image', None)
if image_value is not None:
image_instance = VirtualHardDisk()
os_disk_instance.source_image = image_instance
uri_value2 = image_value.get('uri', None)
if uri_value2 is not None:
uri_instance2 = uri_value2
image_instance.uri = uri_instance2
caching_value = os_disk_value.get('caching', None)
if caching_value is not None:
caching_instance = caching_value
os_disk_instance.caching = caching_instance
create_option_value = os_disk_value.get('createOption', None)
if create_option_value is not None:
create_option_instance = create_option_value
os_disk_instance.create_option = create_option_instance
data_disks_array = storage_profile_value.get('dataDisks', None)
if data_disks_array is not None:
for data_disks_value in data_disks_array:
data_disk_instance = DataDisk()
storage_profile_instance.data_disks.append(data_disk_instance)
lun_value = data_disks_value.get('lun', None)
if lun_value is not None:
lun_instance = lun_value
data_disk_instance.lun = lun_instance
disk_size_gb_value = data_disks_value.get('diskSizeGB', None)
if disk_size_gb_value is not None:
disk_size_gb_instance = disk_size_gb_value
data_disk_instance.disk_size_gb = disk_size_gb_instance
name_value3 = data_disks_value.get('name', None)
if name_value3 is not None:
name_instance3 = name_value3
data_disk_instance.name = name_instance3
vhd_value2 = data_disks_value.get('vhd', None)
if vhd_value2 is not None:
vhd_instance2 = VirtualHardDisk()
data_disk_instance.virtual_hard_disk = vhd_instance2
uri_value3 = vhd_value2.get('uri', None)
if uri_value3 is not None:
uri_instance3 = uri_value3
vhd_instance2.uri = uri_instance3
image_value2 = data_disks_value.get('image', None)
if image_value2 is not None:
image_instance2 = VirtualHardDisk()
data_disk_instance.source_image = image_instance2
uri_value4 = image_value2.get('uri', None)
if uri_value4 is not None:
uri_instance4 = uri_value4
image_instance2.uri = uri_instance4
caching_value2 = data_disks_value.get('caching', None)
if caching_value2 is not None:
caching_instance2 = caching_value2
data_disk_instance.caching = caching_instance2
create_option_value2 = data_disks_value.get('createOption', None)
if create_option_value2 is not None:
create_option_instance2 = create_option_value2
data_disk_instance.create_option = create_option_instance2
os_profile_value = properties_value.get('osProfile', None)
if os_profile_value is not None:
os_profile_instance = OSProfile(secrets=[])
virtual_machine_instance.os_profile = os_profile_instance
computer_name_value = os_profile_value.get('computerName', None)
if computer_name_value is not None:
computer_name_instance = computer_name_value
os_profile_instance.computer_name = computer_name_instance
admin_username_value = os_profile_value.get('adminUsername', None)
if admin_username_value is not None:
admin_username_instance = admin_username_value
os_profile_instance.admin_username = admin_username_instance
admin_password_value = os_profile_value.get('adminPassword', None)
if admin_password_value is not None:
admin_password_instance = admin_password_value
os_profile_instance.admin_password = admin_password_instance
custom_data_value = os_profile_value.get('customData', None)
if custom_data_value is not None:
custom_data_instance = custom_data_value
os_profile_instance.custom_data = custom_data_instance
windows_configuration_value = os_profile_value.get('windowsConfiguration', None)
if windows_configuration_value is not None:
windows_configuration_instance = WindowsConfiguration(additional_unattend_contents=[])
os_profile_instance.windows_configuration = windows_configuration_instance
provision_vm_agent_value = windows_configuration_value.get('provisionVMAgent', None)
if provision_vm_agent_value is not None:
provision_vm_agent_instance = provision_vm_agent_value
windows_configuration_instance.provision_vm_agent = provision_vm_agent_instance
enable_automatic_updates_value = windows_configuration_value.get('enableAutomaticUpdates', None)
if enable_automatic_updates_value is not None:
enable_automatic_updates_instance = enable_automatic_updates_value
windows_configuration_instance.enable_automatic_updates = enable_automatic_updates_instance
time_zone_value = windows_configuration_value.get('timeZone', None)
if time_zone_value is not None:
time_zone_instance = time_zone_value
windows_configuration_instance.time_zone = time_zone_instance
additional_unattend_content_array = windows_configuration_value.get('additionalUnattendContent', None)
if additional_unattend_content_array is not None:
for additional_unattend_content_value in additional_unattend_content_array:
additional_unattend_content_instance = AdditionalUnattendContent()
windows_configuration_instance.additional_unattend_contents.append(additional_unattend_content_instance)
pass_name_value = additional_unattend_content_value.get('passName', None)
if pass_name_value is not None:
pass_name_instance = pass_name_value
additional_unattend_content_instance.pass_name = pass_name_instance
component_name_value = additional_unattend_content_value.get('componentName', None)
if component_name_value is not None:
component_name_instance = component_name_value
additional_unattend_content_instance.component_name = component_name_instance
setting_name_value = additional_unattend_content_value.get('settingName', None)
if setting_name_value is not None:
setting_name_instance = setting_name_value
additional_unattend_content_instance.setting_name = setting_name_instance
content_value = additional_unattend_content_value.get('content', None)
if content_value is not None:
content_instance = content_value
additional_unattend_content_instance.content = content_instance
win_rm_value = windows_configuration_value.get('winRM', None)
if win_rm_value is not None:
win_rm_instance = WinRMConfiguration(listeners=[])
windows_configuration_instance.win_rm_configuration = win_rm_instance
listeners_array = win_rm_value.get('listeners', None)
if listeners_array is not None:
for listeners_value in listeners_array:
win_rm_listener_instance = WinRMListener()
win_rm_instance.listeners.append(win_rm_listener_instance)
protocol_value = listeners_value.get('protocol', None)
if protocol_value is not None:
protocol_instance = protocol_value
win_rm_listener_instance.protocol = protocol_instance
certificate_url_value = listeners_value.get('certificateUrl', None)
if certificate_url_value is not None:
certificate_url_instance = certificate_url_value
win_rm_listener_instance.certificate_url = certificate_url_instance
linux_configuration_value = os_profile_value.get('linuxConfiguration', None)
if linux_configuration_value is not None:
linux_configuration_instance = LinuxConfiguration()
os_profile_instance.linux_configuration = linux_configuration_instance
disable_password_authentication_value = linux_configuration_value.get('disablePasswordAuthentication', None)
if disable_password_authentication_value is not None:
disable_password_authentication_instance = disable_password_authentication_value
linux_configuration_instance.disable_password_authentication = disable_password_authentication_instance
ssh_value = linux_configuration_value.get('ssh', None)
if ssh_value is not None:
ssh_instance = SshConfiguration(public_keys=[])
linux_configuration_instance.ssh_configuration = ssh_instance
public_keys_array = ssh_value.get('publicKeys', None)
if public_keys_array is not None:
for public_keys_value in public_keys_array:
ssh_public_key_instance = SshPublicKey()
ssh_instance.public_keys.append(ssh_public_key_instance)
path_value = public_keys_value.get('path', None)
if path_value is not None:
path_instance = path_value
ssh_public_key_instance.path = path_instance
key_data_value = public_keys_value.get('keyData', None)
if key_data_value is not None:
key_data_instance = key_data_value
ssh_public_key_instance.key_data = key_data_instance
secrets_array = os_profile_value.get('secrets', None)
if secrets_array is not None:
for secrets_value in secrets_array:
vault_secret_group_instance = VaultSecretGroup(vault_certificates=[])
os_profile_instance.secrets.append(vault_secret_group_instance)
source_vault_value = secrets_value.get('sourceVault', None)
if source_vault_value is not None:
source_vault_instance = SourceVaultReference()
vault_secret_group_instance.source_vault = source_vault_instance
id_value2 = source_vault_value.get('id', None)
if id_value2 is not None:
id_instance2 = id_value2
source_vault_instance.reference_uri = id_instance2
vault_certificates_array = secrets_value.get('vaultCertificates', None)
if vault_certificates_array is not None:
for vault_certificates_value in vault_certificates_array:
vault_certificate_instance = VaultCertificate()
vault_secret_group_instance.vault_certificates.append(vault_certificate_instance)
certificate_url_value2 = vault_certificates_value.get('certificateUrl', None)
if certificate_url_value2 is not None:
certificate_url_instance2 = certificate_url_value2
vault_certificate_instance.certificate_url = certificate_url_instance2
certificate_store_value = vault_certificates_value.get('certificateStore', None)
if certificate_store_value is not None:
certificate_store_instance = certificate_store_value
vault_certificate_instance.certificate_store = certificate_store_instance
network_profile_value = properties_value.get('networkProfile', None)
if network_profile_value is not None:
network_profile_instance = NetworkProfile(network_interfaces=[])
virtual_machine_instance.network_profile = network_profile_instance
network_interfaces_array = network_profile_value.get('networkInterfaces', None)
if network_interfaces_array is not None:
for network_interfaces_value in network_interfaces_array:
network_interface_reference_json_instance = NetworkInterfaceReference()
network_profile_instance.network_interfaces.append(network_interface_reference_json_instance)
properties_value2 = network_interfaces_value.get('properties', None)
if properties_value2 is not None:
primary_value = properties_value2.get('primary', None)
if primary_value is not None:
primary_instance = primary_value
network_interface_reference_json_instance.primary = primary_instance
id_value3 = network_interfaces_value.get('id', None)
if id_value3 is not None:
id_instance3 = id_value3
network_interface_reference_json_instance.reference_uri = id_instance3
availability_set_value = properties_value.get('availabilitySet', None)
if availability_set_value is not None:
availability_set_instance = AvailabilitySetReference()
virtual_machine_instance.availability_set_reference = availability_set_instance
id_value4 = availability_set_value.get('id', None)
if id_value4 is not None:
id_instance4 = id_value4
availability_set_instance.reference_uri = id_instance4
provisioning_state_value = properties_value.get('provisioningState', None)
if provisioning_state_value is not None:
provisioning_state_instance = provisioning_state_value
virtual_machine_instance.provisioning_state = provisioning_state_instance
instance_view_value = properties_value.get('instanceView', None)
if instance_view_value is not None:
instance_view_instance = VirtualMachineInstanceView(disks=[], extensions=[], statuses=[])
virtual_machine_instance.instance_view = instance_view_instance
platform_update_domain_value = instance_view_value.get('platformUpdateDomain', None)
if platform_update_domain_value is not None:
platform_update_domain_instance = platform_update_domain_value
instance_view_instance.platform_update_domain = platform_update_domain_instance
platform_fault_domain_value = instance_view_value.get('platformFaultDomain', None)
if platform_fault_domain_value is not None:
platform_fault_domain_instance = platform_fault_domain_value
instance_view_instance.platform_fault_domain = platform_fault_domain_instance
rdp_thumb_print_value = instance_view_value.get('rdpThumbPrint', None)
if rdp_thumb_print_value is not None:
rdp_thumb_print_instance = rdp_thumb_print_value
instance_view_instance.remote_desktop_thumbprint = rdp_thumb_print_instance
vm_agent_value = instance_view_value.get('vmAgent', None)
if vm_agent_value is not None:
vm_agent_instance = VirtualMachineAgentInstanceView(extension_handlers=[], statuses=[])
instance_view_instance.vm_agent = vm_agent_instance
vm_agent_version_value = vm_agent_value.get('vmAgentVersion', None)
if vm_agent_version_value is not None:
vm_agent_version_instance = vm_agent_version_value
vm_agent_instance.vm_agent_version = vm_agent_version_instance
extension_handlers_array = vm_agent_value.get('extensionHandlers', None)
if extension_handlers_array is not None:
for extension_handlers_value in extension_handlers_array:
virtual_machine_extension_handler_instance_view_instance = VirtualMachineExtensionHandlerInstanceView()
vm_agent_instance.extension_handlers.append(virtual_machine_extension_handler_instance_view_instance)
type_value = extension_handlers_value.get('type', None)
if type_value is not None:
type_instance = type_value
virtual_machine_extension_handler_instance_view_instance.type = type_instance
type_handler_version_value = extension_handlers_value.get('typeHandlerVersion', None)
if type_handler_version_value is not None:
type_handler_version_instance = type_handler_version_value
virtual_machine_extension_handler_instance_view_instance.type_handler_version = type_handler_version_instance
status_value = extension_handlers_value.get('status', None)
if status_value is not None:
status_instance = InstanceViewStatus()
virtual_machine_extension_handler_instance_view_instance.status = status_instance
code_value = status_value.get('code', None)
if code_value is not None:
code_instance = code_value
status_instance.code = code_instance
level_value = status_value.get('level', None)
if level_value is not None:
level_instance = level_value
status_instance.level = level_instance
display_status_value = status_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
status_instance.display_status = display_status_instance
message_value = status_value.get('message', None)
if message_value is not None:
message_instance = message_value
status_instance.message = message_instance
time_value = status_value.get('time', None)
if time_value is not None:
time_instance = time_value
status_instance.time = time_instance
statuses_array = vm_agent_value.get('statuses', None)
if statuses_array is not None:
for statuses_value in statuses_array:
instance_view_status_instance = InstanceViewStatus()
vm_agent_instance.statuses.append(instance_view_status_instance)
code_value2 = statuses_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
instance_view_status_instance.code = code_instance2
level_value2 = statuses_value.get('level', None)
if level_value2 is not None:
level_instance2 = level_value2
instance_view_status_instance.level = level_instance2
display_status_value2 = statuses_value.get('displayStatus', None)
if display_status_value2 is not None:
display_status_instance2 = display_status_value2
instance_view_status_instance.display_status = display_status_instance2
message_value2 = statuses_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
instance_view_status_instance.message = message_instance2
time_value2 = statuses_value.get('time', None)
if time_value2 is not None:
time_instance2 = time_value2
instance_view_status_instance.time = time_instance2
disks_array = instance_view_value.get('disks', None)
if disks_array is not None:
for disks_value in disks_array:
disk_instance_view_instance = DiskInstanceView(statuses=[])
instance_view_instance.disks.append(disk_instance_view_instance)
name_value4 = disks_value.get('name', None)
if name_value4 is not None:
name_instance4 = name_value4
disk_instance_view_instance.name = name_instance4
statuses_array2 = disks_value.get('statuses', None)
if statuses_array2 is not None:
for statuses_value2 in statuses_array2:
instance_view_status_instance2 = InstanceViewStatus()
disk_instance_view_instance.statuses.append(instance_view_status_instance2)
code_value3 = statuses_value2.get('code', None)
if code_value3 is not None:
code_instance3 = code_value3
instance_view_status_instance2.code = code_instance3
level_value3 = statuses_value2.get('level', None)
if level_value3 is not None:
level_instance3 = level_value3
instance_view_status_instance2.level = level_instance3
display_status_value3 = statuses_value2.get('displayStatus', None)
if display_status_value3 is not None:
display_status_instance3 = display_status_value3
instance_view_status_instance2.display_status = display_status_instance3
message_value3 = statuses_value2.get('message', None)
if message_value3 is not None:
message_instance3 = message_value3
instance_view_status_instance2.message = message_instance3
time_value3 = statuses_value2.get('time', None)
if time_value3 is not None:
time_instance3 = time_value3
instance_view_status_instance2.time = time_instance3
extensions_array = instance_view_value.get('extensions', None)
if extensions_array is not None:
for extensions_value in extensions_array:
virtual_machine_extension_instance_view_instance = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
instance_view_instance.extensions.append(virtual_machine_extension_instance_view_instance)
name_value5 = extensions_value.get('name', None)
if name_value5 is not None:
name_instance5 = name_value5
virtual_machine_extension_instance_view_instance.name = name_instance5
type_value2 = extensions_value.get('type', None)
if type_value2 is not None:
type_instance2 = type_value2
virtual_machine_extension_instance_view_instance.extension_type = type_instance2
type_handler_version_value2 = extensions_value.get('typeHandlerVersion', None)
if type_handler_version_value2 is not None:
type_handler_version_instance2 = type_handler_version_value2
virtual_machine_extension_instance_view_instance.type_handler_version = type_handler_version_instance2
substatuses_array = extensions_value.get('substatuses', None)
if substatuses_array is not None:
for substatuses_value in substatuses_array:
instance_view_status_instance3 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.sub_statuses.append(instance_view_status_instance3)
code_value4 = substatuses_value.get('code', None)
if code_value4 is not None:
code_instance4 = code_value4
instance_view_status_instance3.code = code_instance4
level_value4 = substatuses_value.get('level', None)
if level_value4 is not None:
level_instance4 = level_value4
instance_view_status_instance3.level = level_instance4
display_status_value4 = substatuses_value.get('displayStatus', None)
if display_status_value4 is not None:
display_status_instance4 = display_status_value4
instance_view_status_instance3.display_status = display_status_instance4
message_value4 = substatuses_value.get('message', None)
if message_value4 is not None:
message_instance4 = message_value4
instance_view_status_instance3.message = message_instance4
time_value4 = substatuses_value.get('time', None)
if time_value4 is not None:
time_instance4 = time_value4
instance_view_status_instance3.time = time_instance4
statuses_array3 = extensions_value.get('statuses', None)
if statuses_array3 is not None:
for statuses_value3 in statuses_array3:
instance_view_status_instance4 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.statuses.append(instance_view_status_instance4)
code_value5 = statuses_value3.get('code', None)
if code_value5 is not None:
code_instance5 = code_value5
instance_view_status_instance4.code = code_instance5
level_value5 = statuses_value3.get('level', None)
if level_value5 is not None:
level_instance5 = level_value5
instance_view_status_instance4.level = level_instance5
display_status_value5 = statuses_value3.get('displayStatus', None)
if display_status_value5 is not None:
display_status_instance5 = display_status_value5
instance_view_status_instance4.display_status = display_status_instance5
message_value5 = statuses_value3.get('message', None)
if message_value5 is not None:
message_instance5 = message_value5
instance_view_status_instance4.message = message_instance5
time_value5 = statuses_value3.get('time', None)
if time_value5 is not None:
time_instance5 = time_value5
instance_view_status_instance4.time = time_instance5
statuses_array4 = instance_view_value.get('statuses', None)
if statuses_array4 is not None:
for statuses_value4 in statuses_array4:
instance_view_status_instance5 = InstanceViewStatus()
instance_view_instance.statuses.append(instance_view_status_instance5)
code_value6 = statuses_value4.get('code', None)
if code_value6 is not None:
code_instance6 = code_value6
instance_view_status_instance5.code = code_instance6
level_value6 = statuses_value4.get('level', None)
if level_value6 is not None:
level_instance6 = level_value6
instance_view_status_instance5.level = level_instance6
display_status_value6 = statuses_value4.get('displayStatus', None)
if display_status_value6 is not None:
display_status_instance6 = display_status_value6
instance_view_status_instance5.display_status = display_status_instance6
message_value6 = statuses_value4.get('message', None)
if message_value6 is not None:
message_instance6 = message_value6
instance_view_status_instance5.message = message_instance6
time_value6 = statuses_value4.get('time', None)
if time_value6 is not None:
time_instance6 = time_value6
instance_view_status_instance5.time = time_instance6
resources_array = response_doc.get('resources', None)
if resources_array is not None:
virtual_machine_instance.extensions = []
for resources_value in resources_array:
virtual_machine_extension_json_instance = VirtualMachineExtension(tags={})
virtual_machine_instance.extensions.append(virtual_machine_extension_json_instance)
properties_value3 = resources_value.get('properties', None)
if properties_value3 is not None:
publisher_value3 = properties_value3.get('publisher', None)
if publisher_value3 is not None:
publisher_instance3 = publisher_value3
virtual_machine_extension_json_instance.publisher = publisher_instance3
type_value3 = properties_value3.get('type', None)
if type_value3 is not None:
type_instance3 = type_value3
virtual_machine_extension_json_instance.extension_type = type_instance3
type_handler_version_value3 = properties_value3.get('typeHandlerVersion', None)
if type_handler_version_value3 is not None:
type_handler_version_instance3 = type_handler_version_value3
virtual_machine_extension_json_instance.type_handler_version = type_handler_version_instance3
auto_upgrade_minor_version_value = properties_value3.get('autoUpgradeMinorVersion', None)
if auto_upgrade_minor_version_value is not None:
auto_upgrade_minor_version_instance = auto_upgrade_minor_version_value
virtual_machine_extension_json_instance.auto_upgrade_minor_version = auto_upgrade_minor_version_instance
settings_value = properties_value3.get('settings', None)
if settings_value is not None:
settings_instance = json.dumps(settings_value)
virtual_machine_extension_json_instance.settings = settings_instance
protected_settings_value = properties_value3.get('protectedSettings', None)
if protected_settings_value is not None:
protected_settings_instance = json.dumps(protected_settings_value)
virtual_machine_extension_json_instance.protected_settings = protected_settings_instance
provisioning_state_value2 = properties_value3.get('provisioningState', None)
if provisioning_state_value2 is not None:
provisioning_state_instance2 = provisioning_state_value2
virtual_machine_extension_json_instance.provisioning_state = provisioning_state_instance2
instance_view_value2 = properties_value3.get('instanceView', None)
if instance_view_value2 is not None:
instance_view_instance2 = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
virtual_machine_extension_json_instance.instance_view = instance_view_instance2
name_value6 = instance_view_value2.get('name', None)
if name_value6 is not None:
name_instance6 = name_value6
instance_view_instance2.name = name_instance6
type_value4 = instance_view_value2.get('type', None)
if type_value4 is not None:
type_instance4 = type_value4
instance_view_instance2.extension_type = type_instance4
type_handler_version_value4 = instance_view_value2.get('typeHandlerVersion', None)
if type_handler_version_value4 is not None:
type_handler_version_instance4 = type_handler_version_value4
instance_view_instance2.type_handler_version = type_handler_version_instance4
substatuses_array2 = instance_view_value2.get('substatuses', None)
if substatuses_array2 is not None:
for substatuses_value2 in substatuses_array2:
instance_view_status_instance6 = InstanceViewStatus()
instance_view_instance2.sub_statuses.append(instance_view_status_instance6)
code_value7 = substatuses_value2.get('code', None)
if code_value7 is not None:
code_instance7 = code_value7
instance_view_status_instance6.code = code_instance7
level_value7 = substatuses_value2.get('level', None)
if level_value7 is not None:
level_instance7 = level_value7
instance_view_status_instance6.level = level_instance7
display_status_value7 = substatuses_value2.get('displayStatus', None)
if display_status_value7 is not None:
display_status_instance7 = display_status_value7
instance_view_status_instance6.display_status = display_status_instance7
message_value7 = substatuses_value2.get('message', None)
if message_value7 is not None:
message_instance7 = message_value7
instance_view_status_instance6.message = message_instance7
time_value7 = substatuses_value2.get('time', None)
if time_value7 is not None:
time_instance7 = time_value7
instance_view_status_instance6.time = time_instance7
statuses_array5 = instance_view_value2.get('statuses', None)
if statuses_array5 is not None:
for statuses_value5 in statuses_array5:
instance_view_status_instance7 = InstanceViewStatus()
instance_view_instance2.statuses.append(instance_view_status_instance7)
code_value8 = statuses_value5.get('code', None)
if code_value8 is not None:
code_instance8 = code_value8
instance_view_status_instance7.code = code_instance8
level_value8 = statuses_value5.get('level', None)
if level_value8 is not None:
level_instance8 = level_value8
instance_view_status_instance7.level = level_instance8
display_status_value8 = statuses_value5.get('displayStatus', None)
if display_status_value8 is not None:
display_status_instance8 = display_status_value8
instance_view_status_instance7.display_status = display_status_instance8
message_value8 = statuses_value5.get('message', None)
if message_value8 is not None:
message_instance8 = message_value8
instance_view_status_instance7.message = message_instance8
time_value8 = statuses_value5.get('time', None)
if time_value8 is not None:
time_instance8 = time_value8
instance_view_status_instance7.time = time_instance8
id_value5 = resources_value.get('id', None)
if id_value5 is not None:
id_instance5 = id_value5
virtual_machine_extension_json_instance.id = id_instance5
name_value7 = resources_value.get('name', None)
if name_value7 is not None:
name_instance7 = name_value7
virtual_machine_extension_json_instance.name = name_instance7
type_value5 = resources_value.get('type', None)
if type_value5 is not None:
type_instance5 = type_value5
virtual_machine_extension_json_instance.type = type_instance5
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_json_instance.location = location_instance
tags_sequence_element = resources_value.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key = property
tags_value = tags_sequence_element[property]
virtual_machine_extension_json_instance.tags[tags_key] = tags_value
id_value6 = response_doc.get('id', None)
if id_value6 is not None:
id_instance6 = id_value6
virtual_machine_instance.id = id_instance6
name_value8 = response_doc.get('name', None)
if name_value8 is not None:
name_instance8 = name_value8
virtual_machine_instance.name = name_instance8
type_value6 = response_doc.get('type', None)
if type_value6 is not None:
type_instance6 = type_value6
virtual_machine_instance.type = type_instance6
location_value2 = response_doc.get('location', None)
if location_value2 is not None:
location_instance2 = location_value2
virtual_machine_instance.location = location_instance2
tags_sequence_element2 = response_doc.get('tags', None)
if tags_sequence_element2 is not None:
for property2 in tags_sequence_element2:
tags_key2 = property2
tags_value2 = tags_sequence_element2[property2]
virtual_machine_instance.tags[tags_key2] = tags_value2
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list(self, resource_group_name):
"""
The operation to list virtual machines under a resource group.
Args:
resource_group_name (string): The name of the resource group.
Returns:
VirtualMachineListResponse: The List Virtual Machine operation
response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineListResponse(virtual_machines=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
value_array = response_doc.get('value', None)
if value_array is not None:
for value_value in value_array:
virtual_machine_json_instance = VirtualMachine(extensions=[], tags={})
result.virtual_machines.append(virtual_machine_json_instance)
plan_value = value_value.get('plan', None)
if plan_value is not None:
plan_instance = Plan()
virtual_machine_json_instance.plan = plan_instance
name_value = plan_value.get('name', None)
if name_value is not None:
name_instance = name_value
plan_instance.name = name_instance
publisher_value = plan_value.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
plan_instance.publisher = publisher_instance
product_value = plan_value.get('product', None)
if product_value is not None:
product_instance = product_value
plan_instance.product = product_instance
promotion_code_value = plan_value.get('promotionCode', None)
if promotion_code_value is not None:
promotion_code_instance = promotion_code_value
plan_instance.promotion_code = promotion_code_instance
properties_value = value_value.get('properties', None)
if properties_value is not None:
hardware_profile_value = properties_value.get('hardwareProfile', None)
if hardware_profile_value is not None:
hardware_profile_instance = HardwareProfile()
virtual_machine_json_instance.hardware_profile = hardware_profile_instance
vm_size_value = hardware_profile_value.get('vmSize', None)
if vm_size_value is not None:
vm_size_instance = vm_size_value
hardware_profile_instance.virtual_machine_size = vm_size_instance
storage_profile_value = properties_value.get('storageProfile', None)
if storage_profile_value is not None:
storage_profile_instance = StorageProfile(data_disks=[])
virtual_machine_json_instance.storage_profile = storage_profile_instance
image_reference_value = storage_profile_value.get('imageReference', None)
if image_reference_value is not None:
image_reference_instance = ImageReference()
storage_profile_instance.image_reference = image_reference_instance
publisher_value2 = image_reference_value.get('publisher', None)
if publisher_value2 is not None:
publisher_instance2 = publisher_value2
image_reference_instance.publisher = publisher_instance2
offer_value = image_reference_value.get('offer', None)
if offer_value is not None:
offer_instance = offer_value
image_reference_instance.offer = offer_instance
sku_value = image_reference_value.get('sku', None)
if sku_value is not None:
sku_instance = sku_value
image_reference_instance.sku = sku_instance
version_value = image_reference_value.get('version', None)
if version_value is not None:
version_instance = version_value
image_reference_instance.version = version_instance
source_image_value = storage_profile_value.get('sourceImage', None)
if source_image_value is not None:
source_image_instance = SourceImageReference()
storage_profile_instance.source_image = source_image_instance
id_value = source_image_value.get('id', None)
if id_value is not None:
id_instance = id_value
source_image_instance.reference_uri = id_instance
os_disk_value = storage_profile_value.get('osDisk', None)
if os_disk_value is not None:
os_disk_instance = OSDisk()
storage_profile_instance.os_disk = os_disk_instance
os_type_value = os_disk_value.get('osType', None)
if os_type_value is not None:
os_type_instance = os_type_value
os_disk_instance.operating_system_type = os_type_instance
name_value2 = os_disk_value.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
os_disk_instance.name = name_instance2
vhd_value = os_disk_value.get('vhd', None)
if vhd_value is not None:
vhd_instance = VirtualHardDisk()
os_disk_instance.virtual_hard_disk = vhd_instance
uri_value = vhd_value.get('uri', None)
if uri_value is not None:
uri_instance = uri_value
vhd_instance.uri = uri_instance
image_value = os_disk_value.get('image', None)
if image_value is not None:
image_instance = VirtualHardDisk()
os_disk_instance.source_image = image_instance
uri_value2 = image_value.get('uri', None)
if uri_value2 is not None:
uri_instance2 = uri_value2
image_instance.uri = uri_instance2
caching_value = os_disk_value.get('caching', None)
if caching_value is not None:
caching_instance = caching_value
os_disk_instance.caching = caching_instance
create_option_value = os_disk_value.get('createOption', None)
if create_option_value is not None:
create_option_instance = create_option_value
os_disk_instance.create_option = create_option_instance
data_disks_array = storage_profile_value.get('dataDisks', None)
if data_disks_array is not None:
for data_disks_value in data_disks_array:
data_disk_instance = DataDisk()
storage_profile_instance.data_disks.append(data_disk_instance)
lun_value = data_disks_value.get('lun', None)
if lun_value is not None:
lun_instance = lun_value
data_disk_instance.lun = lun_instance
disk_size_gb_value = data_disks_value.get('diskSizeGB', None)
if disk_size_gb_value is not None:
disk_size_gb_instance = disk_size_gb_value
data_disk_instance.disk_size_gb = disk_size_gb_instance
name_value3 = data_disks_value.get('name', None)
if name_value3 is not None:
name_instance3 = name_value3
data_disk_instance.name = name_instance3
vhd_value2 = data_disks_value.get('vhd', None)
if vhd_value2 is not None:
vhd_instance2 = VirtualHardDisk()
data_disk_instance.virtual_hard_disk = vhd_instance2
uri_value3 = vhd_value2.get('uri', None)
if uri_value3 is not None:
uri_instance3 = uri_value3
vhd_instance2.uri = uri_instance3
image_value2 = data_disks_value.get('image', None)
if image_value2 is not None:
image_instance2 = VirtualHardDisk()
data_disk_instance.source_image = image_instance2
uri_value4 = image_value2.get('uri', None)
if uri_value4 is not None:
uri_instance4 = uri_value4
image_instance2.uri = uri_instance4
caching_value2 = data_disks_value.get('caching', None)
if caching_value2 is not None:
caching_instance2 = caching_value2
data_disk_instance.caching = caching_instance2
create_option_value2 = data_disks_value.get('createOption', None)
if create_option_value2 is not None:
create_option_instance2 = create_option_value2
data_disk_instance.create_option = create_option_instance2
os_profile_value = properties_value.get('osProfile', None)
if os_profile_value is not None:
os_profile_instance = OSProfile(secrets=[])
virtual_machine_json_instance.os_profile = os_profile_instance
computer_name_value = os_profile_value.get('computerName', None)
if computer_name_value is not None:
computer_name_instance = computer_name_value
os_profile_instance.computer_name = computer_name_instance
admin_username_value = os_profile_value.get('adminUsername', None)
if admin_username_value is not None:
admin_username_instance = admin_username_value
os_profile_instance.admin_username = admin_username_instance
admin_password_value = os_profile_value.get('adminPassword', None)
if admin_password_value is not None:
admin_password_instance = admin_password_value
os_profile_instance.admin_password = admin_password_instance
custom_data_value = os_profile_value.get('customData', None)
if custom_data_value is not None:
custom_data_instance = custom_data_value
os_profile_instance.custom_data = custom_data_instance
windows_configuration_value = os_profile_value.get('windowsConfiguration', None)
if windows_configuration_value is not None:
windows_configuration_instance = WindowsConfiguration(additional_unattend_contents=[])
os_profile_instance.windows_configuration = windows_configuration_instance
provision_vm_agent_value = windows_configuration_value.get('provisionVMAgent', None)
if provision_vm_agent_value is not None:
provision_vm_agent_instance = provision_vm_agent_value
windows_configuration_instance.provision_vm_agent = provision_vm_agent_instance
enable_automatic_updates_value = windows_configuration_value.get('enableAutomaticUpdates', None)
if enable_automatic_updates_value is not None:
enable_automatic_updates_instance = enable_automatic_updates_value
windows_configuration_instance.enable_automatic_updates = enable_automatic_updates_instance
time_zone_value = windows_configuration_value.get('timeZone', None)
if time_zone_value is not None:
time_zone_instance = time_zone_value
windows_configuration_instance.time_zone = time_zone_instance
additional_unattend_content_array = windows_configuration_value.get('additionalUnattendContent', None)
if additional_unattend_content_array is not None:
for additional_unattend_content_value in additional_unattend_content_array:
additional_unattend_content_instance = AdditionalUnattendContent()
windows_configuration_instance.additional_unattend_contents.append(additional_unattend_content_instance)
pass_name_value = additional_unattend_content_value.get('passName', None)
if pass_name_value is not None:
pass_name_instance = pass_name_value
additional_unattend_content_instance.pass_name = pass_name_instance
component_name_value = additional_unattend_content_value.get('componentName', None)
if component_name_value is not None:
component_name_instance = component_name_value
additional_unattend_content_instance.component_name = component_name_instance
setting_name_value = additional_unattend_content_value.get('settingName', None)
if setting_name_value is not None:
setting_name_instance = setting_name_value
additional_unattend_content_instance.setting_name = setting_name_instance
content_value = additional_unattend_content_value.get('content', None)
if content_value is not None:
content_instance = content_value
additional_unattend_content_instance.content = content_instance
win_rm_value = windows_configuration_value.get('winRM', None)
if win_rm_value is not None:
win_rm_instance = WinRMConfiguration(listeners=[])
windows_configuration_instance.win_rm_configuration = win_rm_instance
listeners_array = win_rm_value.get('listeners', None)
if listeners_array is not None:
for listeners_value in listeners_array:
win_rm_listener_instance = WinRMListener()
win_rm_instance.listeners.append(win_rm_listener_instance)
protocol_value = listeners_value.get('protocol', None)
if protocol_value is not None:
protocol_instance = protocol_value
win_rm_listener_instance.protocol = protocol_instance
certificate_url_value = listeners_value.get('certificateUrl', None)
if certificate_url_value is not None:
certificate_url_instance = certificate_url_value
win_rm_listener_instance.certificate_url = certificate_url_instance
linux_configuration_value = os_profile_value.get('linuxConfiguration', None)
if linux_configuration_value is not None:
linux_configuration_instance = LinuxConfiguration()
os_profile_instance.linux_configuration = linux_configuration_instance
disable_password_authentication_value = linux_configuration_value.get('disablePasswordAuthentication', None)
if disable_password_authentication_value is not None:
disable_password_authentication_instance = disable_password_authentication_value
linux_configuration_instance.disable_password_authentication = disable_password_authentication_instance
ssh_value = linux_configuration_value.get('ssh', None)
if ssh_value is not None:
ssh_instance = SshConfiguration(public_keys=[])
linux_configuration_instance.ssh_configuration = ssh_instance
public_keys_array = ssh_value.get('publicKeys', None)
if public_keys_array is not None:
for public_keys_value in public_keys_array:
ssh_public_key_instance = SshPublicKey()
ssh_instance.public_keys.append(ssh_public_key_instance)
path_value = public_keys_value.get('path', None)
if path_value is not None:
path_instance = path_value
ssh_public_key_instance.path = path_instance
key_data_value = public_keys_value.get('keyData', None)
if key_data_value is not None:
key_data_instance = key_data_value
ssh_public_key_instance.key_data = key_data_instance
secrets_array = os_profile_value.get('secrets', None)
if secrets_array is not None:
for secrets_value in secrets_array:
vault_secret_group_instance = VaultSecretGroup(vault_certificates=[])
os_profile_instance.secrets.append(vault_secret_group_instance)
source_vault_value = secrets_value.get('sourceVault', None)
if source_vault_value is not None:
source_vault_instance = SourceVaultReference()
vault_secret_group_instance.source_vault = source_vault_instance
id_value2 = source_vault_value.get('id', None)
if id_value2 is not None:
id_instance2 = id_value2
source_vault_instance.reference_uri = id_instance2
vault_certificates_array = secrets_value.get('vaultCertificates', None)
if vault_certificates_array is not None:
for vault_certificates_value in vault_certificates_array:
vault_certificate_instance = VaultCertificate()
vault_secret_group_instance.vault_certificates.append(vault_certificate_instance)
certificate_url_value2 = vault_certificates_value.get('certificateUrl', None)
if certificate_url_value2 is not None:
certificate_url_instance2 = certificate_url_value2
vault_certificate_instance.certificate_url = certificate_url_instance2
certificate_store_value = vault_certificates_value.get('certificateStore', None)
if certificate_store_value is not None:
certificate_store_instance = certificate_store_value
vault_certificate_instance.certificate_store = certificate_store_instance
network_profile_value = properties_value.get('networkProfile', None)
if network_profile_value is not None:
network_profile_instance = NetworkProfile(network_interfaces=[])
virtual_machine_json_instance.network_profile = network_profile_instance
network_interfaces_array = network_profile_value.get('networkInterfaces', None)
if network_interfaces_array is not None:
for network_interfaces_value in network_interfaces_array:
network_interface_reference_json_instance = NetworkInterfaceReference()
network_profile_instance.network_interfaces.append(network_interface_reference_json_instance)
properties_value2 = network_interfaces_value.get('properties', None)
if properties_value2 is not None:
primary_value = properties_value2.get('primary', None)
if primary_value is not None:
primary_instance = primary_value
network_interface_reference_json_instance.primary = primary_instance
id_value3 = network_interfaces_value.get('id', None)
if id_value3 is not None:
id_instance3 = id_value3
network_interface_reference_json_instance.reference_uri = id_instance3
availability_set_value = properties_value.get('availabilitySet', None)
if availability_set_value is not None:
availability_set_instance = AvailabilitySetReference()
virtual_machine_json_instance.availability_set_reference = availability_set_instance
id_value4 = availability_set_value.get('id', None)
if id_value4 is not None:
id_instance4 = id_value4
availability_set_instance.reference_uri = id_instance4
provisioning_state_value = properties_value.get('provisioningState', None)
if provisioning_state_value is not None:
provisioning_state_instance = provisioning_state_value
virtual_machine_json_instance.provisioning_state = provisioning_state_instance
instance_view_value = properties_value.get('instanceView', None)
if instance_view_value is not None:
instance_view_instance = VirtualMachineInstanceView(disks=[], extensions=[], statuses=[])
virtual_machine_json_instance.instance_view = instance_view_instance
platform_update_domain_value = instance_view_value.get('platformUpdateDomain', None)
if platform_update_domain_value is not None:
platform_update_domain_instance = platform_update_domain_value
instance_view_instance.platform_update_domain = platform_update_domain_instance
platform_fault_domain_value = instance_view_value.get('platformFaultDomain', None)
if platform_fault_domain_value is not None:
platform_fault_domain_instance = platform_fault_domain_value
instance_view_instance.platform_fault_domain = platform_fault_domain_instance
rdp_thumb_print_value = instance_view_value.get('rdpThumbPrint', None)
if rdp_thumb_print_value is not None:
rdp_thumb_print_instance = rdp_thumb_print_value
instance_view_instance.remote_desktop_thumbprint = rdp_thumb_print_instance
vm_agent_value = instance_view_value.get('vmAgent', None)
if vm_agent_value is not None:
vm_agent_instance = VirtualMachineAgentInstanceView(extension_handlers=[], statuses=[])
instance_view_instance.vm_agent = vm_agent_instance
vm_agent_version_value = vm_agent_value.get('vmAgentVersion', None)
if vm_agent_version_value is not None:
vm_agent_version_instance = vm_agent_version_value
vm_agent_instance.vm_agent_version = vm_agent_version_instance
extension_handlers_array = vm_agent_value.get('extensionHandlers', None)
if extension_handlers_array is not None:
for extension_handlers_value in extension_handlers_array:
virtual_machine_extension_handler_instance_view_instance = VirtualMachineExtensionHandlerInstanceView()
vm_agent_instance.extension_handlers.append(virtual_machine_extension_handler_instance_view_instance)
type_value = extension_handlers_value.get('type', None)
if type_value is not None:
type_instance = type_value
virtual_machine_extension_handler_instance_view_instance.type = type_instance
type_handler_version_value = extension_handlers_value.get('typeHandlerVersion', None)
if type_handler_version_value is not None:
type_handler_version_instance = type_handler_version_value
virtual_machine_extension_handler_instance_view_instance.type_handler_version = type_handler_version_instance
status_value = extension_handlers_value.get('status', None)
if status_value is not None:
status_instance = InstanceViewStatus()
virtual_machine_extension_handler_instance_view_instance.status = status_instance
code_value = status_value.get('code', None)
if code_value is not None:
code_instance = code_value
status_instance.code = code_instance
level_value = status_value.get('level', None)
if level_value is not None:
level_instance = level_value
status_instance.level = level_instance
display_status_value = status_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
status_instance.display_status = display_status_instance
message_value = status_value.get('message', None)
if message_value is not None:
message_instance = message_value
status_instance.message = message_instance
time_value = status_value.get('time', None)
if time_value is not None:
time_instance = time_value
status_instance.time = time_instance
statuses_array = vm_agent_value.get('statuses', None)
if statuses_array is not None:
for statuses_value in statuses_array:
instance_view_status_instance = InstanceViewStatus()
vm_agent_instance.statuses.append(instance_view_status_instance)
code_value2 = statuses_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
instance_view_status_instance.code = code_instance2
level_value2 = statuses_value.get('level', None)
if level_value2 is not None:
level_instance2 = level_value2
instance_view_status_instance.level = level_instance2
display_status_value2 = statuses_value.get('displayStatus', None)
if display_status_value2 is not None:
display_status_instance2 = display_status_value2
instance_view_status_instance.display_status = display_status_instance2
message_value2 = statuses_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
instance_view_status_instance.message = message_instance2
time_value2 = statuses_value.get('time', None)
if time_value2 is not None:
time_instance2 = time_value2
instance_view_status_instance.time = time_instance2
disks_array = instance_view_value.get('disks', None)
if disks_array is not None:
for disks_value in disks_array:
disk_instance_view_instance = DiskInstanceView(statuses=[])
instance_view_instance.disks.append(disk_instance_view_instance)
name_value4 = disks_value.get('name', None)
if name_value4 is not None:
name_instance4 = name_value4
disk_instance_view_instance.name = name_instance4
statuses_array2 = disks_value.get('statuses', None)
if statuses_array2 is not None:
for statuses_value2 in statuses_array2:
instance_view_status_instance2 = InstanceViewStatus()
disk_instance_view_instance.statuses.append(instance_view_status_instance2)
code_value3 = statuses_value2.get('code', None)
if code_value3 is not None:
code_instance3 = code_value3
instance_view_status_instance2.code = code_instance3
level_value3 = statuses_value2.get('level', None)
if level_value3 is not None:
level_instance3 = level_value3
instance_view_status_instance2.level = level_instance3
display_status_value3 = statuses_value2.get('displayStatus', None)
if display_status_value3 is not None:
display_status_instance3 = display_status_value3
instance_view_status_instance2.display_status = display_status_instance3
message_value3 = statuses_value2.get('message', None)
if message_value3 is not None:
message_instance3 = message_value3
instance_view_status_instance2.message = message_instance3
time_value3 = statuses_value2.get('time', None)
if time_value3 is not None:
time_instance3 = time_value3
instance_view_status_instance2.time = time_instance3
extensions_array = instance_view_value.get('extensions', None)
if extensions_array is not None:
for extensions_value in extensions_array:
virtual_machine_extension_instance_view_instance = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
instance_view_instance.extensions.append(virtual_machine_extension_instance_view_instance)
name_value5 = extensions_value.get('name', None)
if name_value5 is not None:
name_instance5 = name_value5
virtual_machine_extension_instance_view_instance.name = name_instance5
type_value2 = extensions_value.get('type', None)
if type_value2 is not None:
type_instance2 = type_value2
virtual_machine_extension_instance_view_instance.extension_type = type_instance2
type_handler_version_value2 = extensions_value.get('typeHandlerVersion', None)
if type_handler_version_value2 is not None:
type_handler_version_instance2 = type_handler_version_value2
virtual_machine_extension_instance_view_instance.type_handler_version = type_handler_version_instance2
substatuses_array = extensions_value.get('substatuses', None)
if substatuses_array is not None:
for substatuses_value in substatuses_array:
instance_view_status_instance3 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.sub_statuses.append(instance_view_status_instance3)
code_value4 = substatuses_value.get('code', None)
if code_value4 is not None:
code_instance4 = code_value4
instance_view_status_instance3.code = code_instance4
level_value4 = substatuses_value.get('level', None)
if level_value4 is not None:
level_instance4 = level_value4
instance_view_status_instance3.level = level_instance4
display_status_value4 = substatuses_value.get('displayStatus', None)
if display_status_value4 is not None:
display_status_instance4 = display_status_value4
instance_view_status_instance3.display_status = display_status_instance4
message_value4 = substatuses_value.get('message', None)
if message_value4 is not None:
message_instance4 = message_value4
instance_view_status_instance3.message = message_instance4
time_value4 = substatuses_value.get('time', None)
if time_value4 is not None:
time_instance4 = time_value4
instance_view_status_instance3.time = time_instance4
statuses_array3 = extensions_value.get('statuses', None)
if statuses_array3 is not None:
for statuses_value3 in statuses_array3:
instance_view_status_instance4 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.statuses.append(instance_view_status_instance4)
code_value5 = statuses_value3.get('code', None)
if code_value5 is not None:
code_instance5 = code_value5
instance_view_status_instance4.code = code_instance5
level_value5 = statuses_value3.get('level', None)
if level_value5 is not None:
level_instance5 = level_value5
instance_view_status_instance4.level = level_instance5
display_status_value5 = statuses_value3.get('displayStatus', None)
if display_status_value5 is not None:
display_status_instance5 = display_status_value5
instance_view_status_instance4.display_status = display_status_instance5
message_value5 = statuses_value3.get('message', None)
if message_value5 is not None:
message_instance5 = message_value5
instance_view_status_instance4.message = message_instance5
time_value5 = statuses_value3.get('time', None)
if time_value5 is not None:
time_instance5 = time_value5
instance_view_status_instance4.time = time_instance5
statuses_array4 = instance_view_value.get('statuses', None)
if statuses_array4 is not None:
for statuses_value4 in statuses_array4:
instance_view_status_instance5 = InstanceViewStatus()
instance_view_instance.statuses.append(instance_view_status_instance5)
code_value6 = statuses_value4.get('code', None)
if code_value6 is not None:
code_instance6 = code_value6
instance_view_status_instance5.code = code_instance6
level_value6 = statuses_value4.get('level', None)
if level_value6 is not None:
level_instance6 = level_value6
instance_view_status_instance5.level = level_instance6
display_status_value6 = statuses_value4.get('displayStatus', None)
if display_status_value6 is not None:
display_status_instance6 = display_status_value6
instance_view_status_instance5.display_status = display_status_instance6
message_value6 = statuses_value4.get('message', None)
if message_value6 is not None:
message_instance6 = message_value6
instance_view_status_instance5.message = message_instance6
time_value6 = statuses_value4.get('time', None)
if time_value6 is not None:
time_instance6 = time_value6
instance_view_status_instance5.time = time_instance6
resources_array = value_value.get('resources', None)
if resources_array is not None:
virtual_machine_json_instance.extensions = []
for resources_value in resources_array:
virtual_machine_extension_json_instance = VirtualMachineExtension(tags={})
virtual_machine_json_instance.extensions.append(virtual_machine_extension_json_instance)
properties_value3 = resources_value.get('properties', None)
if properties_value3 is not None:
publisher_value3 = properties_value3.get('publisher', None)
if publisher_value3 is not None:
publisher_instance3 = publisher_value3
virtual_machine_extension_json_instance.publisher = publisher_instance3
type_value3 = properties_value3.get('type', None)
if type_value3 is not None:
type_instance3 = type_value3
virtual_machine_extension_json_instance.extension_type = type_instance3
type_handler_version_value3 = properties_value3.get('typeHandlerVersion', None)
if type_handler_version_value3 is not None:
type_handler_version_instance3 = type_handler_version_value3
virtual_machine_extension_json_instance.type_handler_version = type_handler_version_instance3
auto_upgrade_minor_version_value = properties_value3.get('autoUpgradeMinorVersion', None)
if auto_upgrade_minor_version_value is not None:
auto_upgrade_minor_version_instance = auto_upgrade_minor_version_value
virtual_machine_extension_json_instance.auto_upgrade_minor_version = auto_upgrade_minor_version_instance
settings_value = properties_value3.get('settings', None)
if settings_value is not None:
settings_instance = json.dumps(settings_value)
virtual_machine_extension_json_instance.settings = settings_instance
protected_settings_value = properties_value3.get('protectedSettings', None)
if protected_settings_value is not None:
protected_settings_instance = json.dumps(protected_settings_value)
virtual_machine_extension_json_instance.protected_settings = protected_settings_instance
provisioning_state_value2 = properties_value3.get('provisioningState', None)
if provisioning_state_value2 is not None:
provisioning_state_instance2 = provisioning_state_value2
virtual_machine_extension_json_instance.provisioning_state = provisioning_state_instance2
instance_view_value2 = properties_value3.get('instanceView', None)
if instance_view_value2 is not None:
instance_view_instance2 = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
virtual_machine_extension_json_instance.instance_view = instance_view_instance2
name_value6 = instance_view_value2.get('name', None)
if name_value6 is not None:
name_instance6 = name_value6
instance_view_instance2.name = name_instance6
type_value4 = instance_view_value2.get('type', None)
if type_value4 is not None:
type_instance4 = type_value4
instance_view_instance2.extension_type = type_instance4
type_handler_version_value4 = instance_view_value2.get('typeHandlerVersion', None)
if type_handler_version_value4 is not None:
type_handler_version_instance4 = type_handler_version_value4
instance_view_instance2.type_handler_version = type_handler_version_instance4
substatuses_array2 = instance_view_value2.get('substatuses', None)
if substatuses_array2 is not None:
for substatuses_value2 in substatuses_array2:
instance_view_status_instance6 = InstanceViewStatus()
instance_view_instance2.sub_statuses.append(instance_view_status_instance6)
code_value7 = substatuses_value2.get('code', None)
if code_value7 is not None:
code_instance7 = code_value7
instance_view_status_instance6.code = code_instance7
level_value7 = substatuses_value2.get('level', None)
if level_value7 is not None:
level_instance7 = level_value7
instance_view_status_instance6.level = level_instance7
display_status_value7 = substatuses_value2.get('displayStatus', None)
if display_status_value7 is not None:
display_status_instance7 = display_status_value7
instance_view_status_instance6.display_status = display_status_instance7
message_value7 = substatuses_value2.get('message', None)
if message_value7 is not None:
message_instance7 = message_value7
instance_view_status_instance6.message = message_instance7
time_value7 = substatuses_value2.get('time', None)
if time_value7 is not None:
time_instance7 = time_value7
instance_view_status_instance6.time = time_instance7
statuses_array5 = instance_view_value2.get('statuses', None)
if statuses_array5 is not None:
for statuses_value5 in statuses_array5:
instance_view_status_instance7 = InstanceViewStatus()
instance_view_instance2.statuses.append(instance_view_status_instance7)
code_value8 = statuses_value5.get('code', None)
if code_value8 is not None:
code_instance8 = code_value8
instance_view_status_instance7.code = code_instance8
level_value8 = statuses_value5.get('level', None)
if level_value8 is not None:
level_instance8 = level_value8
instance_view_status_instance7.level = level_instance8
display_status_value8 = statuses_value5.get('displayStatus', None)
if display_status_value8 is not None:
display_status_instance8 = display_status_value8
instance_view_status_instance7.display_status = display_status_instance8
message_value8 = statuses_value5.get('message', None)
if message_value8 is not None:
message_instance8 = message_value8
instance_view_status_instance7.message = message_instance8
time_value8 = statuses_value5.get('time', None)
if time_value8 is not None:
time_instance8 = time_value8
instance_view_status_instance7.time = time_instance8
id_value5 = resources_value.get('id', None)
if id_value5 is not None:
id_instance5 = id_value5
virtual_machine_extension_json_instance.id = id_instance5
name_value7 = resources_value.get('name', None)
if name_value7 is not None:
name_instance7 = name_value7
virtual_machine_extension_json_instance.name = name_instance7
type_value5 = resources_value.get('type', None)
if type_value5 is not None:
type_instance5 = type_value5
virtual_machine_extension_json_instance.type = type_instance5
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_json_instance.location = location_instance
tags_sequence_element = resources_value.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key = property
tags_value = tags_sequence_element[property]
virtual_machine_extension_json_instance.tags[tags_key] = tags_value
id_value6 = value_value.get('id', None)
if id_value6 is not None:
id_instance6 = id_value6
virtual_machine_json_instance.id = id_instance6
name_value8 = value_value.get('name', None)
if name_value8 is not None:
name_instance8 = name_value8
virtual_machine_json_instance.name = name_instance8
type_value6 = value_value.get('type', None)
if type_value6 is not None:
type_instance6 = type_value6
virtual_machine_json_instance.type = type_instance6
location_value2 = value_value.get('location', None)
if location_value2 is not None:
location_instance2 = location_value2
virtual_machine_json_instance.location = location_instance2
tags_sequence_element2 = value_value.get('tags', None)
if tags_sequence_element2 is not None:
for property2 in tags_sequence_element2:
tags_key2 = property2
tags_value2 = tags_sequence_element2[property2]
virtual_machine_json_instance.tags[tags_key2] = tags_value2
odatanext_link_value = response_doc.get('@odata.nextLink', None)
if odatanext_link_value is not None:
odatanext_link_instance = odatanext_link_value
result.next_link = odatanext_link_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list_all(self, parameters):
"""
Gets the list of Virtual Machines in the subscription. Use nextLink
property in the response to get the next page of Virtual Machines. Do
this till nextLink is not null to fetch all the Virtual Machines.
Args:
parameters (ListParameters)
Returns:
VirtualMachineListResponse: The List Virtual Machine operation
response.
"""
# Validate
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineListResponse(virtual_machines=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
value_array = response_doc.get('value', None)
if value_array is not None:
for value_value in value_array:
virtual_machine_json_instance = VirtualMachine(extensions=[], tags={})
result.virtual_machines.append(virtual_machine_json_instance)
plan_value = value_value.get('plan', None)
if plan_value is not None:
plan_instance = Plan()
virtual_machine_json_instance.plan = plan_instance
name_value = plan_value.get('name', None)
if name_value is not None:
name_instance = name_value
plan_instance.name = name_instance
publisher_value = plan_value.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
plan_instance.publisher = publisher_instance
product_value = plan_value.get('product', None)
if product_value is not None:
product_instance = product_value
plan_instance.product = product_instance
promotion_code_value = plan_value.get('promotionCode', None)
if promotion_code_value is not None:
promotion_code_instance = promotion_code_value
plan_instance.promotion_code = promotion_code_instance
properties_value = value_value.get('properties', None)
if properties_value is not None:
hardware_profile_value = properties_value.get('hardwareProfile', None)
if hardware_profile_value is not None:
hardware_profile_instance = HardwareProfile()
virtual_machine_json_instance.hardware_profile = hardware_profile_instance
vm_size_value = hardware_profile_value.get('vmSize', None)
if vm_size_value is not None:
vm_size_instance = vm_size_value
hardware_profile_instance.virtual_machine_size = vm_size_instance
storage_profile_value = properties_value.get('storageProfile', None)
if storage_profile_value is not None:
storage_profile_instance = StorageProfile(data_disks=[])
virtual_machine_json_instance.storage_profile = storage_profile_instance
image_reference_value = storage_profile_value.get('imageReference', None)
if image_reference_value is not None:
image_reference_instance = ImageReference()
storage_profile_instance.image_reference = image_reference_instance
publisher_value2 = image_reference_value.get('publisher', None)
if publisher_value2 is not None:
publisher_instance2 = publisher_value2
image_reference_instance.publisher = publisher_instance2
offer_value = image_reference_value.get('offer', None)
if offer_value is not None:
offer_instance = offer_value
image_reference_instance.offer = offer_instance
sku_value = image_reference_value.get('sku', None)
if sku_value is not None:
sku_instance = sku_value
image_reference_instance.sku = sku_instance
version_value = image_reference_value.get('version', None)
if version_value is not None:
version_instance = version_value
image_reference_instance.version = version_instance
source_image_value = storage_profile_value.get('sourceImage', None)
if source_image_value is not None:
source_image_instance = SourceImageReference()
storage_profile_instance.source_image = source_image_instance
id_value = source_image_value.get('id', None)
if id_value is not None:
id_instance = id_value
source_image_instance.reference_uri = id_instance
os_disk_value = storage_profile_value.get('osDisk', None)
if os_disk_value is not None:
os_disk_instance = OSDisk()
storage_profile_instance.os_disk = os_disk_instance
os_type_value = os_disk_value.get('osType', None)
if os_type_value is not None:
os_type_instance = os_type_value
os_disk_instance.operating_system_type = os_type_instance
name_value2 = os_disk_value.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
os_disk_instance.name = name_instance2
vhd_value = os_disk_value.get('vhd', None)
if vhd_value is not None:
vhd_instance = VirtualHardDisk()
os_disk_instance.virtual_hard_disk = vhd_instance
uri_value = vhd_value.get('uri', None)
if uri_value is not None:
uri_instance = uri_value
vhd_instance.uri = uri_instance
image_value = os_disk_value.get('image', None)
if image_value is not None:
image_instance = VirtualHardDisk()
os_disk_instance.source_image = image_instance
uri_value2 = image_value.get('uri', None)
if uri_value2 is not None:
uri_instance2 = uri_value2
image_instance.uri = uri_instance2
caching_value = os_disk_value.get('caching', None)
if caching_value is not None:
caching_instance = caching_value
os_disk_instance.caching = caching_instance
create_option_value = os_disk_value.get('createOption', None)
if create_option_value is not None:
create_option_instance = create_option_value
os_disk_instance.create_option = create_option_instance
data_disks_array = storage_profile_value.get('dataDisks', None)
if data_disks_array is not None:
for data_disks_value in data_disks_array:
data_disk_instance = DataDisk()
storage_profile_instance.data_disks.append(data_disk_instance)
lun_value = data_disks_value.get('lun', None)
if lun_value is not None:
lun_instance = lun_value
data_disk_instance.lun = lun_instance
disk_size_gb_value = data_disks_value.get('diskSizeGB', None)
if disk_size_gb_value is not None:
disk_size_gb_instance = disk_size_gb_value
data_disk_instance.disk_size_gb = disk_size_gb_instance
name_value3 = data_disks_value.get('name', None)
if name_value3 is not None:
name_instance3 = name_value3
data_disk_instance.name = name_instance3
vhd_value2 = data_disks_value.get('vhd', None)
if vhd_value2 is not None:
vhd_instance2 = VirtualHardDisk()
data_disk_instance.virtual_hard_disk = vhd_instance2
uri_value3 = vhd_value2.get('uri', None)
if uri_value3 is not None:
uri_instance3 = uri_value3
vhd_instance2.uri = uri_instance3
image_value2 = data_disks_value.get('image', None)
if image_value2 is not None:
image_instance2 = VirtualHardDisk()
data_disk_instance.source_image = image_instance2
uri_value4 = image_value2.get('uri', None)
if uri_value4 is not None:
uri_instance4 = uri_value4
image_instance2.uri = uri_instance4
caching_value2 = data_disks_value.get('caching', None)
if caching_value2 is not None:
caching_instance2 = caching_value2
data_disk_instance.caching = caching_instance2
create_option_value2 = data_disks_value.get('createOption', None)
if create_option_value2 is not None:
create_option_instance2 = create_option_value2
data_disk_instance.create_option = create_option_instance2
os_profile_value = properties_value.get('osProfile', None)
if os_profile_value is not None:
os_profile_instance = OSProfile(secrets=[])
virtual_machine_json_instance.os_profile = os_profile_instance
computer_name_value = os_profile_value.get('computerName', None)
if computer_name_value is not None:
computer_name_instance = computer_name_value
os_profile_instance.computer_name = computer_name_instance
admin_username_value = os_profile_value.get('adminUsername', None)
if admin_username_value is not None:
admin_username_instance = admin_username_value
os_profile_instance.admin_username = admin_username_instance
admin_password_value = os_profile_value.get('adminPassword', None)
if admin_password_value is not None:
admin_password_instance = admin_password_value
os_profile_instance.admin_password = admin_password_instance
custom_data_value = os_profile_value.get('customData', None)
if custom_data_value is not None:
custom_data_instance = custom_data_value
os_profile_instance.custom_data = custom_data_instance
windows_configuration_value = os_profile_value.get('windowsConfiguration', None)
if windows_configuration_value is not None:
windows_configuration_instance = WindowsConfiguration(additional_unattend_contents=[])
os_profile_instance.windows_configuration = windows_configuration_instance
provision_vm_agent_value = windows_configuration_value.get('provisionVMAgent', None)
if provision_vm_agent_value is not None:
provision_vm_agent_instance = provision_vm_agent_value
windows_configuration_instance.provision_vm_agent = provision_vm_agent_instance
enable_automatic_updates_value = windows_configuration_value.get('enableAutomaticUpdates', None)
if enable_automatic_updates_value is not None:
enable_automatic_updates_instance = enable_automatic_updates_value
windows_configuration_instance.enable_automatic_updates = enable_automatic_updates_instance
time_zone_value = windows_configuration_value.get('timeZone', None)
if time_zone_value is not None:
time_zone_instance = time_zone_value
windows_configuration_instance.time_zone = time_zone_instance
additional_unattend_content_array = windows_configuration_value.get('additionalUnattendContent', None)
if additional_unattend_content_array is not None:
for additional_unattend_content_value in additional_unattend_content_array:
additional_unattend_content_instance = AdditionalUnattendContent()
windows_configuration_instance.additional_unattend_contents.append(additional_unattend_content_instance)
pass_name_value = additional_unattend_content_value.get('passName', None)
if pass_name_value is not None:
pass_name_instance = pass_name_value
additional_unattend_content_instance.pass_name = pass_name_instance
component_name_value = additional_unattend_content_value.get('componentName', None)
if component_name_value is not None:
component_name_instance = component_name_value
additional_unattend_content_instance.component_name = component_name_instance
setting_name_value = additional_unattend_content_value.get('settingName', None)
if setting_name_value is not None:
setting_name_instance = setting_name_value
additional_unattend_content_instance.setting_name = setting_name_instance
content_value = additional_unattend_content_value.get('content', None)
if content_value is not None:
content_instance = content_value
additional_unattend_content_instance.content = content_instance
win_rm_value = windows_configuration_value.get('winRM', None)
if win_rm_value is not None:
win_rm_instance = WinRMConfiguration(listeners=[])
windows_configuration_instance.win_rm_configuration = win_rm_instance
listeners_array = win_rm_value.get('listeners', None)
if listeners_array is not None:
for listeners_value in listeners_array:
win_rm_listener_instance = WinRMListener()
win_rm_instance.listeners.append(win_rm_listener_instance)
protocol_value = listeners_value.get('protocol', None)
if protocol_value is not None:
protocol_instance = protocol_value
win_rm_listener_instance.protocol = protocol_instance
certificate_url_value = listeners_value.get('certificateUrl', None)
if certificate_url_value is not None:
certificate_url_instance = certificate_url_value
win_rm_listener_instance.certificate_url = certificate_url_instance
linux_configuration_value = os_profile_value.get('linuxConfiguration', None)
if linux_configuration_value is not None:
linux_configuration_instance = LinuxConfiguration()
os_profile_instance.linux_configuration = linux_configuration_instance
disable_password_authentication_value = linux_configuration_value.get('disablePasswordAuthentication', None)
if disable_password_authentication_value is not None:
disable_password_authentication_instance = disable_password_authentication_value
linux_configuration_instance.disable_password_authentication = disable_password_authentication_instance
ssh_value = linux_configuration_value.get('ssh', None)
if ssh_value is not None:
ssh_instance = SshConfiguration(public_keys=[])
linux_configuration_instance.ssh_configuration = ssh_instance
public_keys_array = ssh_value.get('publicKeys', None)
if public_keys_array is not None:
for public_keys_value in public_keys_array:
ssh_public_key_instance = SshPublicKey()
ssh_instance.public_keys.append(ssh_public_key_instance)
path_value = public_keys_value.get('path', None)
if path_value is not None:
path_instance = path_value
ssh_public_key_instance.path = path_instance
key_data_value = public_keys_value.get('keyData', None)
if key_data_value is not None:
key_data_instance = key_data_value
ssh_public_key_instance.key_data = key_data_instance
secrets_array = os_profile_value.get('secrets', None)
if secrets_array is not None:
for secrets_value in secrets_array:
vault_secret_group_instance = VaultSecretGroup(vault_certificates=[])
os_profile_instance.secrets.append(vault_secret_group_instance)
source_vault_value = secrets_value.get('sourceVault', None)
if source_vault_value is not None:
source_vault_instance = SourceVaultReference()
vault_secret_group_instance.source_vault = source_vault_instance
id_value2 = source_vault_value.get('id', None)
if id_value2 is not None:
id_instance2 = id_value2
source_vault_instance.reference_uri = id_instance2
vault_certificates_array = secrets_value.get('vaultCertificates', None)
if vault_certificates_array is not None:
for vault_certificates_value in vault_certificates_array:
vault_certificate_instance = VaultCertificate()
vault_secret_group_instance.vault_certificates.append(vault_certificate_instance)
certificate_url_value2 = vault_certificates_value.get('certificateUrl', None)
if certificate_url_value2 is not None:
certificate_url_instance2 = certificate_url_value2
vault_certificate_instance.certificate_url = certificate_url_instance2
certificate_store_value = vault_certificates_value.get('certificateStore', None)
if certificate_store_value is not None:
certificate_store_instance = certificate_store_value
vault_certificate_instance.certificate_store = certificate_store_instance
network_profile_value = properties_value.get('networkProfile', None)
if network_profile_value is not None:
network_profile_instance = NetworkProfile(network_interfaces=[])
virtual_machine_json_instance.network_profile = network_profile_instance
network_interfaces_array = network_profile_value.get('networkInterfaces', None)
if network_interfaces_array is not None:
for network_interfaces_value in network_interfaces_array:
network_interface_reference_json_instance = NetworkInterfaceReference()
network_profile_instance.network_interfaces.append(network_interface_reference_json_instance)
properties_value2 = network_interfaces_value.get('properties', None)
if properties_value2 is not None:
primary_value = properties_value2.get('primary', None)
if primary_value is not None:
primary_instance = primary_value
network_interface_reference_json_instance.primary = primary_instance
id_value3 = network_interfaces_value.get('id', None)
if id_value3 is not None:
id_instance3 = id_value3
network_interface_reference_json_instance.reference_uri = id_instance3
availability_set_value = properties_value.get('availabilitySet', None)
if availability_set_value is not None:
availability_set_instance = AvailabilitySetReference()
virtual_machine_json_instance.availability_set_reference = availability_set_instance
id_value4 = availability_set_value.get('id', None)
if id_value4 is not None:
id_instance4 = id_value4
availability_set_instance.reference_uri = id_instance4
provisioning_state_value = properties_value.get('provisioningState', None)
if provisioning_state_value is not None:
provisioning_state_instance = provisioning_state_value
virtual_machine_json_instance.provisioning_state = provisioning_state_instance
instance_view_value = properties_value.get('instanceView', None)
if instance_view_value is not None:
instance_view_instance = VirtualMachineInstanceView(disks=[], extensions=[], statuses=[])
virtual_machine_json_instance.instance_view = instance_view_instance
platform_update_domain_value = instance_view_value.get('platformUpdateDomain', None)
if platform_update_domain_value is not None:
platform_update_domain_instance = platform_update_domain_value
instance_view_instance.platform_update_domain = platform_update_domain_instance
platform_fault_domain_value = instance_view_value.get('platformFaultDomain', None)
if platform_fault_domain_value is not None:
platform_fault_domain_instance = platform_fault_domain_value
instance_view_instance.platform_fault_domain = platform_fault_domain_instance
rdp_thumb_print_value = instance_view_value.get('rdpThumbPrint', None)
if rdp_thumb_print_value is not None:
rdp_thumb_print_instance = rdp_thumb_print_value
instance_view_instance.remote_desktop_thumbprint = rdp_thumb_print_instance
vm_agent_value = instance_view_value.get('vmAgent', None)
if vm_agent_value is not None:
vm_agent_instance = VirtualMachineAgentInstanceView(extension_handlers=[], statuses=[])
instance_view_instance.vm_agent = vm_agent_instance
vm_agent_version_value = vm_agent_value.get('vmAgentVersion', None)
if vm_agent_version_value is not None:
vm_agent_version_instance = vm_agent_version_value
vm_agent_instance.vm_agent_version = vm_agent_version_instance
extension_handlers_array = vm_agent_value.get('extensionHandlers', None)
if extension_handlers_array is not None:
for extension_handlers_value in extension_handlers_array:
virtual_machine_extension_handler_instance_view_instance = VirtualMachineExtensionHandlerInstanceView()
vm_agent_instance.extension_handlers.append(virtual_machine_extension_handler_instance_view_instance)
type_value = extension_handlers_value.get('type', None)
if type_value is not None:
type_instance = type_value
virtual_machine_extension_handler_instance_view_instance.type = type_instance
type_handler_version_value = extension_handlers_value.get('typeHandlerVersion', None)
if type_handler_version_value is not None:
type_handler_version_instance = type_handler_version_value
virtual_machine_extension_handler_instance_view_instance.type_handler_version = type_handler_version_instance
status_value = extension_handlers_value.get('status', None)
if status_value is not None:
status_instance = InstanceViewStatus()
virtual_machine_extension_handler_instance_view_instance.status = status_instance
code_value = status_value.get('code', None)
if code_value is not None:
code_instance = code_value
status_instance.code = code_instance
level_value = status_value.get('level', None)
if level_value is not None:
level_instance = level_value
status_instance.level = level_instance
display_status_value = status_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
status_instance.display_status = display_status_instance
message_value = status_value.get('message', None)
if message_value is not None:
message_instance = message_value
status_instance.message = message_instance
time_value = status_value.get('time', None)
if time_value is not None:
time_instance = time_value
status_instance.time = time_instance
statuses_array = vm_agent_value.get('statuses', None)
if statuses_array is not None:
for statuses_value in statuses_array:
instance_view_status_instance = InstanceViewStatus()
vm_agent_instance.statuses.append(instance_view_status_instance)
code_value2 = statuses_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
instance_view_status_instance.code = code_instance2
level_value2 = statuses_value.get('level', None)
if level_value2 is not None:
level_instance2 = level_value2
instance_view_status_instance.level = level_instance2
display_status_value2 = statuses_value.get('displayStatus', None)
if display_status_value2 is not None:
display_status_instance2 = display_status_value2
instance_view_status_instance.display_status = display_status_instance2
message_value2 = statuses_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
instance_view_status_instance.message = message_instance2
time_value2 = statuses_value.get('time', None)
if time_value2 is not None:
time_instance2 = time_value2
instance_view_status_instance.time = time_instance2
disks_array = instance_view_value.get('disks', None)
if disks_array is not None:
for disks_value in disks_array:
disk_instance_view_instance = DiskInstanceView(statuses=[])
instance_view_instance.disks.append(disk_instance_view_instance)
name_value4 = disks_value.get('name', None)
if name_value4 is not None:
name_instance4 = name_value4
disk_instance_view_instance.name = name_instance4
statuses_array2 = disks_value.get('statuses', None)
if statuses_array2 is not None:
for statuses_value2 in statuses_array2:
instance_view_status_instance2 = InstanceViewStatus()
disk_instance_view_instance.statuses.append(instance_view_status_instance2)
code_value3 = statuses_value2.get('code', None)
if code_value3 is not None:
code_instance3 = code_value3
instance_view_status_instance2.code = code_instance3
level_value3 = statuses_value2.get('level', None)
if level_value3 is not None:
level_instance3 = level_value3
instance_view_status_instance2.level = level_instance3
display_status_value3 = statuses_value2.get('displayStatus', None)
if display_status_value3 is not None:
display_status_instance3 = display_status_value3
instance_view_status_instance2.display_status = display_status_instance3
message_value3 = statuses_value2.get('message', None)
if message_value3 is not None:
message_instance3 = message_value3
instance_view_status_instance2.message = message_instance3
time_value3 = statuses_value2.get('time', None)
if time_value3 is not None:
time_instance3 = time_value3
instance_view_status_instance2.time = time_instance3
extensions_array = instance_view_value.get('extensions', None)
if extensions_array is not None:
for extensions_value in extensions_array:
virtual_machine_extension_instance_view_instance = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
instance_view_instance.extensions.append(virtual_machine_extension_instance_view_instance)
name_value5 = extensions_value.get('name', None)
if name_value5 is not None:
name_instance5 = name_value5
virtual_machine_extension_instance_view_instance.name = name_instance5
type_value2 = extensions_value.get('type', None)
if type_value2 is not None:
type_instance2 = type_value2
virtual_machine_extension_instance_view_instance.extension_type = type_instance2
type_handler_version_value2 = extensions_value.get('typeHandlerVersion', None)
if type_handler_version_value2 is not None:
type_handler_version_instance2 = type_handler_version_value2
virtual_machine_extension_instance_view_instance.type_handler_version = type_handler_version_instance2
substatuses_array = extensions_value.get('substatuses', None)
if substatuses_array is not None:
for substatuses_value in substatuses_array:
instance_view_status_instance3 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.sub_statuses.append(instance_view_status_instance3)
code_value4 = substatuses_value.get('code', None)
if code_value4 is not None:
code_instance4 = code_value4
instance_view_status_instance3.code = code_instance4
level_value4 = substatuses_value.get('level', None)
if level_value4 is not None:
level_instance4 = level_value4
instance_view_status_instance3.level = level_instance4
display_status_value4 = substatuses_value.get('displayStatus', None)
if display_status_value4 is not None:
display_status_instance4 = display_status_value4
instance_view_status_instance3.display_status = display_status_instance4
message_value4 = substatuses_value.get('message', None)
if message_value4 is not None:
message_instance4 = message_value4
instance_view_status_instance3.message = message_instance4
time_value4 = substatuses_value.get('time', None)
if time_value4 is not None:
time_instance4 = time_value4
instance_view_status_instance3.time = time_instance4
statuses_array3 = extensions_value.get('statuses', None)
if statuses_array3 is not None:
for statuses_value3 in statuses_array3:
instance_view_status_instance4 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.statuses.append(instance_view_status_instance4)
code_value5 = statuses_value3.get('code', None)
if code_value5 is not None:
code_instance5 = code_value5
instance_view_status_instance4.code = code_instance5
level_value5 = statuses_value3.get('level', None)
if level_value5 is not None:
level_instance5 = level_value5
instance_view_status_instance4.level = level_instance5
display_status_value5 = statuses_value3.get('displayStatus', None)
if display_status_value5 is not None:
display_status_instance5 = display_status_value5
instance_view_status_instance4.display_status = display_status_instance5
message_value5 = statuses_value3.get('message', None)
if message_value5 is not None:
message_instance5 = message_value5
instance_view_status_instance4.message = message_instance5
time_value5 = statuses_value3.get('time', None)
if time_value5 is not None:
time_instance5 = time_value5
instance_view_status_instance4.time = time_instance5
statuses_array4 = instance_view_value.get('statuses', None)
if statuses_array4 is not None:
for statuses_value4 in statuses_array4:
instance_view_status_instance5 = InstanceViewStatus()
instance_view_instance.statuses.append(instance_view_status_instance5)
code_value6 = statuses_value4.get('code', None)
if code_value6 is not None:
code_instance6 = code_value6
instance_view_status_instance5.code = code_instance6
level_value6 = statuses_value4.get('level', None)
if level_value6 is not None:
level_instance6 = level_value6
instance_view_status_instance5.level = level_instance6
display_status_value6 = statuses_value4.get('displayStatus', None)
if display_status_value6 is not None:
display_status_instance6 = display_status_value6
instance_view_status_instance5.display_status = display_status_instance6
message_value6 = statuses_value4.get('message', None)
if message_value6 is not None:
message_instance6 = message_value6
instance_view_status_instance5.message = message_instance6
time_value6 = statuses_value4.get('time', None)
if time_value6 is not None:
time_instance6 = time_value6
instance_view_status_instance5.time = time_instance6
resources_array = value_value.get('resources', None)
if resources_array is not None:
virtual_machine_json_instance.extensions = []
for resources_value in resources_array:
virtual_machine_extension_json_instance = VirtualMachineExtension(tags={})
virtual_machine_json_instance.extensions.append(virtual_machine_extension_json_instance)
properties_value3 = resources_value.get('properties', None)
if properties_value3 is not None:
publisher_value3 = properties_value3.get('publisher', None)
if publisher_value3 is not None:
publisher_instance3 = publisher_value3
virtual_machine_extension_json_instance.publisher = publisher_instance3
type_value3 = properties_value3.get('type', None)
if type_value3 is not None:
type_instance3 = type_value3
virtual_machine_extension_json_instance.extension_type = type_instance3
type_handler_version_value3 = properties_value3.get('typeHandlerVersion', None)
if type_handler_version_value3 is not None:
type_handler_version_instance3 = type_handler_version_value3
virtual_machine_extension_json_instance.type_handler_version = type_handler_version_instance3
auto_upgrade_minor_version_value = properties_value3.get('autoUpgradeMinorVersion', None)
if auto_upgrade_minor_version_value is not None:
auto_upgrade_minor_version_instance = auto_upgrade_minor_version_value
virtual_machine_extension_json_instance.auto_upgrade_minor_version = auto_upgrade_minor_version_instance
settings_value = properties_value3.get('settings', None)
if settings_value is not None:
settings_instance = json.dumps(settings_value)
virtual_machine_extension_json_instance.settings = settings_instance
protected_settings_value = properties_value3.get('protectedSettings', None)
if protected_settings_value is not None:
protected_settings_instance = json.dumps(protected_settings_value)
virtual_machine_extension_json_instance.protected_settings = protected_settings_instance
provisioning_state_value2 = properties_value3.get('provisioningState', None)
if provisioning_state_value2 is not None:
provisioning_state_instance2 = provisioning_state_value2
virtual_machine_extension_json_instance.provisioning_state = provisioning_state_instance2
instance_view_value2 = properties_value3.get('instanceView', None)
if instance_view_value2 is not None:
instance_view_instance2 = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
virtual_machine_extension_json_instance.instance_view = instance_view_instance2
name_value6 = instance_view_value2.get('name', None)
if name_value6 is not None:
name_instance6 = name_value6
instance_view_instance2.name = name_instance6
type_value4 = instance_view_value2.get('type', None)
if type_value4 is not None:
type_instance4 = type_value4
instance_view_instance2.extension_type = type_instance4
type_handler_version_value4 = instance_view_value2.get('typeHandlerVersion', None)
if type_handler_version_value4 is not None:
type_handler_version_instance4 = type_handler_version_value4
instance_view_instance2.type_handler_version = type_handler_version_instance4
substatuses_array2 = instance_view_value2.get('substatuses', None)
if substatuses_array2 is not None:
for substatuses_value2 in substatuses_array2:
instance_view_status_instance6 = InstanceViewStatus()
instance_view_instance2.sub_statuses.append(instance_view_status_instance6)
code_value7 = substatuses_value2.get('code', None)
if code_value7 is not None:
code_instance7 = code_value7
instance_view_status_instance6.code = code_instance7
level_value7 = substatuses_value2.get('level', None)
if level_value7 is not None:
level_instance7 = level_value7
instance_view_status_instance6.level = level_instance7
display_status_value7 = substatuses_value2.get('displayStatus', None)
if display_status_value7 is not None:
display_status_instance7 = display_status_value7
instance_view_status_instance6.display_status = display_status_instance7
message_value7 = substatuses_value2.get('message', None)
if message_value7 is not None:
message_instance7 = message_value7
instance_view_status_instance6.message = message_instance7
time_value7 = substatuses_value2.get('time', None)
if time_value7 is not None:
time_instance7 = time_value7
instance_view_status_instance6.time = time_instance7
statuses_array5 = instance_view_value2.get('statuses', None)
if statuses_array5 is not None:
for statuses_value5 in statuses_array5:
instance_view_status_instance7 = InstanceViewStatus()
instance_view_instance2.statuses.append(instance_view_status_instance7)
code_value8 = statuses_value5.get('code', None)
if code_value8 is not None:
code_instance8 = code_value8
instance_view_status_instance7.code = code_instance8
level_value8 = statuses_value5.get('level', None)
if level_value8 is not None:
level_instance8 = level_value8
instance_view_status_instance7.level = level_instance8
display_status_value8 = statuses_value5.get('displayStatus', None)
if display_status_value8 is not None:
display_status_instance8 = display_status_value8
instance_view_status_instance7.display_status = display_status_instance8
message_value8 = statuses_value5.get('message', None)
if message_value8 is not None:
message_instance8 = message_value8
instance_view_status_instance7.message = message_instance8
time_value8 = statuses_value5.get('time', None)
if time_value8 is not None:
time_instance8 = time_value8
instance_view_status_instance7.time = time_instance8
id_value5 = resources_value.get('id', None)
if id_value5 is not None:
id_instance5 = id_value5
virtual_machine_extension_json_instance.id = id_instance5
name_value7 = resources_value.get('name', None)
if name_value7 is not None:
name_instance7 = name_value7
virtual_machine_extension_json_instance.name = name_instance7
type_value5 = resources_value.get('type', None)
if type_value5 is not None:
type_instance5 = type_value5
virtual_machine_extension_json_instance.type = type_instance5
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_json_instance.location = location_instance
tags_sequence_element = resources_value.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key = property
tags_value = tags_sequence_element[property]
virtual_machine_extension_json_instance.tags[tags_key] = tags_value
id_value6 = value_value.get('id', None)
if id_value6 is not None:
id_instance6 = id_value6
virtual_machine_json_instance.id = id_instance6
name_value8 = value_value.get('name', None)
if name_value8 is not None:
name_instance8 = name_value8
virtual_machine_json_instance.name = name_instance8
type_value6 = value_value.get('type', None)
if type_value6 is not None:
type_instance6 = type_value6
virtual_machine_json_instance.type = type_instance6
location_value2 = value_value.get('location', None)
if location_value2 is not None:
location_instance2 = location_value2
virtual_machine_json_instance.location = location_instance2
tags_sequence_element2 = value_value.get('tags', None)
if tags_sequence_element2 is not None:
for property2 in tags_sequence_element2:
tags_key2 = property2
tags_value2 = tags_sequence_element2[property2]
virtual_machine_json_instance.tags[tags_key2] = tags_value2
odatanext_link_value = response_doc.get('@odata.nextLink', None)
if odatanext_link_value is not None:
odatanext_link_instance = odatanext_link_value
result.next_link = odatanext_link_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list_available_sizes(self, resource_group_name, vm_name):
"""
Lists virtual-machine-sizes available to be used for a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
VirtualMachineSizeListResponse: The List Virtual Machine operation
response.
"""
# Validate
if resource_group_name is None:
raise ValueError('resource_group_name cannot be None.')
if vm_name is None:
raise ValueError('vm_name cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/resourceGroups/'
url = url + quote(resource_group_name)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/'
url = url + 'virtualMachines'
url = url + '/'
url = url + quote(vm_name)
url = url + '/vmSizes'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineSizeListResponse(virtual_machine_sizes=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
value_array = response_doc.get('value', None)
if value_array is not None:
for value_value in value_array:
virtual_machine_size_instance = VirtualMachineSize()
result.virtual_machine_sizes.append(virtual_machine_size_instance)
name_value = value_value.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_size_instance.name = name_instance
number_of_cores_value = value_value.get('numberOfCores', None)
if number_of_cores_value is not None:
number_of_cores_instance = number_of_cores_value
virtual_machine_size_instance.number_of_cores = number_of_cores_instance
os_disk_size_in_mb_value = value_value.get('osDiskSizeInMB', None)
if os_disk_size_in_mb_value is not None:
os_disk_size_in_mb_instance = os_disk_size_in_mb_value
virtual_machine_size_instance.os_disk_size_in_mb = os_disk_size_in_mb_instance
resource_disk_size_in_mb_value = value_value.get('resourceDiskSizeInMB', None)
if resource_disk_size_in_mb_value is not None:
resource_disk_size_in_mb_instance = resource_disk_size_in_mb_value
virtual_machine_size_instance.resource_disk_size_in_mb = resource_disk_size_in_mb_instance
memory_in_mb_value = value_value.get('memoryInMB', None)
if memory_in_mb_value is not None:
memory_in_mb_instance = memory_in_mb_value
virtual_machine_size_instance.memory_in_mb = memory_in_mb_instance
max_data_disk_count_value = value_value.get('maxDataDiskCount', None)
if max_data_disk_count_value is not None:
max_data_disk_count_instance = max_data_disk_count_value
virtual_machine_size_instance.max_data_disk_count = max_data_disk_count_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def list_next(self, next_link):
"""
Gets the next page of Virtual Machines. NextLink is obtained by making
a ListAll() callwhich fetches the first page of Virtual Machines and
a link to fetch the next page.
Args:
next_link (string): NextLink from the previous successful call to
ListVirtualMachines operation.
Returns:
VirtualMachineListResponse: The List Virtual Machine operation
response.
"""
# Validate
if next_link is None:
raise ValueError('next_link cannot be None.')
# Tracing
# Construct URL
url = ''
url = url + next_link
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json; charset=utf-8'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineListResponse(virtual_machines=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
value_array = response_doc.get('value', None)
if value_array is not None:
for value_value in value_array:
virtual_machine_json_instance = VirtualMachine(extensions=[], tags={})
result.virtual_machines.append(virtual_machine_json_instance)
plan_value = value_value.get('plan', None)
if plan_value is not None:
plan_instance = Plan()
virtual_machine_json_instance.plan = plan_instance
name_value = plan_value.get('name', None)
if name_value is not None:
name_instance = name_value
plan_instance.name = name_instance
publisher_value = plan_value.get('publisher', None)
if publisher_value is not None:
publisher_instance = publisher_value
plan_instance.publisher = publisher_instance
product_value = plan_value.get('product', None)
if product_value is not None:
product_instance = product_value
plan_instance.product = product_instance
promotion_code_value = plan_value.get('promotionCode', None)
if promotion_code_value is not None:
promotion_code_instance = promotion_code_value
plan_instance.promotion_code = promotion_code_instance
properties_value = value_value.get('properties', None)
if properties_value is not None:
hardware_profile_value = properties_value.get('hardwareProfile', None)
if hardware_profile_value is not None:
hardware_profile_instance = HardwareProfile()
virtual_machine_json_instance.hardware_profile = hardware_profile_instance
vm_size_value = hardware_profile_value.get('vmSize', None)
if vm_size_value is not None:
vm_size_instance = vm_size_value
hardware_profile_instance.virtual_machine_size = vm_size_instance
storage_profile_value = properties_value.get('storageProfile', None)
if storage_profile_value is not None:
storage_profile_instance = StorageProfile(data_disks=[])
virtual_machine_json_instance.storage_profile = storage_profile_instance
image_reference_value = storage_profile_value.get('imageReference', None)
if image_reference_value is not None:
image_reference_instance = ImageReference()
storage_profile_instance.image_reference = image_reference_instance
publisher_value2 = image_reference_value.get('publisher', None)
if publisher_value2 is not None:
publisher_instance2 = publisher_value2
image_reference_instance.publisher = publisher_instance2
offer_value = image_reference_value.get('offer', None)
if offer_value is not None:
offer_instance = offer_value
image_reference_instance.offer = offer_instance
sku_value = image_reference_value.get('sku', None)
if sku_value is not None:
sku_instance = sku_value
image_reference_instance.sku = sku_instance
version_value = image_reference_value.get('version', None)
if version_value is not None:
version_instance = version_value
image_reference_instance.version = version_instance
source_image_value = storage_profile_value.get('sourceImage', None)
if source_image_value is not None:
source_image_instance = SourceImageReference()
storage_profile_instance.source_image = source_image_instance
id_value = source_image_value.get('id', None)
if id_value is not None:
id_instance = id_value
source_image_instance.reference_uri = id_instance
os_disk_value = storage_profile_value.get('osDisk', None)
if os_disk_value is not None:
os_disk_instance = OSDisk()
storage_profile_instance.os_disk = os_disk_instance
os_type_value = os_disk_value.get('osType', None)
if os_type_value is not None:
os_type_instance = os_type_value
os_disk_instance.operating_system_type = os_type_instance
name_value2 = os_disk_value.get('name', None)
if name_value2 is not None:
name_instance2 = name_value2
os_disk_instance.name = name_instance2
vhd_value = os_disk_value.get('vhd', None)
if vhd_value is not None:
vhd_instance = VirtualHardDisk()
os_disk_instance.virtual_hard_disk = vhd_instance
uri_value = vhd_value.get('uri', None)
if uri_value is not None:
uri_instance = uri_value
vhd_instance.uri = uri_instance
image_value = os_disk_value.get('image', None)
if image_value is not None:
image_instance = VirtualHardDisk()
os_disk_instance.source_image = image_instance
uri_value2 = image_value.get('uri', None)
if uri_value2 is not None:
uri_instance2 = uri_value2
image_instance.uri = uri_instance2
caching_value = os_disk_value.get('caching', None)
if caching_value is not None:
caching_instance = caching_value
os_disk_instance.caching = caching_instance
create_option_value = os_disk_value.get('createOption', None)
if create_option_value is not None:
create_option_instance = create_option_value
os_disk_instance.create_option = create_option_instance
data_disks_array = storage_profile_value.get('dataDisks', None)
if data_disks_array is not None:
for data_disks_value in data_disks_array:
data_disk_instance = DataDisk()
storage_profile_instance.data_disks.append(data_disk_instance)
lun_value = data_disks_value.get('lun', None)
if lun_value is not None:
lun_instance = lun_value
data_disk_instance.lun = lun_instance
disk_size_gb_value = data_disks_value.get('diskSizeGB', None)
if disk_size_gb_value is not None:
disk_size_gb_instance = disk_size_gb_value
data_disk_instance.disk_size_gb = disk_size_gb_instance
name_value3 = data_disks_value.get('name', None)
if name_value3 is not None:
name_instance3 = name_value3
data_disk_instance.name = name_instance3
vhd_value2 = data_disks_value.get('vhd', None)
if vhd_value2 is not None:
vhd_instance2 = VirtualHardDisk()
data_disk_instance.virtual_hard_disk = vhd_instance2
uri_value3 = vhd_value2.get('uri', None)
if uri_value3 is not None:
uri_instance3 = uri_value3
vhd_instance2.uri = uri_instance3
image_value2 = data_disks_value.get('image', None)
if image_value2 is not None:
image_instance2 = VirtualHardDisk()
data_disk_instance.source_image = image_instance2
uri_value4 = image_value2.get('uri', None)
if uri_value4 is not None:
uri_instance4 = uri_value4
image_instance2.uri = uri_instance4
caching_value2 = data_disks_value.get('caching', None)
if caching_value2 is not None:
caching_instance2 = caching_value2
data_disk_instance.caching = caching_instance2
create_option_value2 = data_disks_value.get('createOption', None)
if create_option_value2 is not None:
create_option_instance2 = create_option_value2
data_disk_instance.create_option = create_option_instance2
os_profile_value = properties_value.get('osProfile', None)
if os_profile_value is not None:
os_profile_instance = OSProfile(secrets=[])
virtual_machine_json_instance.os_profile = os_profile_instance
computer_name_value = os_profile_value.get('computerName', None)
if computer_name_value is not None:
computer_name_instance = computer_name_value
os_profile_instance.computer_name = computer_name_instance
admin_username_value = os_profile_value.get('adminUsername', None)
if admin_username_value is not None:
admin_username_instance = admin_username_value
os_profile_instance.admin_username = admin_username_instance
admin_password_value = os_profile_value.get('adminPassword', None)
if admin_password_value is not None:
admin_password_instance = admin_password_value
os_profile_instance.admin_password = admin_password_instance
custom_data_value = os_profile_value.get('customData', None)
if custom_data_value is not None:
custom_data_instance = custom_data_value
os_profile_instance.custom_data = custom_data_instance
windows_configuration_value = os_profile_value.get('windowsConfiguration', None)
if windows_configuration_value is not None:
windows_configuration_instance = WindowsConfiguration(additional_unattend_contents=[])
os_profile_instance.windows_configuration = windows_configuration_instance
provision_vm_agent_value = windows_configuration_value.get('provisionVMAgent', None)
if provision_vm_agent_value is not None:
provision_vm_agent_instance = provision_vm_agent_value
windows_configuration_instance.provision_vm_agent = provision_vm_agent_instance
enable_automatic_updates_value = windows_configuration_value.get('enableAutomaticUpdates', None)
if enable_automatic_updates_value is not None:
enable_automatic_updates_instance = enable_automatic_updates_value
windows_configuration_instance.enable_automatic_updates = enable_automatic_updates_instance
time_zone_value = windows_configuration_value.get('timeZone', None)
if time_zone_value is not None:
time_zone_instance = time_zone_value
windows_configuration_instance.time_zone = time_zone_instance
additional_unattend_content_array = windows_configuration_value.get('additionalUnattendContent', None)
if additional_unattend_content_array is not None:
for additional_unattend_content_value in additional_unattend_content_array:
additional_unattend_content_instance = AdditionalUnattendContent()
windows_configuration_instance.additional_unattend_contents.append(additional_unattend_content_instance)
pass_name_value = additional_unattend_content_value.get('passName', None)
if pass_name_value is not None:
pass_name_instance = pass_name_value
additional_unattend_content_instance.pass_name = pass_name_instance
component_name_value = additional_unattend_content_value.get('componentName', None)
if component_name_value is not None:
component_name_instance = component_name_value
additional_unattend_content_instance.component_name = component_name_instance
setting_name_value = additional_unattend_content_value.get('settingName', None)
if setting_name_value is not None:
setting_name_instance = setting_name_value
additional_unattend_content_instance.setting_name = setting_name_instance
content_value = additional_unattend_content_value.get('content', None)
if content_value is not None:
content_instance = content_value
additional_unattend_content_instance.content = content_instance
win_rm_value = windows_configuration_value.get('winRM', None)
if win_rm_value is not None:
win_rm_instance = WinRMConfiguration(listeners=[])
windows_configuration_instance.win_rm_configuration = win_rm_instance
listeners_array = win_rm_value.get('listeners', None)
if listeners_array is not None:
for listeners_value in listeners_array:
win_rm_listener_instance = WinRMListener()
win_rm_instance.listeners.append(win_rm_listener_instance)
protocol_value = listeners_value.get('protocol', None)
if protocol_value is not None:
protocol_instance = protocol_value
win_rm_listener_instance.protocol = protocol_instance
certificate_url_value = listeners_value.get('certificateUrl', None)
if certificate_url_value is not None:
certificate_url_instance = certificate_url_value
win_rm_listener_instance.certificate_url = certificate_url_instance
linux_configuration_value = os_profile_value.get('linuxConfiguration', None)
if linux_configuration_value is not None:
linux_configuration_instance = LinuxConfiguration()
os_profile_instance.linux_configuration = linux_configuration_instance
disable_password_authentication_value = linux_configuration_value.get('disablePasswordAuthentication', None)
if disable_password_authentication_value is not None:
disable_password_authentication_instance = disable_password_authentication_value
linux_configuration_instance.disable_password_authentication = disable_password_authentication_instance
ssh_value = linux_configuration_value.get('ssh', None)
if ssh_value is not None:
ssh_instance = SshConfiguration(public_keys=[])
linux_configuration_instance.ssh_configuration = ssh_instance
public_keys_array = ssh_value.get('publicKeys', None)
if public_keys_array is not None:
for public_keys_value in public_keys_array:
ssh_public_key_instance = SshPublicKey()
ssh_instance.public_keys.append(ssh_public_key_instance)
path_value = public_keys_value.get('path', None)
if path_value is not None:
path_instance = path_value
ssh_public_key_instance.path = path_instance
key_data_value = public_keys_value.get('keyData', None)
if key_data_value is not None:
key_data_instance = key_data_value
ssh_public_key_instance.key_data = key_data_instance
secrets_array = os_profile_value.get('secrets', None)
if secrets_array is not None:
for secrets_value in secrets_array:
vault_secret_group_instance = VaultSecretGroup(vault_certificates=[])
os_profile_instance.secrets.append(vault_secret_group_instance)
source_vault_value = secrets_value.get('sourceVault', None)
if source_vault_value is not None:
source_vault_instance = SourceVaultReference()
vault_secret_group_instance.source_vault = source_vault_instance
id_value2 = source_vault_value.get('id', None)
if id_value2 is not None:
id_instance2 = id_value2
source_vault_instance.reference_uri = id_instance2
vault_certificates_array = secrets_value.get('vaultCertificates', None)
if vault_certificates_array is not None:
for vault_certificates_value in vault_certificates_array:
vault_certificate_instance = VaultCertificate()
vault_secret_group_instance.vault_certificates.append(vault_certificate_instance)
certificate_url_value2 = vault_certificates_value.get('certificateUrl', None)
if certificate_url_value2 is not None:
certificate_url_instance2 = certificate_url_value2
vault_certificate_instance.certificate_url = certificate_url_instance2
certificate_store_value = vault_certificates_value.get('certificateStore', None)
if certificate_store_value is not None:
certificate_store_instance = certificate_store_value
vault_certificate_instance.certificate_store = certificate_store_instance
network_profile_value = properties_value.get('networkProfile', None)
if network_profile_value is not None:
network_profile_instance = NetworkProfile(network_interfaces=[])
virtual_machine_json_instance.network_profile = network_profile_instance
network_interfaces_array = network_profile_value.get('networkInterfaces', None)
if network_interfaces_array is not None:
for network_interfaces_value in network_interfaces_array:
network_interface_reference_json_instance = NetworkInterfaceReference()
network_profile_instance.network_interfaces.append(network_interface_reference_json_instance)
properties_value2 = network_interfaces_value.get('properties', None)
if properties_value2 is not None:
primary_value = properties_value2.get('primary', None)
if primary_value is not None:
primary_instance = primary_value
network_interface_reference_json_instance.primary = primary_instance
id_value3 = network_interfaces_value.get('id', None)
if id_value3 is not None:
id_instance3 = id_value3
network_interface_reference_json_instance.reference_uri = id_instance3
availability_set_value = properties_value.get('availabilitySet', None)
if availability_set_value is not None:
availability_set_instance = AvailabilitySetReference()
virtual_machine_json_instance.availability_set_reference = availability_set_instance
id_value4 = availability_set_value.get('id', None)
if id_value4 is not None:
id_instance4 = id_value4
availability_set_instance.reference_uri = id_instance4
provisioning_state_value = properties_value.get('provisioningState', None)
if provisioning_state_value is not None:
provisioning_state_instance = provisioning_state_value
virtual_machine_json_instance.provisioning_state = provisioning_state_instance
instance_view_value = properties_value.get('instanceView', None)
if instance_view_value is not None:
instance_view_instance = VirtualMachineInstanceView(disks=[], extensions=[], statuses=[])
virtual_machine_json_instance.instance_view = instance_view_instance
platform_update_domain_value = instance_view_value.get('platformUpdateDomain', None)
if platform_update_domain_value is not None:
platform_update_domain_instance = platform_update_domain_value
instance_view_instance.platform_update_domain = platform_update_domain_instance
platform_fault_domain_value = instance_view_value.get('platformFaultDomain', None)
if platform_fault_domain_value is not None:
platform_fault_domain_instance = platform_fault_domain_value
instance_view_instance.platform_fault_domain = platform_fault_domain_instance
rdp_thumb_print_value = instance_view_value.get('rdpThumbPrint', None)
if rdp_thumb_print_value is not None:
rdp_thumb_print_instance = rdp_thumb_print_value
instance_view_instance.remote_desktop_thumbprint = rdp_thumb_print_instance
vm_agent_value = instance_view_value.get('vmAgent', None)
if vm_agent_value is not None:
vm_agent_instance = VirtualMachineAgentInstanceView(extension_handlers=[], statuses=[])
instance_view_instance.vm_agent = vm_agent_instance
vm_agent_version_value = vm_agent_value.get('vmAgentVersion', None)
if vm_agent_version_value is not None:
vm_agent_version_instance = vm_agent_version_value
vm_agent_instance.vm_agent_version = vm_agent_version_instance
extension_handlers_array = vm_agent_value.get('extensionHandlers', None)
if extension_handlers_array is not None:
for extension_handlers_value in extension_handlers_array:
virtual_machine_extension_handler_instance_view_instance = VirtualMachineExtensionHandlerInstanceView()
vm_agent_instance.extension_handlers.append(virtual_machine_extension_handler_instance_view_instance)
type_value = extension_handlers_value.get('type', None)
if type_value is not None:
type_instance = type_value
virtual_machine_extension_handler_instance_view_instance.type = type_instance
type_handler_version_value = extension_handlers_value.get('typeHandlerVersion', None)
if type_handler_version_value is not None:
type_handler_version_instance = type_handler_version_value
virtual_machine_extension_handler_instance_view_instance.type_handler_version = type_handler_version_instance
status_value = extension_handlers_value.get('status', None)
if status_value is not None:
status_instance = InstanceViewStatus()
virtual_machine_extension_handler_instance_view_instance.status = status_instance
code_value = status_value.get('code', None)
if code_value is not None:
code_instance = code_value
status_instance.code = code_instance
level_value = status_value.get('level', None)
if level_value is not None:
level_instance = level_value
status_instance.level = level_instance
display_status_value = status_value.get('displayStatus', None)
if display_status_value is not None:
display_status_instance = display_status_value
status_instance.display_status = display_status_instance
message_value = status_value.get('message', None)
if message_value is not None:
message_instance = message_value
status_instance.message = message_instance
time_value = status_value.get('time', None)
if time_value is not None:
time_instance = time_value
status_instance.time = time_instance
statuses_array = vm_agent_value.get('statuses', None)
if statuses_array is not None:
for statuses_value in statuses_array:
instance_view_status_instance = InstanceViewStatus()
vm_agent_instance.statuses.append(instance_view_status_instance)
code_value2 = statuses_value.get('code', None)
if code_value2 is not None:
code_instance2 = code_value2
instance_view_status_instance.code = code_instance2
level_value2 = statuses_value.get('level', None)
if level_value2 is not None:
level_instance2 = level_value2
instance_view_status_instance.level = level_instance2
display_status_value2 = statuses_value.get('displayStatus', None)
if display_status_value2 is not None:
display_status_instance2 = display_status_value2
instance_view_status_instance.display_status = display_status_instance2
message_value2 = statuses_value.get('message', None)
if message_value2 is not None:
message_instance2 = message_value2
instance_view_status_instance.message = message_instance2
time_value2 = statuses_value.get('time', None)
if time_value2 is not None:
time_instance2 = time_value2
instance_view_status_instance.time = time_instance2
disks_array = instance_view_value.get('disks', None)
if disks_array is not None:
for disks_value in disks_array:
disk_instance_view_instance = DiskInstanceView(statuses=[])
instance_view_instance.disks.append(disk_instance_view_instance)
name_value4 = disks_value.get('name', None)
if name_value4 is not None:
name_instance4 = name_value4
disk_instance_view_instance.name = name_instance4
statuses_array2 = disks_value.get('statuses', None)
if statuses_array2 is not None:
for statuses_value2 in statuses_array2:
instance_view_status_instance2 = InstanceViewStatus()
disk_instance_view_instance.statuses.append(instance_view_status_instance2)
code_value3 = statuses_value2.get('code', None)
if code_value3 is not None:
code_instance3 = code_value3
instance_view_status_instance2.code = code_instance3
level_value3 = statuses_value2.get('level', None)
if level_value3 is not None:
level_instance3 = level_value3
instance_view_status_instance2.level = level_instance3
display_status_value3 = statuses_value2.get('displayStatus', None)
if display_status_value3 is not None:
display_status_instance3 = display_status_value3
instance_view_status_instance2.display_status = display_status_instance3
message_value3 = statuses_value2.get('message', None)
if message_value3 is not None:
message_instance3 = message_value3
instance_view_status_instance2.message = message_instance3
time_value3 = statuses_value2.get('time', None)
if time_value3 is not None:
time_instance3 = time_value3
instance_view_status_instance2.time = time_instance3
extensions_array = instance_view_value.get('extensions', None)
if extensions_array is not None:
for extensions_value in extensions_array:
virtual_machine_extension_instance_view_instance = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
instance_view_instance.extensions.append(virtual_machine_extension_instance_view_instance)
name_value5 = extensions_value.get('name', None)
if name_value5 is not None:
name_instance5 = name_value5
virtual_machine_extension_instance_view_instance.name = name_instance5
type_value2 = extensions_value.get('type', None)
if type_value2 is not None:
type_instance2 = type_value2
virtual_machine_extension_instance_view_instance.extension_type = type_instance2
type_handler_version_value2 = extensions_value.get('typeHandlerVersion', None)
if type_handler_version_value2 is not None:
type_handler_version_instance2 = type_handler_version_value2
virtual_machine_extension_instance_view_instance.type_handler_version = type_handler_version_instance2
substatuses_array = extensions_value.get('substatuses', None)
if substatuses_array is not None:
for substatuses_value in substatuses_array:
instance_view_status_instance3 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.sub_statuses.append(instance_view_status_instance3)
code_value4 = substatuses_value.get('code', None)
if code_value4 is not None:
code_instance4 = code_value4
instance_view_status_instance3.code = code_instance4
level_value4 = substatuses_value.get('level', None)
if level_value4 is not None:
level_instance4 = level_value4
instance_view_status_instance3.level = level_instance4
display_status_value4 = substatuses_value.get('displayStatus', None)
if display_status_value4 is not None:
display_status_instance4 = display_status_value4
instance_view_status_instance3.display_status = display_status_instance4
message_value4 = substatuses_value.get('message', None)
if message_value4 is not None:
message_instance4 = message_value4
instance_view_status_instance3.message = message_instance4
time_value4 = substatuses_value.get('time', None)
if time_value4 is not None:
time_instance4 = time_value4
instance_view_status_instance3.time = time_instance4
statuses_array3 = extensions_value.get('statuses', None)
if statuses_array3 is not None:
for statuses_value3 in statuses_array3:
instance_view_status_instance4 = InstanceViewStatus()
virtual_machine_extension_instance_view_instance.statuses.append(instance_view_status_instance4)
code_value5 = statuses_value3.get('code', None)
if code_value5 is not None:
code_instance5 = code_value5
instance_view_status_instance4.code = code_instance5
level_value5 = statuses_value3.get('level', None)
if level_value5 is not None:
level_instance5 = level_value5
instance_view_status_instance4.level = level_instance5
display_status_value5 = statuses_value3.get('displayStatus', None)
if display_status_value5 is not None:
display_status_instance5 = display_status_value5
instance_view_status_instance4.display_status = display_status_instance5
message_value5 = statuses_value3.get('message', None)
if message_value5 is not None:
message_instance5 = message_value5
instance_view_status_instance4.message = message_instance5
time_value5 = statuses_value3.get('time', None)
if time_value5 is not None:
time_instance5 = time_value5
instance_view_status_instance4.time = time_instance5
statuses_array4 = instance_view_value.get('statuses', None)
if statuses_array4 is not None:
for statuses_value4 in statuses_array4:
instance_view_status_instance5 = InstanceViewStatus()
instance_view_instance.statuses.append(instance_view_status_instance5)
code_value6 = statuses_value4.get('code', None)
if code_value6 is not None:
code_instance6 = code_value6
instance_view_status_instance5.code = code_instance6
level_value6 = statuses_value4.get('level', None)
if level_value6 is not None:
level_instance6 = level_value6
instance_view_status_instance5.level = level_instance6
display_status_value6 = statuses_value4.get('displayStatus', None)
if display_status_value6 is not None:
display_status_instance6 = display_status_value6
instance_view_status_instance5.display_status = display_status_instance6
message_value6 = statuses_value4.get('message', None)
if message_value6 is not None:
message_instance6 = message_value6
instance_view_status_instance5.message = message_instance6
time_value6 = statuses_value4.get('time', None)
if time_value6 is not None:
time_instance6 = time_value6
instance_view_status_instance5.time = time_instance6
resources_array = value_value.get('resources', None)
if resources_array is not None:
virtual_machine_json_instance.extensions = []
for resources_value in resources_array:
virtual_machine_extension_json_instance = VirtualMachineExtension(tags={})
virtual_machine_json_instance.extensions.append(virtual_machine_extension_json_instance)
properties_value3 = resources_value.get('properties', None)
if properties_value3 is not None:
publisher_value3 = properties_value3.get('publisher', None)
if publisher_value3 is not None:
publisher_instance3 = publisher_value3
virtual_machine_extension_json_instance.publisher = publisher_instance3
type_value3 = properties_value3.get('type', None)
if type_value3 is not None:
type_instance3 = type_value3
virtual_machine_extension_json_instance.extension_type = type_instance3
type_handler_version_value3 = properties_value3.get('typeHandlerVersion', None)
if type_handler_version_value3 is not None:
type_handler_version_instance3 = type_handler_version_value3
virtual_machine_extension_json_instance.type_handler_version = type_handler_version_instance3
auto_upgrade_minor_version_value = properties_value3.get('autoUpgradeMinorVersion', None)
if auto_upgrade_minor_version_value is not None:
auto_upgrade_minor_version_instance = auto_upgrade_minor_version_value
virtual_machine_extension_json_instance.auto_upgrade_minor_version = auto_upgrade_minor_version_instance
settings_value = properties_value3.get('settings', None)
if settings_value is not None:
settings_instance = json.dumps(settings_value)
virtual_machine_extension_json_instance.settings = settings_instance
protected_settings_value = properties_value3.get('protectedSettings', None)
if protected_settings_value is not None:
protected_settings_instance = json.dumps(protected_settings_value)
virtual_machine_extension_json_instance.protected_settings = protected_settings_instance
provisioning_state_value2 = properties_value3.get('provisioningState', None)
if provisioning_state_value2 is not None:
provisioning_state_instance2 = provisioning_state_value2
virtual_machine_extension_json_instance.provisioning_state = provisioning_state_instance2
instance_view_value2 = properties_value3.get('instanceView', None)
if instance_view_value2 is not None:
instance_view_instance2 = VirtualMachineExtensionInstanceView(statuses=[], sub_statuses=[])
virtual_machine_extension_json_instance.instance_view = instance_view_instance2
name_value6 = instance_view_value2.get('name', None)
if name_value6 is not None:
name_instance6 = name_value6
instance_view_instance2.name = name_instance6
type_value4 = instance_view_value2.get('type', None)
if type_value4 is not None:
type_instance4 = type_value4
instance_view_instance2.extension_type = type_instance4
type_handler_version_value4 = instance_view_value2.get('typeHandlerVersion', None)
if type_handler_version_value4 is not None:
type_handler_version_instance4 = type_handler_version_value4
instance_view_instance2.type_handler_version = type_handler_version_instance4
substatuses_array2 = instance_view_value2.get('substatuses', None)
if substatuses_array2 is not None:
for substatuses_value2 in substatuses_array2:
instance_view_status_instance6 = InstanceViewStatus()
instance_view_instance2.sub_statuses.append(instance_view_status_instance6)
code_value7 = substatuses_value2.get('code', None)
if code_value7 is not None:
code_instance7 = code_value7
instance_view_status_instance6.code = code_instance7
level_value7 = substatuses_value2.get('level', None)
if level_value7 is not None:
level_instance7 = level_value7
instance_view_status_instance6.level = level_instance7
display_status_value7 = substatuses_value2.get('displayStatus', None)
if display_status_value7 is not None:
display_status_instance7 = display_status_value7
instance_view_status_instance6.display_status = display_status_instance7
message_value7 = substatuses_value2.get('message', None)
if message_value7 is not None:
message_instance7 = message_value7
instance_view_status_instance6.message = message_instance7
time_value7 = substatuses_value2.get('time', None)
if time_value7 is not None:
time_instance7 = time_value7
instance_view_status_instance6.time = time_instance7
statuses_array5 = instance_view_value2.get('statuses', None)
if statuses_array5 is not None:
for statuses_value5 in statuses_array5:
instance_view_status_instance7 = InstanceViewStatus()
instance_view_instance2.statuses.append(instance_view_status_instance7)
code_value8 = statuses_value5.get('code', None)
if code_value8 is not None:
code_instance8 = code_value8
instance_view_status_instance7.code = code_instance8
level_value8 = statuses_value5.get('level', None)
if level_value8 is not None:
level_instance8 = level_value8
instance_view_status_instance7.level = level_instance8
display_status_value8 = statuses_value5.get('displayStatus', None)
if display_status_value8 is not None:
display_status_instance8 = display_status_value8
instance_view_status_instance7.display_status = display_status_instance8
message_value8 = statuses_value5.get('message', None)
if message_value8 is not None:
message_instance8 = message_value8
instance_view_status_instance7.message = message_instance8
time_value8 = statuses_value5.get('time', None)
if time_value8 is not None:
time_instance8 = time_value8
instance_view_status_instance7.time = time_instance8
id_value5 = resources_value.get('id', None)
if id_value5 is not None:
id_instance5 = id_value5
virtual_machine_extension_json_instance.id = id_instance5
name_value7 = resources_value.get('name', None)
if name_value7 is not None:
name_instance7 = name_value7
virtual_machine_extension_json_instance.name = name_instance7
type_value5 = resources_value.get('type', None)
if type_value5 is not None:
type_instance5 = type_value5
virtual_machine_extension_json_instance.type = type_instance5
location_value = resources_value.get('location', None)
if location_value is not None:
location_instance = location_value
virtual_machine_extension_json_instance.location = location_instance
tags_sequence_element = resources_value.get('tags', None)
if tags_sequence_element is not None:
for property in tags_sequence_element:
tags_key = property
tags_value = tags_sequence_element[property]
virtual_machine_extension_json_instance.tags[tags_key] = tags_value
id_value6 = value_value.get('id', None)
if id_value6 is not None:
id_instance6 = id_value6
virtual_machine_json_instance.id = id_instance6
name_value8 = value_value.get('name', None)
if name_value8 is not None:
name_instance8 = name_value8
virtual_machine_json_instance.name = name_instance8
type_value6 = value_value.get('type', None)
if type_value6 is not None:
type_instance6 = type_value6
virtual_machine_json_instance.type = type_instance6
location_value2 = value_value.get('location', None)
if location_value2 is not None:
location_instance2 = location_value2
virtual_machine_json_instance.location = location_instance2
tags_sequence_element2 = value_value.get('tags', None)
if tags_sequence_element2 is not None:
for property2 in tags_sequence_element2:
tags_key2 = property2
tags_value2 = tags_sequence_element2[property2]
virtual_machine_json_instance.tags[tags_key2] = tags_value2
odatanext_link_value = response_doc.get('@odata.nextLink', None)
if odatanext_link_value is not None:
odatanext_link_instance = odatanext_link_value
result.next_link = odatanext_link_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
def power_off(self, resource_group_name, vm_name):
"""
The operation to power off (stop) a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
client2 = self.client
response = client2.virtual_machines.begin_powering_off(resource_group_name, vm_name)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_initial_timeout >= 0:
delay_in_seconds = client2.long_running_operation_initial_timeout
while (result.status != ComputeOperationStatus.in_progress) == False:
time.sleep(delay_in_seconds)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_retry_timeout >= 0:
delay_in_seconds = client2.long_running_operation_retry_timeout
return result
def restart(self, resource_group_name, vm_name):
"""
The operation to restart a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
client2 = self.client
response = client2.virtual_machines.begin_restarting(resource_group_name, vm_name)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_initial_timeout >= 0:
delay_in_seconds = client2.long_running_operation_initial_timeout
while (result.status != ComputeOperationStatus.in_progress) == False:
time.sleep(delay_in_seconds)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_retry_timeout >= 0:
delay_in_seconds = client2.long_running_operation_retry_timeout
return result
def start(self, resource_group_name, vm_name):
"""
The operation to start a virtual machine.
Args:
resource_group_name (string): The name of the resource group.
vm_name (string): The name of the virtual machine.
Returns:
ComputeLongRunningOperationResponse: The Compute service response for
long-running operations.
"""
client2 = self.client
response = client2.virtual_machines.begin_starting(resource_group_name, vm_name)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_initial_timeout >= 0:
delay_in_seconds = client2.long_running_operation_initial_timeout
while (result.status != ComputeOperationStatus.in_progress) == False:
time.sleep(delay_in_seconds)
result = client2.get_long_running_operation_status(response.azure_async_operation)
delay_in_seconds = 30
if client2.long_running_operation_retry_timeout >= 0:
delay_in_seconds = client2.long_running_operation_retry_timeout
return result
class VirtualMachineSizeOperations(object):
"""
Operations for listing virtual machine sizes available in a region.
__NOTE__: An instance of this class is automatically created for an
instance of the [ComputeManagementClient]
"""
def __init__(self, client):
self._client = client
@property
def client(self):
"""
Gets a reference to the
Microsoft.Azure.Management.Compute.ComputeManagementClient.
"""
return self._client
def list(self, location):
"""
Lists virtual-machine-sizes available in a location for a subscription.
Args:
location (string): The location upon which virtual-machine-sizes is
queried.
Returns:
VirtualMachineSizeListResponse: The List Virtual Machine operation
response.
"""
# Validate
if location is None:
raise ValueError('location cannot be None.')
if location is not None and len(location) > 1000:
raise IndexError('location is outside the valid range.')
if (re.search('^[-\\w\\._]+$', location) is not None) == False:
raise IndexError('location is outside the valid range.')
# Tracing
# Construct URL
url = ''
url = url + '/subscriptions/'
if self.client.credentials.subscription_id is not None:
url = url + quote(self.client.credentials.subscription_id)
url = url + '/providers/'
url = url + 'Microsoft.Compute'
url = url + '/locations/'
url = url + quote(location)
url = url + '/vmSizes'
query_parameters = []
query_parameters.append('api-version=2015-05-01-preview')
if len(query_parameters) > 0:
url = url + '?' + '&'.join(query_parameters)
base_url = self.client.base_uri
# Trim '/' character from the end of baseUrl and beginning of url.
if base_url[len(base_url) - 1] == '/':
base_url = base_url[0 : len(base_url) - 1]
if url[0] == '/':
url = url[1 : ]
url = base_url + '/' + url
url = url.replace(' ', '%20')
# Create HTTP transport objects
http_request = Request()
http_request.url = url
http_request.method = 'GET'
# Set Headers
http_request.headers['Content-Type'] = 'application/json'
# Send Request
response = self.client.send_request(http_request)
body = response.content
status_code = response.status_code
if status_code != 200:
error = AzureHttpError(body, response.status_code)
raise error
# Create Result
result = None
# Deserialize Response
if status_code == 200:
response_content = body
result = VirtualMachineSizeListResponse(virtual_machine_sizes=[])
response_doc = None
if response_content:
response_doc = json.loads(response_content.decode())
if response_doc is not None:
value_array = response_doc.get('value', None)
if value_array is not None:
for value_value in value_array:
virtual_machine_size_instance = VirtualMachineSize()
result.virtual_machine_sizes.append(virtual_machine_size_instance)
name_value = value_value.get('name', None)
if name_value is not None:
name_instance = name_value
virtual_machine_size_instance.name = name_instance
number_of_cores_value = value_value.get('numberOfCores', None)
if number_of_cores_value is not None:
number_of_cores_instance = number_of_cores_value
virtual_machine_size_instance.number_of_cores = number_of_cores_instance
os_disk_size_in_mb_value = value_value.get('osDiskSizeInMB', None)
if os_disk_size_in_mb_value is not None:
os_disk_size_in_mb_instance = os_disk_size_in_mb_value
virtual_machine_size_instance.os_disk_size_in_mb = os_disk_size_in_mb_instance
resource_disk_size_in_mb_value = value_value.get('resourceDiskSizeInMB', None)
if resource_disk_size_in_mb_value is not None:
resource_disk_size_in_mb_instance = resource_disk_size_in_mb_value
virtual_machine_size_instance.resource_disk_size_in_mb = resource_disk_size_in_mb_instance
memory_in_mb_value = value_value.get('memoryInMB', None)
if memory_in_mb_value is not None:
memory_in_mb_instance = memory_in_mb_value
virtual_machine_size_instance.memory_in_mb = memory_in_mb_instance
max_data_disk_count_value = value_value.get('maxDataDiskCount', None)
if max_data_disk_count_value is not None:
max_data_disk_count_instance = max_data_disk_count_value
virtual_machine_size_instance.max_data_disk_count = max_data_disk_count_instance
result.status_code = status_code
result.request_id = response.headers.get('x-ms-request-id')
return result
|
{
"content_hash": "bead382f2b2f9717ac7d31f011434529",
"timestamp": "",
"source": "github",
"line_count": 13111,
"max_line_length": 157,
"avg_line_length": 53.52375867592098,
"alnum_prop": 0.46596508728179553,
"repo_name": "rjhunter8285/nsc-cloudproject-s22016",
"id": "ebf3cda622a13bcff6d732bea1ca58243c6bf9a0",
"size": "701770",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "api/FlaskApp/FlaskApp/python_modules/azure/mgmt/compute/computemanagement.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5101"
},
{
"name": "HTML",
"bytes": "50636"
},
{
"name": "Java",
"bytes": "48035"
},
{
"name": "JavaScript",
"bytes": "1359974"
},
{
"name": "PHP",
"bytes": "2175"
},
{
"name": "Python",
"bytes": "13485797"
},
{
"name": "Shell",
"bytes": "6524"
}
],
"symlink_target": ""
}
|
from dragon.core.tensor import Tensor
import dragon.ops as ops
def grad(cost, wrt, **kwargs):
"""Compute the gradients for variables with respect to the cost.
Parameters
----------
cost : Tensor
The cost.
wrt : Tensor or list of Tensor
The variables w.r.t the cost.
Returns
-------
Tensor or list of Tensor
The gradients of variables.
Examples
--------
>>> x = Tensor('x').Variable()
>>> y = x * 2
>>> dx = grad(y, x)
>>> z = Tensor('z').Variable()
>>> y = x + z
>>> dx, dz = grad(y, [x, z])
"""
grads = []
if not isinstance(wrt, list): wrt = [wrt]
for w in wrt:
cost.grad_wrts.append(w.name)
w.grad_objs.append(cost)
w_grad = Tensor(w.name + '_grad')
w_grad.extra_targets.add(cost.name)
w_grad.expressions = cost.expressions
w_grad.grad_wrts.append(w.name)
grads.append(w_grad)
if len(grads) == 1: return grads[0]
return grads
def disconnected_grad(x):
"""Return the identity of input with truncated gradient flow.
The expression itself is unaffected, but the gradient is stopped.
Parameters
----------
x : Tensor
The input tensor.
Returns
-------
Tensor
The identity of input.
"""
return ops.StopGradient(x)
|
{
"content_hash": "1cb25498d0021c8928aa3e23e531b793",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 69,
"avg_line_length": 22.433333333333334,
"alnum_prop": 0.5601783060921248,
"repo_name": "neopenx/Dragon",
"id": "d08c4d05bdceb50b2ef7780ce81e9c21d19ebee2",
"size": "1535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Dragon/python/dragon/vm/theano/gradient.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7082"
},
{
"name": "C++",
"bytes": "1024612"
},
{
"name": "CMake",
"bytes": "7849"
},
{
"name": "Cuda",
"bytes": "246400"
},
{
"name": "Makefile",
"bytes": "7409"
},
{
"name": "Python",
"bytes": "552459"
}
],
"symlink_target": ""
}
|
"""This module contains logic for a 2048 board that doesnt calculate moves."""
from tfe_engine import MappedGameBoard
class MappedTwentyFortyEight(MappedGameBoard):
"""This class adds 2048 specific logic to the MappedGameBoard class."""
|
{
"content_hash": "399c2225cc90460efbaf840b9b77aeb4",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 78,
"avg_line_length": 40.5,
"alnum_prop": 0.7860082304526749,
"repo_name": "Kautenja/tfe_engine",
"id": "c2ce4c73440ba6a9468b8ad4be573d8218a051ab",
"size": "243",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tfe_engine/_tfe_gameboard/mapped_tfe_gameboard.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "507"
},
{
"name": "Python",
"bytes": "121613"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, print_function
import os
import jinja2
import plyxproto.parser as plyxproto
import yaml
from colorama import Fore
import sys
from . import jinja2_extensions
from .proto2xproto import Proto2XProto
from .xos2jinja import XOS2Jinja
from .validator import XProtoValidator
loader = jinja2.PackageLoader(__name__, "templates")
env = jinja2.Environment(loader=loader)
class XOSProcessorArgs:
""" Helper class for use cases that want to call XOSProcessor directly, rather than executing xosgenx from the
command line.
"""
default_rev = False
default_output = None
default_attic = None
default_kvpairs = None
default_write_to_file = None
default_dest_file = None
default_dest_extension = None
default_target = None
default_checkers = None
default_verbosity = (
0
) # Higher numbers = more verbosity, lower numbers = less verbosity
default_include_models = (
[]
) # If neither include_models nor include_apps is specified, then all models will
default_include_apps = [] # be included.
default_strict_validation = False
default_lint = False
def __init__(self, **kwargs):
# set defaults
self.rev = XOSProcessorArgs.default_rev
self.output = XOSProcessorArgs.default_output
self.attic = XOSProcessorArgs.default_attic
self.kvpairs = XOSProcessorArgs.default_kvpairs
self.verbosity = XOSProcessorArgs.default_verbosity
self.write_to_file = XOSProcessorArgs.default_write_to_file
self.default_dest_file = XOSProcessorArgs.default_dest_file
self.default_dest_extension = XOSProcessorArgs.default_dest_extension
self.default_target = XOSProcessorArgs.default_target
self.default_checkers = XOSProcessorArgs.default_target
self.include_models = XOSProcessorArgs.default_include_models
self.include_apps = XOSProcessorArgs.default_include_apps
self.strict_validation = XOSProcessorArgs.default_strict_validation
self.lint = XOSProcessorArgs.default_lint
# override defaults with kwargs
for (k, v) in kwargs.items():
setattr(self, k, v)
class XOSProcessor:
@staticmethod
def _read_input_from_files(files):
""" Read the files and return the combined text read.
Also returns a list of (line_number, filename) tuples that tell which
starting line corresponds to each file.
"""
line_map = []
input = ""
for fname in files:
with open(fname) as infile:
line_map.append((len(input.split("\n")), fname))
input += infile.read()
return (input, line_map)
@staticmethod
def _attach_parser(ast, args):
if hasattr(args, "rev") and args.rev:
v = Proto2XProto()
ast.accept(v)
v = XOS2Jinja(args)
ast.accept(v)
return v
@staticmethod
def _get_template(target):
if not os.path.isabs(target):
return os.path.abspath(
os.path.dirname(os.path.realpath(__file__)) + "/targets/" + target
)
return target
@staticmethod
def _file_exists(attic):
# NOTE this method can be used in the jinja template
def file_exists2(name):
if attic is not None:
path = attic + "/" + name
else:
path = name
return os.path.exists(path)
return file_exists2
@staticmethod
def _include_file(attic):
# NOTE this method can be used in the jinja template
def include_file2(name):
if attic is not None:
path = attic + "/" + name
else:
path = name
return open(path).read()
return include_file2
@staticmethod
def _load_jinja2_extensions(os_template_env, attic):
os_template_env.globals["include_file"] = XOSProcessor._include_file(
attic
) # Generates a function
os_template_env.globals["file_exists"] = XOSProcessor._file_exists(
attic
) # Generates a function
os_template_env.filters["yaml"] = yaml.dump
for f in dir(jinja2_extensions):
if f.startswith("xproto"):
os_template_env.globals[f] = getattr(jinja2_extensions, f)
return os_template_env
@staticmethod
def _add_context(args):
if not hasattr(args, "kv") or not args.kv:
return
try:
context = {}
for s in args.kv.split(","):
k, val = s.split(":")
context[k] = val
return context
except Exception as e:
print(e)
@staticmethod
def _write_single_file(rendered, dir, dest_file, quiet):
file_name = "%s/%s" % (dir, dest_file)
file = open(file_name, "w")
file.write(rendered)
file.close()
if not quiet:
print("Saved: %s" % file_name)
@staticmethod
def _write_split_target(rendered, dir, quiet):
lines = rendered.splitlines()
current_buffer = []
for line in lines:
if line.startswith("+++"):
if dir:
path = dir + "/" + line[4:].lower()
fil = open(path, "w")
buf = "\n".join(current_buffer)
obuf = buf
fil.write(obuf)
fil.close()
if not quiet:
print("Save file to: %s" % path)
current_buffer = []
else:
current_buffer.append(line)
@staticmethod
def _find_message_by_model_name(messages, model):
return next((x for x in messages if x["name"] == model), None)
@staticmethod
def _find_last_nonempty_line(text, pointer):
ne_pointer = pointer
found = False
while ne_pointer != 0 and not found:
ne_pointer = text[: (ne_pointer - 1)].rfind("\n")
if ne_pointer < 0:
ne_pointer = 0
if text[ne_pointer - 1] != "\n":
found = True
return ne_pointer
@staticmethod
def process(args, operator=None):
# Setting defaults
if not hasattr(args, "attic"):
args.attic = None
if not hasattr(args, "write_to_file"):
args.write_to_file = None
if not hasattr(args, "dest_file"):
args.dest_file = None
if not hasattr(args, "dest_extension"):
args.dest_extension = None
if not hasattr(args, "output"):
args.output = None
if not hasattr(args, "quiet"):
args.quiet = True
# Validating
if args.write_to_file == "single" and args.dest_file is None:
raise Exception(
"[XosGenX] write_to_file option is specified as 'single' but no dest_file is provided"
)
if args.write_to_file == "model" and (args.dest_extension is None):
raise Exception(
"[XosGenX] write_to_file option is specified as 'model' but no dest_extension is provided"
)
if args.output is not None and not os.path.isabs(args.output):
raise Exception("[XosGenX] The output dir (%s) must be an absolute path!" % args.output)
if args.output is not None and not os.path.isdir(args.output):
raise Exception("[XosGenX] The output dir (%s) must be a directory!" % args.output)
if hasattr(args, "files"):
(inputs, line_map) = XOSProcessor._read_input_from_files(args.files)
elif hasattr(args, "inputs"):
inputs = args.inputs
line_map = []
else:
raise Exception("[XosGenX] No inputs provided!")
context = XOSProcessor._add_context(args)
parser = plyxproto.ProtobufAnalyzer()
try:
ast = parser.parse_string(inputs, debug=0)
except plyxproto.ParsingError as e:
if e.message:
error = e.message
else:
error = "xproto parsing error"
if e.error_range is None:
# No line number information
print(error + "\n")
else:
line, start, end = e.error_range
ptr = XOSProcessor._find_last_nonempty_line(inputs, start)
if start == 0:
beginning = ""
else:
beginning = inputs[ptr: start - 1]
line_end_char = inputs[start + end:].find("\n")
line_end = inputs[line_end_char]
print(error + "\n" + Fore.YELLOW + "Line %d:" % line + Fore.WHITE)
print(
beginning
+ Fore.YELLOW
+ inputs[start - 1: start + end]
+ Fore.WHITE
+ line_end
)
exit(1)
v = XOSProcessor._attach_parser(ast, args)
if args.include_models or args.include_apps:
for message in v.messages:
message["is_included"] = False
if message["name"] in args.include_models:
message["is_included"] = True
else:
app_label = (
message.get("options", {})
.get("app_label")
.strip('"')
)
if app_label in args.include_apps:
message["is_included"] = True
else:
for message in v.messages:
message["is_included"] = True
validator = XProtoValidator(v.models, line_map)
validator.validate()
if validator.errors:
if args.strict_validation or (args.verbosity >= 0):
validator.print_errors()
fatal_errors = [x for x in validator.errors if x["severity"] == "ERROR"]
if fatal_errors and args.strict_validation:
sys.exit(-1)
if args.lint:
return ""
if not operator:
operator = args.target
template_path = XOSProcessor._get_template(operator)
else:
template_path = operator
[template_folder, template_name] = os.path.split(template_path)
os_template_loader = jinja2.FileSystemLoader(searchpath=[template_folder])
os_template_env = jinja2.Environment(loader=os_template_loader)
os_template_env = XOSProcessor._load_jinja2_extensions(
os_template_env, args.attic
)
template = os_template_env.get_template(template_name)
if args.output is not None and args.write_to_file == "model":
# Handle the case where each model is written to a separate python file.
rendered = {}
for i, model in enumerate(v.models):
model_dict = v.models[model]
messages = [XOSProcessor._find_message_by_model_name(v.messages, model)]
rendered[model] = template.render(
{
"proto": {
"message_table": {model: model_dict},
"messages": messages,
"policies": v.policies,
"message_names": [m["name"] for m in v.messages],
},
"context": context,
"options": v.options,
}
)
if not rendered[model]:
print("Not saving model %s as it is empty" % model, file=sys.stderr)
else:
legacy = jinja2_extensions.base.xproto_list_evaluates_true(
[model_dict.get("options", {}).get("custom_python", None),
model_dict.get("options", {}).get("legacy", None),
v.options.get("custom_python", None),
v.options.get("legacy", None)])
if legacy:
file_name = "%s/%s_decl.%s" % (args.output, model.lower(), args.dest_extension)
else:
file_name = "%s/%s.%s" % (args.output, model.lower(), args.dest_extension)
file = open(file_name, "w")
file.write(rendered[model])
file.close()
if not args.quiet:
print("Saved: %s" % file_name, file=sys.stderr)
else:
# Handle the case where all models are written to the same python file.
rendered = template.render(
{
"proto": {
"message_table": v.models,
"messages": v.messages,
"policies": v.policies,
"message_names": [m["name"] for m in v.messages],
},
"context": context,
"options": v.options,
}
)
if args.output is not None and args.write_to_file == "target":
XOSProcessor._write_split_target(rendered, args.output, args.quiet)
elif args.output is not None and args.write_to_file == "single":
XOSProcessor._write_single_file(
rendered, args.output, args.dest_file, args.quiet
)
return rendered
|
{
"content_hash": "7d55abbce0322d5f7f42b67c6976c59f",
"timestamp": "",
"source": "github",
"line_count": 386,
"max_line_length": 114,
"avg_line_length": 35.33678756476684,
"alnum_prop": 0.5296920821114369,
"repo_name": "open-cloud/xos",
"id": "d354f9a1336b1e16881c545b0ac07bc950bc669e",
"size": "14235",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/xos-genx/xosgenx/generator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "5024"
},
{
"name": "Makefile",
"bytes": "13624"
},
{
"name": "Python",
"bytes": "1329912"
},
{
"name": "Shell",
"bytes": "57651"
},
{
"name": "Smarty",
"bytes": "3161"
}
],
"symlink_target": ""
}
|
from unittest import mock
from neutron_lib import constants as common_constants
from neutron_lib import context
from neutron_lib.db import constants as db_consts
from neutron_lib.plugins.ml2 import ovs_constants
from neutron_lib.services.qos import constants as qos_consts
from oslo_utils import uuidutils
from neutron.agent.l2.extensions import qos
from neutron.agent.l2.extensions import qos_linux
from neutron.api.rpc.callbacks.consumer import registry
from neutron.api.rpc.callbacks import events
from neutron.api.rpc.callbacks import resources
from neutron.api.rpc.handlers import resources_rpc
from neutron import manager
from neutron.objects.qos import policy
from neutron.objects.qos import rule
from neutron.plugins.ml2.drivers.openvswitch.agent import (
ovs_agent_extension_api as ovs_ext_api)
from neutron.plugins.ml2.drivers.openvswitch.agent.openflow.native import (
ovs_bridge)
from neutron.tests import base
BASE_TEST_POLICY = {'context': None,
'name': 'test1',
'id': uuidutils.generate_uuid()}
TEST_POLICY = policy.QosPolicy(**BASE_TEST_POLICY)
TEST_POLICY_DESCR = policy.QosPolicy(description='fake_descr',
**BASE_TEST_POLICY)
TEST_POLICY2 = policy.QosPolicy(context=None,
name='test2', id=uuidutils.generate_uuid())
TEST_PORT = {'port_id': 'test_port_id',
'qos_policy_id': TEST_POLICY.id}
TEST_PORT2 = {'port_id': 'test_port_id_2',
'qos_policy_id': TEST_POLICY2.id}
FAKE_RULE_ID = uuidutils.generate_uuid()
FAKE_RULE_ID_2 = uuidutils.generate_uuid()
REALLY_FAKE_RULE_ID = uuidutils.generate_uuid()
class FakeDriver(qos_linux.QosLinuxAgentDriver):
SUPPORTED_RULES = {
qos_consts.RULE_TYPE_BANDWIDTH_LIMIT: {
qos_consts.MAX_KBPS: {
'type:range': [0, db_consts.DB_INTEGER_MAX_VALUE]},
qos_consts.MAX_BURST: {
'type:range': [0, db_consts.DB_INTEGER_MAX_VALUE]},
qos_consts.DIRECTION: {
'type:values': [common_constants.EGRESS_DIRECTION,
common_constants.INGRESS_DIRECTION]}
},
}
def __init__(self):
super(FakeDriver, self).__init__()
self.create_bandwidth_limit = mock.Mock()
self.update_bandwidth_limit = mock.Mock()
self.delete_bandwidth_limit = mock.Mock()
self.delete_bandwidth_limit_ingress = mock.Mock()
def initialize(self):
pass
class QosFakeRule(rule.QosRule):
rule_type = 'fake_type'
class QosAgentDriverTestCase(base.BaseTestCase):
def setUp(self):
super(QosAgentDriverTestCase, self).setUp()
self.driver = FakeDriver()
self.policy = TEST_POLICY
self.egress_bandwidth_limit_rule = (
rule.QosBandwidthLimitRule(
context=None, id=FAKE_RULE_ID,
qos_policy_id=self.policy.id,
max_kbps=100, max_burst_kbps=200,
direction=common_constants.EGRESS_DIRECTION))
self.ingress_bandwidth_limit_rule = (
rule.QosBandwidthLimitRule(
context=None, id=FAKE_RULE_ID_2,
qos_policy_id=self.policy.id,
max_kbps=100, max_burst_kbps=200,
direction=common_constants.INGRESS_DIRECTION))
self.policy.rules = [self.egress_bandwidth_limit_rule,
self.ingress_bandwidth_limit_rule]
self.port = {'qos_policy_id': None, 'qos_network_policy_id': None,
'device_owner': 'random-device-owner'}
self.fake_rule = QosFakeRule(context=None, id=REALLY_FAKE_RULE_ID,
qos_policy_id=self.policy.id)
def test_create(self):
self.driver.create(self.port, self.policy)
self.driver.create_bandwidth_limit.assert_has_calls([
mock.call(self.port, self.egress_bandwidth_limit_rule),
mock.call(self.port, self.ingress_bandwidth_limit_rule)
])
def test_update(self):
self.driver.update(self.port, self.policy)
self.driver.update_bandwidth_limit.assert_has_calls([
mock.call(self.port, self.egress_bandwidth_limit_rule),
mock.call(self.port, self.ingress_bandwidth_limit_rule)
])
def test_delete(self):
self.driver.delete(self.port, self.policy)
self.driver.delete_bandwidth_limit.assert_called_with(self.port)
self.driver.delete_bandwidth_limit_ingress.assert_called_with(
self.port)
def test_delete_no_policy(self):
self.driver.delete(self.port, qos_policy=None)
self.driver.delete_bandwidth_limit.assert_called_with(self.port)
self.driver.delete_bandwidth_limit_ingress.assert_called_with(
self.port)
def test__iterate_rules_with_unknown_rule_type(self):
self.policy.rules.append(self.fake_rule)
rules = list(self.driver._iterate_rules(self.policy.rules))
self.assertEqual(2, len(rules))
self.assertIsInstance(rules[0], rule.QosBandwidthLimitRule)
self.assertIsInstance(rules[1], rule.QosBandwidthLimitRule)
def test__handle_update_create_rules_checks_should_apply_to_port(self):
self.egress_bandwidth_limit_rule.should_apply_to_port = mock.Mock(
return_value=False)
self.ingress_bandwidth_limit_rule.should_apply_to_port = mock.Mock(
return_value=False)
self.driver.create(self.port, self.policy)
self.assertFalse(self.driver.create_bandwidth_limit.called)
self.egress_bandwidth_limit_rule.should_apply_to_port = mock.Mock(
return_value=True)
self.ingress_bandwidth_limit_rule.should_apply_to_port = mock.Mock(
return_value=True)
self.driver.create(self.port, self.policy)
self.assertTrue(self.driver.create_bandwidth_limit.called)
def test__get_max_burst_value(self):
rule = self.egress_bandwidth_limit_rule
rule.max_burst_kbps = 0
expected_burst = rule.max_kbps * qos_consts.DEFAULT_BURST_RATE
self.assertEqual(
expected_burst, self.driver._get_egress_burst_value(rule)
)
def test__rule_type_has_ingress_direction(self):
self.assertTrue(
self.driver._rule_type_has_ingress_direction(
qos_consts.RULE_TYPE_BANDWIDTH_LIMIT))
# Should return False for rule type other than
# RULE_TYPE_BANDWIDTH_LIMIT
supported_rules = {
qos_consts.RULE_TYPE_DSCP_MARKING: {
qos_consts.DSCP_MARK: {
'type:values': common_constants.VALID_DSCP_MARKS}
}
}
with mock.patch.dict(self.driver.SUPPORTED_RULES, supported_rules):
self.assertFalse(
self.driver._rule_type_has_ingress_direction(
qos_consts.RULE_TYPE_DSCP_MARKING))
# Should return False for rule type RULE_TYPE_BANDWIDTH_LIMIT but
# without INGRESS_DIRECTION in supported values
supported_rules = {
qos_consts.RULE_TYPE_BANDWIDTH_LIMIT: {
'type:values': [common_constants.EGRESS_DIRECTION]
}
}
with mock.patch.dict(self.driver.SUPPORTED_RULES, supported_rules):
self.assertFalse(
self.driver._rule_type_has_ingress_direction(
qos_consts.RULE_TYPE_BANDWIDTH_LIMIT))
def test__rule_is_ingress_direction(self):
self.assertFalse(
self.driver._rule_is_ingress_direction(
self.egress_bandwidth_limit_rule))
self.assertFalse(
self.driver._rule_is_ingress_direction(
self.fake_rule))
self.assertTrue(
self.driver._rule_is_ingress_direction(
self.ingress_bandwidth_limit_rule))
class QosExtensionBaseTestCase(base.BaseTestCase):
def setUp(self):
super(QosExtensionBaseTestCase, self).setUp()
conn_patcher = mock.patch(
'neutron.agent.ovsdb.impl_idl._connection')
conn_patcher.start()
self.addCleanup(conn_patcher.stop)
self.qos_ext = qos.QosAgentExtension()
self.context = context.get_admin_context()
self.connection = mock.Mock()
os_ken_app = mock.Mock()
self.agent_api = ovs_ext_api.OVSAgentExtensionAPI(
ovs_bridge.OVSAgentBridge(
'br-int', os_ken_app=os_ken_app),
ovs_bridge.OVSAgentBridge(
'br-tun', os_ken_app=os_ken_app),
{'phynet1': ovs_bridge.OVSAgentBridge(
'br-phynet1', os_ken_app=os_ken_app)})
self.qos_ext.consume_api(self.agent_api)
# Don't rely on used driver
mock.patch.object(
manager.NeutronManager, 'load_class_for_provider',
return_value=lambda: mock.Mock(
spec=qos_linux.QosLinuxAgentDriver)).start()
setattr(TEST_POLICY, 'rules', [])
class QosExtensionRpcTestCase(QosExtensionBaseTestCase):
def setUp(self):
super(QosExtensionRpcTestCase, self).setUp()
self.qos_ext.initialize(
self.connection, ovs_constants.EXTENSION_DRIVER_TYPE)
self.pull_mock = mock.patch.object(
self.qos_ext.resource_rpc, 'pull',
return_value=TEST_POLICY).start()
def _create_test_port_dict(self, qos_policy_id=None):
return {'port_id': uuidutils.generate_uuid(),
'qos_policy_id': qos_policy_id or TEST_POLICY.id}
def test_handle_port_with_no_policy(self):
port = self._create_test_port_dict()
del port['qos_policy_id']
self.qos_ext._process_reset_port = mock.Mock()
self.qos_ext.handle_port(self.context, port)
self.qos_ext._process_reset_port.assert_called_with(port)
def test_handle_unknown_port(self):
port = self._create_test_port_dict()
qos_policy_id = port['qos_policy_id']
port_id = port['port_id']
TEST_POLICY.rules = [rule.QosBandwidthLimitRule(
context=None, id=FAKE_RULE_ID,
qos_policy_id=TEST_POLICY.id,
max_kbps=100, max_burst_kbps=200,
direction=common_constants.EGRESS_DIRECTION)]
self.qos_ext.handle_port(self.context, port)
# we make sure the underlying qos driver is called with the
# right parameters
self.qos_ext.qos_driver.create.assert_called_once_with(
port, TEST_POLICY)
self.assertEqual(port,
self.qos_ext.policy_map.qos_policy_ports[qos_policy_id][port_id])
self.assertIn(port_id, self.qos_ext.policy_map.port_policies)
self.assertEqual(TEST_POLICY,
self.qos_ext.policy_map.known_policies[qos_policy_id])
def test_handle_unknown_port_with_no_rules(self):
test_policy_with_rules = {'context': None,
'name': 'test1',
'id': uuidutils.generate_uuid()}
test_policy = policy.QosPolicy(**test_policy_with_rules)
test_policy.rules = []
port = self._create_test_port_dict(test_policy.id)
qos_policy_id = port['qos_policy_id']
port_id = port['port_id']
self.pull_mock.return_value = test_policy
self.qos_ext.handle_port(self.context, port)
# we make sure the underlying qos driver is called with the
# right parameters
self.qos_ext.qos_driver.delete.assert_called_once_with(port, None)
self.assertEqual(port,
self.qos_ext.policy_map.qos_policy_ports[qos_policy_id][port_id])
self.assertIn(port_id, self.qos_ext.policy_map.port_policies)
self.assertEqual(test_policy,
self.qos_ext.policy_map.known_policies[qos_policy_id])
def test_handle_known_port(self):
port_obj1 = self._create_test_port_dict()
port_obj2 = dict(port_obj1)
self.qos_ext.handle_port(self.context, port_obj1)
self.qos_ext.qos_driver.reset_mock()
self.qos_ext.handle_port(self.context, port_obj2)
self.assertFalse(self.qos_ext.qos_driver.create.called)
def test_handle_known_port_change_policy_id(self):
port = self._create_test_port_dict()
self.qos_ext.handle_port(self.context, port)
self.qos_ext.resource_rpc.pull.reset_mock()
test_policy_with_rules = {'context': None,
'name': 'test1',
'id': uuidutils.generate_uuid()}
test_policy = policy.QosPolicy(**test_policy_with_rules)
setattr(test_policy, 'rules', [])
self.pull_mock.return_value = test_policy
port['qos_policy_id'] = test_policy.id
self.qos_ext.handle_port(self.context, port)
self.pull_mock.assert_called_once_with(
self.context, resources.QOS_POLICY,
port['qos_policy_id'])
def test_handle_diff_ports_same_policy_id(self):
port_obj1 = self._create_test_port_dict()
port_obj2 = self._create_test_port_dict()
self.qos_ext.handle_port(self.context, port_obj1)
self.pull_mock.assert_called_once_with(
self.context, resources.QOS_POLICY,
port_obj1['qos_policy_id'])
self.assertIsNotNone(
self.qos_ext.policy_map.get_port_policy(port_obj1))
self.assertIsNone(
self.qos_ext.policy_map.get_port_policy(port_obj2))
self.qos_ext.resource_rpc.pull.reset_mock()
self.qos_ext.handle_port(self.context, port_obj2)
self.assertFalse(self.pull_mock.called)
self.assertIsNotNone(
self.qos_ext.policy_map.get_port_policy(port_obj2))
self.assertEqual(
self.qos_ext.policy_map.get_port_policy(port_obj1),
self.qos_ext.policy_map.get_port_policy(port_obj2))
def test_delete_known_port(self):
port = self._create_test_port_dict()
self.qos_ext.handle_port(self.context, port)
self.qos_ext.qos_driver.reset_mock()
self.qos_ext.delete_port(self.context, port)
self.qos_ext.qos_driver.delete.assert_called_with(port)
self.assertIsNone(self.qos_ext.policy_map.get_port_policy(port))
def test_delete_unknown_port(self):
port = self._create_test_port_dict()
self.qos_ext.delete_port(self.context, port)
self.assertTrue(self.qos_ext.qos_driver.delete.called)
self.assertIsNone(self.qos_ext.policy_map.get_port_policy(port))
def test__handle_notification_ignores_all_event_types_except_updated(self):
with mock.patch.object(self.qos_ext,
'_process_update_policy') as update_mock:
for event_type in set(events.VALID) - {events.UPDATED}:
self.qos_ext._handle_notification(mock.Mock(), 'QOS',
object(), event_type)
self.assertFalse(update_mock.called)
def test__handle_notification_passes_update_events(self):
with mock.patch.object(self.qos_ext,
'_process_update_policy') as update_mock:
policy_obj = mock.Mock()
self.qos_ext._handle_notification(mock.Mock(), 'QOS',
[policy_obj], events.UPDATED)
update_mock.assert_called_with(policy_obj)
def test__process_update_policy(self):
port1 = self._create_test_port_dict(qos_policy_id=TEST_POLICY.id)
port2 = self._create_test_port_dict(qos_policy_id=TEST_POLICY2.id)
self.qos_ext.policy_map.set_port_policy(port1, TEST_POLICY)
self.qos_ext.policy_map.set_port_policy(port2, TEST_POLICY2)
self.qos_ext._policy_rules_modified = mock.Mock(return_value=True)
policy_obj = mock.Mock()
policy_obj.id = port1['qos_policy_id']
self.qos_ext._process_update_policy(policy_obj)
self.qos_ext.qos_driver.update.assert_called_with(port1, policy_obj)
self.qos_ext.qos_driver.update.reset_mock()
policy_obj.id = port2['qos_policy_id']
self.qos_ext._process_update_policy(policy_obj)
self.qos_ext.qos_driver.update.assert_called_with(port2, policy_obj)
def test__process_update_policy_descr_not_propagated_into_driver(self):
port = self._create_test_port_dict(qos_policy_id=TEST_POLICY.id)
self.qos_ext.policy_map.set_port_policy(port, TEST_POLICY)
self.qos_ext._policy_rules_modified = mock.Mock(return_value=False)
self.qos_ext._process_update_policy(TEST_POLICY_DESCR)
self.qos_ext._policy_rules_modified.assert_called_with(TEST_POLICY,
TEST_POLICY_DESCR)
self.assertFalse(self.qos_ext.qos_driver.delete.called)
self.assertFalse(self.qos_ext.qos_driver.update.called)
self.assertEqual(TEST_POLICY_DESCR,
self.qos_ext.policy_map.get_policy(TEST_POLICY.id))
def test__process_update_policy_not_known(self):
self.qos_ext._policy_rules_modified = mock.Mock()
self.qos_ext._process_update_policy(TEST_POLICY_DESCR)
self.assertFalse(self.qos_ext._policy_rules_modified.called)
self.assertFalse(self.qos_ext.qos_driver.delete.called)
self.assertFalse(self.qos_ext.qos_driver.update.called)
self.assertIsNone(self.qos_ext.policy_map.get_policy(
TEST_POLICY_DESCR.id))
def test__process_reset_port(self):
port1 = self._create_test_port_dict(qos_policy_id=TEST_POLICY.id)
port2 = self._create_test_port_dict(qos_policy_id=TEST_POLICY2.id)
self.qos_ext.policy_map.set_port_policy(port1, TEST_POLICY)
self.qos_ext.policy_map.set_port_policy(port2, TEST_POLICY2)
self.qos_ext._process_reset_port(port1)
self.qos_ext.qos_driver.delete.assert_called_with(port1)
self.assertIsNone(self.qos_ext.policy_map.get_port_policy(port1))
self.assertIsNotNone(self.qos_ext.policy_map.get_port_policy(port2))
self.qos_ext.qos_driver.delete.reset_mock()
self.qos_ext._process_reset_port(port2)
self.qos_ext.qos_driver.delete.assert_called_with(port2)
self.assertIsNone(self.qos_ext.policy_map.get_port_policy(port2))
class QosExtensionInitializeTestCase(QosExtensionBaseTestCase):
@mock.patch.object(registry, 'register')
@mock.patch.object(resources_rpc, 'ResourcesPushRpcCallback')
def test_initialize_subscribed_to_rpc(self, rpc_mock, subscribe_mock):
self.qos_ext.initialize(
self.connection, ovs_constants.EXTENSION_DRIVER_TYPE)
self.connection.create_consumer.assert_has_calls(
[mock.call(
resources_rpc.resource_type_versioned_topic(resource_type),
[rpc_mock()],
fanout=True)
for resource_type in self.qos_ext.SUPPORTED_RESOURCE_TYPES]
)
subscribe_mock.assert_called_with(mock.ANY, resources.QOS_POLICY)
class QosExtensionReflushRulesTestCase(QosExtensionBaseTestCase):
def setUp(self):
super(QosExtensionReflushRulesTestCase, self).setUp()
self.qos_ext.initialize(
self.connection, ovs_constants.EXTENSION_DRIVER_TYPE)
self.pull_mock = mock.patch.object(
self.qos_ext.resource_rpc, 'pull',
return_value=TEST_POLICY).start()
self.policy = policy.QosPolicy(**BASE_TEST_POLICY)
self.rule = (
rule.QosBandwidthLimitRule(context=None, id=FAKE_RULE_ID,
qos_policy_id=self.policy.id,
max_kbps=100, max_burst_kbps=10))
self.policy.rules = [self.rule]
self.port = {'port_id': uuidutils.generate_uuid(),
'qos_policy_id': TEST_POLICY.id}
self.new_policy = policy.QosPolicy(description='descr',
**BASE_TEST_POLICY)
def test_is_reflush_required_change_policy_descr(self):
self.qos_ext.policy_map.set_port_policy(self.port, self.policy)
self.new_policy.rules = [self.rule]
self.assertFalse(self.qos_ext._policy_rules_modified(self.policy,
self.new_policy))
def test_is_reflush_required_change_policy_rule(self):
self.qos_ext.policy_map.set_port_policy(self.port, self.policy)
updated_rule = (rule.QosBandwidthLimitRule(context=None,
id=FAKE_RULE_ID,
qos_policy_id=self.policy.id,
max_kbps=200,
max_burst_kbps=20))
self.new_policy.rules = [updated_rule]
self.assertTrue(self.qos_ext._policy_rules_modified(self.policy,
self.new_policy))
def test_is_reflush_required_remove_rules(self):
self.qos_ext.policy_map.set_port_policy(self.port, self.policy)
self.new_policy.rules = []
self.assertTrue(self.qos_ext._policy_rules_modified(self.policy,
self.new_policy))
def test_is_reflush_required_add_rules(self):
self.qos_ext.policy_map.set_port_policy(self.port, self.policy)
self.new_policy.rules = [self.rule]
fake_rule = QosFakeRule(context=None, id=REALLY_FAKE_RULE_ID,
qos_policy_id=self.policy.id)
self.new_policy.rules.append(fake_rule)
self.assertTrue(self.qos_ext._policy_rules_modified(self.policy,
self.new_policy))
class PortPolicyMapTestCase(base.BaseTestCase):
def setUp(self):
super(PortPolicyMapTestCase, self).setUp()
self.policy_map = qos.PortPolicyMap()
def test_update_policy(self):
self.policy_map.update_policy(TEST_POLICY)
self.assertEqual(TEST_POLICY,
self.policy_map.known_policies[TEST_POLICY.id])
def _set_ports(self):
self.policy_map.set_port_policy(TEST_PORT, TEST_POLICY)
self.policy_map.set_port_policy(TEST_PORT2, TEST_POLICY2)
def test_set_port_policy(self):
self._set_ports()
self.assertEqual(TEST_POLICY,
self.policy_map.known_policies[TEST_POLICY.id])
self.assertIn(TEST_PORT['port_id'],
self.policy_map.qos_policy_ports[TEST_POLICY.id])
def test_get_port_policy(self):
self._set_ports()
self.assertEqual(TEST_POLICY,
self.policy_map.get_port_policy(TEST_PORT))
self.assertEqual(TEST_POLICY2,
self.policy_map.get_port_policy(TEST_PORT2))
def test_get_ports(self):
self._set_ports()
self.assertEqual([TEST_PORT],
list(self.policy_map.get_ports(TEST_POLICY)))
self.assertEqual([TEST_PORT2],
list(self.policy_map.get_ports(TEST_POLICY2)))
def test_clean_by_port(self):
self._set_ports()
self.policy_map.clean_by_port(TEST_PORT)
self.assertNotIn(TEST_POLICY.id, self.policy_map.known_policies)
self.assertNotIn(TEST_PORT['port_id'], self.policy_map.port_policies)
self.assertIn(TEST_POLICY2.id, self.policy_map.known_policies)
def test_clean_by_port_for_unknown_port(self):
self.policy_map._clean_policy_info = mock.Mock()
self.policy_map.clean_by_port(TEST_PORT)
self.policy_map._clean_policy_info.assert_not_called()
def test_has_policy_changed(self):
self._set_ports()
self.assertTrue(
self.policy_map.has_policy_changed(TEST_PORT, 'a_new_policy_id'))
self.assertFalse(
self.policy_map.has_policy_changed(TEST_PORT, TEST_POLICY.id))
|
{
"content_hash": "06b90439e408a980c8e2ec7123e5e72b",
"timestamp": "",
"source": "github",
"line_count": 559,
"max_line_length": 79,
"avg_line_length": 43.0304114490161,
"alnum_prop": 0.6166541947285274,
"repo_name": "openstack/neutron",
"id": "d5c6d4d52680c19753a839463939d4172e26678c",
"size": "24700",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/agent/l2/extensions/test_qos.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "2773"
},
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "15932611"
},
{
"name": "Ruby",
"bytes": "1257"
},
{
"name": "Shell",
"bytes": "83270"
}
],
"symlink_target": ""
}
|
"""
Takes m8 blast files and generates a table of taxon hit counts for the
given rank. Columns are input files and rows are taxa. If multiple ranks
given (the default), multiple output files are produced, each with the
rank name appended to the output file name.
"""
import sys
import argparse
import logging
from urllib.parse import unquote_plus
from edl.taxon import ranks, getAncestorClosestToRank
from edl.hits import add_count_arguments, add_weight_arguments, \
loadSequenceWeights, add_taxon_arguments, readMaps, \
countIterHits, parseM8FileIter, FilterParams, getHitTranslator, \
ACCS
from edl.util import add_universal_arguments, setup_logging, \
checkNoneOption, passThrough
ORG_RANK = 'organism'
def main():
"""" Set up the CLI """
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("input_files", nargs="+",
default=[],
metavar="INFILE",
help="List of hit tables to process")
parser.add_argument("-o", "--outfile", dest="outfile",
metavar="OUTFILE",
help="Write count table to OUTFILE")
parser.add_argument("-r", "--rank", dest="ranks", default=None,
metavar="RANK", action="append",
help=""" Rank(s) to collect counts on. Use flag
multiple
times to specify multiple ranks. If multiple values
given, one table produced for each with rank name
appended to file name. Defaults to all major ranks
between phylum and species. Corresponds to rank names
in nodes.dmp. To see list run:
'cut -f5 nodes.dmp | uniq | sort | uniq'
in ncbi tax dir. Will also accept 'organism' to mean
no rank (ie, just the organism name).""")
parser.add_argument(
"-s",
"--collapseToDomain",
default=False,
action="store_true",
help="Collapse all taxa below given rank down to "
"superkingdom/domain. EG: in the genus output, anything "
"assigned to Cyanobactia, will be lumped in with all "
"other bacteria")
parser.add_argument(
"--proportional",
dest="proportional",
default=False,
action="store_true",
help="""When using tophit or toporg, redistribute proportionally
instead of winner take all""")
parser.add_argument(
"-R",
"--printRank",
dest="printRanks",
action="append",
help="Include indeicated rank(s) in lineage of printed taxa. "
"Will be ignored if beyond the rank of the taxa "
"(IE We can't include species if the taxon being counted "
"is genus)")
# option for deconvoluting clusters or assemblies
add_weight_arguments(parser, multiple=True)
# cutoff options
add_count_arguments(parser)
# format, tax dir, and more
add_taxon_arguments(
parser,
choices={
'countMethod': (
'LCA',
'all',
'first',
'most',
'tophit',
'toporg',
'consensus')})
# log level and help
add_universal_arguments(parser)
arguments = parser.parse_args()
setup_logging(arguments)
if arguments.proportional and \
arguments.countMethod not in ['tophit', 'toporg']:
parser.error("--proportinal only has meaning "
"if using tophit or toporg")
if len(arguments.input_files) == 0:
parser.error("Must supply at least one m8 file to parse")
# Handle the case where Galaxy tries to set None as a string
arguments.ranks = checkNoneOption(arguments.ranks)
arguments.printRanks = checkNoneOption(arguments.printRanks)
logging.info("Printing out ranks: %r", arguments.ranks)
# Set defaults and check for some conflicts
if arguments.ranks is None and arguments.taxdir is None:
# using hit names only
arguments.ranks = [ORG_RANK]
if arguments.printRanks is not None:
parser.error("Display ranks are not used without taxonomic info")
else:
if arguments.taxdir is None:
parser.error("Cannot select ranks without a taxonomy")
if arguments.ranks is None:
# set a default
arguments.ranks = [
'phylum',
'class',
'order',
'family',
'genus',
'species']
try:
# Make sure the rank lists make sense
arguments.ranks = cleanRanks(arguments.ranks)
if arguments.printRanks is not None:
arguments.printRanks = cleanRanks(arguments.printRanks)
except Exception as e:
parser.error(str(e))
# load weights file
sequenceWeights = loadSequenceWeights(arguments.weights)
# only print to stdout if there is a single rank
if len(arguments.ranks) > 1 and arguments.outfile is None:
parser.error("STDOUT only works if a single rank is chosen!")
# Because rank is used in parsing hits, we can only do multiple ranks for
# certain kinds of count methods
if len(arguments.ranks) > 1:
rank = None
if arguments.countMethod in ['consensus', 'most']:
parser.error(
"Using multiple ranks does not work with the 'consensus' "
"or 'most' counting methods. LCA should give the same "
"results as consensus. If you really want to do this, "
"use a bash loop:'for rank in phylum order genus; do "
"COMMAND -r ${rank}; done'")
else:
rank = arguments.ranks[0]
# load necessary maps
(taxonomy, hitStringMap) = readMaps(arguments)
# parse input files
fileCounts = {}
totals = {}
fileLabels = {}
sortedLabels = []
# Allow for file names to be preceded with TAG=
for filename in arguments.input_files:
bits = filename.split("=", 1)
if len(bits) > 1:
(filetag, filename) = bits
else:
filetag = filename
fileLabels[filename] = filetag
# keep order so that column order matches arguments
sortedLabels.append(filetag)
fileCounts[filetag] = {}
totals[filetag] = 0
params = FilterParams.create_from_arguments(arguments)
if arguments.countMethod == 'tophit' or arguments.countMethod == 'toporg':
# Process all files at once and use overall abundance to pick best hits
from edl import redistribute
multifile = redistribute.multipleFileWrapper(fileLabels.keys())
if arguments.countMethod == 'tophit':
# don't give any taxonomy, just map to accessions for
# redistribution
readHits = redistribute.pickBestHitByAbundance(
multifile,
filterParams=params,
return_lines=False,
winnerTakeAll=not arguments.proportional,
parseStyle=arguments.parseStyle,
sequenceWeights=sequenceWeights)
# define method to turn Hits into orgnaisms
hitTranslator = getHitTranslator(parseStyle=arguments.parseStyle,
taxonomy=taxonomy,
hitStringMap=hitStringMap)
translateHit = lambda hit: hitTranslator.translateHit(hit=hit)[0]
else:
# translate to organism before finding most abundant
readHits = redistribute.pickBestHitByAbundance(
multifile,
filterParams=params,
return_lines=False,
return_translations=True,
winnerTakeAll=not arguments.proportional,
taxonomy=taxonomy,
hitStringMap=hitStringMap,
parseStyle=ACCS)
# Organisms will be returned, make translator trivial:
translateHit = passThrough
# use read->file mapping and hit translator to get file based counts
# from returned (read,Hit) pairs
increment = 1
for (read_name, hit) in readHits:
file_name, read_name = read_name.split("/", 1)
file_tag = fileLabels[unquote_plus(file_name)]
taxon = translateHit(hit)
taxcount = fileCounts[file_tag].setdefault(taxon, 0)
if sequenceWeights is not None:
increment = sequenceWeights.get(read_name, 1)
fileCounts[file_tag][taxon] = taxcount + increment
totals[file_tag] += increment
logging.debug(str(totals))
else:
# Original way, just process each file separately
for (filename, filetag) in fileLabels.items():
infile = open(filename, 'rU')
hitIter = parseM8FileIter(infile,
hitStringMap,
params,
arguments.parseStyle,
arguments.countMethod,
taxonomy=taxonomy,
rank=rank)
(total, counts, hitMap) = \
countIterHits(hitIter,
allMethod=arguments.allMethod,
weights=sequenceWeights)
fileCounts[filetag] = counts
totals[filetag] = total
logging.info(
"parsed %d hits (%d unique) for %d reads from %s",
total, len(counts), len(hitMap), filename)
infile.close()
printCountTablesByRank(fileCounts, totals, sortedLabels, arguments)
def cleanRanks(rankList):
if ORG_RANK not in ranks:
ranks.insert(0, ORG_RANK)
# don't allow duplicates
rankList = list(set(rankList))
# translate domain to superkingdom
if 'domain' in rankList:
rankList.remove('domain')
rankList.append('superkingdom')
# make sure the ranks are real
badRanks = []
for rank in rankList:
if rank not in ranks:
badRanks.append(rank)
if len(badRanks) > 0:
raise Exception("Unknown rank(s): %s" % (badRanks))
# return ranks in proper order
return sorted(rankList, key=ranks.index, reverse=True)
def printCountTablesByRank(fileCounts, totals, fileNames, options):
"""
Create a new file for each rank witha tab separated table of counts
"""
cutoff = options.cutoff
# create an output table for each requested rank
for rank in options.ranks:
# For each rank, try to force all counts to be at that rank
fileRankTotals = {}
rankCounts = {}
rankTaxa = {}
thresholds = {}
for (filename, counts) in fileCounts.items():
fileRankTotals[filename] = 0
thresholds[filename] = totals[filename] * cutoff
fileRankCounts = rankCounts.setdefault(filename, {})
fileTotal = 0
for taxon in counts.keys():
# get the counts from this node
taxonCount = counts[taxon]
fileTotal += taxonCount
# get parent taxon at the given rank
if taxon is None:
ranked = None
elif rank is None or rank == ORG_RANK:
ranked = taxon
else:
if options.collapseToDomain:
# If we are beyond this rank already, fall back to SK
fallback = taxon.getAncestorAtRank('superkingdom')
if fallback is None:
fallback = taxon.getRootNode()
else:
fallback = taxon
ranked = getAncestorClosestToRank(
taxon,
rank,
default=fallback,
useChildOfFirstRankedAncestor=not(
options.collapseToDomain))
if ranked is None:
# This shouldn't happen...
logging.warning(
"getAncestorClosestRoRank return None!")
# ...but if it doesn, leave unchanged
ranked = taxon
# update counts
fileRankCounts[ranked] = fileRankCounts.get(
ranked, 0) + taxonCount
rankTaxa[ranked] = True
logging.debug(
"File %s has %d hits (had %d)",
filename, fileTotal, totals[filename])
# logging.debug(repr(rankTaxa))
# logging.debug(repr(rankCounts))
if logging.getLogger().level <= logging.DEBUG:
for (filename, counts) in rankCounts.items():
logging.debug("File %s hs %d ranked counts",
filename, sum(counts.values()))
# apply cutoff
for taxon in list(rankTaxa.keys()):
# check to see if taxon is over cutoff in any file
over = False
for (filename, fileRankCount) in rankCounts.items():
frTaxonCount = fileRankCount.get(taxon, 0)
fileRankTotals[filename] += frTaxonCount
if frTaxonCount > thresholds[filename]:
over = True
if not over:
# this taxon is not over the cutoff for any file
rankTaxa.pop(taxon)
if taxon is not None:
if options.taxdir is None:
other = 'Other'
else:
other = taxon.getAncestorAtRank('superkingdom')
if other is None:
other = taxon.getRootNode()
else:
other = None
rankTaxa[other] = True
for (filename, fileRankCount) in rankCounts.items():
fileRankCount[other] = fileRankCount.get(
other, 0) + fileRankCount.pop(taxon, 0)
if logging.getLogger().level <= logging.DEBUG:
for (filename, counts) in rankCounts.items():
logging.debug("File %s hs %d ranked counts",
filename, sum(counts.values()))
missed = False
for taxa in counts.keys():
if taxa not in rankTaxa:
missed = True
logging.debug(
"Missing taxon %s has %d counts for %s",
taxa, counts[taxa], filename)
if not missed:
logging.debug(
"There are no missing taxa from %s",
filename)
logging.debug("Final file counts: %r", fileRankTotals)
# output file
if options.outfile is None:
outs = sys.stdout
else:
if len(options.ranks) > 1:
outfile = "%s.%s" % (options.outfile, rank)
else:
outfile = options.outfile
outs = open(outfile, 'w')
# write to file(s?)
# header
outs.write("Taxon\t%s\n" % ('\t'.join(fileNames)))
taxonFormatter = getTaxonFormatter(options.printRanks, rank)
for taxon in sorted(rankTaxa.keys(), key=taxonFormatter):
outs.write(taxonFormatter(taxon))
for filename in fileNames:
outs.write("\t")
outs.write(str(rankCounts[filename].get(taxon, 0)))
outs.write("\n")
# close out stream
if options.outfile is not None:
outs.close()
def getTaxonFormatter(displayedRanks, leafRank):
if displayedRanks is None:
return str
else:
return lambda t: formatTaxon(t, displayedRanks, leafRank)
def formatTaxon(taxon, displayedRanks, leafRank, delim=';'):
"""
Generates lineage using all display ranks that are less than the
leaf rank. This is probably ineffecient, as we have to figure out
which ranks to display for every item.
This method is also used by assign_taxa.py!
"""
if isinstance(taxon, list):
if len(taxon) == 0:
taxon = None
elif len(taxon) == 1:
taxon = taxon[0]
else:
raise Exception("taxon should not be a list:\n{}"
.format(repr(taxon)))
if taxon is None:
logging.debug("Taxon is None")
return 'None'
if taxon is taxon.getRootNode():
return str(taxon)
lineage = ""
logging.debug(
"Creating lineage with: %s, %s, %s",
taxon, displayedRanks, leafRank)
for rank in displayedRanks:
if ranks.index(rank) <= ranks.index(leafRank):
logging.debug(
"Rank of %s (%d) is less than %s (%d)",
rank, ranks.index(rank), leafRank, ranks.index(leafRank))
break
ancestor = taxon.getAncestorAtRank(rank)
if ancestor is taxon:
logging.debug(
"ancestor at %s of %s is %s",
taxon, rank, ancestor)
break
if ancestor is None:
ancestor = ""
lineage += str(ancestor) + delim
logging.debug("Lineage: %s", lineage)
lineage += str(taxon)
logging.debug("Lineage: %s", lineage)
return lineage
if __name__ == '__main__':
main()
|
{
"content_hash": "043239f28a0c324ddfa3e6fececaeb5c",
"timestamp": "",
"source": "github",
"line_count": 477,
"max_line_length": 79,
"avg_line_length": 37.43186582809224,
"alnum_prop": 0.5451134136096332,
"repo_name": "jmeppley/py-metagenomics",
"id": "2024894fd5392d9785ae2c8cbf8ea6ca3da230a9",
"size": "17878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "count_taxa.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "28823"
},
{
"name": "Perl",
"bytes": "4389"
},
{
"name": "Python",
"bytes": "426256"
},
{
"name": "Roff",
"bytes": "4605"
},
{
"name": "Shell",
"bytes": "66740"
}
],
"symlink_target": ""
}
|
"""
Inventory Management
A module to record inventories of items at a locations (sites),
including Warehouses, Offices, Shelters & Hospitals
"""
module = request.controller
resourcename = request.function
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
# -----------------------------------------------------------------------------
def index():
""" Module's Home Page """
return settings.customise_home(module, alt_function="index_alt")
# -----------------------------------------------------------------------------
def index_alt():
"""
Module homepage for non-Admin users when no CMS content found
"""
# Just redirect to the Warehouse Summary View
s3_redirect_default(URL(f="warehouse", args="summary"))
# -----------------------------------------------------------------------------
def index2():
"""
Alternative Application Home page
- custom View
"""
# Need CRUD String
table = s3db.table("cr_shelter", None)
module_name = settings.modules[module].name_nice
response.title = module_name
response.view = "inv/index.html"
if s3.debug:
# Start of TEST CODE for multiple dataTables,
#this also required views/inv/index.html to be modified
from s3.s3data import S3DataTable
representation = request.extension
if representation == "html" or get_vars.id == "warehouse_list_1":
resource = s3db.resource("inv_warehouse")
totalrows = resource.count()
list_fields = ["id",
"name",
"organisation_id",
]
orderby = "inv_warehouse.name asc"
if representation == "aadata":
query, orderby, left = resource.datatable_filter(list_fields, get_vars)
if orderby is None:
orderby = default_orderby
start = int(get_vars.displayStart) if get_vars.displayStart else 0
limit = int(get_vars.pageLength) if get_vars.pageLength else s3.ROWSPERPAGE
data = resource.select(list_fields,
start=start,
limit=limit,
orderby=orderby,
count=True,
represent=True)
filteredrows = data["numrows"]
if totalrows is None:
totalrows = filteredrows
rfields = data["rfields"]
rows = data["rows"]
dt = S3DataTable(rfields, rows)
dt.defaultActionButtons(resource)
if representation == "html":
warehouses = dt.html(totalrows,
filteredrows,
"warehouse_list_1",
dt_ajax_url=URL(c="inv",
f="index2",
extension="aadata",
vars={"id":"warehouse_list_1"},
),
dt_group=2,
dt_searching="true",
)
else:
warehouse = dt.json(totalrows,
filteredrows,
"warehouse_list_1",
int(get_vars.draw),
)
return warehouse
# Second Table
if representation == "html" or get_vars.id == "inventory_list_1":
if "Adjust" in request.post_vars:
if request.post_vars.selected == "":
inventory = "Well you could have selected something :("
else:
inventory = "Adjustment not currently supported... :-) you selected the following items: %s" % request.post_vars.selected
else:
resource = s3db.resource("inv_inv_item")
totalrows = resource.count()
table = resource.table
stable = s3db.supply_item
list_fields = ["id",
"site_id",
"item_id$name",
"quantity",
"pack_value",
"total_value",
]
orderby = "inv_inv_item.site_id asc"
if representation == "aadata":
query, orderby, left = resource.datatable_filter(list_fields, get_vars)
if orderby is None:
orderby = default_orderby
site_list = {}
data = resource.select(list_fields,
limit=None,
orderby=orderby,
count=True)
filteredrows = data["numrows"]
if totalrows is None:
totalrows = filteredrows
rows = data["rows"]
for row in rows:
site_id = row["inv_inv_item.site_id"]
if site_id not in site_list:
site_list[site_id] = 1
else:
site_list[site_id] += 1
formatted_site_list = {}
repr = table.site_id.represent
for (key,value) in site_list.items():
formatted_site_list[str(repr(key))] = value
if isinstance(orderby, bool):
orderby = [table.site_id, stable.name, ~table.quantity]
start = int(get_vars.displayStart) if get_vars.displayStart else 0
limit = int(get_vars.pageLength) if get_vars.pageLength else s3.ROWSPERPAGE
data = resource.select(list_fields,
orderby=orderby,
start=start,
limit=limit,
represent=True)
rfields = data["rfields"]
rows = data["rows"]
dt = S3DataTable(rfields,
rows,
orderby=orderby,
)
custom_actions = [dict(label=str(T("Warehouse")),
_class="action-icon",
img="/%s/static/img/markers/gis_marker.image.Agri_Commercial_Food_Distribution_Center_S1.png" % appname,
url=URL(c="inv", f="warehouse",
args=["[id]", "update"]
)
),
]
dt.defaultActionButtons(resource, custom_actions)
if representation == "html":
rows = current.db(table.quantity<100.0).select(table.id, table.quantity)
errorList = []
warningList = []
alertList = []
for row in rows:
if row.quantity < 0.0:
errorList.append(row.id)
elif row.quantity == 0.0:
warningList.append(row.id)
else:
alertList.append(row.id)
inventory = dt.html(totalrows,
filteredrows,
"inventory_list_1",
dt_action_col=-1,
dt_ajax_url=URL(c="inv",
f="index2",
extension="aadata",
vars={"id":"inventory_list_1"},
),
dt_bulk_actions = "Adjust",
dt_group=[1,2],
dt_group_totals=[formatted_site_list],
dt_searching="true",
dt_styles = {"dtdisable": errorList,
"dtwarning": warningList,
"dtalert": alertList,
},
#dt_text_maximum_len = 10,
#dt_text_condense_len = 8,
#dt_group_space = "true",
dt_shrink_groups = "accordion",
#dt_shrink_groups = "individual",
)
s3.actions = None
elif representation == "aadata":
inventory = dt.json(totalrows,
filteredrows,
"inventory_list_1",
int(get_vars.draw),
dt_action_col=-1,
dt_bulk_actions = "Adjust",
dt_group_totals=[formatted_site_list],
)
return inventory
else:
# Probably not the way to do it.... but
s3db.configure("inv_inv_item",
list_fields=list_fields,
report_groupby="site_id",
pdf_groupby="site_id",
)
s3.filter = filter
r = s3_request("inv", "inv_item",
vars={"orderby" : orderby})
r.resource = resource
output = r(
pdf_groupby='site_id',
dt_group=1,
)
return output
# Third table
if representation == "html" or get_vars.id == "supply_list_1":
resource = s3db.resource("supply_item")
list_fields = ["id",
"name",
"um",
"model",
]
orderby = "inv_inv_item.site_id asc"
if representation == "aadata":
query, orderby, left = resource.datatable_filter(list_fields, get_vars)
if orderby is None:
orderby = default_orderby
data = resource.select(list_fields,
limit=None,
orderby=orderby,
count=True,
represent=True)
rows = data["rows"]
rfields = data["rfields"]
numrows = data["numrows"]
dt = S3DataTable(rfields, rows)
dt.defaultActionButtons(resource)
if representation == "html":
supply_items = dt.html(numrows,
numrows,
"supply_list_1",
dt_action_col=1,
dt_ajax_url=URL(c="inv",
f="index2",
extension="aadata",
vars={"id": "supply_list_1"},
),
dt_pageLength=10,
)
else:
supply_items = dt.json(numrows,
numrows,
"supply_list_1",
int(get_vars.draw),
dt_action_col=1,
)
return supply_items
r = s3_request(prefix = "inv", name = "inv_item")
return dict(module_name=module_name,
warehouses = warehouses,
inventory = inventory,
supply_items = supply_items,
r = r,
)
# End of TEST CODE
return dict(module_name=module_name)
# -----------------------------------------------------------------------------
def warehouse():
"""
RESTful CRUD controller
"""
request_args = request.args
if "viewing" in get_vars:
viewing = get_vars.viewing
tn, id = viewing.split(".", 1)
if tn == "inv_warehouse":
request_args.insert(0, id)
# CRUD pre-process
def prep(r):
if r.component:
component_name = r.component_name
if component_name == "inv_item":
# Filter out items which are already in this inventory
s3db.inv_prep(r)
# Remove the Warehouse Name from the list_fields
list_fields = s3db.get_config("inv_inv_item", "list_fields")
try:
list_fields.remove("site_id")
s3db.configure("inv_inv_item",
list_fields = list_fields,
)
except:
pass
elif component_name == "recv":
# Filter out items which are already in this inventory
s3db.inv_prep(r)
# Configure which fields in inv_recv are readable/writable
# depending on status
recvtable = s3db.inv_recv
if r.component_id:
record = db(recvtable.id == r.component_id).select(recvtable.status,
limitby=(0, 1)
).first()
set_recv_attr(record.status)
else:
set_recv_attr(s3db.inv_ship_status["IN_PROCESS"])
recvtable.recv_ref.readable = False
if r.method and r.method != "read":
# Don't want to see in Create forms
recvtable.status.readable = False
elif component_name == "send":
# Filter out items which are already in this inventory
s3db.inv_prep(r)
elif component_name == "human_resource":
s3db.org_site_staff_config(r)
elif component_name == "req":
s3db.req_prep(r)
if r.method != "update" and r.method != "read":
# Hide fields which don't make sense in a Create form
# inc list_create (list_fields over-rides)
s3db.req_create_form_mods()
elif component_name == "asset":
# Default/Hide the Organisation & Site fields
record = r.record
atable = s3db.asset_asset
field = atable.organisation_id
field.default = record.organisation_id
field.readable = field.writable = False
field = atable.site_id
field.default = record.site_id
field.readable = field.writable = False
# Stay within Warehouse tab
s3db.configure("asset_asset",
create_next = None,
)
elif r.id:
r.table.obsolete.readable = r.table.obsolete.writable = True
# "show_obsolete" var option can be added (btn?) later to
# disable this filter
if r.method in [None, "list"] and \
not r.vars.get("show_obsolete", False):
r.resource.add_filter(db.inv_warehouse.obsolete != True)
return True
s3.prep = prep
# CRUD post-process
def postp(r, output):
if r.interactive and not r.component and r.method != "import":
if auth.s3_has_permission("read", "inv_inv_item"):
# Change Action buttons to open Stock Tab by default
read_url = URL(f="warehouse", args=["[id]", "inv_item"])
update_url = URL(f="warehouse", args=["[id]", "inv_item"])
s3_action_buttons(r,
read_url=read_url,
update_url=update_url)
else:
cname = r.component_name
if cname == "human_resource":
# Modify action button to open staff instead of human_resource
read_url = URL(c="hrm", f="staff", args=["[id]"])
update_url = URL(c="hrm", f="staff", args=["[id]", "update"])
s3_action_buttons(r, read_url=read_url,
#delete_url=delete_url,
update_url=update_url)
if "add_btn" in output:
del output["add_btn"]
return output
s3.postp = postp
if "extra_data" in get_vars:
resourcename = "inv_item"
else:
resourcename = "warehouse"
csv_stylesheet = "%s.xsl" % resourcename
if len(request_args) > 1 and request_args[1] in ("req", "send", "recv"):
# Sends/Receives should break out of Component Tabs
# To allow access to action buttons in inv_recv rheader
native = True
else:
native = False
output = s3_rest_controller(module, resourcename,
#hide_filter = {"inv_item": False,
# "_default": True,
# },
# Extra fields for CSV uploads:
#csv_extra_fields = [
# dict(label="Organisation",
# field=s3db.org_organisation_id(comment=None))
#]
csv_stylesheet = csv_stylesheet,
csv_template = resourcename,
native = native,
rheader = s3db.inv_rheader,
)
return output
# -----------------------------------------------------------------------------
def warehouse_type():
"""
RESTful CRUD controller
"""
return s3_rest_controller()
# -----------------------------------------------------------------------------
def supplier():
"""
Filtered version of the organisation() REST controller
"""
get_vars["organisation_type.name"] = "Supplier"
# Load model
table = s3db.org_organisation
# Modify CRUD Strings
s3.crud_strings.org_organisation = Storage(
label_create = T("Create Supplier"),
title_display = T("Supplier Details"),
title_list = T("Suppliers"),
title_update = T("Edit Supplier"),
title_upload = T("Import Suppliers"),
label_list_button = T("List Suppliers"),
label_delete_button = T("Delete Supplier"),
msg_record_created = T("Supplier added"),
msg_record_modified = T("Supplier updated"),
msg_record_deleted = T("Supplier deleted"),
msg_list_empty = T("No Suppliers currently registered")
)
# Open record in this controller after creation
s3db.configure("org_organisation",
create_next = URL(c="inv", f="supplier",
args = ["[id]", "read"]),
)
return s3db.org_organisation_controller()
# =============================================================================
def inv_item():
""" REST Controller """
# If this url has a viewing track items then redirect to track_movement
viewing = get_vars.get("viewing", None)
if viewing:
tn, id = viewing.split(".", 1)
if tn == "inv_track_item":
table = s3db.inv_track_item
record = db(table.id == id).select(table.item_id,
limitby=(0, 1)).first()
redirect(URL(c = "inv",
f = "track_movement",
args = [],
vars = {"viewing" : "%s.%s" % ("inv_inv_item", record.item_id)}
))
tablename = "inv_inv_item"
# Load model to be able to override CRUD string(s)
table = s3db[tablename]
# Limit site_id to sites the user has permissions for
auth.permitted_facilities(table=table,
error_msg=T("You do not have permission for any site to add an inventory item."))
s3.crud_strings[tablename].msg_list_empty = T("No Stock currently registered")
report = get_vars.get("report")
if report == "mon":
s3.crud_strings[tablename].update(dict(
title_list = T("Monetization Report"),
subtitle_list = T("Monetization Details"),
#msg_list_empty = T("No Stock currently registered"),
))
s3db.configure(tablename,
list_fields = ["id",
(T("Donor"), "supply_org_id"),
(T("Items/Description"), "item_id"),
(T("Quantity"), "quantity"),
(T("Unit"), "item_pack_id"),
(T("Unit Value"), "pack_value"),
(T("Total Value"), "total_value"),
(T("Remarks"), "comments"),
"status",
]
)
else:
s3db.configure(tablename,
insertable = settings.get_inv_direct_stock_edits(),
list_fields = ["id",
"site_id",
"item_id",
"item_id$code",
"item_id$item_category_id",
"quantity",
"pack_value",
#(T("Total Value"), "total_value"),
]
)
if len(request.args) > 1 and request.args[1] == "track_item":
# remove CRUD generated buttons in the tabs
s3db.configure("inv_track_item",
create=False,
listadd=False,
editable=False,
deletable=False,
)
else:
s3.filter = (table.quantity != 0)
def prep(r):
if r.method != "report":
s3.dataTable_group = 1
return True
s3.prep = prep
# Import pre-process
def import_prep(data):
"""
Deletes all Stock records of the organisation/branch
before processing a new data import
"""
resource, tree = data
xml = current.xml
tag = xml.TAG
att = xml.ATTRIBUTE
if s3.importerReplace:
if tree is not None:
root = tree.getroot()
expr = "/%s/%s[@%s='org_organisation']/%s[@%s='name']" % \
(tag.root, tag.resource, att.name, tag.data, att.field)
orgs = root.xpath(expr)
otable = s3db.org_organisation
stable = s3db.org_site
itable = s3db.inv_inv_item
for org in orgs:
org_name = org.get("value", None) or org.text
if org_name:
try:
org_name = json.loads(xml.xml_decode(org_name))
except:
pass
if org_name:
query = (otable.name == org_name) & \
(stable.organisation_id == otable.id) & \
(itable.site_id == stable.id)
resource = s3db.resource("inv_inv_item", filter=query)
# Use cascade=True so that the deletion gets
# rolled back if the import fails:
resource.delete(format="xml", cascade=True)
resource.skip_import = True
s3.import_prep = import_prep
# Upload for configuration (add replace option)
s3.importerPrep = lambda: dict(ReplaceOption=T("Remove existing data before import"))
output = s3_rest_controller(#csv_extra_fields = [dict(label="Organisation",
# field=s3db.org_organisation_id(comment=None))
# ],
pdf_paper_alignment = "Landscape",
pdf_table_autogrow = "B",
pdf_groupby = "site_id, item_id",
pdf_orderby = "expiry_date, supply_org_id",
rheader=s3db.inv_rheader,
)
if "add_btn" in output and not settings.get_inv_direct_stock_edits():
del output["add_btn"]
return output
# -----------------------------------------------------------------------------
def track_movement():
""" REST Controller """
table = s3db.inv_track_item
s3db.configure("inv_track_item",
create = False,
deletable = False,
editable = False,
listadd = False,
)
def prep(r):
if r.interactive:
if "viewing" in get_vars:
dummy, item_id = get_vars.viewing.split(".")
if item_id != "None":
query = (table.send_inv_item_id == item_id ) | \
(table.recv_inv_item_id == item_id)
r.resource.add_filter(query)
return True
s3.prep = prep
output = s3_rest_controller("inv", "track_item",
rheader = s3db.inv_rheader,
)
if "add_btn" in output:
del output["add_btn"]
return output
# -----------------------------------------------------------------------------
def inv_item_quantity():
"""
"""
try:
item_id = request.args[0]
except:
raise HTTP(400, current.xml.json_message(False, 400, "No value provided!"))
table = s3db.inv_inv_item
ptable = db.supply_item_pack
query = (table.id == item_id) & \
(table.item_pack_id == ptable.id)
record = db(query).select(table.quantity,
ptable.quantity,
limitby=(0, 1)).first()
d = {"iquantity" : record.inv_inv_item.quantity,
"pquantity" : record.supply_item_pack.quantity,
}
output = json.dumps(d)
response.headers["Content-Type"] = "application/json"
return output
# -----------------------------------------------------------------------------
def inv_item_packs():
"""
Called by S3OptionsFilter to provide the pack options for a
particular Item
"""
try:
item_id = request.args[0]
except:
raise HTTP(400, current.xml.json_message(False, 400, "No value provided!"))
table = s3db.inv_inv_item
ptable = db.supply_item_pack
query = (table.id == item_id) & \
(table.item_id == ptable.item_id)
records = db(query).select(ptable.id,
ptable.name,
ptable.quantity)
output = records.json()
response.headers["Content-Type"] = "application/json"
return output
# =============================================================================
def send():
""" RESTful CRUD controller """
return s3db.inv_send_controller()
# ==============================================================================
def send_commit():
"""
Send a Shipment containing all items in a Commitment
"""
return s3db.req_send_commit()
# -----------------------------------------------------------------------------
def send_process():
""" Process a Shipment """
return s3db.inv_send_process()
# -----------------------------------------------------------------------------
def send_returns():
"""
This will cancel a shipment that has been sent
@todo need to roll back commitments
"""
try:
send_id = request.args[0]
except:
redirect(f="send")
stable = s3db.inv_send
if not auth.s3_has_permission("update", stable, record_id=send_id):
session.error = T("You do not have permission to return this sent shipment.")
send_record = db(stable.id == send_id).select(stable.status,
limitby=(0, 1)
).first()
inv_ship_status = s3db.inv_ship_status
if send_record.status == inv_ship_status["IN_PROCESS"]:
session.error = T("This shipment has not been sent - it cannot be returned because it can still be edited.")
if session.error:
redirect(URL(c="inv", f="send",
args=[send_id]))
rtable = s3db.inv_recv
tracktable = s3db.inv_track_item
# Okay no error so far, change the status to Returning
stable[send_id] = dict(status = inv_ship_status["RETURNING"],
owned_by_user = None,
owned_by_group = ADMIN)
recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id,
limitby = (0, 1)).first()
if recv_row:
recv_id = recv_row.recv_id
rtable[recv_id] = dict(date = request.utcnow,
status = inv_ship_status["RETURNING"],
owned_by_user = None,
owned_by_group = ADMIN)
# Set all track items to status of returning
db(tracktable.send_id == send_id).update(status = s3db.inv_tracking_status["RETURNING"])
session.confirmation = T("Sent Shipment has returned, indicate how many items will be returned to Warehouse.")
redirect(URL(c="inv", f="send",
args=[send_id, "track_item"]))
# -----------------------------------------------------------------------------
def return_process():
"""
Return some stock from a shipment back into the warehouse
"""
try:
send_id = request.args[0]
except:
redirect(f="send")
stable = s3db.inv_send
if not auth.s3_has_permission("update", stable, record_id=send_id):
session.error = T("You do not have permission to return this sent shipment.")
send_record = db(stable.id == send_id).select(stable.status,
limitby=(0, 1)
).first()
inv_ship_status = s3db.inv_ship_status
if send_record.status != inv_ship_status["RETURNING"]:
session.error = T("This shipment has not been returned.")
if session.error:
redirect(URL(c="inv", f="send",
args=[send_id]))
invtable = s3db.inv_inv_item
rtable = s3db.inv_recv
tracktable = s3db.inv_track_item
# Okay no error so far, let's move the goods back into the warehouse
# and then change the status to received
# Update Receive record & lock for editing
# Move each item to the site
track_rows = db(tracktable.send_id == send_id).select(tracktable.id,
tracktable.quantity,
tracktable.return_quantity,
tracktable.send_inv_item_id,
)
for track_item in track_rows:
send_inv_id = track_item.send_inv_item_id
return_qnty = track_item.return_quantity
if return_qnty == None:
return_qnty = 0
# update the receive quantity in the tracking record
tracktable[track_item.id] = dict(recv_quantity = track_item.quantity - return_qnty)
if return_qnty:
db(invtable.id == send_inv_id).update(quantity = invtable.quantity + return_qnty)
stable[send_id] = dict(status = inv_ship_status["RECEIVED"],
owned_by_user = None,
owned_by_group = ADMIN)
recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id,
limitby = (0, 1)).first()
if recv_row:
recv_id = recv_row.recv_id
rtable[recv_id] = dict(date = request.utcnow,
status = inv_ship_status["RECEIVED"],
owned_by_user = None,
owned_by_group = ADMIN)
# Change the status for all track items in this shipment to Received
db(tracktable.send_id == send_id).update(status = s3db.inv_tracking_status["RECEIVED"])
redirect(URL(f = "send",
args = [send_id]))
# -----------------------------------------------------------------------------
def send_cancel():
"""
This will cancel a shipment that has been sent
@todo need to roll back commitments
"""
try:
send_id = request.args[0]
except:
redirect(f="send")
stable = s3db.inv_send
if not auth.s3_has_permission("delete", stable, record_id=send_id):
session.error = T("You do not have permission to cancel this sent shipment.")
send_record = db(stable.id == send_id).select(stable.status,
limitby=(0, 1)).first()
inv_ship_status = s3db.inv_ship_status
if send_record.status != inv_ship_status["SENT"]:
session.error = T("This shipment has not been sent - it has NOT been canceled because it can still be edited.")
if session.error:
redirect(URL(c="inv", f="send",
args=[send_id]))
rtable = s3db.inv_recv
tracktable = s3db.inv_track_item
# Okay no error so far, let's delete that baby
# Change the send and recv status to cancelled
db(stable.id == send_id).update(status = inv_ship_status["CANCEL"],
owned_by_user = None,
owned_by_group = ADMIN)
recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id,
limitby = (0, 1)).first()
if recv_row:
recv_id = recv_row.recv_id
db(rtable.id == recv_id).update(date = request.utcnow,
status = inv_ship_status["CANCEL"],
owned_by_user = None,
owned_by_group = ADMIN)
# Change the track items status to canceled and then delete them
# If they are linked to a request then the in transit total will also be reduced
# Records can only be deleted if the status is In Process (or preparing)
# so change the status before we delete
db(tracktable.send_id == send_id).update(status = inv_tracking_status["IN_PROCESS"])
track_rows = db(tracktable.send_id == send_id).select(tracktable.id)
for track_item in track_rows:
s3.inv_track_item_deleting(track_item.id)
# Now change the status to (cancelled)
db(tracktable.send_id == send_id).update(status = inv_tracking_status["CANCEL"])
session.confirmation = T("Sent Shipment canceled and items returned to Warehouse")
redirect(URL(f = "send",
args = [send_id]))
# =============================================================================
def set_recv_attr(status):
"""
Set field attributes for inv_recv table
"""
recvtable = s3db.inv_recv
ship_status = s3db.inv_ship_status
recvtable.sender_id.readable = recvtable.sender_id.writable = False
recvtable.grn_status.readable = recvtable.grn_status.writable = False
recvtable.cert_status.readable = recvtable.cert_status.writable = False
recvtable.eta.readable = False
recvtable.req_ref.writable = True
if status == ship_status["IN_PROCESS"]:
recvtable.send_ref.writable = True
recvtable.recv_ref.readable = False
recvtable.sender_id.readable = False
else:
# Make all fields writable False
for field in recvtable.fields:
recvtable[field].writable = False
if status == ship_status["SENT"]:
recvtable.date.writable = True
recvtable.recipient_id.readable = recvtable.recipient_id.writable = True
recvtable.comments.writable = True
# -----------------------------------------------------------------------------
def recv():
""" RESTful CRUD controller """
recvtable = s3db.inv_recv
# Limit site_id to sites the user has permissions for
if settings.get_inv_shipment_name() == "order":
error_msg = T("You do not have permission for any facility to add an order.")
else:
error_msg = T("You do not have permission for any facility to receive a shipment.")
auth.permitted_facilities(table=recvtable, error_msg=error_msg)
tracktable = s3db.inv_track_item
atable = s3db.inv_adj_item
# The inv_recv record might be created when the shipment is send and so it
# might not have the recipient identified. If it is null then set it to
# the person who is logged in (the default)
id = request.args(0)
if id and isinstance(id, int):
record = db(recvtable.id == id).select(recvtable.recipient_id,
limitby=(0, 1)).first()
try:
if record.recipient_id is None:
db(recvtable.id == id).update(recipient_id=auth.s3_logged_in_person())
except:
pass
status = s3db.inv_ship_status
SHIP_STATUS_IN_PROCESS = status["IN_PROCESS"]
SHIP_STATUS_SENT = status["SENT"]
SHIP_STATUS_RECEIVED = status["RECEIVED"]
SHIP_STATUS_CANCEL = status["CANCEL"]
status = s3db.inv_tracking_status
TRACK_STATUS_UNKNOWN = status["UNKNOWN"]
TRACK_STATUS_PREPARING = status["IN_PROCESS"]
TRACK_STATUS_TRANSIT = status["SENT"]
TRACK_STATUS_UNLOADING = status["UNLOADING"]
TRACK_STATUS_ARRIVED = status["RECEIVED"]
TRACK_STATUS_CANCELED = status["CANCEL"]
def set_track_attr(status):
# By default Make all fields writable False
for field in tracktable.fields:
tracktable[field].writable = False
# Hide some fields
tracktable.send_id.readable = False
tracktable.recv_id.readable = False
tracktable.bin.readable = False
tracktable.adj_item_id.readable = False
tracktable.recv_quantity.readable = True
if status == TRACK_STATUS_PREPARING:
# Show some fields
tracktable.item_source_no.writable = True
tracktable.item_id.writable = True
tracktable.item_pack_id.writable = True
tracktable.quantity.writable = True
tracktable.currency.writable = True
tracktable.pack_value.writable = True
tracktable.expiry_date.writable = True
tracktable.recv_bin.writable = True
tracktable.owner_org_id.writable = True
tracktable.supply_org_id.writable = True
tracktable.inv_item_status.writable = True
tracktable.comments.writable = True
tracktable.recv_quantity.readable = False
# Hide some fields
tracktable.send_inv_item_id.readable = False
# Change some labels - NO - use consistent labels
#tracktable.quantity.label = T("Quantity Delivered")
tracktable.recv_bin.label = T("Bin")
elif status == TRACK_STATUS_TRANSIT:
# Hide the values that will be copied from the inv_inv_item record
tracktable.send_inv_item_id.readable = False
tracktable.send_inv_item_id.writable = False
tracktable.item_source_no.readable = True
tracktable.item_source_no.writable = False
# Display the values that can only be entered on create
tracktable.recv_quantity.writable = True
tracktable.recv_bin.readable = True
tracktable.recv_bin.writable = True
tracktable.comments.writable = True
# This is a received purchase so change the label to reflect this - NO - use consistent labels
#tracktable.quantity.label = T("Quantity Delivered")
elif status == TRACK_STATUS_ARRIVED:
tracktable.item_source_no.readable = True
tracktable.item_source_no.writable = False
tracktable.item_id.writable = False
tracktable.send_inv_item_id.writable = False
tracktable.item_pack_id.writable = False
tracktable.quantity.writable = False
tracktable.currency.writable = False
tracktable.pack_value.writable = False
tracktable.expiry_date.writable = False
tracktable.owner_org_id.writable = False
tracktable.supply_org_id.writable = False
tracktable.recv_bin.readable = True
tracktable.recv_bin.writable = True
def prep(r):
record = r.record
if record and \
record.status not in (SHIP_STATUS_IN_PROCESS, SHIP_STATUS_SENT):
# Now that the shipment has been sent
# lock the record so that it can't be meddled with
s3db.configure("inv_recv",
create = False,
deletable = False,
editable = False,
listadd = False,
)
component = r.component
if record and component and component.name == "track_item":
# Can only create or delete track items for a recv record
# if the status is preparing:
if r.method == "create" or r.method == "delete":
if record.status != SHIP_STATUS_IN_PROCESS:
return False
# Configure which fields in track_item are readable/writable
# depending on status:
if r.component_id:
track_record = db(tracktable.id == r.component_id).select(tracktable.status,
limitby=(0, 1)
).first()
set_track_attr(track_record.status)
else:
set_track_attr(TRACK_STATUS_PREPARING)
tracktable.status.readable = False
# Adjust CRUD strings
if record.status == SHIP_STATUS_IN_PROCESS:
s3.crud_strings.inv_recv.title_update = \
s3.crud_strings.inv_recv.title_display = T("Process Received Shipment")
# Default the Supplier/Donor to the Org sending the shipment
tracktable.supply_org_id.default = record.organisation_id
else:
# Configure which fields in inv_recv are readable/writable
# depending on status
if r.id:
record = db(recvtable.id == r.id).select(recvtable.status,
limitby=(0, 1)
).first()
set_recv_attr(record.status)
else:
set_recv_attr(SHIP_STATUS_IN_PROCESS)
recvtable.recv_ref.readable = False
if r.method and r.method != "read":
# Don't want to see in Create forms
recvtable.status.readable = False
return True
s3.prep = prep
if len(request.args) > 1 and request.args[1] == "track_item":
record = db(recvtable.id == request.args[0]).select(recvtable.status,
limitby=(0, 1)
).first()
status = record.status if record else None
if status == SHIP_STATUS_SENT:
list_fields = ["id",
"status",
"item_id",
"item_pack_id",
"quantity",
"currency",
"pack_value",
"recv_quantity",
"recv_bin",
"owner_org_id",
"supply_org_id",
]
s3db.configure("inv_track_item",
# Remove CRUD generated buttons in the tabs
create = False,
deletable = False,
editable = True,
listadd = False,
list_fields = list_fields,
)
elif status:
# Remove CRUD generated buttons in the tabs
s3db.configure("inv_track_item",
create = False,
deletable = False,
editable = False,
listadd = False,
)
output = s3_rest_controller(rheader = s3db.inv_recv_rheader)
return output
# -----------------------------------------------------------------------------
def req_items_for_inv(site_id, quantity_type):
"""
used by recv_process & send_process
returns a dict of unique req items (with min db.req_req.date_required | db.req_req.date)
key = item_id
@param site_id: The inventory to find the req_items from
@param quantity_type: str ("commit", "transit" or "fulfil) The
quantity type which will be used to determine if this item is still outstanding
"""
if not settings.has_module("req"):
return Storage()
table = s3db.req_req
itable = s3db.req_req_item
query = (table.site_id == site_id) & \
(table.id == itable.req_id) & \
(itable.item_pack_id == itable.item_pack_id) & \
(itable["quantity_%s" % quantity_type] < itable.quantity) & \
(table.cancel == False) & \
(table.deleted == False) & \
(itable.deleted == False)
req_items = db(query).select(itable.id,
itable.req_id,
itable.item_id,
itable.quantity,
itable["quantity_%s" % quantity_type],
itable.item_pack_id,
orderby = table.date_required | table.date,
#groupby = itable.item_id
)
# Because groupby doesn't follow the orderby, this will remove any
# duplicate req_item, using the first record according to the orderby
# req_items = req_items.as_dict( key = "req_req_item.item_id") <- doensn't work
# @todo: web2py Rows.as_dict function could be extended to enable this functionality instead
req_item_ids = []
unique_req_items = Storage()
for req_item in req_items:
if req_item.item_id not in req_item_ids:
# This item is not already in the dict
unique_req_items[req_item.item_id] = Storage( req_item.as_dict() )
req_item_ids.append(req_item.item_id)
return unique_req_items
# -----------------------------------------------------------------------------
def req_item_in_shipment(shipment_item,
shipment_type,
req_items,
):
"""
Checks if a shipment item is in a request and updates req_item
and the shipment.
"""
shipment_item_table = "inv_%s_item" % shipment_type
try:
item_id = shipment_item[shipment_item_table].item_id
except:
item_id = shipment_item.inv_inv_item.item_id
# Check for req_items
if item_id in req_items:
shipment_to_req_type = dict(recv = "fulfil",
send = "transit")
quantity_req_type = "quantity_%s" % shipment_to_req_type[shipment_type]
# This item has been requested from this inv
req_item = req_items[item_id]
req_item_id = req_item.id
# Update the req quantity
# convert the shipment items quantity into the req_tem.quantity_fulfil (according to pack)
quantity = req_item[quantity_req_type] + \
(shipment_item[shipment_item_table].pack_quantity / \
req_item.pack_quantity) * \
shipment_item[shipment_item_table].quantity
quantity = min(quantity, req_item.quantity) #Cap at req. quantity
s3db.req_req_item[req_item_id] = {quantity_req_type: quantity}
# Link the shipment_item to the req_item
s3db[shipment_item_table][shipment_item[shipment_item_table].id] = \
dict(req_item_id = req_item_id)
# Flag req record to update status_fulfil
return req_item.req_id, req_item.id
else:
return None, None
# -----------------------------------------------------------------------------
def recv_process():
""" Receive a Shipment """
try:
recv_id = request.args[0]
except:
redirect(URL(f="recv"))
rtable = s3db.inv_recv
if not auth.s3_has_permission("update", rtable, record_id=recv_id):
session.error = T("You do not have permission to receive this shipment.")
redirect(URL(c="inv", f="recv", args=[recv_id]))
recv_record = db(rtable.id == recv_id).select(rtable.date,
rtable.status,
rtable.site_id,
limitby=(0, 1)
).first()
status = recv_record.status
inv_ship_status = s3db.inv_ship_status
if status == inv_ship_status["RECEIVED"]:
session.error = T("This shipment has already been received.")
redirect(URL(c="inv", f="recv", args=[recv_id]))
elif status == inv_ship_status["CANCEL"]:
session.error = T("This shipment has already been received & subsequently canceled.")
redirect(URL(c="inv", f="recv", args=[recv_id]))
# Update Receive record & lock for editing
code = s3db.supply_get_shipping_code(settings.get_inv_recv_shortname(),
recv_record.site_id,
s3db.inv_recv.recv_ref)
data = dict(recv_ref = code,
status = inv_ship_status["RECEIVED"],
owned_by_user = None,
owned_by_group = ADMIN,
)
if not recv_record.date:
data["date"] = request.utcnow
db(rtable.id == recv_id).update(**data)
stable = db.inv_send
tracktable = db.inv_track_item
send_row = db(tracktable.recv_id == recv_id).select(tracktable.send_id,
limitby=(0, 1)).first()
if send_row:
send_id = send_row.send_id
db(stable.id == send_id).update(status = inv_ship_status["RECEIVED"],
owned_by_user = None,
owned_by_group = ADMIN,
)
# Change the status for all track items in this shipment to Unloading
# the onaccept will then move the values into the site update any request
# record, create any adjustment if needed and change the status to Arrived
db(tracktable.recv_id == recv_id).update(status = 3)
# Move each item to the site
track_rows = db(tracktable.recv_id == recv_id).select()
for track_item in track_rows:
row = Storage(track_item)
s3db.inv_track_item_onaccept(Storage(vars=Storage(id=row.id),
record = row,
))
session.confirmation = T("Shipment Items Received")
redirect(URL(c="inv", f="recv",
args=[recv_id]))
# -----------------------------------------------------------------------------
def recv_cancel():
"""
Cancel a Received Shipment
@todo what to do if the quantity cancelled doesn't exist?
"""
try:
recv_id = request.args[0]
except:
redirect(URL(f="recv"))
rtable = s3db.inv_recv
if not auth.s3_has_permission("delete", rtable, record_id=recv_id):
session.error = T("You do not have permission to cancel this received shipment.")
redirect(URL(c="inv", f="recv", args=[recv_id]))
recv_record = db(rtable.id == recv_id).select(rtable.status,
limitby=(0, 1)).first()
inv_ship_status = s3db.inv_ship_status
if recv_record.status != inv_ship_status["RECEIVED"]:
session.error = T("This shipment has not been received - it has NOT been canceled because it can still be edited.")
redirect(URL(c="inv", f="recv", args=[recv_id]))
stable = s3db.inv_send
tracktable = s3db.inv_track_item
inv_item_table = s3db.inv_inv_item
ritable = s3db.req_req_item
siptable = s3db.supply_item_pack
# Go through each item in the shipment remove them from the site store
# and put them back in the track item record
query = (tracktable.recv_id == recv_id) & \
(tracktable.deleted == False)
recv_items = db(query).select(tracktable.recv_inv_item_id,
tracktable.recv_quantity,
tracktable.send_id,
)
send_id = None
for recv_item in recv_items:
inv_item_id = recv_item.recv_inv_item_id
# This assumes that the inv_item has the quantity
quantity = inv_item_table.quantity - recv_item.recv_quantity
if quantity == 0:
db(inv_item_table.id == inv_item_id).delete()
else:
db(inv_item_table.id == inv_item_id).update(quantity = quantity)
db(tracktable.recv_id == recv_id).update(status = 2) # In transit
# @todo potential problem in that the send id should be the same for all track items but is not explicitly checked
if send_id is None and recv_item.send_id is not None:
send_id = recv_item.send_id
track_rows = db(tracktable.recv_id == recv_id).select(tracktable.req_item_id,
tracktable.item_pack_id,
tracktable.recv_quantity,
)
for track_item in track_rows:
# If this is linked to a request
# then remove these items from the quantity in fulfil
if track_item.req_item_id:
req_id = track_item.req_item_id
req_item = db(ritable.id == req_id).select(ritable.quantity_fulfil,
ritable.item_pack_id,
limitby=(0, 1)).first()
req_quantity = req_item.quantity_fulfil
# @ToDo: Optimise by reading these 2 in a single DB query
req_pack_quantity = db(siptable.id == req_item.item_pack_id).select(siptable.quantity,
limitby=(0, 1)
).first().quantity
track_pack_quantity = db(siptable.id == track_item.item_pack_id).select(siptable.quantity,
limitby=(0, 1)
).first().quantity
quantity_fulfil = s3db.supply_item_add(req_quantity,
req_pack_quantity,
- track_item.recv_quantity,
track_pack_quantity
)
db(ritable.id == req_id).update(quantity_fulfil = quantity_fulfil)
s3db.req_update_status(req_id)
# Now set the recv record to cancelled and the send record to sent
db(rtable.id == recv_id).update(date = request.utcnow,
status = inv_ship_status["CANCEL"],
owned_by_user = None,
owned_by_group = ADMIN)
if send_id != None:
# The sent record is now set back to SENT so the source warehouse can
# now cancel this record to get the stock back into their warehouse.
# IMPORTANT reports need to locate this record otherwise it can be
# a mechanism to circumvent the auditing of stock
db(stable.id == send_id).update(status = inv_ship_status["SENT"],
owned_by_user = None,
owned_by_group = ADMIN)
redirect(URL(c="inv", f="recv",
args=[recv_id]))
# =============================================================================
def track_item():
""" RESTful CRUD controller """
table = s3db.inv_track_item
s3db.configure("inv_track_item",
create = False,
deletable = False,
editable = False,
insertable = False,
listadd = False,
)
report = get_vars.get("report")
if report == "rel":
# Summary of Releases
s3.crud_strings["inv_track_item"] = Storage(title_list = T("Summary of Releases"),
subtitle_list = T("Summary Details"),
)
s3db.configure("inv_track_item",
list_fields = ["id",
#"send_id",
#"req_item_id",
(T("Date Released"), "send_id$date"),
(T("Beneficiary"), "send_id$site_id"),
(settings.get_inv_send_shortname(), "send_id$send_ref"),
(settings.get_req_shortname(), "send_id$req_ref"),
(T("Items/Description"), "item_id"),
(T("Source"), "supply_org_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
],
orderby = "inv_send.site_id",
sort = True
)
s3.filter = (FS("send_id") != None)
elif report == "inc":
# Summary of Incoming Supplies
s3.crud_strings["inv_track_item"] = Storage(title_list = T("Summary of Incoming Supplies"),
subtitle_list = T("Summary Details"),
)
s3db.configure("inv_track_item",
list_fields = ["id",
(T("Date Received"), "recv_id$date"),
(T("Received By"), "recv_id$recipient_id"),
(settings.get_inv_send_shortname(), "recv_id$send_ref"),
(settings.get_inv_recv_shortname(), "recv_id$recv_ref"),
(settings.get_proc_shortname(), "recv_id$purchase_ref"),
(T("Item/Description"), "item_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
(T("Source"), "supply_org_id"),
(T("Remarks"), "comments"),
],
orderby = "inv_recv.recipient_id",
)
s3.filter = (FS("recv_id") != None)
elif report == "util":
# Utilization Report
s3.crud_strings["inv_track_item"] = Storage(title_list = T("Utilization Report"),
subtitle_list = T("Utilization Details"),
)
s3db.configure("inv_track_item",
list_fields = ["id",
(T("Item/Description"), "item_id$name"),
(T("Beneficiary"), "send_id$site_id"),
(settings.get_inv_send_shortname(), "send_id$send_ref"),
(settings.get_req_shortname(), "send_id$req_ref"),
(T("Items/Description"), "item_id"),
(T("Source"), "supply_org_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
]
)
s3.filter = (FS("item_id") != None)
elif report == "exp":
# Expiration Report
s3.crud_strings["inv_track_item"] = Storage(title_list = T("Expiration Report"),
subtitle_list = T("Expiration Details"),
)
s3db.configure("inv_track_item",
list_fields = ["id",
"recv_inv_item_id$site_id",
(T("Item/Description"), "item_id"),
(T("Expiration Date"), "expiry_date"),
(T("Source"), "supply_org_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
]
)
s3.filter = (FS("expiry_date") != None)
output = s3_rest_controller(rheader = s3db.inv_rheader)
return output
# =============================================================================
def adj():
""" RESTful CRUD controller """
table = s3db.inv_adj
# Limit site_id to sites the user has permissions for
error_msg = T("You do not have permission to adjust the stock level in this warehouse.")
auth.permitted_facilities(table=table, error_msg=error_msg)
def prep(r):
if r.interactive:
if r.component:
if r.component_name == "adj_item":
if r.component_id:
aitable = s3db.inv_adj_item
if r.record.status == 0:
aitable.reason.writable = True
record = db(aitable.id == r.component_id).select(aitable.inv_item_id,
limitby=(0, 1)
).first()
if record.inv_item_id:
aitable.item_id.writable = False
aitable.item_id.comment = None
aitable.item_pack_id.writable = False
elif r.component_name == "image":
doc_table = s3db.doc_image
doc_table.organisation_id.readable = doc_table.organisation_id.writable = False
doc_table.person_id.readable = doc_table.person_id.writable = False
doc_table.location_id.readable = doc_table.location_id.writable = False
else:
# if an adjustment has been selected and it has been completed
# then make the fields read only
if r.record and r.record.status:
table.adjuster_id.writable = False
table.site_id.writable = False
table.comments.writable = False
else:
if "item" in get_vars and "site" in get_vars:
# create a adj record with a single adj_item record
adj_id = table.insert(adjuster_id = auth.s3_logged_in_person(),
site_id = get_vars.site,
adjustment_date = request.utcnow,
status = 0,
category = 1,
comments = "Single item adjustment"
)
inv_item_table = s3db.inv_inv_item
inv_item = inv_item_table[get_vars.item]
adjitemtable = s3db.inv_adj_item
adj_item_id = adjitemtable.insert(reason = 0,
adj_id = adj_id,
inv_item_id = inv_item.id, # original source inv_item
item_id = inv_item.item_id, # the supply item
item_pack_id = inv_item.item_pack_id,
old_quantity = inv_item.quantity,
currency = inv_item.currency,
old_status = inv_item.status,
new_status = inv_item.status,
old_pack_value = inv_item.pack_value,
new_pack_value = inv_item.pack_value,
expiry_date = inv_item.expiry_date,
bin = inv_item.bin,
old_owner_org_id = inv_item.owner_org_id,
new_owner_org_id = inv_item.owner_org_id,
)
redirect(URL(c = "inv",
f = "adj",
args = [adj_id,
"adj_item",
adj_item_id,
"update"]))
else:
table.comments.default = "Complete Stock Adjustment"
if "site" in get_vars:
table.site_id.writable = True
table.site_id.default = get_vars.site
return True
s3.prep = prep
def postp(r, output):
if r.interactive:
s3_action_buttons(r, deletable=False)
return output
s3.postp = postp
args = request.args
if len(args) > 1 and args[1] == "adj_item" and \
table[args[0]].status:
# remove CRUD generated buttons in the tabs
s3db.configure("inv_adj_item",
create = False,
deletable = False,
editable = False,
listadd = False,
)
output = s3_rest_controller(rheader = s3db.inv_adj_rheader)
return output
# -----------------------------------------------------------------------------
def adj_close():
""" RESTful CRUD controller """
try:
adj_id = request.args[0]
except:
redirect(URL(f="adj"))
atable = s3db.inv_adj
# Limit site_id to sites the user has permissions for
error_msg = T("You do not have permission to adjust the stock level in this warehouse.")
auth.permitted_facilities(table=atable, error_msg=error_msg)
adj_rec = db(atable.id == adj_id).select(atable.status,
atable.site_id,
limitby=(0, 1)).first()
if adj_rec.status != 0:
session.error = T("This adjustment has already been closed.")
if session.error:
redirect(URL(c="inv", f="adj",
args=[adj_id]))
aitable = s3db.inv_adj_item
inv_item_table = s3db.inv_inv_item
site_id = adj_rec.site_id
# Go through all the adj_items
query = (aitable.adj_id == adj_id) & \
(aitable.deleted == False)
adj_items = db(query).select()
for adj_item in adj_items:
if adj_item.inv_item_id is None:
# Create a new stock item
inv_item_id = inv_item_table.insert(site_id = site_id,
item_id = adj_item.item_id,
item_pack_id = adj_item.item_pack_id,
currency = adj_item.currency,
bin = adj_item.bin,
pack_value = adj_item.old_pack_value,
expiry_date = adj_item.expiry_date,
quantity = adj_item.new_quantity,
owner_org_id = adj_item.old_owner_org_id,
)
# Add the inventory item id to the adjustment record
db(aitable.id == adj_item.id).update(inv_item_id = inv_item_id)
elif adj_item.new_quantity is not None:
# Update the existing stock item
db(inv_item_table.id == adj_item.inv_item_id).update(item_pack_id = adj_item.item_pack_id,
bin = adj_item.bin,
pack_value = adj_item.old_pack_value,
expiry_date = adj_item.expiry_date,
quantity = adj_item.new_quantity,
owner_org_id = adj_item.new_owner_org_id,
status = adj_item.new_status,
)
# Change the status of the adj record to Complete
db(atable.id == adj_id).update(status=1)
# Go to the Inventory of the Site which has adjusted these items
(prefix, resourcename, id) = s3db.get_instance(s3db.org_site,
site_id)
redirect(URL(c = prefix,
f = resourcename,
args = [id, "inv_item"]))
# =============================================================================
def recv_item_json():
"""
"""
try:
item_id = request.args[0]
except:
raise HTTP(400, current.xml.json_message(False, 400, "No value provided!"))
stable = s3db.org_site
rtable = s3db.inv_recv
ittable = s3db.inv_track_item
rtable.date.represent = lambda dt: dt[:10]
query = (ittable.req_item_id == item_id) & \
(rtable.id == ittable.recv_id) & \
(rtable.site_id == stable.id) & \
(rtable.status == s3db.inv_ship_status["RECEIVED"]) & \
(ittable.deleted == False)
records = db(query).select(rtable.id,
rtable.date,
stable.name,
ittable.quantity)
output = "[%s,%s" % (json.dumps(dict(id = str(T("Received")),
quantity = "#"
)),
records.json()[1:])
response.headers["Content-Type"] = "application/json"
return output
# -----------------------------------------------------------------------------
def send_item_json():
"""
"""
try:
item_id = request.args[0]
except:
raise HTTP(400, current.xml.json_message(False, 400, "No value provided!"))
stable = s3db.org_site
istable = s3db.inv_send
ittable = s3db.inv_track_item
inv_ship_status = s3db.inv_ship_status
istable.date.represent = lambda dt: dt[:10]
query = (ittable.req_item_id == item_id) & \
(istable.id == ittable.send_id) & \
(istable.site_id == stable.id) & \
((istable.status == inv_ship_status["SENT"]) | \
(istable.status == inv_ship_status["RECEIVED"])) & \
(ittable.deleted == False)
records = db(query).select(istable.id,
istable.date,
stable.name,
ittable.quantity)
output = "[%s,%s" % (json.dumps(dict(id = str(T("Sent")),
quantity = "#"
)),
records.json()[1:])
response.headers["Content-Type"] = "application/json"
return output
# -----------------------------------------------------------------------------
def kitting():
return s3_rest_controller(rheader = s3db.inv_rheader,
)
# -----------------------------------------------------------------------------
def facility():
# Open record in this controller after creation
s3db.configure("org_facility",
create_next = URL(c="inv", f="facility",
args = ["[id]", "read"]),
)
return s3db.org_facility_controller()
# -----------------------------------------------------------------------------
def facility_type():
return s3_rest_controller("org")
# -----------------------------------------------------------------------------
def incoming():
"""
Incoming Shipments for Sites
Used from Requests rheader when looking at Transport Status
"""
# @ToDo: Create this function!
return s3db.inv_incoming()
# -----------------------------------------------------------------------------
def req_match():
""" Match Requests """
return s3db.req_match()
# END =========================================================================
|
{
"content_hash": "ff6477d626ec7339cd1b61c3ba75304b",
"timestamp": "",
"source": "github",
"line_count": 1738,
"max_line_length": 141,
"avg_line_length": 44.03912543153049,
"alnum_prop": 0.44690357982754114,
"repo_name": "ScottBuchanan/eden",
"id": "3721db8d8df0806712199f899cb27a8c470acb96",
"size": "76565",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "controllers/inv.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "727"
},
{
"name": "CSS",
"bytes": "2347001"
},
{
"name": "HTML",
"bytes": "1318813"
},
{
"name": "JavaScript",
"bytes": "19268377"
},
{
"name": "NSIS",
"bytes": "3934"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "29167481"
},
{
"name": "Ruby",
"bytes": "3611"
},
{
"name": "Shell",
"bytes": "4133"
},
{
"name": "XSLT",
"bytes": "2800187"
}
],
"symlink_target": ""
}
|
from flask import Flask, request, render_template
import plivohelper
import os
response_server = Flask("ResponseServer")
response_server.debug = True
@response_server.errorhandler(404)
def page_not_found(error):
"""error page"""
print "404 page not found"
return 'This URL does not exist', 404
@response_server.route('/ringing/', methods=['GET', 'POST'])
def ringing():
"""ringing URL"""
# Post params- 'to': ringing number, 'request_uuid': request id given at the time of api call
print "We got a ringing notification"
return "OK"
@response_server.route('/hangup/', methods=['GET', 'POST'])
def hangup():
"""hangup URL"""
# Post params- 'request_uuid': request id given at the time of api call,
# 'CallUUID': unique id of call, 'reason': reason of hangup
print "We got a hangup notification"
return "OK"
@response_server.route('/dialed/', methods=['GET', 'POST'])
def dialed():
if request.method == 'POST':
print request.form.items()
else:
print request.args.items()
r = plivohelper.Response()
r.addSpeak("Dial done")
print "RESTXML Response => %s" % r
return render_template('response_template.xml', response=r)
@response_server.route('/dialmusic/', methods=['GET', 'POST'])
def dialmusic():
r = plivohelper.Response()
r.addSpeak("Calling now", loop=1)
r.addPlay("http://127.0.0.1:5000/static/duck.mp3", loop=1)
r.addPlay("http://127.0.0.1:5000/static/duck.mp3", loop=1)
print "RESTXML Response => %s" % r
return render_template('response_template.xml', response=r)
@response_server.route('/confirm/', methods=['GET', 'POST'])
def confirm():
r = plivohelper.Response()
r.addSpeak("Confirm by pressing 9", loop=1)
r.addPlay("http://127.0.0.1:5000/static/duck.mp3", loop=1)
r.addPlay("http://127.0.0.1:5000/static/duck.mp3", loop=1)
print "RESTXML Response => %s" % r
return render_template('response_template.xml', response=r)
@response_server.route('/answered/', methods=['GET', 'POST'])
def answered():
# Post params- 'CallUUID': unique id of call, 'Direction': direction of call,
# 'To': Number which was called, 'From': calling number,
# If Direction is outbound then 2 additional params:
# 'ALegUUID': Unique Id for first leg,
# 'ALegRequestUUID': request id given at the time of api call
if request.method == 'POST':
try:
print "CallUUID: %s" % request.form['CallUUID']
except:
pass
else:
try:
print "CallUUID: %s" % request.args['CallUUID']
except:
pass
r = plivohelper.Response()
r.addSpeak("Dial Test")
d = r.addDial(action="http://127.0.0.1:5000/dialed/",
hangupOnStar=True, timeLimit=60,
dialMusic="http://127.0.0.1:5000/dialmusic/",
confirmSound="http://127.0.0.1:5000/confirm/",
confirmKey="9")
d.addNumber("4871", gateways="sofia/gateway/pstn/", gatewayTimeouts="30")
d.addNumber("1749", gateways="sofia/gateway/pstn", gatewayTimeouts="30")
print "RESTXML Response => %s" % r
return render_template('response_template.xml', response=r)
if __name__ == '__main__':
if not os.path.isfile("templates/response_template.xml"):
print "Error : Can't find the XML template : templates/response_template.xml"
else:
response_server.run(host='127.0.0.1', port=5000)
|
{
"content_hash": "71462f2a983fe76aed47ae27897244ad",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 97,
"avg_line_length": 36.677083333333336,
"alnum_prop": 0.6219823913660891,
"repo_name": "plivo/plivohelper-python",
"id": "fa0f5d7458cc20332358ccb7556cb0ac5342d8b4",
"size": "3521",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/dial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "100302"
}
],
"symlink_target": ""
}
|
from __future__ import division
import numpy as np
from pgmpy.factors.distributions import BaseDistribution
from pgmpy.factors.distributions import GaussianDistribution
class CanonicalDistribution(BaseDistribution):
u"""
The intermediate factors in a Gaussian network can be described
compactly using a simple parametric representation called the
canonical form. This representation is closed under the basic
operations used in inference: factor product, factor division,
factor reduction, and marginalization. Thus, we define this
CanonicalDistribution class that allows the inference process to be
performed on joint Gaussian networks.
A canonical form C (X; K,h,g) is defined as
C (X; K,h,g) = exp( ((-1/2) * X.T * K * X) + (h.T * X) + g)
Reference
---------
Probabilistic Graphical Models, Principles and Techniques,
Daphne Koller and Nir Friedman, Section 14.2, Chapter 14.
"""
def __init__(self, variables, K, h, g):
"""
Parameters
----------
variables: list or array-like
The variables for wich the distribution is defined.
K: n x n, 2-d array-like
h : n x 1, array-like
g : int, float
pdf: function
The probability density function of the distribution.
The terms K, h and g are defined parameters for canonical
factors representation.
Examples
--------
>>> from pgmpy.factors.continuous import CanonicalDistribution
>>> phi = CanonicalDistribution(['X', 'Y'], np.array([[1, -1], [-1, 1]]),
np.array([[1], [-1]]), -3)
>>> phi.variables
['X', 'Y']
>>> phi.K
array([[1, -1],
[-1, 1]])
>>> phi.h
array([[1],
[-1]])
>>> phi.g
-3
"""
no_of_var = len(variables)
if len(h) != no_of_var:
raise ValueError("Length of h parameter vector must be equal to "
"the number of variables.")
self.variables = variables
self.h = np.asarray(np.reshape(h, (no_of_var, 1)), dtype=float)
self.g = g
self.K = np.asarray(K, dtype=float)
if self.K.shape != (no_of_var, no_of_var):
raise ValueError("The K matrix should be a square matrix with "
"order equal to the number of variables. Got: "
"{got_shape}, Expected: {exp_shape}".format(
got_shape=self.K.shape,
exp_shape=(no_of_var, no_of_var)))
@property
def pdf(self):
def fun(*args):
x = np.array(args)
return np.exp(self.g + np.dot(x, self.h)[0] - 0.5 * np.dot(x.T, np.dot(self.K, x)))
return fun
def assignment(self, *x):
"""
Returns the probability value of the PDF at the given parameter values.
Parameters
----------
*x: values of all variables of this distribution,
collective defining a point at which the probability value is to be computed.
Returns
-------
float: The probability value at the point.
Examples
--------
>>> from pgmpy.factors.distributions import GaussianDistribution
>>> dist = GaussianDistribution(variables=['x1', 'x2'],
... mean=[[0], [0]],
... covariance=[[1, 0], [0, 1]])
>>> dist.assignment(0, 0)
0.15915494309189535
"""
return self.pdf(*x)
def copy(self):
"""
Makes a copy of the factor.
Returns
-------
CanonicalDistribution object: Copy of the factor
Examples
--------
>>> from pgmpy.factors.continuous import CanonicalDistribution
>>> phi = CanonicalDistribution(['X', 'Y'], np.array([[1, -1], [-1, 1]]),
np.array([[1], [-1]]), -3)
>>> phi.variables
['X', 'Y']
>>> phi.K
array([[1, -1],
[-1, 1]])
>>> phi.h
array([[1],
[-1]])
>>> phi.g
-3
>>> phi2 = phi.copy()
>>> phi2.variables
['X', 'Y']
>>> phi2.K
array([[1, -1],
[-1, 1]])
>>> phi2.h
array([[1],
[-1]])
>>> phi2.g
-3
"""
copy_factor = CanonicalDistribution(self.variables, self.K.copy(),
self.h.copy(), self.g)
return copy_factor
def to_joint_gaussian(self):
"""
Return an equivalent Joint Gaussian Distribution.
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors.continuous import CanonicalDistribution
>>> phi = CanonicalDistribution(['x1', 'x2'], np.array([[3, -2], [-2, 4]]),
np.array([[5], [-1]]), 1)
>>> jgd = phi.to_joint_gaussian()
>>> jgd.variables
['x1', 'x2']
>>> jgd.covariance
array([[ 0.5 , 0.25 ],
[ 0.25 , 0.375]])
>>> jgd.mean
array([[ 2.25 ],
[ 0.875]])
"""
covariance = np.linalg.inv(self.K)
mean = np.dot(covariance, self.h)
return GaussianDistribution(self.variables, mean, covariance)
def reduce(self, values, inplace=True):
"""
Reduces the distribution to the context of the given variable values.
Let C(X,Y ; K, h, g) be some canonical form over X,Y where,
k = [[K_XX, K_XY], ; h = [[h_X],
[K_YX, K_YY]] [h_Y]]
The formula for the obtained conditional distribution for setting
Y = y is given by,
.. math:: K' = K_{XX}
.. math:: h' = h_X - K_{XY} * y
.. math:: g' = g + {h^T}_Y * y - 0.5 * y^T * K_{YY} * y
Parameters
----------
values: list, array-like
A list of tuples of the form (variable name, variable value).
inplace: boolean
If inplace=True it will modify the factor itself, else would return
a new CaninicalFactor object.
Returns
-------
CanonicalDistribution or None:
if inplace=True (default) returns None
if inplace=False returns a new CanonicalDistribution instance.
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors.continuous import CanonicalDistribution
>>> phi = CanonicalDistribution(['X1', 'X2', 'X3'],
... np.array([[1, -1, 0], [-1, 4, -2], [0, -2, 4]]),
... np.array([[1], [4], [-1]]), -2)
>>> phi.variables
['X1', 'X2', 'X3']
>>> phi.K
array([[ 1., -1.],
[-1., 3.]])
>>> phi.h
array([[ 1. ],
[ 3.5]])
>>> phi.g
-2
>>> phi.reduce([('X3', 0.25)])
>>> phi.variables
['X1', 'X2']
>>> phi.K
array([[ 1, -1],
[-1, 4]])
>>> phi.h
array([[ 1. ],
[ 4.5]])
>>> phi.g
-2.375
"""
if not isinstance(values, (list, tuple, np.ndarray)):
raise TypeError("variables: Expected type list or array-like, "
"got type {var_type}".format(var_type=type(values)))
if not all([var in self.variables for var, value in values]):
raise ValueError("Variable not in scope.")
phi = self if inplace else self.copy()
var_to_reduce = [var for var, value in values]
# index_to_keep -> j vector
index_to_keep = [self.variables.index(var) for var in self.variables
if var not in var_to_reduce]
# index_to_reduce -> i vector
index_to_reduce = [self.variables.index(var) for var in var_to_reduce]
K_i_i = self.K[np.ix_(index_to_keep, index_to_keep)]
K_i_j = self.K[np.ix_(index_to_keep, index_to_reduce)]
K_j_j = self.K[np.ix_(index_to_reduce, index_to_reduce)]
h_i = self.h[index_to_keep]
h_j = self.h[index_to_reduce]
# The values for the reduced variables.
y = np.array([value for var, value in values]).reshape(len(index_to_reduce), 1)
phi.variables = [self.variables[index] for index in index_to_keep]
phi.K = K_i_i
phi.h = h_i - np.dot(K_i_j, y)
phi.g = self.g + (np.dot(h_j.T, y) - (0.5 * np.dot(np.dot(y.T, K_j_j), y)))[0][0]
if not inplace:
return phi
def marginalize(self, variables, inplace=True):
u"""
Modifies the factor with marginalized values.
Let C(X,Y ; K, h, g) be some canonical form over X,Y where,
k = [[K_XX, K_XY], ; h = [[h_X],
[K_YX, K_YY]] [h_Y]]
In this case, the result of the integration operation is a canonical
from C (K', h', g') given by,
.. math:: K' = K_{XX} - K_{XY} * {K^{-1}}_{YY} * K_YX
.. math:: h' = h_X - K_{XY} * {K^{-1}}_{YY} * h_Y
.. math:: g' = g + 0.5 * (|Y| * log(2*pi) - log(|K_{YY}|) + {h^T}_Y * K_{YY} * h_Y)
Parameters
----------
variables: list or array-like
List of variables over which to marginalize.
inplace: boolean
If inplace=True it will modify the distribution itself,
else would return a new distribution.
Returns
-------
CanonicalDistribution or None :
if inplace=True (default) returns None
if inplace=False return a new CanonicalDistribution instance
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors.continuous import CanonicalDistribution
>>> phi = CanonicalDistribution(['X1', 'X2', 'X3'],
... np.array([[1, -1, 0], [-1, 4, -2], [0, -2, 4]]),
... np.array([[1], [4], [-1]]), -2)
>>> phi.K
array([[ 1, -1, 0],
[-1, 4, -2],
[ 0, -2, 4]])
>>> phi.h
array([[ 1],
[ 4],
[-1]])
>>> phi.g
-2
>>> phi.marginalize(['X3'])
>>> phi.K
array([[ 1., -1.],
[-1., 3.]])
>>> phi.h
array([[ 1. ],
[ 3.5]])
>>> phi.g
0.22579135
"""
if not isinstance(variables, (list, tuple, np.ndarray)):
raise TypeError("variables: Expected type list or array-like, "
"got type {var_type}".format(var_type=type(variables)))
if not all([var in self.variables for var in variables]):
raise ValueError("Variable not in scope.")
phi = self if inplace else self.copy()
# index_to_keep -> i vector
index_to_keep = [self.variables.index(var) for var in self.variables
if var not in variables]
# index_to_marginalize -> j vector
index_to_marginalize = [self.variables.index(var) for var in variables]
K_i_i = self.K[np.ix_(index_to_keep, index_to_keep)]
K_i_j = self.K[np.ix_(index_to_keep, index_to_marginalize)]
K_j_i = self.K[np.ix_(index_to_marginalize, index_to_keep)]
K_j_j = self.K[np.ix_(index_to_marginalize, index_to_marginalize)]
K_j_j_inv = np.linalg.inv(K_j_j)
h_i = self.h[index_to_keep]
h_j = self.h[index_to_marginalize]
phi.variables = [self.variables[index] for index in index_to_keep]
phi.K = K_i_i - np.dot(np.dot(K_i_j, K_j_j_inv), K_j_i)
phi.h = h_i - np.dot(np.dot(K_i_j, K_j_j_inv), h_j)
phi.g = self.g + 0.5 * (len(variables) * np.log(2 * np.pi) -
np.log(abs(np.linalg.det(K_j_j))) + np.dot(np.dot(h_j.T, K_j_j), h_j))[0][0]
if not inplace:
return phi
def _operate(self, other, operation, inplace=True):
"""
Gives the CanonicalDistribution operation (product or divide) with
the other factor.
The product of two canonical factors over the same scope
X is simply:
C(K1, h1, g1) * C(K2, h2, g2) = C(K1+K2, h1+h2, g1+g2)
The division of canonical forms is defined analogously:
C(K1, h1, g1) / C(K2, h2, g2) = C(K1-K2, h1-h2, g1- g2)
When we have two canonical factors over different scopes X and Y,
we simply extend the scope of both to make their scopes match and
then perform the operation of the above equation. The extension of
the scope is performed by simply adding zero entries to both the K
matrices and the h vectors.
Parameters
----------
other: CanonicalFactor
The CanonicalDistribution to be multiplied.
operation: String
'product' for multiplication operation and
'divide' for division operation.
Returns
-------
CanonicalDistribution or None:
if inplace=True (default) returns None
if inplace=False returns a new CanonicalDistribution instance.
Example
-------
>>> import numpy as np
>>> from pgmpy.factors.continuous import CanonicalDistribution
>>> phi1 = CanonicalDistribution(['x1', 'x2', 'x3'],
np.array([[1, -1, 0], [-1, 4, -2], [0, -2, 4]]),
np.array([[1], [4], [-1]]), -2)
>>> phi2 = CanonicalDistribution(['x1', 'x2'], np.array([[3, -2], [-2, 4]]),
np.array([[5], [-1]]), 1)
>>> phi3 = phi1 * phi2
>>> phi3.K
array([[ 4., -3., 0.],
[-3., 8., -2.],
[ 0., -2., 4.]])
>>> phi3.h
array([ 6., 3., -1.])
>>> phi3.g
-1
>>> phi4 = phi1 / phi2
>>> phi4.K
array([[-2., 1., 0.],
[ 1., 0., -2.],
[ 0., -2., 4.]])
>>> phi4.h
array([-4., 5., -1.])
>>> phi4.g
-3
"""
if not isinstance(other, CanonicalDistribution):
raise TypeError(
"CanonicalDistribution object can only be multiplied or divided "
"with an another CanonicalDistribution object. Got {other_type}, "
"expected CanonicalDistribution.".format(other_type=type(other)))
phi = self if inplace else self.copy()
all_vars = self.variables + [var for var in other.variables if var not in self.variables]
no_of_var = len(all_vars)
self_var_index = [all_vars.index(var) for var in self.variables]
other_var_index = [all_vars.index(var) for var in other.variables]
def _extend_K_scope(K, index):
ext_K = np.zeros([no_of_var, no_of_var])
ext_K[np.ix_(index, index)] = K
return ext_K
def _extend_h_scope(h, index):
ext_h = np.zeros(no_of_var).reshape(no_of_var, 1)
ext_h[index] = h
return ext_h
phi.variables = all_vars
if operation == 'product':
phi.K = _extend_K_scope(self.K, self_var_index) + _extend_K_scope(other.K, other_var_index)
phi.h = _extend_h_scope(self.h, self_var_index) + _extend_h_scope(other.h, other_var_index)
phi.g = self.g + other.g
else:
phi.K = _extend_K_scope(self.K, self_var_index) - _extend_K_scope(other.K, other_var_index)
phi.h = _extend_h_scope(self.h, self_var_index) - _extend_h_scope(other.h, other_var_index)
phi.g = self.g - other.g
if not inplace:
return phi
def product(self, other, inplace=True):
"""
Returns the product of two gaussian distributions.
Parameters
----------
other: CanonicalFactor
The GaussianDistribution to be multiplied.
inplace: boolean
If True, modifies the distribution itself, otherwise returns a new
CanonicalDistribution object.
Returns
-------
CanonicalDistribution or None:
if inplace=True (default) returns None.
if inplace=False returns a new CanonicalDistribution instance.
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors.distributions import GaussianDistribution as GD
>>> dis1 = GD(['x1', 'x2', 'x3'], np.array([[1], [-3], [4]]),
... np.array([[4, 2, -2], [2, 5, -5], [-2, -5, 8]]))
>>> dis2 = GD(['x3', 'x4'], [1, 2], [[2, 3], [5, 6]])
>>> dis3 = dis1.product(dis2, inplace=False)
>>> dis3.covariance
array([[ 3.6, 1. , -0.4, -0.6],
[ 1. , 2.5, -1. , -1.5],
[-0.4, -1. , 1.6, 2.4],
[-1. , -2.5, 4. , 4.5]])
>>> dis3.mean
array([[ 1.6],
[-1.5],
[ 1.6],
[ 3.5]])
"""
return self._operate(other, operation='product', inplace=inplace)
def divide(self, other, inplace=True):
"""
Returns the division of two gaussian distributions.
Parameters
----------
other: GaussianDistribution
The GaussianDistribution to be divided.
inplace: boolean
If True, modifies the distribution itself, otherwise returns a new
GaussianDistribution object.
Returns
-------
CanonicalDistribution or None:
if inplace=True (default) returns None.
if inplace=False returns a new CanonicalDistribution instance.
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors.distributions import GaussianDistribution as GD
>>> dis1 = GD(['x1', 'x2', 'x3'], np.array([[1], [-3], [4]]),
... np.array([[4, 2, -2], [2, 5, -5], [-2, -5, 8]]))
>>> dis2 = GD(['x3', 'x4'], [1, 2], [[2, 3], [5, 6]])
>>> dis3 = dis1.divide(dis2, inplace=False)
>>> dis3.covariance
array([[ 3.6, 1. , -0.4, -0.6],
[ 1. , 2.5, -1. , -1.5],
[-0.4, -1. , 1.6, 2.4],
[-1. , -2.5, 4. , 4.5]])
>>> dis3.mean
array([[ 1.6],
[-1.5],
[ 1.6],
[ 3.5]])
"""
return self._operate(other, operation='divide', inplace=inplace)
def __mul__(self, other):
return self.product(other, inplace=False)
def __rmul__(self, other):
return self.__mul__(other)
def __truediv__(self, other):
return self.divide(other, inplace=False)
__div__ = __truediv__
|
{
"content_hash": "fc110d586b4097fd6cd6f95aff45f2ef",
"timestamp": "",
"source": "github",
"line_count": 586,
"max_line_length": 108,
"avg_line_length": 32.60580204778157,
"alnum_prop": 0.48762233736327,
"repo_name": "jhonatanoliveira/pgmpy",
"id": "8c26b1639c8f4d413a8bf1200068a1fea660c6f0",
"size": "19136",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "pgmpy/factors/continuous/CanonicalDistribution.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1427372"
},
{
"name": "Shell",
"bytes": "1026"
}
],
"symlink_target": ""
}
|
from pants.testutil.pants_run_integration_test import PantsRunIntegrationTest
class NodeRunIntegrationTest(PantsRunIntegrationTest):
def test_run_simple(self):
command = [
"run",
"contrib/node/examples/src/node/web-component-button",
"--run-node-script-name=build",
]
pants_run = self.run_pants(command=command)
self.assert_success(pants_run)
def test_run_passthru_args(self):
command = [
"-q",
"run",
"contrib/node/examples/src/node/server-project",
"--run-node-script-name=checkarg",
"--",
]
pants_run = self.run_pants(command=command + ["incorrect"])
self.assert_failure(pants_run)
pants_run = self.run_pants(command=command + ["correct"])
self.assert_success(pants_run)
def test_run_yarnpkg(self):
command = [
"run",
"contrib/node/examples/src/node/hello:pantsbuild-hello-node",
"--run-node-script-name=start",
]
pants_run = self.run_pants(command=command)
self.assert_success(pants_run)
def test_run_yarnpkg_source_deps_with_workspaces(self):
command = [
"run",
"contrib/node/examples/src/node/yarn-workspaces",
"--run-node-script-name=test-adder",
]
pants_run = self.run_pants(command=command)
self.assert_success(pants_run)
|
{
"content_hash": "d8491581d398b94330ad895d4e0c7a10",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 77,
"avg_line_length": 31.891304347826086,
"alnum_prop": 0.580095432856169,
"repo_name": "wisechengyi/pants",
"id": "504fb336b7d3c0f8618a1f1d80803d5eedcdc025",
"size": "1599",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "contrib/node/tests/python/pants_test/contrib/node/tasks/test_node_run_integration.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "6634"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "44381"
},
{
"name": "Java",
"bytes": "507948"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "7608990"
},
{
"name": "Rust",
"bytes": "1005243"
},
{
"name": "Scala",
"bytes": "106520"
},
{
"name": "Shell",
"bytes": "105217"
},
{
"name": "Starlark",
"bytes": "489739"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
}
|
__author__ = 'JordSti'
from PyQt4 import QtGui, QtCore
import gui
import resource_widget
import resource_file
import os
class resource_file_widget(QtGui.QWidget, gui.Ui_resource_file_widget):
def __init__(self, res_file, parent=None):
super(resource_file_widget, self).__init__(parent)
self.res_file = res_file
self.setupUi(self)
self.resources_widget = []
self.__init__widget()
def __init__widget(self):
self.container = QtGui.QWidget()
self.setLayout(self.layout_main)
self.scroll_layout = QtGui.QVBoxLayout()
self.sa_resources.setWidget(self.container)
self.container.setLayout(self.scroll_layout)
self.btn_add_resource.clicked.connect(self.new_resource)
self.__fill_resources()
def new_resource(self):
filepath = str(QtGui.QFileDialog.getOpenFileName(self, "New resource", QtCore.QDir.homePath(), "Image (*.png);;All files (*.*)"))
if os.path.exists(filepath):
rname = os.path.basename(filepath).split('.')[0]
fp = open(filepath, 'rb')
chunk = fp.read(1024)
data = chunk
while len(chunk) == 1024:
chunk = fp.read(1024)
data += chunk
fp.close()
res = resource_file.resource(rname, data)
self.res_file.resources.append(res)
self.__fill_resources()
def __fill_resources(self):
for rw in self.resources_widget:
rw.deleteLater()
#self.scroll_layout.removeWidget(rw)
self.resources_widget = []
for r in self.res_file.resources:
rw = resource_widget.resource_widget(r)
self.scroll_layout.addWidget(rw)
self.resources_widget.append(rw)
#method todo
#base class for pane
def save(self):
if self.res_file.path is not None:
self.res_file.write_file()
else:
self.save_as()
def save_as(self):
filepath = str(QtGui.QFileDialog.getSaveFileName(self, "Save as", QtCore.QDir.homePath(), "Resource File (*.res)"))
if len(filepath) > 0:
self.res_file.path = filepath
self.res_file.write_file()
def closing(self):
return True
|
{
"content_hash": "b0a2a1ca5f59b58ace5c8672037cd97f",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 137,
"avg_line_length": 27.011764705882353,
"alnum_prop": 0.5871080139372822,
"repo_name": "jordsti/stigame",
"id": "f6b2fe86f8f459e26ba0d19f84ac935b2f1c0daa",
"size": "2296",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/sprite-editor/resource_file_widget.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "631452"
},
{
"name": "CMake",
"bytes": "14021"
},
{
"name": "Python",
"bytes": "2171"
}
],
"symlink_target": ""
}
|
"""Print hex digits for seaborn colors"""
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import seaborn
# Print the HEX codes of seaborn colors
pal_hls = seaborn.hls_palette(12, l=.3, s=.8).as_hex()
import seaborn as sns
num_shades = 8
sns.palplot(sns.cubehelix_palette(num_shades))
plt.savefig('seaborn.png')
print(pal_hls)
|
{
"content_hash": "125c2c52cf68dec2139fc73754bca5bf",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 54,
"avg_line_length": 20.88235294117647,
"alnum_prop": 0.7436619718309859,
"repo_name": "dvklopfenstein/biocode",
"id": "d158bd80ddb3bcd9b4101eb3e85fe8c266a52d3a",
"size": "378",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/Tests/matplotlib/test_searborn.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2491"
},
{
"name": "Python",
"bytes": "6464545"
}
],
"symlink_target": ""
}
|
import logging
from django.conf import settings
from django.core.urlresolvers import reverse
from django.template import defaultfilters as filters
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from keystoneclient import exceptions as keystoneclient_exceptions
from horizon import exceptions
from horizon import messages
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard.dashboards.identity.domains import constants
LOG = logging.getLogger(__name__)
class UpdateUsersLink(tables.LinkAction):
name = "users"
verbose_name = _("Manage Members")
url = "horizon:identity:domains:update"
classes = ("ajax-modal",)
policy_rules = (("identity", "identity:update_domain"),)
def get_link_url(self, domain):
step = 'update_user_members'
base_url = reverse(self.url, args=[domain.id])
param = urlencode({"step": step})
return "?".join([base_url, param])
class UpdateGroupsLink(tables.LinkAction):
name = "groups"
verbose_name = _("Modify Groups")
url = "horizon:identity:domains:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("identity", "identity:update_domain"),)
def get_link_url(self, domain):
step = 'update_group_members'
base_url = reverse(self.url, args=[domain.id])
param = urlencode({"step": step})
return "?".join([base_url, param])
class CreateDomainLink(tables.LinkAction):
name = "create"
verbose_name = _("Create Domain")
url = constants.DOMAINS_CREATE_URL
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (('identity', 'identity:create_domain'),)
def allowed(self, request, domain):
return api.keystone.keystone_can_edit_domain()
class EditDomainLink(tables.LinkAction):
name = "edit"
verbose_name = _("Edit")
url = constants.DOMAINS_UPDATE_URL
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (('identity', 'identity:update_domain'),)
def allowed(self, request, domain):
return api.keystone.keystone_can_edit_domain()
class DeleteDomainsAction(tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Domain",
u"Delete Domains",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted Domain",
u"Deleted Domains",
count
)
name = "delete"
policy_rules = (('identity', 'identity:delete_domain'),)
def allowed(self, request, datum):
return api.keystone.keystone_can_edit_domain()
def delete(self, request, obj_id):
domain = self.table.get_object_by_id(obj_id)
if domain.enabled:
msg = _('Domain "%s" must be disabled before it can be deleted.') \
% domain.name
messages.error(request, msg)
raise keystoneclient_exceptions.ClientException(409, msg)
else:
LOG.info('Deleting domain "%s".', obj_id)
api.keystone.domain_delete(request, obj_id)
class DisableDomainsAction(tables.BatchAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Disable Domain",
u"Disable Domains",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Disabled Domain",
u"Disabled Domains",
count
)
name = "disable"
policy_rules = (('identity', 'identity:update_domain'),)
verbose_name = _("Disable Domains")
def allowed(self, request, datum):
return api.keystone.keystone_can_edit_domain() \
and (datum is None or datum.enabled)
def action(self, request, obj_id):
domain = self.table.get_object_by_id(obj_id)
if domain.enabled:
LOG.info('Disabling domain "%s".', obj_id)
try:
api.keystone.domain_update(request,
domain_id=domain.id,
name=domain.name,
description=domain.description,
enabled=False)
except Exception:
exceptions.handle(request, ignore=True)
return False
class EnableDomainsAction(tables.BatchAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Enable Domain",
u"Enable Domains",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Enabled Domain",
u"Enabled Domains",
count
)
name = "enable"
policy_rules = (('identity', 'identity:update_domain'),)
verbose_name = _("Enable Domains")
def allowed(self, request, datum):
return api.keystone.keystone_can_edit_domain() \
and (datum is None or not datum.enabled)
def action(self, request, obj_id):
domain = self.table.get_object_by_id(obj_id)
if not domain.enabled:
LOG.info('Enabling domain "%s".', obj_id)
try:
api.keystone.domain_update(request,
domain_id=domain.id,
name=domain.name,
description=domain.description,
enabled=True)
except Exception:
exceptions.handle(request, ignore=True)
return False
class DomainFilterAction(tables.FilterAction):
def allowed(self, request, datum):
multidomain_support = getattr(settings,
'OPENSTACK_KEYSTONE_MULTIDOMAIN_SUPPORT',
False)
return multidomain_support
def filter(self, table, domains, filter_string):
"""Naive case-insensitive search."""
q = filter_string.lower()
def comp(domain):
if q in domain.name.lower():
return True
return False
return filter(comp, domains)
class SetDomainContext(tables.Action):
name = "set_domain_context"
verbose_name = _("Set Domain Context")
url = constants.DOMAINS_INDEX_URL
preempt = True
policy_rules = (('identity', 'identity:update_domain'),)
def allowed(self, request, datum):
multidomain_support = getattr(settings,
'OPENSTACK_KEYSTONE_MULTIDOMAIN_SUPPORT',
False)
if not multidomain_support:
return False
ctx = request.session.get("domain_context", None)
if ctx and datum.id == ctx:
return False
return True
def single(self, table, request, obj_id):
if ('domain_context' not in request.session or
request.session['domain_context'] != obj_id):
try:
domain = api.keystone.domain_get(request, obj_id)
request.session['domain_context'] = obj_id
request.session['domain_context_name'] = domain.name
messages.success(request,
_('Domain Context updated to Domain %s.') %
domain.name)
except Exception:
messages.error(request,
_('Unable to set Domain Context.'))
class UnsetDomainContext(tables.Action):
name = "clear_domain_context"
verbose_name = _("Clear Domain Context")
url = constants.DOMAINS_INDEX_URL
preempt = True
requires_input = False
policy_rules = (('identity', 'identity:update_domain'),)
def allowed(self, request, datum):
ctx = request.session.get("domain_context", None)
return ctx is not None
def single(self, table, request, obj_id):
if 'domain_context' in request.session:
request.session.pop("domain_context")
request.session.pop("domain_context_name")
messages.success(request, _('Domain Context cleared.'))
class DomainsTable(tables.DataTable):
name = tables.WrappingColumn('name', verbose_name=_('Name'))
description = tables.Column(lambda obj: getattr(obj, 'description', None),
verbose_name=_('Description'))
id = tables.Column('id', verbose_name=_('Domain ID'))
enabled = tables.Column('enabled', verbose_name=_('Enabled'), status=True,
filters=(filters.yesno, filters.capfirst))
class Meta(object):
name = "domains"
verbose_name = _("Domains")
table_actions_menu = (EnableDomainsAction, DisableDomainsAction)
row_actions = (SetDomainContext, UpdateUsersLink, UpdateGroupsLink,
EditDomainLink, EnableDomainsAction,
DisableDomainsAction, DeleteDomainsAction)
table_actions = (DomainFilterAction, CreateDomainLink,
DeleteDomainsAction, UnsetDomainContext)
|
{
"content_hash": "3d167686ee0b0bb45e87d3c897e7b152",
"timestamp": "",
"source": "github",
"line_count": 280,
"max_line_length": 79,
"avg_line_length": 33.25,
"alnum_prop": 0.5818474758324382,
"repo_name": "yeming233/horizon",
"id": "d0b43db2e18728d6bfcf33cbef93813b5bf68cda",
"size": "9944",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/identity/domains/tables.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "105527"
},
{
"name": "HTML",
"bytes": "517093"
},
{
"name": "JavaScript",
"bytes": "953373"
},
{
"name": "Makefile",
"bytes": "588"
},
{
"name": "Python",
"bytes": "4845896"
},
{
"name": "Shell",
"bytes": "18658"
}
],
"symlink_target": ""
}
|
import pygtk
pygtk.require('2.0')
import gtk
import gobject
import fcntl
import functools
import os
import select
import signal
import subprocess
# Gleaned from <linux/cdrom.h>
CDROMEJECT = 0x5309
CDROMCLOSETRAY = 0x5319
CDROM_DRIVE_STATUS = 0x5326
CDROM_LOCKDOOR = 0x5329
class RootWindow(object):
icons = gtk.icon_theme_get_default()
def __init__(self):
def cmd(command, *args):
def l(w):
if self.player:
command(self.player, *args)
return l
self.player = None
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.connect('delete_event', self.quit)
self.window.set_title('Media Player')
self.window.set_default_size(900, 300)
hbox = gtk.HBox()
self.window.add(hbox)
controls = gtk.VBox()
hbox.pack_start(controls, False, False, 0)
self.playlistControls = gtk.HBox(True)
primaryControls = gtk.VBox()
secondaryControls = gtk.VBox()
self.playlistControls.pack_start(primaryControls, True, True, 0)
self.playlistControls.pack_start(secondaryControls, True, True, 0)
controls.pack_start(self.playlistControls, True, True, 0)
extButton = gtk.Button('External Drive')
extButton.connect('clicked', lambda w: self.selectFile('/'))
folderButton = gtk.Button('Folder')
folderButton.connect('clicked', lambda w: self.selectFolder('/'))
ytButton = gtk.Button('YouTube Video')
ytButton.connect('clicked', lambda w: self.selectYouTube())
dvdButton = gtk.Button('DVD')
dvdButton.connect('clicked', lambda w: self.selectDVD())
removeButton = gtk.Button('Remove Selected')
removeButton.connect('clicked', lambda w: self.removeSelected())
removeAllButton = gtk.Button('Remove All')
removeAllButton.connect('clicked', lambda w: self.removeAll())
primaryControls.pack_start(extButton)
primaryControls.pack_start(folderButton)
primaryControls.pack_start(removeButton)
secondaryControls.pack_start(ytButton)
secondaryControls.pack_start(dvdButton)
secondaryControls.pack_start(removeAllButton)
player = gtk.VBox()
hbox.pack_start(player)
self.playlist = PlaylistWidget()
scroller = gtk.ScrolledWindow()
scroller.add(self.playlist.widget)
scroller.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
player.pack_start(scroller)
self.scrubber = gtk.HScale()
self.scrubber.set_draw_value(False)
self.scrubber.connect('adjust-bounds', lambda w, v: self.seek(v))
player.pack_end(self.scrubber, False, False, 0)
leftControls = gtk.VBox(True)
rightControls = gtk.VBox(True)
bigButtons = gtk.HBox(True)
controls.pack_start(bigButtons, True, True, 0)
bigButtons.pack_start(leftControls, True, True, 0)
bigButtons.pack_start(rightControls, True, True, 0)
playButton = gtk.Button();
playButton.add(self._loadIcon('player_play'))
playButton.connect('clicked', self.play)
pauseButton = gtk.Button()
pauseButton.add(self._loadIcon('player_pause'))
pauseButton.connect('clicked', cmd(Control.togglePause))
stopButton = gtk.Button()
stopButton.add(self._loadIcon('player_stop'))
stopButton.connect('clicked', self.stop)
leftControls.pack_start(playButton, True, True, 0)
rightControls.pack_start(pauseButton, True, True, 0)
leftControls.pack_start(stopButton, True, True, 0)
prevButton = gtk.Button()
prevButton.add(self._loadIcon('player_start'))
prevButton.connect('clicked', cmd(Control.prev))
nextButton = gtk.Button()
nextButton.add(self._loadIcon('player_end'))
nextButton.connect('clicked', cmd(Control.next))
skipBox = gtk.HBox()
skipBox.pack_start(prevButton)
skipBox.pack_start(nextButton)
controls.pack_start(skipBox)
seekBox = gtk.HBox()
seekBackButton = gtk.Button()
seekBackButton.add(self._loadIcon('player_rew'))
seekBackButton.connect('clicked', cmd(Control.seekDelta, -10))
seekBox.pack_start(seekBackButton)
seekForwardButton = gtk.Button()
seekForwardButton.add(self._loadIcon('player_fwd'))
seekForwardButton.connect('clicked', cmd(Control.seekDelta, 10))
seekBox.pack_start(seekForwardButton)
controls.pack_start(seekBox)
dvdbox = gtk.Table(3, 3, True)
upButton = gtk.Button()
upButton.add(self._loadIconTiny('go-up'))
upButton.connect('clicked', cmd(Control.dvdControl, 'up'))
dvdbox.attach(upButton, 1, 2, 0, 1)
leftButton = gtk.Button()
leftButton.add(self._loadIconTiny('go-previous'))
leftButton.connect('clicked', cmd(Control.dvdControl, 'left'))
dvdbox.attach(leftButton, 0, 1, 1, 2)
downButton = gtk.Button()
downButton.add(self._loadIconTiny('go-down'))
downButton.connect('clicked', cmd(Control.dvdControl, 'down'))
dvdbox.attach(downButton, 1, 2, 2, 3)
rightButton = gtk.Button()
rightButton.add(self._loadIconTiny('go-next'))
rightButton.connect('clicked', cmd(Control.dvdControl, 'right'))
dvdbox.attach(rightButton, 2, 3, 1, 2)
selectButton = gtk.Button('OK')
selectButton.connect('clicked', cmd(Control.dvdControl, 'select'))
dvdbox.attach(selectButton, 1, 2, 1, 2)
lastChapterButton = gtk.Button()
lastChapterButton.add(self._loadIconTiny('go-first'))
lastChapterButton.connect('clicked', cmd(Control.seekChapter, -1))
dvdbox.attach(lastChapterButton, 0, 1, 0, 1)
nextChapterButton = gtk.Button()
nextChapterButton.add(self._loadIconTiny('go-last'))
nextChapterButton.connect('clicked', cmd(Control.seekChapter, 1))
dvdbox.attach(nextChapterButton, 2, 3, 0, 1)
menuButton = gtk.Button()
menuButton.add(self._loadIconTiny('undo'))
menuButton.connect('clicked', cmd(Control.dvdControl, 'menu'))
dvdbox.attach(menuButton, 0, 1, 2, 3)
ejectButton = gtk.Button()
ejectButton.add(self._loadIconTiny('player_eject'))
ejectButton.connect('clicked', lambda w: self.eject())
dvdbox.attach(ejectButton, 2, 3, 2, 3)
rightControls.pack_start(dvdbox, True, True, 0)
languageBox = gtk.HBox(True)
subsButton = gtk.Button('Subtitles')
subsButton.connect('clicked', cmd(Control.cycleSubs))
languageBox.pack_start(subsButton)
langButton = gtk.Button('Languages')
langButton.connect('clicked', cmd(Control.cycleLanguage))
languageBox.pack_start(langButton)
controls.pack_start(languageBox)
self.window.show_all()
@staticmethod
def _loadIcon(iconName):
return gtk.image_new_from_pixbuf(RootWindow.icons.load_icon(iconName, 96, 0))
@staticmethod
def _loadIconSmall(iconName):
return gtk.image_new_from_pixbuf(RootWindow.icons.load_icon(iconName, 48, 0))
@staticmethod
def _loadIconTiny(iconName):
return gtk.image_new_from_pixbuf(RootWindow.icons.load_icon(iconName, 32, 0))
def _setScrubberEnabled(self, enabled):
self.scrubber.set_sensitive(enabled)
if not enabled:
self.scrubber.set_value(0)
self._setPlaylistEnabled(not enabled)
def _setPlaylistEnabled(self, enabled):
self.playlist.widget.set_sensitive(enabled)
self.playlistControls.set_sensitive(enabled)
def removeSelected(self):
self.playlist.removeSelected()
def removeAll(self):
self.playlist.clear()
def selectFile(self, root):
f = gtk.FileChooserDialog('Select File', None, gtk.FILE_CHOOSER_ACTION_OPEN, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK))
f.set_default_response(gtk.RESPONSE_CANCEL)
f.set_select_multiple(True)
f.set_current_folder(root)
response = f.run()
if response == gtk.RESPONSE_OK:
self.playlist.addItems([LocalFile(name) for name in f.get_filenames()])
f.destroy()
def selectFolder(self, root):
f = gtk.FileChooserDialog('Select Folder', None, gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK))
f.set_default_response(gtk.RESPONSE_CANCEL)
f.set_select_multiple(True)
f.set_create_folders(False)
f.set_current_folder(root)
response = f.run()
if response == gtk.RESPONSE_OK:
paths = functools.reduce(list.__add__, [os.walk(name) for name in f.get_filenames()])
files = functools.reduce(list.__add__, [[os.path.join(root, file) for file in files] for root, dirs, files in paths])
files.sort()
self.playlist.addItems([LocalFile(name) for name in files])
f.destroy()
def selectYouTube(self):
ytWindow = gtk.MessageDialog(None, 0, gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK_CANCEL, None)
ytWindow.set_markup('Enter URL')
ytWindow.connect('delete_event', lambda w,d: w.destroy())
ytWindow.set_default_response(gtk.RESPONSE_OK)
inputBox = gtk.Entry()
inputBox.set_activates_default(True)
ytWindow.vbox.pack_start(inputBox)
ytWindow.show_all()
response = ytWindow.run()
if response == gtk.RESPONSE_OK and inputBox.get_text():
try:
self.playlist.addItem(YouTubeMovie(inputBox.get_text()))
except:
pass
ytWindow.destroy()
def selectDVD(self):
self.playlist.addItem(DVDMovie())
def update(self):
if self.player:
if self.player.ended():
self._setScrubberEnabled(False)
self.player = None
return False
try:
self._setScrubberEnabled(True)
self.scrubber.set_range(0, self.player.getDuration())
self.scrubber.set_value(self.player.getTime())
except:
self._setScrubberEnabled(False)
return not self.player.ended()
return True
return False
def play(self, widget):
if self.player and not self.player.ended():
if self.player.paused():
self.player.togglePause()
return
p = self.playlist.compile()
if p:
self.player = p.play()
self._setScrubberEnabled(True)
self.timer = gobject.timeout_add(500, self.update)
else:
error = gtk.MessageDialog(None, 0, gtk.MESSAGE_ERROR, gtk.BUTTONS_OK, None)
error.set_markup('Cannot play empty playlist!')
error.run()
error.destroy()
def seek(self, value):
if self.player:
duration = self.player.getDuration()
if not duration:
self.stop()
return
if value > duration - 6:
value = duration - 6
self.player.seek(value)
if value >= duration - 6:
self.player.pause()
def stop(self, widget=None):
if self.player:
self.player.quit()
self._setScrubberEnabled(False)
self.player = None
def eject(self):
cd = None
try:
cd = os.open('/dev/sr0', os.O_RDWR | os.O_NONBLOCK)
status = fcntl.ioctl(cd, CDROM_DRIVE_STATUS)
if status == 2:
fcntl.ioctl(cd, CDROMCLOSETRAY)
else:
if status == 4:
fcntl.ioctl(cd, CDROM_LOCKDOOR)
fcntl.ioctl(cd, CDROMEJECT)
except:
print('Could not eject CD')
finally:
if cd is not None:
os.close(cd)
def quit(self, widget, event, data=None):
gtk.main_quit()
return False
class LocalFile(object):
def __init__(self, filename):
self.filename = filename
def __repr__(self):
return self.filename
def uri(self):
return self.filename
@staticmethod
def type():
return 'File'
def name(self):
return self.filename
class YouTubeMovie(object):
def __init__(self, url):
if url.startswith('http://'):
self.url = url
else:
self.url = 'http://www.youtube.com/watch?v={0}'.format(url)
self.download = self._pollProc(['-g'])
self.title = self._pollProc(['-e']).rstrip()
def _pollProc(self, type):
command = ['youtube-dl']
command.extend(type)
command.append(self.url)
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
(out, err) = proc.communicate()
if proc.returncode:
raise subprocess.CalledProcessError(proc.returncode, ' '.join(command))
return out
def __repr__(self):
return 'YouTube: {0}'.format(self.url)
def uri(self):
return self.download
@staticmethod
def type():
return 'YouTube'
def name(self):
return self.title
class DVDMovie(object):
def __init__(self, dev='/dev/dvd'):
self.device = dev
command = ['blkid', '-o', 'value', '-s', 'LABEL']
command.append(self.device)
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
(out, err) = proc.communicate()
if proc.returncode:
self.label = 'Unknown DVD'
else:
self.label = out.rstrip()
def __repr__(self):
return 'DVD: {0}'.format(self.device)
def uri(self):
return 'dvdnav:///{0}'.format(self.device)
@staticmethod
def type():
return 'DVD'
def name(self):
return self.label
class PlaylistWidget(object):
def __init__(self):
def format_name(col, cell, model, iter):
obj = model.get_value(iter, 0)
cell.set_property('text', obj.name())
def format_type(col, cell, model, iter):
obj = model.get_value(iter, 0)
cell.set_property('text', obj.type())
self.listStore = gtk.ListStore(object)
nameText = gtk.CellRendererText()
nameCol = gtk.TreeViewColumn('Name', nameText)
nameCol.set_cell_data_func(nameText, format_name)
typeText = gtk.CellRendererText()
typeCol = gtk.TreeViewColumn('Type', typeText)
typeCol.set_cell_data_func(typeText, format_type)
self.widget = gtk.TreeView(self.listStore)
self.widget.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.widget.set_reorderable(True)
self.widget.append_column(nameCol)
self.widget.append_column(typeCol)
def addItem(self, item):
self.listStore.append([item])
def addItems(self, items):
for item in items:
self.addItem(item)
def removeSelected(self):
selected = self.widget.get_selection()
model, rows = selected.get_selected_rows()
iters = [model.get_iter(row) for row in rows]
for i in iters:
model.remove(i)
def clear(self):
self.listStore.clear()
def compile(self):
if not self.listStore.get_iter_root():
return None
playlist = Playlist()
self.listStore.foreach(lambda model, path, iter, user_data: playlist.items.append(model.get_value(iter, 0).uri()), None)
return playlist
class Control(object):
def __init__(self, fifo, proc):
self.file = fifo
self.fifo = open(fifo, 'w')
self.proc = proc
def __del__(self):
self.quit()
def quit(self):
try:
self.proc.terminate()
self.proc = None
except:
pass
try:
os.remove(self.file)
except:
pass
self.fifo.close()
def _write(self, command):
if self.ended():
return
self.fifo.write(command)
self.fifo.write('\n')
self.fifo.flush()
def _expect(self, answer, timeout=0):
def onTimeout():
raise RuntimeError('Timed out')
self.fifo.flush()
signal.signal(signal.SIGALRM, lambda no, fr: onTimeout())
if timeout:
signal.alarm(timeout)
while True:
if self.ended():
signal.alarm(0)
return None
line = self.proc.stdout.readline().rstrip()
command = line.split('=', 1)
if command[0] != answer:
continue
signal.alarm(0)
return command[1]
def ended(self):
return not self.proc or self.proc.poll() is not None
def seekDelta(self, delta):
self._write('seek {0}'.format('+{0}'.format(delta) if delta >= 0 else '{0}'.format(delta)))
def next(self):
self._write('pt_step +1')
def prev(self):
self._write('pt_step -1')
def togglePause(self):
self._write('pause')
def pause(self):
if not self.paused():
self.togglePause()
def getTime(self):
self._write('pausing_keep_force get_time_pos')
try:
return float(self._expect('ANS_TIME_POSITION', 1))
except:
return None
def getDuration(self):
self._write('pausing_keep_force get_time_length')
try:
return float(self._expect('ANS_LENGTH', 1))
except:
return None
def paused(self):
self._write('pausing_keep_force get_property pause')
try:
return self._expect('ANS_pause', 1) == 'yes'
except:
return False
def seek(self, value):
self._write('seek {0} 2'.format(value))
def seekChapter(self, direction):
self._write('seek_chapter {0}'.format(direction))
def getTrack(self):
pass
def cycleSubs(self):
self._write('sub_select')
def cycleLanguage(self):
self._write('switch_audio')
def dvdControl(self, control):
self._write('dvdnav {0}'.format(control))
class Playlist(object):
def __init__(self):
self.items = []
def play(self):
try:
os.remove('/tmp/mplayer.fifo')
except:
pass
try:
os.mkfifo('/tmp/mplayer.fifo', 0o660)
except:
pass
proc = subprocess.Popen(['mplayer', '-playlist', '-', '-quiet', '-slave', '-input', 'file=/tmp/mplayer.fifo'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, bufsize=1)
for i in self.items:
proc.stdin.write(i)
proc.stdin.write('\n')
proc.stdin.close()
return Control('/tmp/mplayer.fifo', proc)
def main():
try:
RootWindow()
gtk.main()
finally:
try:
os.remove('/tmp/mplayer.fifo')
except:
pass
if __name__ == '__main__':
main()
|
{
"content_hash": "688ad75e23a0a472a47527762aa4bb61",
"timestamp": "",
"source": "github",
"line_count": 576,
"max_line_length": 170,
"avg_line_length": 28.09548611111111,
"alnum_prop": 0.7007353395538528,
"repo_name": "endrift/mplayer-wrapper",
"id": "28131d6c67880fa1a43f835e5386a44ffd8e3716",
"size": "16206",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "player.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16206"
}
],
"symlink_target": ""
}
|
"""Tests for Interval Bound Propagation."""
import functools
from absl.testing import absltest
from absl.testing import parameterized
import haiku as hk
import jax
import jax.numpy as jnp
import jax_verify
import numpy as np
class IBPTest(parameterized.TestCase):
def assertArrayAlmostEqual(self, lhs, rhs):
diff = jnp.abs(lhs - rhs).max()
self.assertAlmostEqual(diff, 0.)
def test_linear_ibp(self):
def linear_model(inp):
return hk.Linear(1)(inp)
z = jnp.array([[1., 2., 3.]])
params = {'linear':
{'w': jnp.ones((3, 1), dtype=jnp.float32),
'b': jnp.array([2.])}}
fun = functools.partial(
hk.without_apply_rng(hk.transform(linear_model)).apply,
params)
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.interval_bound_propagation(fun, input_bounds)
self.assertAlmostEqual(5., output_bounds.lower)
self.assertAlmostEqual(11., output_bounds.upper)
def test_conv1d_ibp(self):
def conv1d_model(inp):
return hk.Conv1D(output_channels=1, kernel_shape=2,
padding='VALID', stride=1, with_bias=True)(inp)
z = jnp.array([3., 4.])
z = jnp.reshape(z, [1, 2, 1])
params = {'conv1_d':
{'w': jnp.ones((2, 1, 1), dtype=jnp.float32),
'b': jnp.array([2.])}}
fun = functools.partial(
hk.without_apply_rng(hk.transform(conv1d_model)).apply,
params)
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.interval_bound_propagation(fun, input_bounds)
self.assertAlmostEqual(7., output_bounds.lower, delta=1e-5)
self.assertAlmostEqual(11., output_bounds.upper, delta=1e-5)
def test_conv2d_ibp(self):
def conv2d_model(inp):
return hk.Conv2D(output_channels=1, kernel_shape=(2, 2),
padding='VALID', stride=1, with_bias=True)(inp)
z = jnp.array([1., 2., 3., 4.])
z = jnp.reshape(z, [1, 2, 2, 1])
params = {'conv2_d':
{'w': jnp.ones((2, 2, 1, 1), dtype=jnp.float32),
'b': jnp.array([2.])}}
fun = functools.partial(
hk.without_apply_rng(hk.transform(conv2d_model)).apply,
params)
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.interval_bound_propagation(fun, input_bounds)
self.assertAlmostEqual(8., output_bounds.lower)
self.assertAlmostEqual(16., output_bounds.upper)
@parameterized.named_parameters(
('exp', jnp.exp, [[-2., 3.]]),
('log', jnp.log, [[3., 5.]]),
('relu', jax.nn.relu, [[-2., 3.]]),
('softplus', jax.nn.softplus, [[-2., 3.]]),
('sign', jnp.sign, [[-2., 3.]]),
('sigmoid', jax.nn.sigmoid, [[-2., 3.]]),
)
def test_passthrough_primitive(self, fn, inputs):
z = jnp.array(inputs)
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.interval_bound_propagation(fn, input_bounds)
self.assertArrayAlmostEqual(fn(input_bounds.lower), output_bounds.lower)
self.assertArrayAlmostEqual(fn(input_bounds.upper), output_bounds.upper)
@parameterized.named_parameters(
('positive', (1.0, 4.0), (1.0, 2.0)),
('negative', (-4.0, -1.0), (float('nan'), float('nan'))),
('zero_edge', (0.0, 1.0), (0.0, 1.0)),
('zero_cross', (-1.0, 1.0), (float('nan'), 1.0)))
def test_sqrt(self, input_bounds, expected):
input_bounds = jax_verify.IntervalBound(
np.array([input_bounds[0], 0.0]), np.array([input_bounds[1], 0.0]))
output_bounds = jax_verify.interval_bound_propagation(
jnp.sqrt, input_bounds)
np.testing.assert_array_equal(
np.array([expected[0], 0.0]), output_bounds.lower)
np.testing.assert_array_equal(
np.array([expected[1], 0.0]), output_bounds.upper)
@parameterized.named_parameters(
('square_positive', 2, (1.0, 2.0), (1.0, 4.0)),
('square_negative', 2, (-2.0, -1.0), (1.0, 4.0)),
('square_zero', 2, (-1.0, 2.0), (0.0, 4.0)),
('cube_positive', 3, (1.0, 2.0), (1.0, 8.0)),
('cube_negative', 3, (-2.0, -1.0), (-8.0, -1.0)),
('cube_zero', 3, (-1.0, 2.0), (-1.0, 8.0)))
def test_integer_pow(self, exponent, input_bounds, expected):
@jax.jit
def _compute_bounds(lower, upper):
input_bounds = jax_verify.IntervalBound(lower, upper)
output_bounds = jax_verify.interval_bound_propagation(
lambda x: x**exponent, input_bounds)
return output_bounds.lower, output_bounds.upper
output_bounds = _compute_bounds(
np.array([input_bounds[0], 0.0]), np.array([input_bounds[1], 0.0]))
np.testing.assert_array_equal(
np.array([expected[0], 0.0]), output_bounds[0])
np.testing.assert_array_equal(
np.array([expected[1], 0.0]), output_bounds[1])
if __name__ == '__main__':
absltest.main()
|
{
"content_hash": "a7b2115b8facfce57459b35ca467e51b",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 76,
"avg_line_length": 35.54014598540146,
"alnum_prop": 0.5995070856438693,
"repo_name": "deepmind/jax_verify",
"id": "9d89f114f74c6a0f6d8d1fc75f2e0a3e2f72df93",
"size": "5479",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jax_verify/tests/ibp_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "915516"
}
],
"symlink_target": ""
}
|
from __future__ import division, unicode_literals
"""
This module provides classes that operate on points or vectors in 3D space.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
import numpy as np
import re
from math import sin, cos, pi, sqrt
from pymatgen.serializers.json_coders import PMGSONable
class SymmOp(PMGSONable):
"""
A symmetry operation in cartesian space. Consists of a rotation plus a
translation. Implementation is as an affine transformation matrix of rank 4
for efficiency. Read: http://en.wikipedia.org/wiki/Affine_transformation.
.. attribute:: affine_matrix
A 4x4 numpy.array representing the symmetry operation.
"""
def __init__(self, affine_transformation_matrix, tol=0.01):
"""
Initializes the SymmOp from a 4x4 affine transformation matrix.
In general, this constructor should not be used unless you are
transferring rotations. Use the static constructors instead to
generate a SymmOp from proper rotations and translation.
Args:
affine_transformation_matrix (4x4 array): Representing an
affine transformation.
tol (float): Tolerance for determining if matrices are equal.
"""
affine_transformation_matrix = np.array(affine_transformation_matrix)
if affine_transformation_matrix.shape != (4, 4):
raise ValueError("Affine Matrix must be a 4x4 numpy array!")
self.affine_matrix = affine_transformation_matrix
self.tol = tol
@staticmethod
def from_rotation_and_translation(rotation_matrix=((1, 0, 0),
(0, 1, 0),
(0, 0, 1)),
translation_vec=(0, 0, 0),
tol=0.1):
"""
Creates a symmetry operation from a rotation matrix and a translation
vector.
Args:
rotation_matrix (3x3 array): Rotation matrix.
translation_vec (3x1 array): Translation vector.
tol (float): Tolerance to determine if rotation matrix is valid.
Returns:
SymmOp object
"""
rotation_matrix = np.array(rotation_matrix)
translation_vec = np.array(translation_vec)
if rotation_matrix.shape != (3, 3):
raise ValueError("Rotation Matrix must be a 3x3 numpy array.")
if translation_vec.shape != (3,):
raise ValueError("Translation vector must be a rank 1 numpy array "
"with 3 elements.")
affine_matrix = np.eye(4)
affine_matrix[0:3][:, 0:3] = rotation_matrix
affine_matrix[0:3][:, 3] = translation_vec
return SymmOp(affine_matrix, tol)
def __eq__(self, other):
return np.allclose(self.affine_matrix, other.affine_matrix,
atol=self.tol)
def __hash__(self):
return 7
def __repr__(self):
return self.__str__()
def __str__(self):
output = ["Rot:", str(self.affine_matrix[0:3][:, 0:3]), "tau",
str(self.affine_matrix[0:3][:, 3])]
return "\n".join(output)
def operate(self, point):
"""
Apply the operation on a point.
Args:
point: Cartesian coordinate.
Returns:
Coordinates of point after operation.
"""
affine_point = np.array([point[0], point[1], point[2], 1])
return np.dot(self.affine_matrix, affine_point)[0:3]
def operate_multi(self, points):
"""
Apply the operation on a list of points.
Args:
points: List of Cartesian coordinates
Returns:
Numpy array of coordinates after operation
"""
points = np.array(points)
affine_points = np.concatenate([points, np.ones(points.shape[:-1] + (1,))], axis=-1)
return np.inner(affine_points, self.affine_matrix)[..., :-1]
def apply_rotation_only(self, vector):
"""
Vectors should only be operated by the rotation matrix and not the
translation vector.
Args:
vector (3x1 array): A vector.
"""
return np.dot(self.rotation_matrix, vector)
def are_symmetrically_related(self, point_a, point_b, tol=0.001):
"""
Checks if two points are symmetrically related.
Args:
point_a (3x1 array): First point.
point_b (3x1 array): Second point.
tol (float): Absolute tolerance for checking distance.
Returns:
True if self.operate(point_a) == point_b or vice versa.
"""
if np.allclose(self.operate(point_a), point_b, atol=tol):
return True
if np.allclose(self.operate(point_b), point_a, atol=tol):
return True
return False
@property
def rotation_matrix(self):
"""
A 3x3 numpy.array representing the rotation matrix.
"""
return self.affine_matrix[0:3][:, 0:3]
@property
def translation_vector(self):
"""
A rank 1 numpy.array of dim 3 representing the translation vector.
"""
return self.affine_matrix[0:3][:, 3]
def __mul__(self, other):
"""
Returns a new SymmOp which is equivalent to apply the "other" SymmOp
followed by this one.
"""
new_matrix = np.dot(self.affine_matrix, other.affine_matrix)
return SymmOp(new_matrix)
@property
def inverse(self):
"""
Returns inverse of transformation.
"""
invr = np.linalg.inv(self.affine_matrix)
return SymmOp(invr)
@staticmethod
def from_axis_angle_and_translation(axis, angle, angle_in_radians=False,
translation_vec=(0, 0, 0)):
"""
Generates a SymmOp for a rotation about a given axis plus translation.
Args:
axis: The axis of rotation in cartesian space. For example,
[1, 0, 0]indicates rotation about x-axis.
angle (float): Angle of rotation.
angle_in_radians (bool): Set to True if angles are given in
radians. Or else, units of degrees are assumed.
translation_vec: A translation vector. Defaults to zero.
Returns:
SymmOp for a rotation about given axis and translation.
"""
if isinstance(axis, (tuple, list)):
axis = np.array(axis)
if isinstance(translation_vec, (tuple, list)):
vec = np.array(translation_vec)
else:
vec = translation_vec
a = angle if angle_in_radians else angle * pi / 180
cosa = cos(a)
sina = sin(a)
u = axis / np.linalg.norm(axis)
r = np.zeros((3, 3))
r[0, 0] = cosa + u[0] ** 2 * (1 - cosa)
r[0, 1] = u[0] * u[1] * (1 - cosa) - u[2] * sina
r[0, 2] = u[0] * u[2] * (1 - cosa) + u[1] * sina
r[1, 0] = u[0] * u[1] * (1 - cosa) + u[2] * sina
r[1, 1] = cosa + u[1] ** 2 * (1 - cosa)
r[1, 2] = u[1] * u[2] * (1 - cosa) - u[0] * sina
r[2, 0] = u[0] * u[2] * (1 - cosa) - u[1] * sina
r[2, 1] = u[1] * u[2] * (1 - cosa) + u[0] * sina
r[2, 2] = cosa + u[2] ** 2 * (1 - cosa)
return SymmOp.from_rotation_and_translation(r, vec)
@staticmethod
def from_origin_axis_angle(origin, axis, angle, angle_in_radians=False):
"""
Generates a SymmOp for a rotation about a given axis through an
origin.
Args:
origin (3x1 array): The origin which the axis passes through.
axis (3x1 array): The axis of rotation in cartesian space. For
example, [1, 0, 0]indicates rotation about x-axis.
angle (float): Angle of rotation.
angle_in_radians (bool): Set to True if angles are given in
radians. Or else, units of degrees are assumed.
Returns:
SymmOp.
"""
theta = angle * pi / 180 if not angle_in_radians else angle
a = origin[0]
b = origin[1]
c = origin[2]
u = axis[0]
v = axis[1]
w = axis[2]
# Set some intermediate values.
u2 = u * u
v2 = v * v
w2 = w * w
cos_t = cos(theta)
sin_t = sin(theta)
l2 = u2 + v2 + w2
l = sqrt(l2)
# Build the matrix entries element by element.
m11 = (u2 + (v2 + w2) * cos_t) / l2
m12 = (u * v * (1 - cos_t) - w * l * sin_t) / l2
m13 = (u * w * (1 - cos_t) + v * l * sin_t) / l2
m14 = (a * (v2 + w2) - u * (b * v + c * w)
+ (u * (b * v + c * w) - a * (v2 + w2)) * cos_t
+ (b * w - c * v) * l * sin_t) / l2
m21 = (u * v * (1 - cos_t) + w * l * sin_t) / l2
m22 = (v2 + (u2 + w2) * cos_t) / l2
m23 = (v * w * (1 - cos_t) - u * l * sin_t) / l2
m24 = (b * (u2 + w2) - v * (a * u + c * w)
+ (v * (a * u + c * w) - b * (u2 + w2)) * cos_t
+ (c * u - a * w) * l * sin_t) / l2
m31 = (u * w * (1 - cos_t) - v * l * sin_t) / l2
m32 = (v * w * (1 - cos_t) + u * l * sin_t) / l2
m33 = (w2 + (u2 + v2) * cos_t) / l2
m34 = (c * (u2 + v2) - w * (a * u + b * v)
+ (w * (a * u + b * v) - c * (u2 + v2)) * cos_t
+ (a * v - b * u) * l * sin_t) / l2
return SymmOp([[m11, m12, m13, m14], [m21, m22, m23, m24],
[m31, m32, m33, m34], [0, 0, 0, 1]])
@staticmethod
def reflection(normal, origin=(0, 0, 0)):
"""
Returns reflection symmetry operation.
Args:
normal (3x1 array): Vector of the normal to the plane of
reflection.
origin (3x1 array): A point in which the mirror plane passes
through.
Returns:
SymmOp for the reflection about the plane
"""
#Normalize the normal vector first.
n = np.array(normal, dtype=float) / np.linalg.norm(normal)
u, v, w = n
translation = np.eye(4)
translation[0:3, 3] = -np.array(origin)
xx = 1 - 2 * u ** 2
yy = 1 - 2 * v ** 2
zz = 1 - 2 * w ** 2
xy = -2 * u * v
xz = -2 * u * w
yz = -2 * v * w
mirror_mat = [[xx, xy, xz, 0], [xy, yy, yz, 0], [xz, yz, zz, 0],
[0, 0, 0, 1]]
if np.linalg.norm(origin) > 1e-6:
mirror_mat = np.dot(np.linalg.inv(translation),
np.dot(mirror_mat, translation))
return SymmOp(mirror_mat)
@staticmethod
def inversion(origin=(0, 0, 0)):
"""
Inversion symmetry operation about axis.
Args:
origin (3x1 array): Origin of the inversion operation. Defaults
to [0, 0, 0].
Returns:
SymmOp representing an inversion operation about the origin.
"""
mat = -np.eye(4)
mat[3, 3] = 1
mat[0:3, 3] = 2 * np.array(origin)
return SymmOp(mat)
@staticmethod
def rotoreflection(axis, angle, origin=(0, 0, 0)):
"""
Returns a roto-reflection symmetry operation
Args:
axis (3x1 array): Axis of rotation / mirror normal
angle (float): Angle in degrees
origin (3x1 array): Point left invariant by roto-reflection.
Defaults to (0, 0, 0).
Return:
Roto-reflection operation
"""
rot = SymmOp.from_origin_axis_angle(origin, axis, angle)
refl = SymmOp.reflection(axis, origin)
m = np.dot(rot.affine_matrix, refl.affine_matrix)
return SymmOp(m)
def as_dict(self):
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"matrix": self.affine_matrix.tolist(), "tolerance": self.tol}
return d
def as_xyz_string(self):
"""
Returns a string of the form 'x, y, z', '-x, -y, z',
'-y+1/2, x+1/2, z+1/2', etc. Only works for integer rotation matrices
"""
xyz = ['x', 'y', 'z']
strings = []
# test for invalid rotation matrix
if not np.all(np.isclose(self.rotation_matrix,
np.round(self.rotation_matrix))):
raise ValueError('Rotation matrix must be integer')
for r, t in zip(self.rotation_matrix, self.translation_vector):
symbols = []
for val, axis in zip(r, xyz):
val = int(round(val))
if val == 1:
if symbols:
symbols.append('+')
symbols.append(axis)
elif val == -1:
symbols.append('-' + axis)
elif val > 1:
if symbols:
symbols.append('+')
symbols.append(str(val) + axis)
elif val < -1:
symbols.append(str(val) + axis)
import fractions
f = fractions.Fraction(float(t)).limit_denominator()
if abs(f) > 1e-6:
if f > 0:
symbols.append('+')
symbols.append(str(f))
strings.append("".join(symbols))
return ', '.join(strings)
@staticmethod
def from_xyz_string(xyz_string):
"""
Args:
xyz_string: string of the form 'x, y, z', '-x, -y, z',
'-2y+1/2, 3x+1/2, z-y+1/2', etc.
Returns:
SymmOp
"""
rot_matrix = np.zeros((3, 3))
trans = np.zeros(3)
toks = xyz_string.strip().split(",")
for i, tok in enumerate(toks):
# build the rotation matrix
for m in re.finditer("([\+\-]?)\s*(\d*)\s*([x-z]+)", tok):
factor = -1 if m.group(1) == "-" else 1
if m.group(2):
factor *= float(m.group(2))
j = ord(m.group(3)) - 120
rot_matrix[i, j] = factor
# build the translation vector
for m in re.finditer("([\+\-])\s*(\d+)\s*/*\s*(\d*)\s*$", tok):
factor = -1 if m.group(1) == "-" else 1
num = float(m.group(2))
if m.group(3) != "":
num /= float(m.group(3))
trans[i] = num * factor
return SymmOp.from_rotation_and_translation(rot_matrix, trans)
@classmethod
def from_dict(cls, d):
return cls(d["matrix"], d["tolerance"])
|
{
"content_hash": "795560f70fdd9380e225f227df7c4e4e",
"timestamp": "",
"source": "github",
"line_count": 429,
"max_line_length": 92,
"avg_line_length": 34.67365967365967,
"alnum_prop": 0.5057478991596639,
"repo_name": "yanikou19/pymatgen",
"id": "58088096e04f9dfd402ecbe1eb7817662aac772e",
"size": "14892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymatgen/core/operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7429"
},
{
"name": "JavaScript",
"bytes": "3638"
},
{
"name": "Propeller Spin",
"bytes": "4026362"
},
{
"name": "Python",
"bytes": "3368797"
},
{
"name": "Shell",
"bytes": "5100"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('feti', '0007_course_campuses'),
]
operations = [
migrations.RemoveField(
model_name='course',
name='campuses',
),
migrations.AddField(
model_name='campus',
name='courses',
field=models.ManyToManyField(to='feti.Course'),
preserve_default=True,
),
]
|
{
"content_hash": "c0837e90b891105aca7bfe93038045ab",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 59,
"avg_line_length": 22.608695652173914,
"alnum_prop": 0.5596153846153846,
"repo_name": "cchristelis/feti",
"id": "e9c369b847e897a089c57ccfc3b76543b9df309c",
"size": "544",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "django_project/feti/migrations/0008_auto_20150518_1153.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "66178"
},
{
"name": "HTML",
"bytes": "3411827"
},
{
"name": "JavaScript",
"bytes": "525391"
},
{
"name": "Makefile",
"bytes": "16513"
},
{
"name": "PLpgSQL",
"bytes": "9805987"
},
{
"name": "Python",
"bytes": "372712"
},
{
"name": "Shell",
"bytes": "2539"
}
],
"symlink_target": ""
}
|
import joblib
import logging
import os
import pickle
import numpy as np
import xgboost as xgb
from google.cloud.aiplatform.constants import prediction
from google.cloud.aiplatform.utils import prediction_utils
from google.cloud.aiplatform.prediction.predictor import Predictor
class XgboostPredictor(Predictor):
"""Default Predictor implementation for Xgboost models."""
def __init__(self):
return
def load(self, artifacts_uri: str) -> None:
"""Loads the model artifact.
Args:
artifacts_uri (str):
Required. The value of the environment variable AIP_STORAGE_URI.
Raises:
ValueError: If there's no required model files provided in the artifacts
uri.
"""
prediction_utils.download_model_artifacts(artifacts_uri)
if os.path.exists(prediction.MODEL_FILENAME_BST):
booster = xgb.Booster(model_file=prediction.MODEL_FILENAME_BST)
elif os.path.exists(prediction.MODEL_FILENAME_JOBLIB):
try:
booster = joblib.load(prediction.MODEL_FILENAME_JOBLIB)
except KeyError:
logging.info(
"Loading model using joblib failed. "
"Loading model using xgboost.Booster instead."
)
booster = xgb.Booster()
booster.load_model(prediction.MODEL_FILENAME_JOBLIB)
elif os.path.exists(prediction.MODEL_FILENAME_PKL):
booster = pickle.load(open(prediction.MODEL_FILENAME_PKL, "rb"))
else:
valid_filenames = [
prediction.MODEL_FILENAME_BST,
prediction.MODEL_FILENAME_JOBLIB,
prediction.MODEL_FILENAME_PKL,
]
raise ValueError(
f"One of the following model files must be provided: {valid_filenames}."
)
self._booster = booster
def preprocess(self, prediction_input: dict) -> xgb.DMatrix:
"""Converts the request body to a Data Matrix before prediction.
Args:
prediction_input (dict):
Required. The prediction input that needs to be preprocessed.
Returns:
The preprocessed prediction input.
"""
instances = prediction_input["instances"]
return xgb.DMatrix(instances)
def predict(self, instances: xgb.DMatrix) -> np.ndarray:
"""Performs prediction.
Args:
instances (xgb.DMatrix):
Required. The instance(s) used for performing prediction.
Returns:
Prediction results.
"""
return self._booster.predict(instances)
def postprocess(self, prediction_results: np.ndarray) -> dict:
"""Converts numpy array to a dict.
Args:
prediction_results (np.ndarray):
Required. The prediction results.
Returns:
The postprocessed prediction results.
"""
return {"predictions": prediction_results.tolist()}
|
{
"content_hash": "a5bb173bc57a9a56ba1b4355dd50b708",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 88,
"avg_line_length": 34.75,
"alnum_prop": 0.6066056245912361,
"repo_name": "googleapis/python-aiplatform",
"id": "005efcb1294c3a811762f788c7a276fccebc21f0",
"size": "3660",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/cloud/aiplatform/prediction/xgboost/predictor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "23977004"
},
{
"name": "Shell",
"bytes": "30668"
}
],
"symlink_target": ""
}
|
"""
recent.py - Used for trello-rss to grab recent Trello updates on your account
Written by Nate Collings
TODO:
- Currently just retrieves ALL it can given a specific token. Add support for just getting
updates for certain boards, public or otherwise. The current process just filters it out later.
"""
import config
from trello import TrelloClient
from datetime import datetime
class InvalidItem(Exception):
"""Raised when a user calls for an item that is not supported"""
pass
class Recent:
"""
Class used to retrieve recent Trello updates. Uses sarumont's py-trello API wrapper lightly.
Currently I just grab the full lump of data from the board API call.
For my use of Trello this works just fine, but if you're using it really
heavily I could see this using up too much memory, or the resultant xml being too big or something.
Could improve that by specifying exactly what we want with the ?filter param, or digging into using
the lists/cards apis more directly. However, I'm currently serving over 100 rss feeds on
trellorss.appspot.com with this method, so it works well enough, even if it kind of bothers me.
"""
def __init__(self, api_key, api_private_key, token=None, board_id=None, public_board=False, all_private=False):
self.api_key = api_key
self.api_private_key = api_private_key
self.token = token
self.public_only = False
if self.token is None:
self.public_only = True
self.trello = TrelloClient(self.api_key, self.api_private_key, self.token)
self.boards = None # Lazy, so doesn't fetch until we ask for them
self.board_id = board_id
self.public_board = public_board
self.all_private = all_private
# A list of items currently supported. The user should pass in one of the keys below,
# and we use the values when passing it to the Trello API.
self.items = config.all_item_types
def create_date(self, date):
return datetime.strptime(date[:-5], '%Y-%m-%dT%H:%M:%S')
def fetch_items(self, item_names):
""" Fetch the specified recent activity for item_names """
for item in item_names:
if item not in self.items:
raise InvalidItem("%s is not a supported item." % item)
items = ','.join([self.items[item] for item in item_names])
if self.all_private:
return self._get_activity(items, None)
else:
return self._get_activity(items, self._get_boards())
def _get_boards(self):
""" Calls the list_boards() method if we haven't already """
if self.board_id:
self.boards = self.trello.get_board(self.board_id)
elif self.boards is None:
self.boards = self.trello.list_boards()
return self.boards
def _get_activity(self, action_filter, boards):
"""Given a action filter, returns those actions for boards from the Trello API"""
actions = []
if self.all_private:
self.trello.info_for_all_boards(action_filter)
actions.append(self.trello.all_info)
else:
if isinstance(boards, list) is False:
boards = [boards]
for board in boards:
if board.closed is False:
board.fetch_actions(action_filter)
if len(board.actions) > 0:
actions.append(board.actions)
return actions
|
{
"content_hash": "c8f04ba857859f39b1ca07e7053e4b8c",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 115,
"avg_line_length": 39.37078651685393,
"alnum_prop": 0.6421232876712328,
"repo_name": "naiyt/trello-rss",
"id": "01dcafb2a3b8525ae171fe8b6c6ad02fad74df9f",
"size": "3504",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "recent.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15900"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
setup(name='injections',
version='0.2.2',
description='Simple dependency injection library',
author='Paul Colomiets',
author_email='paul@colomiets.name',
url='http://github.com/tailhook/injections',
packages=[
'injections',
],
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2',
],
license='MIT',
)
|
{
"content_hash": "9e83dd2fba3901866f1334bf5d98a568",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 56,
"avg_line_length": 28.22222222222222,
"alnum_prop": 0.5787401574803149,
"repo_name": "tailhook/injections",
"id": "d8139439efc1021e3c035dcb908df33b56327bdf",
"size": "531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11622"
}
],
"symlink_target": ""
}
|
import json
from GetEWSFolder import main, convert_mail_to_json
import demistomock as demisto
def create_mail(subject, body):
return {'subject': subject, 'textBody': body, 'body': '<body>{}<\\body>'.format(body)}
mails_folder_1 = [create_mail(subject='subject 1', body='body 1'), create_mail(subject='subject 2', body='body 2')]
mails_folder_2 = [create_mail(subject='subject 3', body='body 3'), create_mail(subject='subject 4', body='body 4')]
def identical_mail(mail1, mail2):
if len(mail1) != len(mail2):
return False
return all(mail1[field] == mail2[field] for field in ['subject', 'body', 'textBody'])
def test_main(mocker):
def executeCommand(name, args):
if args['folder-path'] == 'folder1':
return [{'Contents': mails_folder_1, 'Type': 'Content'}]
if args['folder-path'] == 'folder2':
return [{'Contents': mails_folder_2, 'Type': 'Content'}]
else:
raise ValueError('Unexist directory')
mocker.patch.object(demisto, 'args', return_value={
"foldersPaths": 'folder1,folder2'
})
mocker.patch.object(demisto, 'executeCommand', side_effect=executeCommand)
mocker.patch.object(demisto, 'results')
entry = main()
file_name = '1_{}'.format(entry['FileID'])
with open(file_name) as json_file:
mails_from_file = json.load(json_file)
assert len(mails_from_file) == len(mails_folder_1) + len(mails_folder_2)
for mails_folder, folder in zip([mails_folder_1, mails_folder_2], ['folder1', 'folder2']):
for mail in mails_folder:
formatted_mail = convert_mail_to_json(mail, folder)
assert sum(identical_mail(m, formatted_mail) for m in mails_from_file) == 1
|
{
"content_hash": "d71abea7ba4733b4c71782adf5d05a84",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 115,
"avg_line_length": 37.58695652173913,
"alnum_prop": 0.6396761133603239,
"repo_name": "VirusTotal/content",
"id": "1951b61e66144e7d92ab0a1f298aabf8882d6466",
"size": "1729",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Packs/EWS/Scripts/GetEWSFolder/GetWESFolder_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2146"
},
{
"name": "HTML",
"bytes": "205901"
},
{
"name": "JavaScript",
"bytes": "1584075"
},
{
"name": "PowerShell",
"bytes": "442288"
},
{
"name": "Python",
"bytes": "47594464"
},
{
"name": "Rich Text Format",
"bytes": "480911"
},
{
"name": "Shell",
"bytes": "108066"
},
{
"name": "YARA",
"bytes": "1185"
}
],
"symlink_target": ""
}
|
import logging
from . import Analysis
from .. import SIM_LIBRARIES
from ..errors import AngrValueError
l = logging.getLogger(name=__name__)
class StaticHooker(Analysis):
"""
This analysis works on statically linked binaries - it finds the library functions statically
linked into the binary and hooks them with the appropriate simprocedures.
Right now it only works on unstripped binaries, but hey! There's room to grow!
"""
def __init__(self, library, binary=None):
self.results = {}
try:
lib = SIM_LIBRARIES[library]
except KeyError:
raise AngrValueError("No such library %s" % library)
if binary is None:
binary = self.project.loader.main_object
for func in binary.symbols:
if not func.is_function:
continue
if self.project.is_hooked(func.rebased_addr):
l.debug("Skipping %s at %#x, already hooked", func.name, func.rebased_addr)
continue
if lib.has_implementation(func.name):
proc = lib.get(func.name, self.project.arch)
self.results[func.rebased_addr] = proc
if self.project.is_hooked(func.rebased_addr):
l.debug("Skipping %s at %#x, already hooked", func.name, func.rebased_addr)
else:
self.project.hook(func.rebased_addr, proc)
l.info("Hooked %s at %#x", func.name, func.rebased_addr)
else:
l.debug("Failed to hook %s at %#x", func.name, func.rebased_addr)
from angr.analyses import AnalysesHub
AnalysesHub.register_default('StaticHooker', StaticHooker)
|
{
"content_hash": "217d36412531a47c77e7497222a0eda8",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 97,
"avg_line_length": 35.583333333333336,
"alnum_prop": 0.607728337236534,
"repo_name": "iamahuman/angr",
"id": "34d71fed93dd67d03514ab7241420bb007dea29b",
"size": "1709",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "angr/analyses/static_hooker.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "6375"
},
{
"name": "C++",
"bytes": "39420"
},
{
"name": "Dockerfile",
"bytes": "493"
},
{
"name": "Makefile",
"bytes": "620"
},
{
"name": "Python",
"bytes": "4842037"
}
],
"symlink_target": ""
}
|
import sys
import unittest
import libsbml
def isnan(x):
return (x != x)
pass
class TestL3Parameter(unittest.TestCase):
global P
P = None
def setUp(self):
self.P = libsbml.Parameter(3,1)
if (self.P == None):
pass
pass
def tearDown(self):
_dummyList = [ self.P ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Parameter_NS(self):
self.assert_( self.P.getNamespaces() != None )
self.assert_( self.P.getNamespaces().getLength() == 1 )
self.assert_(( "http://www.sbml.org/sbml/level3/version1/core" == self.P.getNamespaces().getURI(0) ))
pass
def test_L3_Parameter_constant(self):
self.assert_( self.P.isSetConstant() == False )
self.P.setConstant(True)
self.assert_( self.P.getConstant() == True )
self.assert_( self.P.isSetConstant() == True )
self.P.setConstant(False)
self.assert_( self.P.getConstant() == False )
self.assert_( self.P.isSetConstant() == True )
pass
def test_L3_Parameter_create(self):
self.assert_( self.P.getTypeCode() == libsbml.SBML_PARAMETER )
self.assert_( self.P.getMetaId() == "" )
self.assert_( self.P.getNotes() == None )
self.assert_( self.P.getAnnotation() == None )
self.assert_( self.P.getId() == "" )
self.assert_( self.P.getName() == "" )
self.assert_( self.P.getUnits() == "" )
self.assertEqual( True, isnan(self.P.getValue()) )
self.assert_( self.P.getConstant() == True )
self.assertEqual( False, self.P.isSetId() )
self.assertEqual( False, self.P.isSetName() )
self.assertEqual( False, self.P.isSetValue() )
self.assertEqual( False, self.P.isSetUnits() )
self.assertEqual( False, self.P.isSetConstant() )
pass
def test_L3_Parameter_createWithNS(self):
xmlns = libsbml.XMLNamespaces()
xmlns.add( "http://www.sbml.org", "testsbml")
sbmlns = libsbml.SBMLNamespaces(3,1)
sbmlns.addNamespaces(xmlns)
p = libsbml.Parameter(sbmlns)
self.assert_( p.getTypeCode() == libsbml.SBML_PARAMETER )
self.assert_( p.getMetaId() == "" )
self.assert_( p.getNotes() == None )
self.assert_( p.getAnnotation() == None )
self.assert_( p.getLevel() == 3 )
self.assert_( p.getVersion() == 1 )
self.assert_( p.getNamespaces() != None )
self.assert_( p.getNamespaces().getLength() == 2 )
self.assert_( p.getId() == "" )
self.assert_( p.getName() == "" )
self.assert_( p.getUnits() == "" )
self.assertEqual( True, isnan(p.getValue()) )
self.assert_( p.getConstant() == True )
self.assertEqual( False, p.isSetId() )
self.assertEqual( False, p.isSetName() )
self.assertEqual( False, p.isSetValue() )
self.assertEqual( False, p.isSetUnits() )
self.assertEqual( False, p.isSetConstant() )
_dummyList = [ p ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Parameter_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Parameter_hasRequiredAttributes(self):
p = libsbml.Parameter(3,1)
self.assertEqual( False, p.hasRequiredAttributes() )
p.setId( "id")
self.assertEqual( False, p.hasRequiredAttributes() )
p.setConstant(False)
self.assertEqual( True, p.hasRequiredAttributes() )
_dummyList = [ p ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Parameter_id(self):
id = "mitochondria";
self.assertEqual( False, self.P.isSetId() )
self.P.setId(id)
self.assert_(( id == self.P.getId() ))
self.assertEqual( True, self.P.isSetId() )
if (self.P.getId() == id):
pass
pass
def test_L3_Parameter_name(self):
name = "My_Favorite_Factory";
self.assertEqual( False, self.P.isSetName() )
self.P.setName(name)
self.assert_(( name == self.P.getName() ))
self.assertEqual( True, self.P.isSetName() )
if (self.P.getName() == name):
pass
self.P.unsetName()
self.assertEqual( False, self.P.isSetName() )
if (self.P.getName() != None):
pass
pass
def test_L3_Parameter_units(self):
units = "volume";
self.assertEqual( False, self.P.isSetUnits() )
self.P.setUnits(units)
self.assert_(( units == self.P.getUnits() ))
self.assertEqual( True, self.P.isSetUnits() )
if (self.P.getUnits() == units):
pass
self.P.unsetUnits()
self.assertEqual( False, self.P.isSetUnits() )
if (self.P.getUnits() != None):
pass
pass
def test_L3_Parameter_value(self):
self.assertEqual( False, self.P.isSetValue() )
self.assertEqual( True, isnan(self.P.getValue()) )
self.P.setValue(1.5)
self.assertEqual( True, self.P.isSetValue() )
self.assert_( self.P.getValue() == 1.5 )
self.P.unsetValue()
self.assertEqual( False, self.P.isSetValue() )
self.assertEqual( True, isnan(self.P.getValue()) )
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestL3Parameter))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
|
{
"content_hash": "1bb365a542113a9dc9f96971cf7c274b",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 109,
"avg_line_length": 32.38607594936709,
"alnum_prop": 0.6208716044557357,
"repo_name": "TheCoSMoCompany/biopredyn",
"id": "ab3e37d6f10dda40166ac92f4b72b9e16e192f43",
"size": "6493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Prototype/src/libsbml-5.10.0/src/bindings/python/test/sbml/TestL3Parameter.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3535918"
},
{
"name": "C++",
"bytes": "26120778"
},
{
"name": "CMake",
"bytes": "455400"
},
{
"name": "CSS",
"bytes": "49020"
},
{
"name": "Gnuplot",
"bytes": "206"
},
{
"name": "HTML",
"bytes": "193068"
},
{
"name": "Java",
"bytes": "66517"
},
{
"name": "JavaScript",
"bytes": "3847"
},
{
"name": "Makefile",
"bytes": "30905"
},
{
"name": "Perl",
"bytes": "3018"
},
{
"name": "Python",
"bytes": "7891301"
},
{
"name": "Shell",
"bytes": "247654"
},
{
"name": "TeX",
"bytes": "22566"
},
{
"name": "XSLT",
"bytes": "55564"
}
],
"symlink_target": ""
}
|
import tests.periodicities.period_test as per
per.buildModel((30 , 'H' , 200));
|
{
"content_hash": "c1a802b432f947a699879a0eefa4866a",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 45,
"avg_line_length": 20.5,
"alnum_prop": 0.7073170731707317,
"repo_name": "antoinecarme/pyaf",
"id": "5ccf08c5b93700231400c6f172c5a826cf86cf22",
"size": "82",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/periodicities/Hour/Cycle_Hour_200_H_30.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
__all__ = ['brand', 'category', 'garment']
|
{
"content_hash": "59027e2ea40f5f534b5aba41d7c52794",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 42,
"avg_line_length": 43,
"alnum_prop": 0.5348837209302325,
"repo_name": "timesync/clothingapi",
"id": "10df411d1e972e01cca3b6185e0bcd1663638621",
"size": "43",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/collections/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "1740"
},
{
"name": "Makefile",
"bytes": "563"
},
{
"name": "Python",
"bytes": "22659"
}
],
"symlink_target": ""
}
|
'''
New Integration Test for Batch Deleting Snapshot.
@author: Legion
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import time
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
bat_del_sp = test_stub.BATCHDELSP()
def test():
bat_del_sp.create_vm()
bat_del_sp.create_data_volume()
for i in range(20):
bat_del_sp.create_sp()
if i % 3 == 2:
bat_del_sp.revert_sp(root_vol=False)
bat_del_sp.sp_check()
bat_del_sp.batch_del_sp()
for i in range(10):
bat_del_sp.create_sp()
if i % 2 == 1:
bat_del_sp.revert_sp(root_vol=False)
bat_del_sp.sp_check()
bat_del_sp.batch_del_sp()
test_util.test_pass('Batch Delete Volume Snapshot Test Successful')
#Will be called only if exception happens in test().
def error_cleanup():
global test_obj_dict
test_lib.lib_error_cleanup(bat_del_sp.test_obj_dict)
|
{
"content_hash": "53d64e641efd889b5ad184bff8be62b4",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 71,
"avg_line_length": 24.452380952380953,
"alnum_prop": 0.6592015579357352,
"repo_name": "zstackorg/zstack-woodpecker",
"id": "a7e834bb24537d39a3998f8059d7a5d71ddd4218",
"size": "1027",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "integrationtest/vm/virtualrouter/batch_del_sp/test_batch_del_vol_sp.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "46522"
},
{
"name": "Makefile",
"bytes": "692"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "2891030"
},
{
"name": "Shell",
"bytes": "54266"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class TickmodeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self,
plotly_name="tickmode",
parent_name="scattermapbox.marker.colorbar",
**kwargs,
):
super(TickmodeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {}),
values=kwargs.pop("values", ["auto", "linear", "array"]),
**kwargs,
)
|
{
"content_hash": "2586ec744a84234d66c67da667eb6d83",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 74,
"avg_line_length": 33,
"alnum_prop": 0.5774410774410774,
"repo_name": "plotly/plotly.py",
"id": "bf215ec2a3a0bd8901abfd321b068cc45f446f1f",
"size": "594",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/scattermapbox/marker/colorbar/_tickmode.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
ext_modules = [Extension('fsdt_donnell_bc2_nonlinear',
['fsdt_donnell_bc2_nonlinear.pyx'],
extra_compile_args=['/openmp',
'/O2', '/favor:INTEL64', '/fp:fast'],
extra_link_args=[],
)]
setup(
name = 'fsdt_donnell_bc2_nonlinear',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules
)
|
{
"content_hash": "3fa477c86d398bb7924fdb3dc861647e",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 60,
"avg_line_length": 36.92857142857143,
"alnum_prop": 0.574468085106383,
"repo_name": "albertoferna/compmech",
"id": "15c13d127e9d5295570f44583658c95e180799c4",
"size": "517",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compmech/conecyl/fsdt/setup_fsdt_donnell_bc2_nonlinear.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "14795"
},
{
"name": "FORTRAN",
"bytes": "58481"
},
{
"name": "Mathematica",
"bytes": "6247877"
},
{
"name": "Python",
"bytes": "8766887"
}
],
"symlink_target": ""
}
|
import argparse
import csv
import sys
import re
import os
import pprint
import contextlib
outfileFormat = '{}.csv'
def main():
args = parseArguments()
# Start fresh
with contextlib.suppress(FileNotFoundError):
os.remove(outfileName(args.date))
for path in args.paths:
print(path)
extractor = RTFExtractor(path)
extractor.extract()
# print(extractor.race)
# pp = pprint.PrettyPrinter(indent=4)
# pp.pprint(extractor.lines)
extractor.writeToFile(outfileName(args.date))
def outfileName(date):
name = outfileFormat.format(date)
return name
def parseArguments():
parser = argparse.ArgumentParser(description='Verify votes are correct using a simple checksum')
parser.add_argument('date', type=str, help='Date of the election. Used in the generated filename.')
parser.add_argument('paths', metavar='path', type=str, nargs='+',
help='path to a CSV file')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true')
parser.add_argument('--includeOverUnder', dest='includeOverUnder', action='store_true')
parser.set_defaults(verbose=False)
return parser.parse_args()
class RTFExtractor(object):
def __init__(self, path):
self.resultsLineRE = re.compile(r'^[ ,\d%.]+$')
self.legendRE = re.compile(r'(-\d-) ([A-Z .()]+)( ,)?')
self.legendMarkerRE = re.compile(r'-\d-')
self.path = path
self.lines = []
self.race = ''
self.firstResultEncountered = False
def extract(self):
with open(self.path, 'r') as f:
for line in f.readlines():
csvLine = self.convert(line)
lastLine = '' if not len(self.lines) else self.lines[-1]
if self.goesWithPreviousLine(csvLine, lastLine):
if len(self.lines):
self.lines[-1] += csvLine
elif not self.shouldDiscardLine(csvLine):
self.lines.append(csvLine)
self.reformatCandidates()
self.reformatPrecinctLines()
def convert(self, line):
rtfCommandRE = re.compile(r'\\[a-z0-9-]+')
outLine = line.replace("\\tab", ",")
outLine = rtfCommandRE.sub('', outLine)
# outLine = outLine.replace("\r\n", "")
outLine = outLine.translate(str.maketrans('\r\n{}', ' '))
outLine = outLine.strip(" ")
return outLine
def writeToFile(self, path):
with open(path, 'a') as f:
for line in self.lines:
f.write(line+'\n')
def reformatCandidates(self):
headerLine = self.lines[1]
headerComponents = [self.race, "Precinct", "Voters", "Trnout", "Pct"]
for m in self.legendRE.finditer(self.lines[0]):
headerComponents.append(m.group(2).strip())
# nameComponents = m.group(2).strip().split(" ")
# party = nameComponents.pop()
# name = "{} ({})".format(" ".join(nameComponents), party)
# headerComponents.append(name)
headerComponents.extend(["Under Votes", "Over Votes", "Write-ins"])
# print(headerComponents)
headerLine = ",".join(headerComponents)
del(self.lines[0:2])
self.lines.insert(0, headerLine)
def reformatPrecinctLines(self):
# self.lines[1:] = [",".join(l.split()[1:]) for l in self.lines[1:]]
for index, line in enumerate(self.lines):
if index > 0:
cols = line.split()
cols[0] = "" # Remove "PCT", "Race"
self.lines[index] = ",".join(cols)
def reformatRace(self, race):
raceComponents = race.split()
party = raceComponents[0].rstrip(".")
return "{} ({})".format(" ".join(raceComponents[1:]), party)
def goesWithPreviousLine(self, line, lastLine):
if self.legendRE.search(line):
if "Legend:" in lastLine:
return True
elif self.resultsLineRE.search(line):
return True
elif self.legendMarkerRE.search(line):
if "Reg" in lastLine:
return True
elif " WI" in line and "Reg" in lastLine:
return True
return False
def shouldDiscardLine(self, line):
if "PCT" in line:
self.firstResultEncountered = True
return False
elif "Race:" in line and not self.firstResultEncountered:
self.race = self.reformatRace(line.split(":")[1].strip())
elif "Race Totals" in line:
return False
elif " WI" in line and not self.firstResultEncountered:
return False
elif self.legendMarkerRE.search(line) and not self.firstResultEncountered:
return False
return True
# Default function is main()
if __name__ == '__main__':
main()
|
{
"content_hash": "6780c574123f1747f900821af7d122b9",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 100,
"avg_line_length": 27.207792207792206,
"alnum_prop": 0.6775656324582339,
"repo_name": "morrellk/openelections-data-or",
"id": "844f516e14278fab8e7b73fb6a92a22553141068",
"size": "5360",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/parsers/multnomah_rtf_extractor.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "173538"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import random
import os
import unicodedata
from faker import Factory
from django.core.files import File
from saleor.product.models import (Shirt, ShirtVariant,
Bag, BagVariant, ProductImage)
from saleor.product.models import Category, Color
from saleor.userprofile.models import User, Address
fake = Factory.create()
PRODUCT_COLLECTIONS = fake.words(10)
def create_color(**kwargs):
r = lambda: random.randint(0, 255)
defaults = {
'name': fake.word(),
'color': '%02X%02X%02X' % (r(), r(), r())
}
defaults.update(kwargs)
return Color.objects.create(**defaults)
def get_or_create_category(name, **kwargs):
defaults = {
'description': fake.text()
}
defaults.update(kwargs)
defaults['slug'] = fake.slug(name)
return Category.objects.get_or_create(name=name, defaults=defaults)[0]
def create_product(product_type, **kwargs):
if random.choice([True, False]):
collection = random.choice(PRODUCT_COLLECTIONS)
else:
collection = ''
defaults = {
'name': fake.company(),
'price': fake.pyfloat(2, 2, positive=True),
'category': Category.objects.order_by('?')[0],
'collection': collection,
'color': Color.objects.order_by('?')[0],
'weight': fake.random_digit(),
'description': '\n\n'.join(fake.paragraphs(5))
}
defaults.update(kwargs)
return product_type.objects.create(**defaults)
def create_variant(product, **kwargs):
defaults = {
'stock': fake.random_int(),
'sku': fake.random_int(1, 100000),
'product': product
}
if isinstance(product, Shirt):
if not 'size' in kwargs:
defaults['size'] = random.choice(ShirtVariant.SIZE_CHOICES)[0]
variant_class = ShirtVariant
elif isinstance(product, Bag):
variant_class = BagVariant
else:
raise NotImplemented
defaults.update(kwargs)
return variant_class.objects.create(**defaults)
def create_product_image(product, placeholder_dir):
img_path = "%s/%s" % (placeholder_dir,
random.choice(os.listdir(placeholder_dir)))
image = ProductImage(
product=product,
image=File(open(img_path, 'rb'))
).save()
return image
def create_product_images(product, how_many, placeholder_dir):
for i in range(how_many):
create_product_image(product, placeholder_dir)
def create_shirt(**kwargs):
return create_product(Shirt, **kwargs)
def create_bag(**kwargs):
return create_product(Bag, **kwargs)
def create_items(placeholder_dir, how_many=10):
# Create few colors
[create_color() for i in range(5)]
shirt_category = get_or_create_category('Shirts')
bag_category = get_or_create_category('Grocery bags')
for i in range(how_many):
# Shirt
shirt = create_shirt(category=shirt_category)
create_product_images(shirt, random.randrange(1, 5),
placeholder_dir + "shirts")
# Bag
bag = create_bag(category=bag_category, collection='')
create_product_images(bag, random.randrange(1, 5),
placeholder_dir + "bags")
# chance to generate couple of sizes
for size in ShirtVariant.SIZE_CHOICES:
# Create min. one size
if shirt.variants.count() == 0:
create_variant(shirt, size=size[0])
continue
if random.choice([True, False]):
create_variant(shirt, size=size[0])
create_variant(bag)
yield "Shirt - %s %s Variants" % (shirt, shirt.variants.count())
yield "Bag - %s %s Variants" % (bag, bag.variants.count())
def create_fake_user():
first_name = fake.first_name()
last_name = fake.last_name()
_first = unicodedata.normalize('NFD', first_name).encode('ascii', 'ignore')
_last = unicodedata.normalize('NFD', last_name).encode('ascii', 'ignore')
email = u'%s.%s@example.com' % (_first.lower(), _last.lower())
user = User.objects.create_user(email=email, password='password')
address = Address.objects.create(
first_name=first_name,
last_name=last_name,
street_address_1=fake.street_address(),
city=fake.city(),
postal_code=fake.postcode(),
country=fake.country_code())
user.addresses.add(address)
user.default_billing_address = address
user.default_shipping_address = address
user.is_active = True
user.save()
return user
def create_users(how_many=10):
for i in range(how_many):
user = create_fake_user()
yield "User - %s" % user.email
|
{
"content_hash": "018b97e353e0fe798a035bb30884df04",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 79,
"avg_line_length": 28.763636363636362,
"alnum_prop": 0.6163084702907712,
"repo_name": "hongquan/saleor",
"id": "6bd3c8b51c35ba766e357a1eae66965f9628f952",
"size": "4746",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/create_random_data.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "6805"
},
{
"name": "HTML",
"bytes": "119616"
},
{
"name": "JavaScript",
"bytes": "2766"
},
{
"name": "Python",
"bytes": "200042"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class ZValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="z", parent_name="histogram2dcontour", **kwargs):
super(ZValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "data"),
**kwargs
)
|
{
"content_hash": "cb8a7537753d58e14bb401d3b3ad875f",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 84,
"avg_line_length": 36.416666666666664,
"alnum_prop": 0.6041189931350115,
"repo_name": "plotly/python-api",
"id": "48de2103d195881195fa52ec5d024645686cf46c",
"size": "437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/histogram2dcontour/_z.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
import os
from datetime import datetime
from django.test import TestCase, override_settings
from django.utils._os import upath
import pytz
import responses
from ..nosnl import NOSNLParser
TEST_DIR = os.path.join(os.path.dirname(upath(__file__)), 'data')
class NOSNLParserTests(TestCase):
maxDiff = None
def setUp(self):
self.timezone = pytz.timezone("Europe/Amsterdam")
super(NOSNLParserTests, self).setUp()
@responses.activate
@override_settings(NEWS_SOURCES=['news.parsers.nosnl.NOSNLParser', ])
def test_nos_nl_urls(self):
page_name = 'www_nos_nl'
page_path = os.path.join(TEST_DIR, page_name)
page_file = open(page_path, 'rb')
responses.add(responses.GET, 'http://www.nos.nl',
body=page_file.read(), status=200,
content_type='text/html')
urls = NOSNLParser.request_urls()
expected = set([
'http://www.nos.nl/artikel/2093795-milaan-sanremo-gehinderd-door-aardverschuiving.html',
'http://www.nos.nl/artikel/2093716-62-doden-bij-vliegtuigongeluk-in-rusland.html',
'http://www.nos.nl/artikel/2093810-luchtaanvallen-op-is-steden-raqqa-en-palmyra.html',
'http://www.nos.nl/artikel/2093799-turkije-waakzaam-in-aanloop-naar-koerdische-feestdag.html',
'http://www.nos.nl/artikel/2093686-vrachtauto-verliest-ruim-150-kratten-bier-op-rotonde-helmond.html',
'http://www.nos.nl/artikel/2093752-stil-leven-sterke-ontmoeting-tussen-armando-en-vanfleteren.html',
'http://www.nos.nl/artikel/2093808-fanara-wint-laatste-reuzenslalom-van-het-seizoen.html',
'http://www.nos.nl/artikel/2093598-eu-roept-om-sancties-tegen-moskou-krim-annexatie-twee-jaar-geleden.html',
'http://www.nos.nl/artikel/2092834-sponsor-wust-gaat-door-tot-spelen-2018.html',
'http://www.nos.nl/artikel/2093789-griekenland-zit-nog-met-duizend-vragen.html',
'http://www.nos.nl/artikel/2093785-lid-clientenraad-ziekenhuis-heerlen-weg-na-intimidatie.html',
'http://www.nos.nl/artikel/2093400-michelle-obama-brengt-single-uit-voor-het-goede-doel.html',
'http://www.nos.nl/artikel/2093626-wie-is-terreurverdachte-salah-abdeslam.html',
'http://www.nos.nl/artikel/2093726-geld-maakt-niet-gelukkig-tenzij-je-arm-bent.html',
'http://www.nos.nl/artikel/2093793-in-molenbeek-wist-iedereen-waar-abdeslam-was.html',
'http://www.nos.nl/artikel/2093186-hond-redt-bejaarde-duitser-van-de-bevriezingsdood.html',
'http://www.nos.nl/artikel/2093585-koning-opent-wildlands-adventure-zoo-in-emmen.html',
'http://www.nos.nl/artikel/2093748-willem-alexander-gaf-zichzelf-een-onmogelijke-opdracht.html',
'http://www.nos.nl/artikel/2093618-feyenoord-wil-een-nieuw-stadion-langs-de-maas.html',
'http://www.nos.nl/artikel/2093732-williams-en-azarenka-naar-finale-indian-wells.html',
'http://www.nos.nl/artikel/2093783-shiffrin-onderstreept-klasse-in-laatste-wb-slalom.html',
'http://www.nos.nl/artikel/2093803-frikadellenbakker-van-duinen-wil-via-roda-slagen-in-dusseldorf.html',
'http://www.nos.nl/artikel/2093770-scoren-in-de-kuip-heeft-puur-te-maken-met-kwaliteit.html',
'http://www.nos.nl/artikel/2093433-google-wil-af-van-maker-van-terminators.html',
'http://www.nos.nl/artikel/2093755-terreurverdachte-abdeslam-verzet-zich-tegen-uitlevering.html',
'http://www.nos.nl/artikel/2093792-terreurdreigingsniveau-drie-blijft-van-kracht-in-belgie.html',
'http://www.nos.nl/artikel/2093764-doden-en-gewonden-bij-aanslag-in-centrum-istanbul.html',
'http://www.nos.nl/artikel/2093794-politie-treft-man-aan-die-nog-170-000-euro-aan-boetes-moet-betalen.html',
'http://www.nos.nl/artikel/2093782-na-taart-in-milaan-wil-cancellara-champagne-in-sanremo.html',
'http://www.nos.nl/artikel/2093707-fabeltjeskrant-keert-terug-in-digitale-versie.html',
'http://www.nos.nl/artikel/2093767-rossi-twee-jaar-langer-bij-yamaha.html',
'http://www.nos.nl/artikel/2093768-the-passion-in-new-orleans-a-live-2-hour-epic-musical-event.html',
'http://www.nos.nl/artikel/2093453-verdronken-hond-na-5-weken-teruggevonden.html',
'http://www.nos.nl/artikel/2093813-hennis-er-moet-zeker-2-miljard-bij-voor-defensie.html',
'http://www.nos.nl/artikel/2093496-ook-b-staal-schaatser-koelizjnikov-positief.html',
'http://www.nos.nl/artikel/2093737-volkskrant-kreeg-ook-pleitnota-wilders-aangeboden.html',
'http://www.nos.nl/artikel/2093778-voorman-noorse-broeders-vast-voor-verduistering-acht-miljoen-euro.html',
'http://www.nos.nl/artikel/2093345-speciale-app-waarschuwt-olympiers-in-rio-voor-gevaar.html',
'http://www.nos.nl/artikel/2093776-onderzeeer-in-poolgebied-breekt-door-ijs-heen.html',
'http://www.nos.nl/artikel/2093744-veel-geheime-schikkingen-over-seksueel-misbruik-rk-kerk.html',
'http://www.nos.nl/artikel/2093282-privacyorganisaties-ontevreden-over-datadeal-europa-vs.html',
'http://www.nos.nl/artikel/2093502-azarenka-verder-na-double-bagel-in-indian-wells.html',
'http://www.nos.nl/artikel/2093757-tweede-inzittende-na-auto-ongeluk-sneek-overleden.html',
'http://www.nos.nl/artikel/2093769-50plus-wil-gedoe-achter-zich-laten.html',
'http://www.nos.nl/artikel/2093704-djokovic-kraakt-sterke-tsonga-nadal-langs-nishikori.html',
'http://www.nos.nl/artikel/2093774-samba-sam-is-terug-larsson-laat-friesland-swingen.html',
'http://www.nos.nl/artikel/2093807-golfer-chowrasia-nieuwe-leider-in-india.html',
'http://www.nos.nl/artikel/2093432-mabel-en-beatrix-bij-uitreiking-friso-prijs.html',
'http://www.nos.nl/artikel/2093631-voor-het-eerst-meer-dan-8-miljoen-auto-s-op-de-weg.html',
'http://www.nos.nl/artikel/2093750-oud-v-d-ers-nog-even-achter-de-toonbank-bij-leegverkoop.html',
'http://www.nos.nl/artikel/2092942-koelizjnikov-reageert-ik-denk-niet-aan-zelfmoord.html',
'http://www.nos.nl/artikel/2093741-ik-laat-me-niet-nog-keer-door-de-daders-tot-zwijgen-dwingen.html',
'http://www.nos.nl/artikel/2093788-f1-teams-willen-snel-aanpassing-van-waardeloze-kwalificatieopzet.html',
'http://www.nos.nl/artikel/2093797-noc-nsf-baas-hendriks-blij-met-terugkeer-bonnes-bij-judobond.html',
])
self.assertEqual(urls, expected)
def test_nos_nl(self):
"""
http://nos.nl/artikel/2069130-dodental-parijs-op-129-maar-zal-nog-stijgen.html
"""
article_name = "www_nos_nl_artikel_2069130-dodental-parijs-op-129" \
"-maar-zal-nog-stijgen.html"
article_path = os.path.join(TEST_DIR, article_name)
article_file = open(article_path, 'rb')
parsed_article = NOSNLParser.parse_new_version('', article_file.read())
expected = \
"Bij de aanslagen in Parijs zijn 129 mensen omgekomen. Dat " \
"aantal zal nog stijgen,\xa0heeft de openbaar aanklager " \
"gezegd\xa0op een persconferentie.\xa0352 mensen raakten gewond. " \
"99 van hen verkeren in kritieke toestand.\xa0\n" \
"Alleen al in de concertzaal Bataclan zijn 89 mensen gedood. De " \
"terroristen\xa0noemden Syri\xeb en Irak tijdens hun aanslag.\xa0\n" \
"Openbaar aanklager Molins zei verder dat zeven aanvallers " \
"zijn gedood. De terroristen hadden drie teams gevormd, die " \
"hun zes\xa0aanslagen\xa0met elkaar hadden afgestemd. Ze " \
"droegen alle zeven\xa0kalasjnikovs en bomgordels met " \
"precies\xa0hetzelfde type explosieven.\n" \
"Een van de aanvallers van Bataclan was een man van 29. " \
"Hij werd\xa0sinds 2010 in de gaten gehouden\xa0door\xa0de " \
"veiligheidsdiensten omdat hij radicaliseerde. Hij is " \
"meerdere keren voor kleine vergrijpen in aanraking gekomen " \
"met de politie, maar heeft nooit gevangen gezeten. Volgens " \
"Franse media heette de man Isma\xebl M. en woonde\xa0hij " \
"in Chartres.\xa0\n" \
"Ook circuleert de naam\xa0Abbdulakbak B. als een van de " \
"terroristen in Bataclan.\xa0Hij zou via het Griekse eiland " \
"Leros Europa zijn binnengekomen.\n" \
"Belgi\xeb\n" \
"Er is ook een verband met buurland Belgi\xeb. " \
"Twee verdachte\xa0auto's in Parijs hadden Belgische " \
"kentekens. Vanochtend zijn bij een controle bij de " \
"Frans-Belgische grens drie mannen aangehouden. Een van " \
"hen was de huurder van de auto die bij Bataclan is " \
"aangetroffen.\xa0Vanmiddag zijn in\xa0Brussel ook " \
"drie\xa0verdachten opgepakt.\xa0\n" \
"Onder de dodelijke slachtoffers zijn zeker vijf " \
"buitenlanders: twee Belgen, een Portugees, een Spanjaard " \
"en een Brit.\n"
self.assertEqual(parsed_article.get('content'), expected)
expected_date = self.timezone.localize(datetime(2015, 11, 14, 19, 28, 38))
self.assertEqual(parsed_article.get('date'), expected_date)
|
{
"content_hash": "b10de97c97b1716a02240d363eb8af23",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 116,
"avg_line_length": 63.813793103448276,
"alnum_prop": 0.6816167729385064,
"repo_name": "flupzor/newsdiffs",
"id": "4213b866aa9590df547f291213bac1fde1d839b2",
"size": "9253",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "news/parsers/tests/test_nosnl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "340"
},
{
"name": "HTML",
"bytes": "644925"
},
{
"name": "JavaScript",
"bytes": "1186"
},
{
"name": "Python",
"bytes": "153374"
},
{
"name": "Shell",
"bytes": "62"
}
],
"symlink_target": ""
}
|
from rally import consts
from rally.plugins.openstack import scenario
from rally.plugins.openstack.scenarios.monasca import utils as monascautils
from rally.task import validation
class MonascaMetrics(monascautils.MonascaScenario):
"""Benchmark scenarios for monasca Metrics API."""
@validation.required_clients("monasca")
@validation.required_services(consts.Service.MONASCA)
@validation.required_openstack(users=True)
@scenario.configure()
def list_metrics(self, **kwargs):
"""Fetch user's metrics.
:param kwargs: optional arguments for list query:
name, dimensions, start_time, etc
"""
self._list_metrics(**kwargs)
|
{
"content_hash": "a779e0983af2aa61d72454b1d04456ae",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 75,
"avg_line_length": 34.65,
"alnum_prop": 0.7186147186147186,
"repo_name": "varuntiwari27/rally",
"id": "e7affce2aad5b760b3d0d06759f3cef757b0cd61",
"size": "1291",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rally/plugins/openstack/scenarios/monasca/metrics.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "452687"
},
{
"name": "C++",
"bytes": "26115"
},
{
"name": "CSS",
"bytes": "6231"
},
{
"name": "HTML",
"bytes": "51546"
},
{
"name": "JavaScript",
"bytes": "14187"
},
{
"name": "Makefile",
"bytes": "68380"
},
{
"name": "Mako",
"bytes": "17949"
},
{
"name": "Python",
"bytes": "8478187"
},
{
"name": "Shell",
"bytes": "61579"
}
],
"symlink_target": ""
}
|
"""Import router for absl.flags. See https://github.com/abseil/abseil-py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging as _logging
# go/tf-wildcard-import
from absl.flags import * # pylint: disable=wildcard-import
import six as _six
from tensorflow.python.util import tf_decorator
# Since we wrap absl.flags DEFINE functions, we need to declare this module
# does not affect key flags.
disclaim_key_flags() # pylint: disable=undefined-variable
_RENAMED_ARGUMENTS = {
'flag_name': 'name',
'default_value': 'default',
'docstring': 'help',
}
def _wrap_define_function(original_function):
"""Wraps absl.flags's define functions so tf.flags accepts old names."""
def wrapper(*args, **kwargs):
"""Wrapper function that turns old keyword names to new ones."""
has_old_names = False
for old_name, new_name in _six.iteritems(_RENAMED_ARGUMENTS):
if old_name in kwargs:
has_old_names = True
value = kwargs.pop(old_name)
kwargs[new_name] = value
if has_old_names:
_logging.warning(
'Use of the keyword argument names (flag_name, default_value, '
'docstring) is deprecated, please use (name, default, help) instead.')
return original_function(*args, **kwargs)
return tf_decorator.make_decorator(original_function, wrapper)
# pylint: disable=invalid-name,used-before-assignment
# absl.flags APIs use `default` as the name of the default value argument.
# Allow the following functions continue to accept `default_value`.
DEFINE_string = _wrap_define_function(DEFINE_string)
DEFINE_boolean = _wrap_define_function(DEFINE_boolean)
DEFINE_bool = DEFINE_boolean
DEFINE_float = _wrap_define_function(DEFINE_float)
DEFINE_integer = _wrap_define_function(DEFINE_integer)
# pylint: enable=invalid-name,used-before-assignment
|
{
"content_hash": "1868f0ceba4bfcf2eaa4c5fe332e21de",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 80,
"avg_line_length": 34.45454545454545,
"alnum_prop": 0.7187335092348285,
"repo_name": "Kongsea/tensorflow",
"id": "abd6f3d85501449b4f32592aa3787d1cbdd67e40",
"size": "2585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/platform/flags.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8458"
},
{
"name": "C",
"bytes": "198923"
},
{
"name": "C++",
"bytes": "29494349"
},
{
"name": "CMake",
"bytes": "644855"
},
{
"name": "Go",
"bytes": "976410"
},
{
"name": "Java",
"bytes": "409984"
},
{
"name": "Jupyter Notebook",
"bytes": "1833675"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "38189"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63210"
},
{
"name": "Perl",
"bytes": "6715"
},
{
"name": "Protocol Buffer",
"bytes": "270658"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "26227666"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "373711"
}
],
"symlink_target": ""
}
|
"""Convenience wrapper for invoking APIs/factories w/ a project."""
import os
from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2
from google.cloud._helpers import _LocalStack
from google.cloud._helpers import (
_determine_default_project as _base_default_project)
from google.cloud.client import ClientWithProject
from google.cloud.environment_vars import DISABLE_GRPC
from google.cloud.environment_vars import GCD_DATASET
from google.cloud.environment_vars import GCD_HOST
from google.cloud.datastore._http import HTTPDatastoreAPI
from google.cloud.datastore import helpers
from google.cloud.datastore.batch import Batch
from google.cloud.datastore.entity import Entity
from google.cloud.datastore.key import Key
from google.cloud.datastore.query import Query
from google.cloud.datastore.transaction import Transaction
try:
from google.cloud.datastore._gax import make_datastore_api
_HAVE_GRPC = True
except ImportError: # pragma: NO COVER
make_datastore_api = None
_HAVE_GRPC = False
_MAX_LOOPS = 128
"""Maximum number of iterations to wait for deferred keys."""
_DATASTORE_BASE_URL = 'https://datastore.googleapis.com'
"""Datastore API request URL base."""
_USE_GRPC = _HAVE_GRPC and not os.getenv(DISABLE_GRPC, False)
def _get_gcd_project():
"""Gets the GCD application ID if it can be inferred."""
return os.getenv(GCD_DATASET)
def _determine_default_project(project=None):
"""Determine default project explicitly or implicitly as fall-back.
In implicit case, supports four environments. In order of precedence, the
implicit environments are:
* DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing)
* GOOGLE_CLOUD_PROJECT environment variable
* Google App Engine application ID
* Google Compute Engine project ID (from metadata server)
:type project: str
:param project: Optional. The project to use as default.
:rtype: str or ``NoneType``
:returns: Default project if it can be determined.
"""
if project is None:
project = _get_gcd_project()
if project is None:
project = _base_default_project(project=project)
return project
def _extended_lookup(datastore_api, project, key_pbs,
missing=None, deferred=None,
eventual=False, transaction_id=None):
"""Repeat lookup until all keys found (unless stop requested).
Helper function for :meth:`Client.get_multi`.
:type datastore_api:
:class:`google.cloud.datastore._http.HTTPDatastoreAPI`
or :class:`google.cloud.datastore._gax.GAPICDatastoreAPI`
:param datastore_api: The datastore API object used to connect
to datastore.
:type project: str
:param project: The project to make the request for.
:type key_pbs: list of :class:`.entity_pb2.Key`
:param key_pbs: The keys to retrieve from the datastore.
:type missing: list
:param missing: (Optional) If a list is passed, the key-only entity
protobufs returned by the backend as "missing" will be
copied into it.
:type deferred: list
:param deferred: (Optional) If a list is passed, the key protobufs returned
by the backend as "deferred" will be copied into it.
:type eventual: bool
:param eventual: If False (the default), request ``STRONG`` read
consistency. If True, request ``EVENTUAL`` read
consistency.
:type transaction_id: str
:param transaction_id: If passed, make the request in the scope of
the given transaction. Incompatible with
``eventual==True``.
:rtype: list of :class:`.entity_pb2.Entity`
:returns: The requested entities.
:raises: :class:`ValueError` if missing / deferred are not null or
empty list.
"""
if missing is not None and missing != []:
raise ValueError('missing must be None or an empty list')
if deferred is not None and deferred != []:
raise ValueError('deferred must be None or an empty list')
results = []
loop_num = 0
read_options = _get_read_options(eventual, transaction_id)
while loop_num < _MAX_LOOPS: # loop against possible deferred.
loop_num += 1
lookup_response = datastore_api.lookup(
project, read_options, key_pbs)
# Accumulate the new results.
results.extend(result.entity for result in lookup_response.found)
if missing is not None:
missing.extend(result.entity for result in lookup_response.missing)
if deferred is not None:
deferred.extend(lookup_response.deferred)
break
if len(lookup_response.deferred) == 0:
break
# We have deferred keys, and the user didn't ask to know about
# them, so retry (but only with the deferred ones).
key_pbs = lookup_response.deferred
return results
class Client(ClientWithProject):
"""Convenience wrapper for invoking APIs/factories w/ a project.
.. doctest::
>>> from google.cloud import datastore
>>> client = datastore.Client()
:type project: str
:param project: (optional) The project to pass to proxied API methods.
:type namespace: str
:param namespace: (optional) namespace to pass to proxied API methods.
:type credentials: :class:`~google.auth.credentials.Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
client. If not passed (and if no ``_http`` object is
passed), falls back to the default inferred from the
environment.
:type _http: :class:`~httplib2.Http`
:param _http: (Optional) HTTP object to make requests. Can be any object
that defines ``request()`` with the same interface as
:meth:`~httplib2.Http.request`. If not passed, an
``_http`` object is created that is bound to the
``credentials`` for the current object.
This parameter should be considered private, and could
change in the future.
:type _use_grpc: bool
:param _use_grpc: (Optional) Explicitly specifies whether
to use the gRPC transport (via GAX) or HTTP. If unset,
falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC``
environment variable.
This parameter should be considered private, and could
change in the future.
"""
SCOPE = ('https://www.googleapis.com/auth/datastore',)
"""The scopes required for authenticating as a Cloud Datastore consumer."""
def __init__(self, project=None, namespace=None,
credentials=None, _http=None, _use_grpc=None):
super(Client, self).__init__(
project=project, credentials=credentials, _http=_http)
self.namespace = namespace
self._batch_stack = _LocalStack()
self._datastore_api_internal = None
if _use_grpc is None:
self._use_grpc = _USE_GRPC
else:
self._use_grpc = _use_grpc
try:
host = os.environ[GCD_HOST]
self._base_url = 'http://' + host
except KeyError:
self._base_url = _DATASTORE_BASE_URL
@staticmethod
def _determine_default(project):
"""Helper: override default project detection."""
return _determine_default_project(project)
@property
def _datastore_api(self):
"""Getter for a wrapped API object."""
if self._datastore_api_internal is None:
if self._use_grpc:
self._datastore_api_internal = make_datastore_api(self)
else:
self._datastore_api_internal = HTTPDatastoreAPI(self)
return self._datastore_api_internal
def _push_batch(self, batch):
"""Push a batch/transaction onto our stack.
"Protected", intended for use by batch / transaction context mgrs.
:type batch: :class:`google.cloud.datastore.batch.Batch`, or an object
implementing its API.
:param batch: newly-active batch/transaction.
"""
self._batch_stack.push(batch)
def _pop_batch(self):
"""Pop a batch/transaction from our stack.
"Protected", intended for use by batch / transaction context mgrs.
:raises: IndexError if the stack is empty.
:rtype: :class:`google.cloud.datastore.batch.Batch`, or an object
implementing its API.
:returns: the top-most batch/transaction, after removing it.
"""
return self._batch_stack.pop()
@property
def current_batch(self):
"""Currently-active batch.
:rtype: :class:`google.cloud.datastore.batch.Batch`, or an object
implementing its API, or ``NoneType`` (if no batch is active).
:returns: The batch/transaction at the top of the batch stack.
"""
return self._batch_stack.top
@property
def current_transaction(self):
"""Currently-active transaction.
:rtype: :class:`google.cloud.datastore.transaction.Transaction`, or an
object implementing its API, or ``NoneType`` (if no transaction
is active).
:returns: The transaction at the top of the batch stack.
"""
transaction = self.current_batch
if isinstance(transaction, Transaction):
return transaction
def get(self, key, missing=None, deferred=None, transaction=None):
"""Retrieve an entity from a single key (if it exists).
.. note::
This is just a thin wrapper over :meth:`get_multi`.
The backend API does not make a distinction between a single key or
multiple keys in a lookup request.
:type key: :class:`google.cloud.datastore.key.Key`
:param key: The key to be retrieved from the datastore.
:type missing: list
:param missing: (Optional) If a list is passed, the key-only entities
returned by the backend as "missing" will be copied
into it.
:type deferred: list
:param deferred: (Optional) If a list is passed, the keys returned
by the backend as "deferred" will be copied into it.
:type transaction:
:class:`~google.cloud.datastore.transaction.Transaction`
:param transaction: (Optional) Transaction to use for read consistency.
If not passed, uses current transaction, if set.
:rtype: :class:`google.cloud.datastore.entity.Entity` or ``NoneType``
:returns: The requested entity if it exists.
"""
entities = self.get_multi(keys=[key], missing=missing,
deferred=deferred, transaction=transaction)
if entities:
return entities[0]
def get_multi(self, keys, missing=None, deferred=None, transaction=None):
"""Retrieve entities, along with their attributes.
:type keys: list of :class:`google.cloud.datastore.key.Key`
:param keys: The keys to be retrieved from the datastore.
:type missing: list
:param missing: (Optional) If a list is passed, the key-only entities
returned by the backend as "missing" will be copied
into it. If the list is not empty, an error will occur.
:type deferred: list
:param deferred: (Optional) If a list is passed, the keys returned
by the backend as "deferred" will be copied into it.
If the list is not empty, an error will occur.
:type transaction:
:class:`~google.cloud.datastore.transaction.Transaction`
:param transaction: (Optional) Transaction to use for read consistency.
If not passed, uses current transaction, if set.
:rtype: list of :class:`google.cloud.datastore.entity.Entity`
:returns: The requested entities.
:raises: :class:`ValueError` if one or more of ``keys`` has a project
which does not match our project.
"""
if not keys:
return []
ids = set(key.project for key in keys)
for current_id in ids:
if current_id != self.project:
raise ValueError('Keys do not match project')
if transaction is None:
transaction = self.current_transaction
entity_pbs = _extended_lookup(
datastore_api=self._datastore_api,
project=self.project,
key_pbs=[k.to_protobuf() for k in keys],
missing=missing,
deferred=deferred,
transaction_id=transaction and transaction.id,
)
if missing is not None:
missing[:] = [
helpers.entity_from_protobuf(missed_pb)
for missed_pb in missing]
if deferred is not None:
deferred[:] = [
helpers.key_from_protobuf(deferred_pb)
for deferred_pb in deferred]
return [helpers.entity_from_protobuf(entity_pb)
for entity_pb in entity_pbs]
def put(self, entity):
"""Save an entity in the Cloud Datastore.
.. note::
This is just a thin wrapper over :meth:`put_multi`.
The backend API does not make a distinction between a single
entity or multiple entities in a commit request.
:type entity: :class:`google.cloud.datastore.entity.Entity`
:param entity: The entity to be saved to the datastore.
"""
self.put_multi(entities=[entity])
def put_multi(self, entities):
"""Save entities in the Cloud Datastore.
:type entities: list of :class:`google.cloud.datastore.entity.Entity`
:param entities: The entities to be saved to the datastore.
:raises: :class:`ValueError` if ``entities`` is a single entity.
"""
if isinstance(entities, Entity):
raise ValueError("Pass a sequence of entities")
if not entities:
return
current = self.current_batch
in_batch = current is not None
if not in_batch:
current = self.batch()
current.begin()
for entity in entities:
current.put(entity)
if not in_batch:
current.commit()
def delete(self, key):
"""Delete the key in the Cloud Datastore.
.. note::
This is just a thin wrapper over :meth:`delete_multi`.
The backend API does not make a distinction between a single key or
multiple keys in a commit request.
:type key: :class:`google.cloud.datastore.key.Key`
:param key: The key to be deleted from the datastore.
"""
self.delete_multi(keys=[key])
def delete_multi(self, keys):
"""Delete keys from the Cloud Datastore.
:type keys: list of :class:`google.cloud.datastore.key.Key`
:param keys: The keys to be deleted from the Datastore.
"""
if not keys:
return
# We allow partial keys to attempt a delete, the backend will fail.
current = self.current_batch
in_batch = current is not None
if not in_batch:
current = self.batch()
current.begin()
for key in keys:
current.delete(key)
if not in_batch:
current.commit()
def allocate_ids(self, incomplete_key, num_ids):
"""Allocate a list of IDs from a partial key.
:type incomplete_key: :class:`google.cloud.datastore.key.Key`
:param incomplete_key: Partial key to use as base for allocated IDs.
:type num_ids: int
:param num_ids: The number of IDs to allocate.
:rtype: list of :class:`google.cloud.datastore.key.Key`
:returns: The (complete) keys allocated with ``incomplete_key`` as
root.
:raises: :class:`ValueError` if ``incomplete_key`` is not a
partial key.
"""
if not incomplete_key.is_partial:
raise ValueError(('Key is not partial.', incomplete_key))
incomplete_key_pb = incomplete_key.to_protobuf()
incomplete_key_pbs = [incomplete_key_pb] * num_ids
response_pb = self._datastore_api.allocate_ids(
incomplete_key.project, incomplete_key_pbs)
allocated_ids = [allocated_key_pb.path[-1].id
for allocated_key_pb in response_pb.keys]
return [incomplete_key.completed_key(allocated_id)
for allocated_id in allocated_ids]
def key(self, *path_args, **kwargs):
"""Proxy to :class:`google.cloud.datastore.key.Key`.
Passes our ``project``.
"""
if 'project' in kwargs:
raise TypeError('Cannot pass project')
kwargs['project'] = self.project
if 'namespace' not in kwargs:
kwargs['namespace'] = self.namespace
return Key(*path_args, **kwargs)
def batch(self):
"""Proxy to :class:`google.cloud.datastore.batch.Batch`."""
return Batch(self)
def transaction(self):
"""Proxy to :class:`google.cloud.datastore.transaction.Transaction`."""
return Transaction(self)
def query(self, **kwargs):
"""Proxy to :class:`google.cloud.datastore.query.Query`.
Passes our ``project``.
Using query to search a datastore:
.. testsetup:: query
from google.cloud import datastore
client = datastore.Client()
query = client.query(kind='_Doctest')
def do_something(entity):
pass
.. doctest:: query
>>> query = client.query(kind='MyKind')
>>> query.add_filter('property', '=', 'val')
Using the query iterator
.. doctest:: query
>>> query_iter = query.fetch()
>>> for entity in query_iter:
... do_something(entity)
or manually page through results
.. testsetup:: query-page
from google.cloud import datastore
from tests.system.test_system import Config # system tests
client = datastore.Client()
key = client.key('_Doctest')
entity1 = datastore.Entity(key=key)
entity1['foo'] = 1337
entity2 = datastore.Entity(key=key)
entity2['foo'] = 42
Config.TO_DELETE.extend([entity1, entity2])
client.put_multi([entity1, entity2])
query = client.query(kind='_Doctest')
cursor = None
.. doctest:: query-page
>>> query_iter = query.fetch(start_cursor=cursor)
>>> pages = query_iter.pages
>>>
>>> first_page = next(pages)
>>> first_page_entities = list(first_page)
>>> query_iter.next_page_token
b'...'
:type kwargs: dict
:param kwargs: Parameters for initializing and instance of
:class:`~google.cloud.datastore.query.Query`.
:rtype: :class:`~google.cloud.datastore.query.Query`
:returns: A query object.
"""
if 'client' in kwargs:
raise TypeError('Cannot pass client')
if 'project' in kwargs:
raise TypeError('Cannot pass project')
kwargs['project'] = self.project
if 'namespace' not in kwargs:
kwargs['namespace'] = self.namespace
return Query(self, **kwargs)
def _get_read_options(eventual, transaction_id):
"""Validate rules for read options, and assign to the request.
Helper method for ``lookup()`` and ``run_query``.
:type eventual: bool
:param eventual: Flag indicating if ``EVENTUAL`` or ``STRONG``
consistency should be used.
:type transaction_id: bytes
:param transaction_id: A transaction identifier (may be null).
:rtype: :class:`.datastore_pb2.ReadOptions`
:returns: The read options corresponding to the inputs.
:raises: :class:`ValueError` if ``eventual`` is ``True`` and the
``transaction_id`` is not ``None``.
"""
if transaction_id is None:
if eventual:
return _datastore_pb2.ReadOptions(
read_consistency=_datastore_pb2.ReadOptions.EVENTUAL)
else:
return _datastore_pb2.ReadOptions()
else:
if eventual:
raise ValueError('eventual must be False when in a transaction')
else:
return _datastore_pb2.ReadOptions(
transaction=transaction_id)
|
{
"content_hash": "8012e2289815cc648caa36effe2fbe6e",
"timestamp": "",
"source": "github",
"line_count": 590,
"max_line_length": 79,
"avg_line_length": 35.50508474576271,
"alnum_prop": 0.6072656100821081,
"repo_name": "ammarkhann/FinalSeniorCode",
"id": "af7d6d4f9113980705f8e3b3faf219b1b17bfe6d",
"size": "21523",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/python2.7/site-packages/google/cloud/datastore/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "229289"
},
{
"name": "C++",
"bytes": "171536"
},
{
"name": "CSS",
"bytes": "928345"
},
{
"name": "Fortran",
"bytes": "14107"
},
{
"name": "HTML",
"bytes": "853239"
},
{
"name": "JavaScript",
"bytes": "4838516"
},
{
"name": "Jupyter Notebook",
"bytes": "518186"
},
{
"name": "Makefile",
"bytes": "214"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "81804894"
},
{
"name": "Roff",
"bytes": "6673"
},
{
"name": "Shell",
"bytes": "3409"
},
{
"name": "Smarty",
"bytes": "28408"
},
{
"name": "TeX",
"bytes": "1527"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import uuid
from .common import InfoExtractor
from .ooyala import OoyalaIE
from ..compat import (
compat_str,
compat_urllib_parse_urlencode,
compat_urlparse,
)
from ..utils import (
int_or_none,
extract_attributes,
determine_ext,
smuggle_url,
parse_duration,
)
class MiTeleBaseIE(InfoExtractor):
def _get_player_info(self, url, webpage):
player_data = extract_attributes(self._search_regex(
r'(?s)(<ms-video-player.+?</ms-video-player>)',
webpage, 'ms video player'))
video_id = player_data['data-media-id']
if player_data.get('data-cms-id') == 'ooyala':
return self.url_result(
'ooyala:%s' % video_id, ie=OoyalaIE.ie_key(), video_id=video_id)
config_url = compat_urlparse.urljoin(url, player_data['data-config'])
config = self._download_json(
config_url, video_id, 'Downloading config JSON')
mmc_url = config['services']['mmc']
duration = None
formats = []
for m_url in (mmc_url, mmc_url.replace('/flash.json', '/html5.json')):
mmc = self._download_json(
m_url, video_id, 'Downloading mmc JSON')
if not duration:
duration = int_or_none(mmc.get('duration'))
for location in mmc['locations']:
gat = self._proto_relative_url(location.get('gat'), 'http:')
bas = location.get('bas')
loc = location.get('loc')
ogn = location.get('ogn')
if None in (gat, bas, loc, ogn):
continue
token_data = {
'bas': bas,
'icd': loc,
'ogn': ogn,
'sta': '0',
}
media = self._download_json(
'%s/?%s' % (gat, compat_urllib_parse_urlencode(token_data)),
video_id, 'Downloading %s JSON' % location['loc'])
file_ = media.get('file')
if not file_:
continue
ext = determine_ext(file_)
if ext == 'f4m':
formats.extend(self._extract_f4m_formats(
file_ + '&hdcore=3.2.0&plugin=aasp-3.2.0.77.18',
video_id, f4m_id='hds', fatal=False))
elif ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
file_, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False))
self._sort_formats(formats)
return {
'id': video_id,
'formats': formats,
'thumbnail': player_data.get('data-poster') or config.get('poster', {}).get('imageUrl'),
'duration': duration,
}
class MiTeleIE(InfoExtractor):
IE_DESC = 'mitele.es'
_VALID_URL = r'https?://(?:www\.)?mitele\.es/(?:[^/]+/)+(?P<id>[^/]+)/player'
_TESTS = [{
'url': 'http://www.mitele.es/programas-tv/diario-de/57b0dfb9c715da65618b4afa/player',
'info_dict': {
'id': '57b0dfb9c715da65618b4afa',
'ext': 'mp4',
'title': 'Tor, la web invisible',
'description': 'md5:3b6fce7eaa41b2d97358726378d9369f',
'series': 'Diario de',
'season': 'La redacción',
'season_number': 14,
'season_id': 'diario_de_t14_11981',
'episode': 'Programa 144',
'episode_number': 3,
'thumbnail': r're:(?i)^https?://.*\.jpg$',
'duration': 2913,
},
'add_ie': ['Ooyala'],
}, {
# no explicit title
'url': 'http://www.mitele.es/programas-tv/cuarto-milenio/57b0de3dc915da14058b4876/player',
'info_dict': {
'id': '57b0de3dc915da14058b4876',
'ext': 'mp4',
'title': 'Cuarto Milenio Temporada 6 Programa 226',
'description': 'md5:5ff132013f0cd968ffbf1f5f3538a65f',
'series': 'Cuarto Milenio',
'season': 'Temporada 6',
'season_number': 6,
'season_id': 'cuarto_milenio_t06_12715',
'episode': 'Programa 226',
'episode_number': 24,
'thumbnail': r're:(?i)^https?://.*\.jpg$',
'duration': 7313,
},
'params': {
'skip_download': True,
},
'add_ie': ['Ooyala'],
}, {
'url': 'http://www.mitele.es/series-online/la-que-se-avecina/57aac5c1c915da951a8b45ed/player',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
gigya_url = self._search_regex(
r'<gigya-api>[^>]*</gigya-api>[^>]*<script\s+src="([^"]*)">[^>]*</script>',
webpage, 'gigya', default=None)
gigya_sc = self._download_webpage(
compat_urlparse.urljoin('http://www.mitele.es/', gigya_url),
video_id, 'Downloading gigya script')
# Get a appKey/uuid for getting the session key
appKey_var = self._search_regex(
r'value\s*\(\s*["\']appGridApplicationKey["\']\s*,\s*([0-9a-f]+)',
gigya_sc, 'appKey variable')
appKey = self._search_regex(
r'var\s+%s\s*=\s*["\']([0-9a-f]+)' % appKey_var, gigya_sc, 'appKey')
session_json = self._download_json(
'https://appgrid-api.cloud.accedo.tv/session',
video_id, 'Downloading session keys', query={
'appKey': appKey,
'uuid': compat_str(uuid.uuid4()),
})
paths = self._download_json(
'https://appgrid-api.cloud.accedo.tv/metadata/general_configuration,%20web_configuration',
video_id, 'Downloading paths JSON',
query={'sessionKey': compat_str(session_json['sessionKey'])})
ooyala_s = paths['general_configuration']['api_configuration']['ooyala_search']
source = self._download_json(
'http://%s%s%s/docs/%s' % (
ooyala_s['base_url'], ooyala_s['full_path'],
ooyala_s['provider_id'], video_id),
video_id, 'Downloading data JSON', query={
'include_titles': 'Series,Season',
'product_name': 'test',
'format': 'full',
})['hits']['hits'][0]['_source']
embedCode = source['offers'][0]['embed_codes'][0]
titles = source['localizable_titles'][0]
title = titles.get('title_medium') or titles['title_long']
description = titles.get('summary_long') or titles.get('summary_medium')
def get(key1, key2):
value1 = source.get(key1)
if not value1 or not isinstance(value1, list):
return
if not isinstance(value1[0], dict):
return
return value1[0].get(key2)
series = get('localizable_titles_series', 'title_medium')
season = get('localizable_titles_season', 'title_medium')
season_number = int_or_none(source.get('season_number'))
season_id = source.get('season_id')
episode = titles.get('title_sort_name')
episode_number = int_or_none(source.get('episode_number'))
duration = parse_duration(get('videos', 'duration'))
return {
'_type': 'url_transparent',
# for some reason only HLS is supported
'url': smuggle_url('ooyala:' + embedCode, {'supportedformats': 'm3u8,dash'}),
'id': video_id,
'title': title,
'description': description,
'series': series,
'season': season,
'season_number': season_number,
'season_id': season_id,
'episode': episode,
'episode_number': episode_number,
'duration': duration,
'thumbnail': get('images', 'url'),
}
|
{
"content_hash": "076f216c10f2712febd1034843cb29db",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 102,
"avg_line_length": 38.38942307692308,
"alnum_prop": 0.5157169693174702,
"repo_name": "bosstb/HaberPush",
"id": "28b743cca1f2355a24cb6b913c7a7410f40d595f",
"size": "8002",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "youtube_dl/extractor/mitele.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "53"
},
{
"name": "HTML",
"bytes": "1074"
},
{
"name": "Python",
"bytes": "4295385"
}
],
"symlink_target": ""
}
|
import logging
from osc_lib import utils
from osc_lib.command import command
from vbclient.common.i18n import _
from vbclient.v1 import resource
LOG = logging.getLogger(__name__)
class ShowJob(command.ShowOne):
_description = _("Show Job")
def get_parser(self, prog_name):
parser = super(ShowJob, self).get_parser(prog_name)
parser.add_argument(
"job_id",
metavar="<job-id>",
help=_("job to display (ID)")
)
return parser
def take_action(self, args):
mgr = self.app.client_manager.volume_backup.job_mgr
job = mgr.get(args.job_id)
columns = job.get_show_column_names()
formatter = job.formatter
return columns, job.get_display_data(columns, formatter=formatter)
|
{
"content_hash": "ed6df6fe6d175ad85e8a5cec5905a74a",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 74,
"avg_line_length": 27.17241379310345,
"alnum_prop": 0.631979695431472,
"repo_name": "Huawei/OpenStackClient_VBS",
"id": "cccb3b433ba185786453e44d98914e907d9ff022",
"size": "1406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vbclient/osc/v1/job.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "82795"
}
],
"symlink_target": ""
}
|
from bambou import NURESTFetcher
class NUDomainFIPAclTemplateEntriesFetcher(NURESTFetcher):
""" Represents a NUDomainFIPAclTemplateEntries fetcher
Notes:
This fetcher enables to fetch NUDomainFIPAclTemplateEntry objects.
See:
bambou.NURESTFetcher
"""
@classmethod
def managed_class(cls):
""" Return NUDomainFIPAclTemplateEntry class that is managed.
Returns:
.NUDomainFIPAclTemplateEntry: the managed class
"""
from .. import NUDomainFIPAclTemplateEntry
return NUDomainFIPAclTemplateEntry
|
{
"content_hash": "bd5621f2c9e23e5cbc1290d343549f98",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 78,
"avg_line_length": 24.68,
"alnum_prop": 0.6726094003241491,
"repo_name": "nuagenetworks/vspk-python",
"id": "47dcf2f5ef89bb46323b49819d580609b45d3a5b",
"size": "2228",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vspk/v6/fetchers/nudomainfipacltemplateentries_fetcher.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "12909327"
}
],
"symlink_target": ""
}
|
print('hello hello hello')
|
{
"content_hash": "10f79b1047116bc961f98af80ed3f90a",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 26,
"avg_line_length": 27,
"alnum_prop": 0.7407407407407407,
"repo_name": "bobisme/hello",
"id": "3d7905d799df012f8e1a1d3229bd1c7b2a3bb882",
"size": "50",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/hello3.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "202"
},
{
"name": "C++",
"bytes": "205"
},
{
"name": "Crystal",
"bytes": "225"
},
{
"name": "Elixir",
"bytes": "47"
},
{
"name": "Elm",
"bytes": "84"
},
{
"name": "Erlang",
"bytes": "80"
},
{
"name": "Haskell",
"bytes": "46"
},
{
"name": "Java",
"bytes": "117"
},
{
"name": "Kotlin",
"bytes": "68"
},
{
"name": "Lua",
"bytes": "44"
},
{
"name": "Makefile",
"bytes": "969"
},
{
"name": "Nim",
"bytes": "26"
},
{
"name": "OCaml",
"bytes": "106"
},
{
"name": "Python",
"bytes": "368"
},
{
"name": "Ruby",
"bytes": "37"
},
{
"name": "Rust",
"bytes": "61"
},
{
"name": "Scala",
"bytes": "263"
}
],
"symlink_target": ""
}
|
from pylab import*
from scipy.io import wavfile
from python_speech_features import mfcc
from python_speech_features import delta
from python_speech_features import logfbank
# Read sound
srate, sig = wavfile.read('da_ta.wav')
plt.plot(np.arange(len(sig))/srate, sig)
plt.title('da_ta.wav')
plt.xlabel('Time (sec)')
plt.ylabel('Amplitude')
plt.show()
# Extract MFCC
winlen = 0.025
winstep = 0.01
numcep = 13
mfcc_raw = mfcc(sig, srate, winlen, winstep, numcep, appendEnergy = True) # 13-d MFCC
mfcc_deriv1 = delta(mfcc_raw, N = 2) # 1st deriv
mfccs = np.concatenate((mfcc_raw, mfcc_deriv1), axis=1).astype(np.float32)
plt.imshow(np.rot90(mfccs, axes=(0,1)), aspect='auto')
plt.title('MFCC values (26 dimension)')
plt.xlabel('Time (msec)')
plt.ylabel('Coefficients')
plt.show()
|
{
"content_hash": "6db5fa623845dbbd4c9f1e2d33b7dcee",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 85,
"avg_line_length": 28.814814814814813,
"alnum_prop": 0.7249357326478149,
"repo_name": "jaekookang/useful_bits",
"id": "be90e78f319006f840d028a912306ac8f55a9329",
"size": "954",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Speech/Extract_MFCC/extract_MFCC.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "259178"
},
{
"name": "Jupyter Notebook",
"bytes": "2142862"
},
{
"name": "MATLAB",
"bytes": "8989"
},
{
"name": "Python",
"bytes": "72390"
}
],
"symlink_target": ""
}
|
import kol.Error as Error
from GenericRequest import GenericRequest
from kol.manager import PatternManager
class Crimbo2011ToyFactoryRequest(GenericRequest):
def __init__(self, session, itemId, quantity, targetPlayer='', note=''):
super(Crimbo2011ToyFactoryRequest, self).__init__(session)
self.url = session.serverURL + "crimbo11.php"
self.requestData['pwd'] = session.pwd
self.requestData['action'] = 'reallybuygifts'
self.requestData['whichitem'] = itemId
self.requestData['howmany'] = quantity
self.requestData['towho'] = targetPlayer
self.requestData['note'] = note
def parseResponse(self):
invalidGiftPattern = PatternManager.getOrCompilePattern('crimboInvalidGift')
if invalidGiftPattern.search(self.responseText):
raise Error.Error("Invalid gift selected.", Error.WRONG_KIND_OF_ITEM)
invalidPlayerPattern = PatternManager.getOrCompilePattern('crimboInvalidPlayer')
if invalidPlayerPattern.search(self.responseText):
raise Error.Error("Invalid player.", Error.USER_NOT_FOUND)
giftAlreadyReceivedPattern = PatternManager.getOrCompilePattern('crimboUserAlreadyReceivedGift')
if giftAlreadyReceivedPattern.search(self.responseText):
raise Error.Error("That player has already received that gift.", Error.ALREADY_COMPLETED)
|
{
"content_hash": "412ee066558dc7d4fb26248c7253cb7f",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 104,
"avg_line_length": 52.111111111111114,
"alnum_prop": 0.7114427860696517,
"repo_name": "KevZho/buffbot",
"id": "79b1204fc3c388341a4e8ea48c8bcddff63e0e02",
"size": "1407",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "kol/request/Crimbo2011ToyFactoryRequest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2078509"
}
],
"symlink_target": ""
}
|
"""Click command-line interface for REANA Job Controller."""
import io
import json
import click
from flask.cli import with_appcontext
from .spec import build_openapi_spec
@click.group()
def openapi():
"""Openapi management commands."""
@openapi.command()
@click.argument("output", type=click.File("w"))
@with_appcontext
def create(output):
"""Generate OpenAPI file."""
spec = build_openapi_spec()
output.write(json.dumps(spec, indent=2, sort_keys=True))
if not isinstance(output, io.TextIOWrapper):
click.echo(
click.style(
"OpenAPI specification written to {}".format(output.name), fg="green"
)
)
|
{
"content_hash": "8ebb7ea96e2371a785fa465570268599",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 85,
"avg_line_length": 23.482758620689655,
"alnum_prop": 0.6563876651982379,
"repo_name": "reanahub/reana-job-controller",
"id": "e3b230b4797fdb39067bc10292b02c5974c472a0",
"size": "954",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "reana_job_controller/cli.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "3563"
},
{
"name": "Python",
"bytes": "130603"
},
{
"name": "Shell",
"bytes": "4720"
}
],
"symlink_target": ""
}
|
import numpy as np
import os
import sys
import nrrd, png
from PIL import Image
def thumbnail(im, size=(25,25)):
im = im.resize(size, Image.ANTIALIAS)
return im
if (len(sys.argv) < 4):
print 'Error: missing arguments!'
print 'e.g. python Index2MaxProjPNG_tn.py template DomainPrefix indexfile1.nrrd indexfileN.nrrd ...'
else:
print 'Loading template...'
data1, header1 = nrrd.read(str(sys.argv[1]))
template=np.array((np.max(data1,axis=2)*0.5),dtype=np.uint8)
del data1, header1
print 'Adding to domains', str(sys.argv[2]), '....'
for x in range(3,(len(sys.argv))):
print 'adding data from file', sys.argv[x]
readdata, options = nrrd.read(str(sys.argv[x]))
for i in np.unique(readdata[readdata>0]):
if np.uint8(i) in readdata:
print 'appending index', str(i)
domfile = str(sys.argv[2]) + str(i).zfill(4) + '_tn.png'
domain = np.zeros(readdata.shape,dtype=np.uint8)
domain[readdata==i]=np.uint8(255)
png1=np.max((np.transpose((np.max(domain,axis=2))), template.T),axis=0)
if np.shape(png1)[0] < np.shape(png1)[1]:
thumbnail(Image.fromarray(png1), size=(120,60)).save(domfile,"PNG")
elif np.shape(png1)[0] > np.shape(png1)[1]:
thumbnail(Image.fromarray(png1), size=(60,120)).save(domfile,"PNG")
else:
thumbnail(Image.fromarray(png1), size=(60,60)).save(domfile,"PNG")
del domain, png1
print 'Done.'
|
{
"content_hash": "560e943cab785abf56f0df7c8d2f4077",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 104,
"avg_line_length": 40.35897435897436,
"alnum_prop": 0.5857687420584498,
"repo_name": "Robbie1977/3DstackDisplay",
"id": "0078f37a74fa497afc59e3d83965bb2129f27b4a",
"size": "1574",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Index2MaxProjPNG_tn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "63338"
}
],
"symlink_target": ""
}
|
from uitester.ui import main_window
def start():
main_window.start()
if __name__ == '__main__':
start()
|
{
"content_hash": "7c52245c69425eea8cfb3c412c07f67e",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 35,
"avg_line_length": 12.88888888888889,
"alnum_prop": 0.5862068965517241,
"repo_name": "IfengAutomation/uitester",
"id": "514346f261b616003c19fae1f0e7338274a4349f",
"size": "116",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uitester/ui_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "285333"
},
{
"name": "Shell",
"bytes": "550"
}
],
"symlink_target": ""
}
|
import os
import unittest
import launch
from launch_ros.substitutions import ExecutableInPackage
import launch_testing
import launch_testing.actions
from launch_testing.asserts import assertSequentialStdout
import pytest
# @brief Test goal: "Integrationtest for the complexdata example of iceoryx"
# @pre setup ROS2 launch executables for RouDi (debug mode) and the example processes
# @post check if all applications return exitcode 0 (success) after test run
@pytest.mark.launch_test
def generate_test_description():
proc_env = os.environ.copy()
colcon_prefix_path = os.environ.get('COLCON_PREFIX_PATH', '')
executable_list = ['iox-cpp-publisher-vector', 'iox-cpp-subscriber-vector',
'iox-cpp-publisher-complexdata', 'iox-cpp-subscriber-complexdata']
process_list = []
for exec in executable_list:
tmp_exec = os.path.join(
colcon_prefix_path,
'example_complexdata/bin/',
exec)
tmp_process = launch.actions.ExecuteProcess(
cmd=[tmp_exec],
env=proc_env, output='screen')
process_list.append(tmp_process)
print("Process list:", process_list)
roudi_executable = os.path.join(
colcon_prefix_path,
'iceoryx_posh/bin/',
'iox-roudi'
)
roudi_process = launch.actions.ExecuteProcess(
cmd=[roudi_executable, '-l', 'debug'],
env=proc_env, output='screen',
sigterm_timeout='20')
return launch.LaunchDescription([
process_list[0],
process_list[1],
process_list[2],
process_list[3],
roudi_process,
launch_testing.actions.ReadyToTest()
]), {'iox-cpp-publisher-vector': process_list[0], 'iox-cpp-subscriber-vector': process_list[1],
'iox-cpp-publisher-complexdata': process_list[2], 'iox-cpp-subscriber-complexdata': process_list[3],
'roudi_process': roudi_process}
# These tests will run concurrently with the dut process. After this test is done,
# the launch system will shut down RouDi
class TestComplexDataExample(unittest.TestCase):
def test_roudi_ready(self, proc_output):
proc_output.assertWaitFor(
'RouDi is ready for clients', timeout=45, stream='stdout')
def test_publisher_subscriber_data_exchange(self, proc_output):
proc_output.assertWaitFor(
'iox-cpp-subscriber-vector got values: 15, 16, 17, 18, 19', timeout=45, stream='stdout')
def test_publisher_subscriber_untyped_data_exchange(self, proc_output):
proc_output.assertWaitFor(
'iox-cpp-subscriber-complexdata got values:\nstringForwardList: hello, world\nintegerList: 15, 22, 11\noptionalList: optional is empty, 42\nfloatStack: 44, 33, 22, 11, 0\nsomeString: hello iceoryx\ndoubleVector: 11, 12, 13, 14, 15\nvariantVector: seven, 8, nine',
timeout=45, stream='stdout')
# These tests run after shutdown and examine the stdout log
@launch_testing.post_shutdown_test()
class TestComplexdataExampleExitCodes(unittest.TestCase):
def test_exit_code(self, proc_info):
launch_testing.asserts.assertExitCodes(proc_info)
|
{
"content_hash": "d12b3adc1eeca23bba341fdfd8d6f209",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 275,
"avg_line_length": 37.87951807228916,
"alnum_prop": 0.683206106870229,
"repo_name": "eclipse-iceoryx/iceoryx",
"id": "ffec24b399f263728f291bf9819add3b6452515f",
"size": "3789",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "iceoryx_integrationtest/iceoryx_integrationtest/test_complexdata_example.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "195601"
},
{
"name": "C++",
"bytes": "5989063"
},
{
"name": "CMake",
"bytes": "154728"
},
{
"name": "Dockerfile",
"bytes": "1953"
},
{
"name": "PowerShell",
"bytes": "2458"
},
{
"name": "Python",
"bytes": "85178"
},
{
"name": "Shell",
"bytes": "96915"
},
{
"name": "Starlark",
"bytes": "62418"
}
],
"symlink_target": ""
}
|
from .auto_serial import *
# END
|
{
"content_hash": "134c0b08c00813f67266c08d4d363f75",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 26,
"avg_line_length": 11.333333333333334,
"alnum_prop": 0.6764705882352942,
"repo_name": "whaleygeek/mb_sdcard",
"id": "4b5c437bd50ef7dc431464d1514caa64e9b741cb",
"size": "49",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/microbit/auto_serial/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "282475"
}
],
"symlink_target": ""
}
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from manager.libs.snippets.http_tools import http_destination_exists
from django.conf import settings
import urlparse
class Migration(SchemaMigration):
def forwards(self, orm):
pass
# Postponed in ticket #58
#from manager.apps.brand.models import BrandOwner
## Updating brand url field to append default http protocol
#db.execute("UPDATE BRAND_OWNER SET \"OWNER_LOGO\"=CONCAT(CONCAT('owner/logo/', lpad(\"OWNER_CD\"::text, 6, '0')), '.jpg');")
## Adding field 'Brand.brand_logo'
#media = urlparse.urlparse(settings.MEDIA_URL)
#for owner in BrandOwner.objects.all():
# if hasattr(owner.owner_logo, 'url'):
# if not http_destination_exists(media.netloc,
# urlparse.urljoin(media.path, owner.owner_logo.name)):
# print "Owner logo could not be found ( %s )" % owner.owner_logo.name
# owner.owner_logo = None
# owner.save()
def backwards(self, orm):
# Deleting field 'Brand.brand_logo'
pass
models = {
u'brand.brand': {
'Meta': {'ordering': "[u'brand_nm']", 'unique_together': "((u'brand_nm', u'owner_cd'),)", 'object_name': 'Brand', 'db_table': "u'brand'"},
'brand_link': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'db_column': "u'BRAND_LINK'", 'blank': 'True'}),
'brand_logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'db_column': "u'BRAND_LOGO'", 'blank': 'True'}),
'brand_nm': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'BRAND_NM'"}),
'brand_type_cd': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brand.BrandType']", 'db_column': "u'BRAND_TYPE_CD'"}),
'bsin': ('django.db.models.fields.CharField', [], {'max_length': '6', 'primary_key': 'True', 'db_column': "u'BSIN'"}),
'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "u'COMMENTS'", 'blank': 'True'}),
'flag_delete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "u'FLAG_DELETE'"}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "u'LAST_MODIFIED'", 'blank': 'True'}),
'owner_cd': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brand.BrandOwner']", 'null': 'True', 'db_column': "u'OWNER_CD'", 'blank': 'True'})
},
u'brand.brandowner': {
'Meta': {'ordering': "[u'owner_nm']", 'object_name': 'BrandOwner', 'db_table': "u'brand_owner'"},
'owner_cd': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "u'OWNER_CD'"}),
'owner_link': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'db_column': "u'OWNER_LINK'", 'blank': 'True'}),
'owner_logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'db_column': "u'OWNER_LOGO'", 'blank': 'True'}),
'owner_nm': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'OWNER_NM'"}),
'owner_wiki_en': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'db_column': "u'OWNER_WIKI_EN'", 'blank': 'True'})
},
u'brand.brandtype': {
'Meta': {'object_name': 'BrandType', 'db_table': "u'brand_type'"},
'brand_type_cd': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "u'BRAND_TYPE_CD'"}),
'brand_type_nm': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'BRAND_TYPE_NM'"})
}
}
complete_apps = ['brand']
|
{
"content_hash": "a450339cf0969514e2a93bb24f896b1f",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 171,
"avg_line_length": 61.61538461538461,
"alnum_prop": 0.5717852684144819,
"repo_name": "okfn/opd-brand-manager",
"id": "f3640d36bd3c9aff55505978f30c84fa6e9f0130",
"size": "4029",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "manager/apps/brand/migrations/0015_check_broken_images.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "20126"
},
{
"name": "Python",
"bytes": "198883"
},
{
"name": "Shell",
"bytes": "96"
}
],
"symlink_target": ""
}
|
from functools import reduce
from math import factorial
import click
__doc__ = """See https://jtara1.github.io/bernoulli.html
for an updated Bernoulli Trials Calc"""
@click.command()
@click.argument('trials', type=click.INT)
@click.argument('prob_of_success', type=click.FLOAT)
def bernoulli_trials(trials, prob_of_success):
def binomial_distribution(n, k):
return factorial(n) / (factorial(k) * factorial(n - k))
successes = 0
prob = 1
probabilities = []
output = ['trials = {}, success = {}'.format(trials, prob_of_success),
''.join(['-'] * 50)]
prob_of_failure = 1 - prob_of_success
while (prob >= 0.0001 or successes <= 2) and trials >= successes:
prob = binomial_distribution(trials, successes) \
* prob_of_success ** successes \
* prob_of_failure ** (trials - successes)
output.append("successes = {}, probability = {:.3}"
.format(successes, prob))
successes += 1
probabilities.append(prob)
output.append(''.join(['-'] * 50))
for i in range(1, len(probabilities) - 1):
output.append("successes >= {}, probability = {:.3}"
.format(i, reduce(lambda x, y: x + y, probabilities[i:])))
print('\n'.join(output))
if __name__ == '__main__':
output = bernoulli_trials()
print(output)
# bernoulli_trials(10, 0.05)
|
{
"content_hash": "f779b36ef0df1f4bf22565e4031d61b1",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 80,
"avg_line_length": 33.476190476190474,
"alnum_prop": 0.5889046941678521,
"repo_name": "jtara1/MiscScripts",
"id": "20e979ac32a14f0a4304295f499e8e04f81c4e5d",
"size": "1406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "misc_scripts/bernoulli_trials.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "221"
},
{
"name": "Batchfile",
"bytes": "57"
},
{
"name": "C",
"bytes": "611"
},
{
"name": "C#",
"bytes": "3698"
},
{
"name": "C++",
"bytes": "1604"
},
{
"name": "HTML",
"bytes": "184"
},
{
"name": "Python",
"bytes": "42084"
},
{
"name": "Shell",
"bytes": "52"
}
],
"symlink_target": ""
}
|
import corepy.spre.spe as spe
# ------------------------------
# Registers
# ------------------------------
class mods:
abs = 'abs'
bias = 'bias'
bx2 = 'bx2'
invert = 'invert'
sign = 'sign'
x2 = 'x2'
class divcomp:
x = 'x'
y = 'y'
z = 'z'
w = 'w'
class Address(object):
def __init__(self, base, offset):
if not isinstance(base, QualifiedCALRegister) and not isinstance(CALRegister):
raise "base must be a register."
if type(offset) != int and type(offset) != long and type(offset) != float:
raise "offset must be a numeric type"
self.base = base
self.offset = offset
def render(self):
return self.base.render() + ' + ' + str(self.offset)
def __str__(self):
return self.render()
class CALRegister(spe.Register):
def __init__(self, reg, name = None):
spe.Register.__init__(self, name)
self.reg = reg
return
def __add__(self, other):
if type(other) == int or type(other) == long or type(other) == float:
return Address(self, other)
else:
raise "Can't do that with a CALRegister."
def __radd__(self, other):
if type(other) == int or type(other) == long or type(other) == float:
return Address(self, other)
else:
raise "Can't do that with a CALRegister."
def __eq__(self, other):
# TODO - AWF - allow string names like "i0" to be equal?
return type(self) == type(other) and self.name == other.name
def __call__(self, swizzle_str = '', abs = False, bias = False, bx2 = False, invert = False, sign = False, x2 = False, neg = '', divcomp = None):
"""
swizzle_str should be a string representing the swizzle or mask, such as 'xxxx' or 'x___'.
neg should be a string of components to be negated such as ('xw')
divcomp should be one of registers.divcomp such as divcomp.x
"""
return QualifiedCALRegister(self, tuple(swizzle_str), abs=abs, bias=bias, bx2=bx2, invert=invert, sign=sign, x2=x2, neg=tuple(neg), divcomp=divcomp)
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except:
valid = True
if len(name) == 1 or len(name) == 2 or len(name) == 4:
for swizzle_comp in name:
if not swizzle_comp in ['x', 'y', 'z', 'w', '_', '0', '1', 'r', 'g', 'b', 'a']:
valid = False
if valid:
return QualifiedCALRegister(self, tuple(name))
raise AttributeError
def render(self):
return self.name
# This is the register type that will be seen by the ISA WHENEVER any source modifiers or swizzles
# are present. It's job is basically to generate a string based on all of the modifiers.
# Note that these registers are TEMPORARY - for all practical purposes for now, we can view them as
# tied to a particular instruction. The user can cache them by naming them, though, and this should be ok.
class QualifiedCALRegister(CALRegister):
def __init__(self, cal_reg, swizzle = (), abs=False, bias=False, bx2=False, invert=False, sign=False, x2=False, neg = (), divcomp = None):
if type(cal_reg) == QualifiedCALRegister:
cal_reg = cal_reg.GetBaseRegister()
CALRegister.__init__(self, cal_reg.reg)
self.cal_reg = cal_reg
self.reg = cal_reg.reg
self.swizzle = swizzle
self.abs = abs
self.bias = bias
self.bx2 = bx2
self.invert = invert
self.sign = sign
self.x2 = x2
self.neg = neg
self.divcomp = divcomp
self.name = self.render()
def check(self):
valid = True
if len(self.swizzle) == 1 or len(self.swizzle) == 2 or len(self.swizzle) == 4:
for swizzle_comp in self.swizzle:
if not swizzle_comp in ['x', 'y', 'z', 'w', '_', '0', '1', 'r', 'g', 'b', 'a']:
valid = False
else:
valid = False
if self.divcomp != None and self.divcomp not in ['y', 'z', 'w', 'unkown']:
valid = False
if neg != ():
for neg_comp in self.neg:
if neg_comp not in ['x', 'y', 'z', 'w', 'r', 'g', 'b', 'a']:
valid = False
return valid
def render(self):
render_str = ''
render_str += self.GetBaseRegister().reg
if self.abs == True:
render_str += '_abs'
if self.bias == True:
render_str += '_bias'
if self.bx2 == True:
render_str += '_bx2'
if self.invert == True:
render_str += '_invert'
if self.sign == True:
render_str += '_sign'
if self.x2 == True:
render_str += '_x2'
if self.neg != () and self.neg != None:
render_str += '_neg('
for neg_comp in self.neg:
render_str += neg_comp
render_str += ')'
if self.divcomp != None:
render_str += '_divcomp(' + divcomp + ')'
if self.swizzle != ():
render_str += '.'
for swizzle_comp in self.swizzle:
render_str += swizzle_comp
return render_str
def __call__(self, swizzle_str = None, abs = None, bias = None, bx2 = None, invert = None, sign = None, x2 = None, neg = None, divcomp = None):
"""
swizzle_str should be a string representing the swizzle or mask, such as 'xxxx' or 'x___'.
neg should be a string of components to be negated such as ('xw')
divcomp should be one of registers.divcomp such as divcomp.x
"""
retval = self.copy()
if swizzle_str != None:
valid = True
if len(name) == 1 or len(name) == 2 or len(name) == 4:
for swizzle_comp in name:
if not swizzle_comp in ['x', 'y', 'z', 'w', '_', '0', '1', 'r', 'g', 'b', 'a']:
valid = False
if valid:
retval.swizzle = tuple(swizzle_str)
else:
raise "Bad swizzle value (must be string of length 1, 2 or 4 composed of ['x', 'y', 'z', 'w', '-', '0', '1', 'r', 'g', 'b', 'a']"
if abs != None:
retval.abs = abs
if bias != None:
retval.bias = bias
if bx2 != None:
retval.bx2 = bx2
if invert != None:
retval.invert = invert
if sign != None:
retval.sign = sign
if x2 != None:
retval.x2 = x2
if neg != None:
retval.neg = neg
if divcomp != None:
retval.divcomp = divcomp
return retval
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except:
return object.__getattribute__(self, '__call__')(swizzle_str=name)
#valid = True
#if len(name) == 1 or len(name) == 2 or len(name) == 4:
# for swizzle_comp in name:
# if not swizzle_comp in ['x', 'y', 'z', 'w', '_', '0', '1', 'r', 'g', 'b', 'a']:
# valid = False
#if valid:
# retval = object.__getattribute__(self, 'copy')()
# retval.swizzle = tuple(name)
raise AttributeError
def __str__(self):
return self.render()
def copy(self):
return QualifiedCALRegister(self, swizzle=self.swizzle, abs=self.abs, bias=self.bias, bx2=self.bx2, invert=self.invert, sign=self.sign, x2=self.x2, neg=self.neg, divcomp=self.divcomp)
def GetBaseRegister(self):
return self.cal_reg
class CALBuffer:
def __init__(self, buffer, name, rel_addressable=False):
# right now, buffer and name should match
self.buffer = buffer
self.name = name
self.rel_addressable = rel_addressable
def __getitem__(self, i):
if type(i) != int and type(i) != long and type(i) != float and not isinstance(i, QualifiedCALRegister) and not isinstance(i, Address):
if self.rel_addressable == True:
if type(i) != str:
raise "Index must be numeric or a register"
else:
raise "Index must be numeric"
if isinstance(i, QualifiedCALRegister) or isinstance(i, Address):
if self.rel_addressable == False:
raise "This register/buffer type is not relatively addressable"
else:
i_str = i.render()
else:
i_str = str(i)
name = self.name + '[' + i_str + ']'
if name not in globals():
globals()[name] = CALRegister(name, name=name)
return globals()[name]
class TempRegister(CALRegister): pass
class LiteralRegister(CALRegister): pass
r = []
l = []
for i in range(256, -1, -1): # reverse order so acquire_register starts at 0
stri = str(i)
name = "r" + stri
globals()[name] = TempRegister(name, name = name)
r.append(globals()[name])
name = "l" + stri
globals()[name] = LiteralRegister(name, name = name)
l.append(globals()[name])
globals()['vWinCoord0'] = CALRegister('vWinCoord0', name='vWinCoord0')
globals()['v0'] = CALRegister('v0', name='v0')
globals()['a0'] = CALRegister('a0', name='a0')
globals()['g'] = CALBuffer('g', name='g', rel_addressable=True)
for i in range(0, 16):
stri = str(i)
name = "i" + stri
globals()[name] = CALBuffer(name, name=name, rel_addressable=False)
name = "cb" + stri
globals()[name] = CALBuffer(name, name=name, rel_addressable=True)
name = "o" + stri
globals()[name] = CALRegister(name, name=name)
name = "x" + stri
globals()[name] = CALBuffer(name, name = name, rel_addressable=True)
def TestRelativeAddressing():
import corepy.arch.cal.platform as env
import corepy.arch.cal.isa as cal
proc = env.Processor(0)
input_mem = proc.alloc_remote('I', 4, 16, 1)
output_mem = proc.alloc_remote('I', 4, 1, 1)
for i in range(16*1*4):
for j in range(4):
input_mem[i*4 + j] = i
prgm = env.Program()
code = prgm.get_stream()
cal.set_active_code(code)
cal.dcl_output(o0, USAGE=cal.usage.generic)
cal.dcl_literal(l0, 1, 1, 1, 1)
cal.dcl_literal(l1, 16, 16, 16, 16)
cal.mov(r0, r0('0000'))
cal.mov(r1, r1('0000'))
cal.whileloop()
cal.iadd(r1, r1, g[r0.x])
cal.iadd(r0, r0, l0)
cal.breakc(cal.relop.ge, r0, l1)
cal.endloop()
cal.mov(o0, r1)
prgm.set_binding('g[]', input_mem)
prgm.set_binding('o0', output_mem)
prgm.add(code)
domain = (0, 0, 128, 128)
prgm.print_code()
proc.execute(prgm, domain)
# code.cache_code()
# print code.render_string
if output_mem[0] == 120:
print "Passed relative addressing test"
else:
print "Failed relative addressing test"
proc.free(input_mem)
proc.free(output_mem)
def TestRelativeAddressing2():
import corepy.arch.cal.platform as env
import corepy.arch.cal.isa as cal
proc = env.Processor(0)
input_mem = proc.alloc_remote('I', 4, 16, 1)
output_mem = proc.alloc_remote('I', 4, 1, 1)
for i in range(17*1*4):
for j in range(4):
input_mem[i*4 + j] = i
prgm = env.Program()
code = prgm.get_stream()
cal.set_active_code(code)
cal.dcl_output(o0, USAGE=cal.usage.generic)
cal.dcl_literal(l0, 1, 1, 1, 1)
cal.dcl_literal(l1, 16, 16, 16, 16)
cal.mov(r0, r0('0000'))
cal.mov(r1, r1('0000'))
cal.whileloop()
cal.iadd(r1, r1, g[1 + r0.x])
cal.iadd(r0, r0, l0)
cal.breakc(cal.relop.ge, r0, l1)
cal.endloop()
cal.mov(o0, r1)
#code.cache_code()
#print code.render_string
prgm.set_binding('g[]', input_mem)
prgm.set_binding('o0', output_mem)
domain = (0, 0, 128, 128)
prgm.add(code)
proc.execute(prgm, domain)
if output_mem[0] == 136:
print "Passed relative addressing with offset test"
else:
print "Failed relative addressing with offset test"
# print output_mem
proc.free(input_mem)
proc.free(output_mem)
if __name__ == '__main__':
TestRelativeAddressing()
TestRelativeAddressing2()
|
{
"content_hash": "a2197de5e8f6d1a7d36c716405722f70",
"timestamp": "",
"source": "github",
"line_count": 389,
"max_line_length": 187,
"avg_line_length": 28.938303341902312,
"alnum_prop": 0.597583725681798,
"repo_name": "matthiaskramm/corepy",
"id": "da32501f9a4d057e2ebdd105b04f70efa6f96eb2",
"size": "13446",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corepy/arch/cal/types/registers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "291898"
},
{
"name": "C++",
"bytes": "2256"
},
{
"name": "Python",
"bytes": "2114744"
},
{
"name": "Shell",
"bytes": "145"
}
],
"symlink_target": ""
}
|
'''Autogenerated by get_gl_extensions script, do not edit!'''
from OpenGL import platform as _p
from OpenGL.GL import glget
EXTENSION_NAME = 'GL_NV_texture_shader2'
_p.unpack_constants( """GL_DOT_PRODUCT_TEXTURE_3D_NV 0x86EF""", globals())
def glInitTextureShader2NV():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( EXTENSION_NAME )
|
{
"content_hash": "bbaec17160a087de4705eaffb257e208",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 74,
"avg_line_length": 39.36363636363637,
"alnum_prop": 0.7528868360277137,
"repo_name": "frederica07/Dragon_Programming_Process",
"id": "24af6b4a5aea3a7abb83ea64f3c22e0904849898",
"size": "433",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PyOpenGL-3.0.2/OpenGL/raw/GL/NV/texture_shader2.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "1548"
},
{
"name": "Python",
"bytes": "2558317"
}
],
"symlink_target": ""
}
|
import logging
import re
import signal
import sys
import time
from typing import List
from praw.exceptions import APIException # type: ignore
from praw.models import Comment, Submission, Subreddit # type: ignore
from prawcore.exceptions import RequestException, ServerError, Forbidden, NotFound # type: ignore
import tor.core
from tor.core import __version__
from tor.core.config import config, Config
from tor.strings import translation
log = logging.getLogger(__name__)
subreddit_regex = re.compile(
r'reddit.com\/r\/([a-z0-9\-\_\+]+)',
flags=re.IGNORECASE
)
i18n = translation()
class flair(object):
unclaimed = 'Unclaimed'
summoned_unclaimed = 'Summoned - Unclaimed'
completed = 'Completed!'
in_progress = 'In Progress'
meta = 'Meta'
disregard = 'Disregard'
class reports(object):
original_post_deleted_or_locked = 'Original post has been deleted or locked'
post_should_be_marked_nsfw = 'Post should be marked as NSFW'
no_bot_accounts = 'No bot accounts but our own'
post_violates_rules = 'Post Violates Rules on Partner Subreddit'
# error message for an API timeout
_pattern = re.compile(r'again in (?P<number>[0-9]+) (?P<unit>\w+)s?\.$',
re.IGNORECASE)
def _(message: str) -> str:
"""
Message formatter. Returns the message and the disclaimer for the
footer.
:param message: string. The message to be displayed.
:return: string. The original message plus the footer.
"""
return i18n['responses']['bot_footer'].format(message, version=__version__)
def clean_list(items: List[str]) -> List[str]:
"""
Takes a list and removes entries that are only newlines.
:param items: List.
:return: List, sans newlines
"""
return list([item.strip() for item in items if item.strip()])
def send_to_modchat(message: str, cfg: Config, channel='general') -> None:
"""
Sends a message to the ToR mod chat.
:param message: String; the message that is to be encoded
:param cfg: the global config dict.
:param channel: String; the name of the channel to send to. '#' optional.
:return: None.
"""
if cfg.modchat:
try:
cfg.modchat.api_call(
'chat.postMessage',
channel=channel,
text=message
)
except Exception as e:
log.error(f'Failed to send message to modchat #{channel}: '
f'\'{message}\'')
log.error(e)
def is_our_subreddit(subreddit_name: str, cfg: Config) -> bool:
"""
Compares given subreddit to the one we're operating out of
:param subreddit_name: String; the questioned subreddit
:param cfg: the global config object
:return: Boolean for if they are the same subreddit
"""
# We're referring to `cfg.tor.name` in case of testing environment, and
# using `.casefold()` to provide cross-characterset, case-insensitive
# string comparisons.
# @see https://docs.python.org/3/library/stdtypes.html#str.casefold
return str(subreddit_name).casefold() == str(cfg.tor.name).casefold()
def clean_id(post_id: str) -> str:
"""
Fixes the Reddit ID so that it can be used to get a new object.
By default, the Reddit ID is prefixed with something like `t1_` or
`t3_`, but this doesn't always work for getting a new object. This
method removes those prefixes and returns the rest.
:param post_id: String. Post fullname (ID)
:return: String. Post fullname minus the first three characters.
"""
return post_id[post_id.index('_') + 1:]
def get_parent_post_id(post: Comment, subreddit: Subreddit) -> Submission:
"""
Takes any given comment object and returns the object of the
original post, no matter how far up the chain it is. This is
a very time-intensive function because of how Reddit handles
rate limiting and the fact that you can't just request the
top parent -- you have to just loop your way to the top.
:param post: comment object
:param r: the instantiated reddit object
:return: submission object of the top post.
"""
if not post.is_root:
parent = subreddit.comment(id=clean_id(post.parent_id))
return get_parent_post_id(parent, subreddit)
else:
return subreddit.submission(id=clean_id(post.parent_id))
def get_wiki_page(pagename: str, cfg: Config) -> str:
"""
Return the contents of a given wiki page.
:param pagename: String. The name of the page to be requested.
:param cfg: Dict. Global config object.
:param return_on_fail: Any value to return when nothing is found
at the requested page. This allows us to specify returns for
easier work in debug mode.
:param subreddit: Object. A specific PRAW Subreddit object if we
want to interact with a different sub.
:return: String or None. The content of the requested page if
present else None.
"""
log.debug(f'Retrieving wiki page {pagename}')
try:
return cfg.tor.wiki[pagename].content_md
except NotFound:
return ''
def handle_rate_limit(exc: APIException) -> None:
time_map = {
'second': 1,
'minute': 60,
'hour': 60 * 60,
}
matches = re.search(_pattern, exc.message)
if not matches:
log.error(f'Unable to parse rate limit message {exc.message!r}')
return
delay = matches[0] * time_map[matches[1]]
time.sleep(delay + 1)
def run_until_dead(func):
"""
The official method that replaces all that ugly boilerplate required to
start up a bot under the TranscribersOfReddit umbrella. This method handles
communication issues with Reddit, timeouts, and handles CTRL+C and
unexpected crashes.
:param func: The function that you want to run; this will automatically be
passed the config object. Historically, this is the only thing needed
to start a bot.
:param exceptions: A tuple of exception classes to guard against. These are
a set of PRAW connection errors (timeouts and general connection
issues) but they can be overridden with a passed-in set.
:return: None.
"""
def double_ctrl_c_handler(*args, **kwargs) -> None:
if not tor.core.is_running:
log.critical('User pressed CTRL+C twice!!! Killing!')
sys.exit(1)
log.info(
'\rUser triggered command line shutdown. Will terminate after current loop.'
)
tor.core.is_running = False
# handler for CTRL+C
signal.signal(signal.SIGINT, double_ctrl_c_handler)
try:
while tor.core.is_running:
try:
func(config)
except APIException as e:
if e.error_type == 'RATELIMIT':
log.warning(
'Ratelimit - artificially limited by Reddit. Sleeping'
' for requested time!'
)
handle_rate_limit(e)
except (RequestException, ServerError, Forbidden) as e:
log.warning(f'{e} - Issue communicating with Reddit. Sleeping for 60s!')
time.sleep(60)
log.info('User triggered shutdown. Shutting down.')
sys.exit(0)
except Exception as e:
log.error(e)
sys.exit(1)
|
{
"content_hash": "d3f7ef01dac2a179b796dd3d4246305c",
"timestamp": "",
"source": "github",
"line_count": 223,
"max_line_length": 98,
"avg_line_length": 33,
"alnum_prop": 0.6431580377768719,
"repo_name": "TranscribersOfReddit/TranscribersOfReddit",
"id": "3f5e2bb41b7ac0c25804e8dee7e258b9e0e65c55",
"size": "7359",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tor/core/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "257"
},
{
"name": "Python",
"bytes": "43722"
}
],
"symlink_target": ""
}
|
"""This is just a hook for morph.io. Downloads a hard-coded list of
data from other morph.io scrapers, and merges them into data.sqlite.
To run this, you'll need a free morph.io account. Set MORPH_API_KEY
to the value of your key.
"""
from logging import getLogger
from os import environ
from urllib.parse import urlencode
from urllib.request import urlopen
from msd.cmd import run
from msd.cmd import set_up_logging
SCRAPER_DATA = {
'sr.company': 'https://morph.io/spendright/scrape-companies/data.sqlite',
'sr.campaign': 'https://morph.io/spendright/scrape-campaigns/data.sqlite',
'sr.url': 'https://morph.io/spendright/scrape-urls/data.sqlite',
}
CHUNK_SIZE = 1024 # for download()
OUTPUT_PATH = 'data.sqlite'
log = getLogger('scraper')
def main():
if 'MORPH_API_KEY' not in environ:
raise ValueError(
'Must set MORPH_API_KEY to download scraper data'.format(db_name))
set_up_logging(quiet=environ.get('MORPH_QUIET'),
verbose=environ.get('MORPH_VERBOSE'))
input_paths = []
for scraper_id, url in sorted(SCRAPER_DATA.items()):
full_url = '{}?{}'.format(
url, urlencode(dict(key=environ['MORPH_API_KEY'])))
path = scraper_id + '.sqlite'
# don't show API key in output
log.info('downloading {} -> {}'.format(url, path))
download(full_url, path)
input_paths.append(path)
run(force_rebuild_scratch=True,
input_db_paths=input_paths,
output_db_path=OUTPUT_PATH)
def download(url, path):
with open(path, 'wb') as f:
with urlopen(url) as src:
while True:
chunk = src.read(CHUNK_SIZE)
if not chunk:
break
f.write(chunk)
if __name__ == '__main__':
main()
|
{
"content_hash": "fb52d4bcee50c4f3d2d3a969f07271ba",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 78,
"avg_line_length": 27.029850746268657,
"alnum_prop": 0.6234124792932082,
"repo_name": "spendright/msd",
"id": "323bed859fb29b1614fa19cb1d0100c1b26a557e",
"size": "2441",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scraper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "134680"
}
],
"symlink_target": ""
}
|
import datetime
import hashlib
import os
import socket
import tempfile
import uuid
import mock
from oslo.config import cfg
import paramiko
import six
import cinder
from cinder.brick.initiator import connector
from cinder.brick.initiator import linuxfc
from cinder import exception
from cinder.openstack.common import processutils as putils
from cinder.openstack.common import timeutils
from cinder import test
from cinder import utils
CONF = cfg.CONF
def _get_local_mock_open(fake_data='abcd efgh'):
mock_context_manager = mock.Mock()
mock_context_manager.__enter__ = mock.Mock(
return_value=six.StringIO(fake_data))
mock_context_manager.__exit__ = mock.Mock(return_value=False)
return mock_context_manager
class ExecuteTestCase(test.TestCase):
def test_retry_on_failure(self):
fd, tmpfilename = tempfile.mkstemp()
_, tmpfilename2 = tempfile.mkstemp()
try:
fp = os.fdopen(fd, 'w+')
fp.write('''#!/bin/sh
# If stdin fails to get passed during one of the runs, make a note.
if ! grep -q foo
then
echo 'failure' > "$1"
fi
# If stdin has failed to get passed during this or a previous run, exit early.
if grep failure "$1"
then
exit 1
fi
runs="$(cat $1)"
if [ -z "$runs" ]
then
runs=0
fi
runs=$(($runs + 1))
echo $runs > "$1"
exit 1
''')
fp.close()
os.chmod(tmpfilename, 0o755)
self.assertRaises(putils.ProcessExecutionError,
utils.execute,
tmpfilename, tmpfilename2, attempts=10,
process_input='foo',
delay_on_retry=False)
fp = open(tmpfilename2, 'r+')
runs = fp.read()
fp.close()
self.assertNotEqual(runs.strip(), 'failure', 'stdin did not '
'always get passed '
'correctly')
runs = int(runs.strip())
self.assertEqual(runs, 10, 'Ran %d times instead of 10.' % (runs,))
finally:
os.unlink(tmpfilename)
os.unlink(tmpfilename2)
def test_unknown_kwargs_raises_error(self):
self.assertRaises(putils.UnknownArgumentError,
utils.execute,
'/usr/bin/env', 'true',
this_is_not_a_valid_kwarg=True)
def test_check_exit_code_boolean(self):
utils.execute('/usr/bin/env', 'false', check_exit_code=False)
self.assertRaises(putils.ProcessExecutionError,
utils.execute,
'/usr/bin/env', 'false', check_exit_code=True)
def test_no_retry_on_success(self):
fd, tmpfilename = tempfile.mkstemp()
_, tmpfilename2 = tempfile.mkstemp()
try:
fp = os.fdopen(fd, 'w+')
fp.write('''#!/bin/sh
# If we've already run, bail out.
grep -q foo "$1" && exit 1
# Mark that we've run before.
echo foo > "$1"
# Check that stdin gets passed correctly.
grep foo
''')
fp.close()
os.chmod(tmpfilename, 0o755)
utils.execute(tmpfilename,
tmpfilename2,
process_input='foo',
attempts=2)
finally:
os.unlink(tmpfilename)
os.unlink(tmpfilename2)
class GetFromPathTestCase(test.TestCase):
def test_tolerates_nones(self):
f = utils.get_from_path
input = []
self.assertEqual([], f(input, "a"))
self.assertEqual([], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [None]
self.assertEqual([], f(input, "a"))
self.assertEqual([], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': None}]
self.assertEqual([], f(input, "a"))
self.assertEqual([], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': {'b': None}}]
self.assertEqual([{'b': None}], f(input, "a"))
self.assertEqual([], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': {'b': {'c': None}}}]
self.assertEqual([{'b': {'c': None}}], f(input, "a"))
self.assertEqual([{'c': None}], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': {'b': {'c': None}}}, {'a': None}]
self.assertEqual([{'b': {'c': None}}], f(input, "a"))
self.assertEqual([{'c': None}], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': {'b': {'c': None}}}, {'a': {'b': None}}]
self.assertEqual([{'b': {'c': None}}, {'b': None}], f(input, "a"))
self.assertEqual([{'c': None}], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
def test_does_select(self):
f = utils.get_from_path
input = [{'a': 'a_1'}]
self.assertEqual(['a_1'], f(input, "a"))
self.assertEqual([], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': {'b': 'b_1'}}]
self.assertEqual([{'b': 'b_1'}], f(input, "a"))
self.assertEqual(['b_1'], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': {'b': {'c': 'c_1'}}}]
self.assertEqual([{'b': {'c': 'c_1'}}], f(input, "a"))
self.assertEqual([{'c': 'c_1'}], f(input, "a/b"))
self.assertEqual(['c_1'], f(input, "a/b/c"))
input = [{'a': {'b': {'c': 'c_1'}}}, {'a': None}]
self.assertEqual([{'b': {'c': 'c_1'}}], f(input, "a"))
self.assertEqual([{'c': 'c_1'}], f(input, "a/b"))
self.assertEqual(['c_1'], f(input, "a/b/c"))
input = [{'a': {'b': {'c': 'c_1'}}},
{'a': {'b': None}}]
self.assertEqual([{'b': {'c': 'c_1'}}, {'b': None}], f(input, "a"))
self.assertEqual([{'c': 'c_1'}], f(input, "a/b"))
self.assertEqual(['c_1'], f(input, "a/b/c"))
input = [{'a': {'b': {'c': 'c_1'}}},
{'a': {'b': {'c': 'c_2'}}}]
self.assertEqual([{'b': {'c': 'c_1'}}, {'b': {'c': 'c_2'}}],
f(input, "a"))
self.assertEqual([{'c': 'c_1'}, {'c': 'c_2'}], f(input, "a/b"))
self.assertEqual(['c_1', 'c_2'], f(input, "a/b/c"))
self.assertEqual([], f(input, "a/b/c/d"))
self.assertEqual([], f(input, "c/a/b/d"))
self.assertEqual([], f(input, "i/r/t"))
def test_flattens_lists(self):
f = utils.get_from_path
input = [{'a': [1, 2, 3]}]
self.assertEqual([1, 2, 3], f(input, "a"))
self.assertEqual([], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': {'b': [1, 2, 3]}}]
self.assertEqual([{'b': [1, 2, 3]}], f(input, "a"))
self.assertEqual([1, 2, 3], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': {'b': [1, 2, 3]}}, {'a': {'b': [4, 5, 6]}}]
self.assertEqual([1, 2, 3, 4, 5, 6], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': [{'b': [1, 2, 3]}, {'b': [4, 5, 6]}]}]
self.assertEqual([1, 2, 3, 4, 5, 6], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = [{'a': [1, 2, {'b': 'b_1'}]}]
self.assertEqual([1, 2, {'b': 'b_1'}], f(input, "a"))
self.assertEqual(['b_1'], f(input, "a/b"))
def test_bad_xpath(self):
f = utils.get_from_path
self.assertRaises(exception.Error, f, [], None)
self.assertRaises(exception.Error, f, [], "")
self.assertRaises(exception.Error, f, [], "/")
self.assertRaises(exception.Error, f, [], "/a")
self.assertRaises(exception.Error, f, [], "/a/")
self.assertRaises(exception.Error, f, [], "//")
self.assertRaises(exception.Error, f, [], "//a")
self.assertRaises(exception.Error, f, [], "a//a")
self.assertRaises(exception.Error, f, [], "a//a/")
self.assertRaises(exception.Error, f, [], "a/a/")
def test_real_failure1(self):
# Real world failure case...
# We weren't coping when the input was a Dictionary instead of a List
# This led to test_accepts_dictionaries
f = utils.get_from_path
inst = {'fixed_ip': {'floating_ips': [{'address': '1.2.3.4'}],
'address': '192.168.0.3'},
'hostname': ''}
private_ips = f(inst, 'fixed_ip/address')
public_ips = f(inst, 'fixed_ip/floating_ips/address')
self.assertEqual(['192.168.0.3'], private_ips)
self.assertEqual(['1.2.3.4'], public_ips)
def test_accepts_dictionaries(self):
f = utils.get_from_path
input = {'a': [1, 2, 3]}
self.assertEqual([1, 2, 3], f(input, "a"))
self.assertEqual([], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = {'a': {'b': [1, 2, 3]}}
self.assertEqual([{'b': [1, 2, 3]}], f(input, "a"))
self.assertEqual([1, 2, 3], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = {'a': [{'b': [1, 2, 3]}, {'b': [4, 5, 6]}]}
self.assertEqual([1, 2, 3, 4, 5, 6], f(input, "a/b"))
self.assertEqual([], f(input, "a/b/c"))
input = {'a': [1, 2, {'b': 'b_1'}]}
self.assertEqual([1, 2, {'b': 'b_1'}], f(input, "a"))
self.assertEqual(['b_1'], f(input, "a/b"))
class GenericUtilsTestCase(test.TestCase):
@mock.patch('os.path.exists', return_value=True)
def test_find_config(self, mock_exists):
path = '/etc/cinder/cinder.conf'
cfgpath = utils.find_config(path)
self.assertEqual(path, cfgpath)
mock_exists.return_value = False
self.assertRaises(exception.ConfigNotFound,
utils.find_config,
path)
def test_as_int(self):
test_obj_int = '2'
test_obj_float = '2.2'
for obj in [test_obj_int, test_obj_float]:
self.assertEqual(2, utils.as_int(obj))
obj = 'not_a_number'
self.assertEqual(obj, utils.as_int(obj))
self.assertRaises(TypeError,
utils.as_int,
obj,
quiet=False)
def test_check_exclusive_options(self):
utils.check_exclusive_options()
utils.check_exclusive_options(something=None,
pretty_keys=True,
unit_test=True)
self.assertRaises(exception.InvalidInput,
utils.check_exclusive_options,
test=True,
unit=False,
pretty_keys=True)
self.assertRaises(exception.InvalidInput,
utils.check_exclusive_options,
test=True,
unit=False,
pretty_keys=False)
def test_require_driver_intialized(self):
driver = mock.Mock()
driver.initialized = True
utils.require_driver_initialized(driver)
driver.initialized = False
self.assertRaises(exception.DriverNotInitialized,
utils.require_driver_initialized,
driver)
def test_hostname_unicode_sanitization(self):
hostname = u"\u7684.test.example.com"
self.assertEqual("test.example.com",
utils.sanitize_hostname(hostname))
def test_hostname_sanitize_periods(self):
hostname = "....test.example.com..."
self.assertEqual("test.example.com",
utils.sanitize_hostname(hostname))
def test_hostname_sanitize_dashes(self):
hostname = "----test.example.com---"
self.assertEqual("test.example.com",
utils.sanitize_hostname(hostname))
def test_hostname_sanitize_characters(self):
hostname = "(#@&$!(@*--#&91)(__=+--test-host.example!!.com-0+"
self.assertEqual("91----test-host.example.com-0",
utils.sanitize_hostname(hostname))
def test_hostname_translate(self):
hostname = "<}\x1fh\x10e\x08l\x02l\x05o\x12!{>"
self.assertEqual("hello", utils.sanitize_hostname(hostname))
def test_generate_glance_url(self):
generated_url = utils.generate_glance_url()
actual_url = "http://%s:%d" % (CONF.glance_host,
CONF.glance_port)
self.assertEqual(generated_url, actual_url)
@mock.patch('__builtin__.open')
@mock.patch('os.path.getmtime', return_value=1)
def test_read_cached_file(self, mock_mtime, mock_open):
fake_file = "/this/is/a/fake"
cache_data = {"data": 1123, "mtime": 2}
mock_open.return_value = _get_local_mock_open()
data = utils.read_cached_file(fake_file, cache_data)
self.assertEqual(cache_data["data"], data)
mock_open.assert_called_once_with(fake_file)
@mock.patch('__builtin__.open')
@mock.patch('os.path.getmtime', return_value=1)
def test_read_modified_cached_file(self, mock_mtime, mock_open):
fake_data = 'lorem ipsum'
fake_file = "/this/is/a/fake"
mock_open.return_value = _get_local_mock_open(fake_data)
cache_data = {"data": 'original data', "mtime": 2}
mock_reload = mock.Mock()
data = utils.read_cached_file(fake_file,
cache_data,
reload_func=mock_reload)
self.assertEqual(data, fake_data)
mock_reload.assert_called_once_with(fake_data)
mock_open.assert_called_once_with(fake_file)
def test_generate_password(self):
password = utils.generate_password()
self.assertTrue([c for c in password if c in '0123456789'])
self.assertTrue([c for c in password
if c in 'abcdefghijklmnopqrstuvwxyz'])
self.assertTrue([c for c in password
if c in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'])
def test_read_file_as_root(self):
def fake_execute(*args, **kwargs):
if args[1] == 'bad':
raise putils.ProcessExecutionError
return 'fakecontents', None
self.stubs.Set(utils, 'execute', fake_execute)
contents = utils.read_file_as_root('good')
self.assertEqual(contents, 'fakecontents')
self.assertRaises(exception.FileNotFound,
utils.read_file_as_root, 'bad')
def test_temporary_chown(self):
def fake_execute(*args, **kwargs):
if args[0] == 'chown':
fake_execute.uid = args[1]
self.stubs.Set(utils, 'execute', fake_execute)
with tempfile.NamedTemporaryFile() as f:
with utils.temporary_chown(f.name, owner_uid=2):
self.assertEqual(fake_execute.uid, 2)
self.assertEqual(fake_execute.uid, os.getuid())
@mock.patch('cinder.openstack.common.timeutils.utcnow')
def test_service_is_up(self, mock_utcnow):
fts_func = datetime.datetime.fromtimestamp
fake_now = 1000
down_time = 5
self.flags(service_down_time=down_time)
mock_utcnow.return_value = fts_func(fake_now)
# Up (equal)
service = {'updated_at': fts_func(fake_now - down_time),
'created_at': fts_func(fake_now - down_time)}
result = utils.service_is_up(service)
self.assertTrue(result)
# Up
service = {'updated_at': fts_func(fake_now - down_time + 1),
'created_at': fts_func(fake_now - down_time + 1)}
result = utils.service_is_up(service)
self.assertTrue(result)
# Down
service = {'updated_at': fts_func(fake_now - down_time - 1),
'created_at': fts_func(fake_now - down_time - 1)}
result = utils.service_is_up(service)
self.assertFalse(result)
def test_safe_parse_xml(self):
normal_body = ('<?xml version="1.0" ?>'
'<foo><bar><v1>hey</v1><v2>there</v2></bar></foo>')
def killer_body():
return (("""<!DOCTYPE x [
<!ENTITY a "%(a)s">
<!ENTITY b "%(b)s">
<!ENTITY c "%(c)s">]>
<foo>
<bar>
<v1>%(d)s</v1>
</bar>
</foo>""") % {
'a': 'A' * 10,
'b': '&a;' * 10,
'c': '&b;' * 10,
'd': '&c;' * 9999,
}).strip()
dom = utils.safe_minidom_parse_string(normal_body)
# Some versions of minidom inject extra newlines so we ignore them
result = str(dom.toxml()).replace('\n', '')
self.assertEqual(normal_body, result)
self.assertRaises(ValueError,
utils.safe_minidom_parse_string,
killer_body())
def test_xhtml_escape(self):
self.assertEqual('"foo"', utils.xhtml_escape('"foo"'))
self.assertEqual(''foo'', utils.xhtml_escape("'foo'"))
def test_hash_file(self):
data = 'Mary had a little lamb, its fleece as white as snow'
flo = six.StringIO(data)
h1 = utils.hash_file(flo)
h2 = hashlib.sha1(data).hexdigest()
self.assertEqual(h1, h2)
def test_check_ssh_injection(self):
cmd_list = ['ssh', '-D', 'my_name@name_of_remote_computer']
self.assertIsNone(utils.check_ssh_injection(cmd_list))
cmd_list = ['echo', '"quoted arg with space"']
self.assertIsNone(utils.check_ssh_injection(cmd_list))
cmd_list = ['echo', "'quoted arg with space'"]
self.assertIsNone(utils.check_ssh_injection(cmd_list))
def test_check_ssh_injection_on_error(self):
with_unquoted_space = ['ssh', 'my_name@ name_of_remote_computer']
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
with_unquoted_space)
with_danger_char = ['||', 'my_name@name_of_remote_computer']
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
with_danger_char)
with_special = ['cmd', 'virus;ls']
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
with_special)
quoted_with_unescaped = ['cmd', '"arg\"withunescaped"']
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
quoted_with_unescaped)
bad_before_quotes = ['cmd', 'virus;"quoted argument"']
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
bad_before_quotes)
bad_after_quotes = ['echo', '"quoted argument";rm -rf']
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
bad_after_quotes)
bad_within_quotes = ['echo', "'quoted argument `rm -rf`'"]
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
bad_within_quotes)
with_multiple_quotes = ['echo', '"quoted";virus;"quoted"']
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
with_multiple_quotes)
with_multiple_quotes = ['echo', '"quoted";virus;\'quoted\'']
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
with_multiple_quotes)
@mock.patch('paramiko.SSHClient')
def test_create_channel(self, mock_client):
test_width = 600
test_height = 800
mock_channel = mock.Mock()
mock_client.invoke_shell.return_value = mock_channel
utils.create_channel(mock_client, test_width, test_height)
mock_client.invoke_shell.assert_called_once()
mock_channel.resize_pty.assert_called_once_with(test_width,
test_height)
@mock.patch('os.stat')
def test_get_file_mode(self, mock_stat):
class stat_result:
st_mode = 0o777
st_gid = 33333
test_file = '/var/tmp/made_up_file'
mock_stat.return_value = stat_result
mode = utils.get_file_mode(test_file)
self.assertEqual(mode, 0o777)
mock_stat.assert_called_once_with(test_file)
@mock.patch('os.stat')
def test_get_file_gid(self, mock_stat):
class stat_result:
st_mode = 0o777
st_gid = 33333
test_file = '/var/tmp/made_up_file'
mock_stat.return_value = stat_result
gid = utils.get_file_gid(test_file)
self.assertEqual(gid, 33333)
mock_stat.assert_called_once_with(test_file)
class MonkeyPatchTestCase(test.TestCase):
"""Unit test for utils.monkey_patch()."""
def setUp(self):
super(MonkeyPatchTestCase, self).setUp()
self.example_package = 'cinder.tests.monkey_patch_example.'
self.flags(
monkey_patch=True,
monkey_patch_modules=[self.example_package + 'example_a' + ':'
+ self.example_package
+ 'example_decorator'])
def test_monkey_patch(self):
utils.monkey_patch()
cinder.tests.monkey_patch_example.CALLED_FUNCTION = []
from cinder.tests.monkey_patch_example import example_a
from cinder.tests.monkey_patch_example import example_b
self.assertEqual('Example function', example_a.example_function_a())
exampleA = example_a.ExampleClassA()
exampleA.example_method()
ret_a = exampleA.example_method_add(3, 5)
self.assertEqual(ret_a, 8)
self.assertEqual('Example function', example_b.example_function_b())
exampleB = example_b.ExampleClassB()
exampleB.example_method()
ret_b = exampleB.example_method_add(3, 5)
self.assertEqual(ret_b, 8)
package_a = self.example_package + 'example_a.'
self.assertTrue(package_a + 'example_function_a'
in cinder.tests.monkey_patch_example.CALLED_FUNCTION)
self.assertTrue(package_a + 'ExampleClassA.example_method'
in cinder.tests.monkey_patch_example.CALLED_FUNCTION)
self.assertTrue(package_a + 'ExampleClassA.example_method_add'
in cinder.tests.monkey_patch_example.CALLED_FUNCTION)
package_b = self.example_package + 'example_b.'
self.assertFalse(package_b + 'example_function_b'
in cinder.tests.monkey_patch_example.CALLED_FUNCTION)
self.assertFalse(package_b + 'ExampleClassB.example_method'
in cinder.tests.monkey_patch_example.CALLED_FUNCTION)
self.assertFalse(package_b + 'ExampleClassB.example_method_add'
in cinder.tests.monkey_patch_example.CALLED_FUNCTION)
class AuditPeriodTest(test.TestCase):
def setUp(self):
super(AuditPeriodTest, self).setUp()
#a fairly random time to test with
test_time = datetime.datetime(second=23,
minute=12,
hour=8,
day=5,
month=3,
year=2012)
patcher = mock.patch.object(timeutils, 'utcnow')
self.addCleanup(patcher.stop)
self.mock_utcnow = patcher.start()
self.mock_utcnow.return_value = test_time
def test_hour(self):
begin, end = utils.last_completed_audit_period(unit='hour')
self.assertEqual(begin,
datetime.datetime(hour=7,
day=5,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(hour=8,
day=5,
month=3,
year=2012))
def test_hour_with_offset_before_current(self):
begin, end = utils.last_completed_audit_period(unit='hour@10')
self.assertEqual(begin, datetime.datetime(minute=10,
hour=7,
day=5,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(minute=10,
hour=8,
day=5,
month=3,
year=2012))
def test_hour_with_offset_after_current(self):
begin, end = utils.last_completed_audit_period(unit='hour@30')
self.assertEqual(begin, datetime.datetime(minute=30,
hour=6,
day=5,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(minute=30,
hour=7,
day=5,
month=3,
year=2012))
def test_day(self):
begin, end = utils.last_completed_audit_period(unit='day')
self.assertEqual(begin, datetime.datetime(day=4,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(day=5,
month=3,
year=2012))
def test_day_with_offset_before_current(self):
begin, end = utils.last_completed_audit_period(unit='day@6')
self.assertEqual(begin, datetime.datetime(hour=6,
day=4,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(hour=6,
day=5,
month=3,
year=2012))
def test_day_with_offset_after_current(self):
begin, end = utils.last_completed_audit_period(unit='day@10')
self.assertEqual(begin, datetime.datetime(hour=10,
day=3,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(hour=10,
day=4,
month=3,
year=2012))
def test_month(self):
begin, end = utils.last_completed_audit_period(unit='month')
self.assertEqual(begin, datetime.datetime(day=1,
month=2,
year=2012))
self.assertEqual(end, datetime.datetime(day=1,
month=3,
year=2012))
def test_month_with_offset_before_current(self):
begin, end = utils.last_completed_audit_period(unit='month@2')
self.assertEqual(begin, datetime.datetime(day=2,
month=2,
year=2012))
self.assertEqual(end, datetime.datetime(day=2,
month=3,
year=2012))
def test_month_with_offset_after_current(self):
begin, end = utils.last_completed_audit_period(unit='month@15')
self.assertEqual(begin, datetime.datetime(day=15,
month=1,
year=2012))
self.assertEqual(end, datetime.datetime(day=15,
month=2,
year=2012))
def test_year(self):
begin, end = utils.last_completed_audit_period(unit='year')
self.assertEqual(begin, datetime.datetime(day=1,
month=1,
year=2011))
self.assertEqual(end, datetime.datetime(day=1,
month=1,
year=2012))
def test_year_with_offset_before_current(self):
begin, end = utils.last_completed_audit_period(unit='year@2')
self.assertEqual(begin, datetime.datetime(day=1,
month=2,
year=2011))
self.assertEqual(end, datetime.datetime(day=1,
month=2,
year=2012))
def test_year_with_offset_after_current(self):
begin, end = utils.last_completed_audit_period(unit='year@6')
self.assertEqual(begin, datetime.datetime(day=1,
month=6,
year=2010))
self.assertEqual(end, datetime.datetime(day=1,
month=6,
year=2011))
class FakeSSHClient(object):
def __init__(self):
self.id = uuid.uuid4()
self.transport = FakeTransport()
def set_missing_host_key_policy(self, policy):
pass
def connect(self, ip, port=22, username=None, password=None,
pkey=None, timeout=10):
pass
def get_transport(self):
return self.transport
def close(self):
pass
def __call__(self, *args, **kwargs):
pass
class FakeSock(object):
def settimeout(self, timeout):
pass
class FakeTransport(object):
def __init__(self):
self.active = True
self.sock = FakeSock()
def set_keepalive(self, timeout):
pass
def is_active(self):
return self.active
class SSHPoolTestCase(test.TestCase):
"""Unit test for SSH Connection Pool."""
@mock.patch('paramiko.RSAKey.from_private_key_file')
@mock.patch('paramiko.SSHClient')
def test_single_ssh_connect(self, mock_sshclient, mock_pkey):
mock_sshclient.return_value = FakeSSHClient()
# create with password
sshpool = utils.SSHPool("127.0.0.1", 22, 10,
"test",
password="test",
min_size=1,
max_size=1)
with sshpool.item() as ssh:
first_id = ssh.id
with sshpool.item() as ssh:
second_id = ssh.id
self.assertEqual(first_id, second_id)
mock_sshclient.connect.assert_called_once()
# create with private key
sshpool = utils.SSHPool("127.0.0.1", 22, 10,
"test",
privatekey="test",
min_size=1,
max_size=1)
mock_sshclient.connect.assert_called_once()
# attempt to create with no password or private key
self.assertRaises(paramiko.SSHException,
utils.SSHPool,
"127.0.0.1", 22, 10,
"test",
min_size=1,
max_size=1)
@mock.patch('paramiko.SSHClient')
def test_closed_reopend_ssh_connections(self, mock_sshclient):
mock_sshclient.return_value = eval('FakeSSHClient')()
sshpool = utils.SSHPool("127.0.0.1", 22, 10,
"test",
password="test",
min_size=1,
max_size=4)
with sshpool.item() as ssh:
mock_sshclient.reset_mock()
first_id = ssh.id
with sshpool.item() as ssh:
second_id = ssh.id
ssh.get_transport().active = False
sshpool.remove(ssh)
self.assertEqual(first_id, second_id)
# create a new client
mock_sshclient.return_value = FakeSSHClient()
with sshpool.item() as ssh:
third_id = ssh.id
self.assertNotEqual(first_id, third_id)
class BrickUtils(test.TestCase):
"""Unit test to test the brick utility
wrapper functions.
"""
def test_brick_get_connector_properties(self):
self.mox.StubOutWithMock(socket, 'gethostname')
socket.gethostname().AndReturn('fakehost')
self.mox.StubOutWithMock(connector.ISCSIConnector, 'get_initiator')
connector.ISCSIConnector.get_initiator().AndReturn('fakeinitiator')
self.mox.StubOutWithMock(linuxfc.LinuxFibreChannel, 'get_fc_wwpns')
linuxfc.LinuxFibreChannel.get_fc_wwpns().AndReturn(None)
self.mox.StubOutWithMock(linuxfc.LinuxFibreChannel, 'get_fc_wwnns')
linuxfc.LinuxFibreChannel.get_fc_wwnns().AndReturn(None)
props = {'initiator': 'fakeinitiator',
'host': 'fakehost',
'ip': CONF.my_ip,
}
self.mox.ReplayAll()
props_actual = utils.brick_get_connector_properties()
self.assertEqual(props, props_actual)
self.mox.VerifyAll()
def test_brick_get_connector(self):
root_helper = utils.get_root_helper()
self.mox.StubOutClassWithMocks(connector, 'ISCSIConnector')
connector.ISCSIConnector(execute=putils.execute,
driver=None,
root_helper=root_helper,
use_multipath=False,
device_scan_attempts=3)
self.mox.StubOutClassWithMocks(connector, 'FibreChannelConnector')
connector.FibreChannelConnector(execute=putils.execute,
driver=None,
root_helper=root_helper,
use_multipath=False,
device_scan_attempts=3)
self.mox.StubOutClassWithMocks(connector, 'AoEConnector')
connector.AoEConnector(execute=putils.execute,
driver=None,
root_helper=root_helper,
device_scan_attempts=3)
self.mox.StubOutClassWithMocks(connector, 'LocalConnector')
connector.LocalConnector(execute=putils.execute,
driver=None,
root_helper=root_helper,
device_scan_attempts=3)
self.mox.ReplayAll()
utils.brick_get_connector('iscsi')
utils.brick_get_connector('fibre_channel')
utils.brick_get_connector('aoe')
utils.brick_get_connector('local')
self.mox.VerifyAll()
class StringLengthTestCase(test.TestCase):
def test_check_string_length(self):
self.assertIsNone(utils.check_string_length(
'test', 'name', max_length=255))
self.assertRaises(exception.InvalidInput,
utils.check_string_length,
11, 'name', max_length=255)
self.assertRaises(exception.InvalidInput,
utils.check_string_length,
'', 'name', min_length=1)
self.assertRaises(exception.InvalidInput,
utils.check_string_length,
'a' * 256, 'name', max_length=255)
|
{
"content_hash": "81c1d2f1268b3e6d75baa45714ebdade",
"timestamp": "",
"source": "github",
"line_count": 934,
"max_line_length": 79,
"avg_line_length": 39.44539614561028,
"alnum_prop": 0.4997557135877531,
"repo_name": "spring-week-topos/cinder-week",
"id": "7c5f27eff262a85ae64cfd01e55a68689ccbd7c6",
"size": "37459",
"binary": false,
"copies": "2",
"ref": "refs/heads/spring-week",
"path": "cinder/tests/test_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6134883"
},
{
"name": "Shell",
"bytes": "8998"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from .. import Tags, Warning, register
def add_session_cookie_message(message):
return message + (
" Using a secure-only session cookie makes it more difficult for "
"network traffic sniffers to hijack user sessions."
)
W010 = Warning(
add_session_cookie_message(
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
"but you have not set SESSION_COOKIE_SECURE to True."
),
id='security.W010',
)
W011 = Warning(
add_session_cookie_message(
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
"in your MIDDLEWARE_CLASSES, but you have not set "
"SESSION_COOKIE_SECURE to True."
),
id='security.W011',
)
W012 = Warning(
add_session_cookie_message("SESSION_COOKIE_SECURE is not set to True."),
id='security.W012',
)
def add_httponly_message(message):
return message + (
" Using an HttpOnly session cookie makes it more difficult for "
"cross-site scripting attacks to hijack user sessions."
)
W013 = Warning(
add_httponly_message(
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
"but you have not set SESSION_COOKIE_HTTPONLY to True.",
),
id='security.W013',
)
W014 = Warning(
add_httponly_message(
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
"in your MIDDLEWARE_CLASSES, but you have not set "
"SESSION_COOKIE_HTTPONLY to True."
),
id='security.W014',
)
W015 = Warning(
add_httponly_message("SESSION_COOKIE_HTTPONLY is not set to True."),
id='security.W015',
)
@register(Tags.security, deploy=True)
def check_session_cookie_secure(app_configs, **kwargs):
errors = []
if not settings.SESSION_COOKIE_SECURE:
if _session_app():
errors.append(W010)
if _session_middleware():
errors.append(W011)
if len(errors) > 1:
errors = [W012]
return errors
@register(Tags.security, deploy=True)
def check_session_cookie_httponly(app_configs, **kwargs):
errors = []
if not settings.SESSION_COOKIE_HTTPONLY:
if _session_app():
errors.append(W013)
if _session_middleware():
errors.append(W014)
if len(errors) > 1:
errors = [W015]
return errors
def _session_middleware():
return ("django.contrib.sessions.middleware.SessionMiddleware" in
settings.MIDDLEWARE_CLASSES)
def _session_app():
return "django.contrib.sessions" in settings.INSTALLED_APPS
|
{
"content_hash": "32acd71111161c3f77a3c01e13b9a7fb",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 76,
"avg_line_length": 27.75257731958763,
"alnum_prop": 0.6229569093610698,
"repo_name": "yephper/django",
"id": "9d7b5385e1bebc82ae1b042a80714274667200f3",
"size": "2692",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "django/core/checks/security/sessions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "1538"
},
{
"name": "CSS",
"bytes": "1697381"
},
{
"name": "HTML",
"bytes": "390772"
},
{
"name": "Java",
"bytes": "588"
},
{
"name": "JavaScript",
"bytes": "3172126"
},
{
"name": "Makefile",
"bytes": "134"
},
{
"name": "PHP",
"bytes": "19336"
},
{
"name": "Python",
"bytes": "13365273"
},
{
"name": "Shell",
"bytes": "837"
},
{
"name": "Smarty",
"bytes": "133"
}
],
"symlink_target": ""
}
|
"""Management script."""
import os
from glob import glob
from subprocess import call
from flask_migrate import MigrateCommand
from flask_script import Command, Manager, Option, Server, Shell
from flask_script.commands import Clean, ShowUrls
from chamberlain.app import create_app
from chamberlain.database import db
from chamberlain.settings import DevConfig, ProdConfig
from chamberlain.user.models import User
CONFIG = ProdConfig if os.environ.get('CHAMBERLAIN_ENV') == 'prod' else DevConfig
HERE = os.path.abspath(os.path.dirname(__file__))
TEST_PATH = os.path.join(HERE, 'tests')
app = create_app(CONFIG)
manager = Manager(app)
def _make_context():
"""Return context dict for a shell session so you can access app, db, and the User model by default."""
return {'app': app, 'db': db, 'User': User}
@manager.command
def test():
"""Run the tests."""
import pytest
exit_code = pytest.main([TEST_PATH, '--verbose'])
return exit_code
class Lint(Command):
"""Lint and check code style with flake8 and isort."""
def get_options(self):
"""Command line options."""
return (
Option('-f', '--fix-imports', action='store_true', dest='fix_imports', default=False,
help='Fix imports using isort, before linting'),
)
def run(self, fix_imports):
"""Run command."""
skip = ['requirements']
root_files = glob('*.py')
root_directories = [name for name in next(os.walk('.'))[1] if not name.startswith('.')]
files_and_directories = [arg for arg in root_files + root_directories if arg not in skip]
def execute_tool(description, *args):
"""Execute a checking tool with its arguments."""
command_line = list(args) + files_and_directories
print('{}: {}'.format(description, ' '.join(command_line)))
rv = call(command_line)
if rv is not 0:
exit(rv)
if fix_imports:
execute_tool('Fixing import order', 'isort', '-rc')
execute_tool('Checking code style', 'flake8')
manager.add_command('server', Server())
manager.add_command('shell', Shell(make_context=_make_context))
manager.add_command('db', MigrateCommand)
manager.add_command('urls', ShowUrls())
manager.add_command('clean', Clean())
manager.add_command('lint', Lint())
if __name__ == '__main__':
manager.run()
|
{
"content_hash": "09174d24641637bd73077ec7381d8550",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 107,
"avg_line_length": 32.513513513513516,
"alnum_prop": 0.6438071487946799,
"repo_name": "sean-abbott/chamberlain",
"id": "e22220f61a0606ef37cf8094ddf16066736704b8",
"size": "2452",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1170"
},
{
"name": "HTML",
"bytes": "8689"
},
{
"name": "JavaScript",
"bytes": "240856"
},
{
"name": "Python",
"bytes": "27905"
}
],
"symlink_target": ""
}
|
import sys
import ctypes
import platform
from vtEngine import MainEngine
from uiMainWindow import *
#from simple_monitor import *
from setup_logger import setup_logger
setup_logger(filename='logs/vnpy_{0}.log'.format(datetime.now().strftime('%m%d_%H%M')), debug=False)
# ----------------------------------------------------------------------
def main():
"""主程序入口"""
# 重载sys模块,设置默认字符串编码方式为utf8
reload(sys)
sys.setdefaultencoding('utf8')
# 设置Windows底部任务栏图标
if 'Windows' in platform.uname() :
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID('vn.trader')
# 初始化Qt应用对象
app = QtGui.QApplication(sys.argv)
app.setWindowIcon(QtGui.QIcon('vnpy.ico'))
app.setFont(BASIC_FONT)
# 设置Qt的皮肤
try:
f = file("VT_setting.json")
setting = json.load(f)
if setting['darkStyle']:
import qdarkstyle
app.setStyleSheet(qdarkstyle.load_stylesheet(pyside=False))
except:
pass
# 初始化主引擎和主窗口对象
mainEngine = MainEngine()
mainWindow = MainWindow(mainEngine, mainEngine.eventEngine)
mainWindow.showMaximized()
# 在主线程中启动Qt事件循环
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
{
"content_hash": "a3fb1926be700bd95ef3fca096affab3",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 100,
"avg_line_length": 25.346938775510203,
"alnum_prop": 0.6111111111111112,
"repo_name": "kanchenxi04/vnpy-app",
"id": "b1b6634c1f921747b86b8d5c283c409992ca91ba",
"size": "1393",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vn.trader/vtMain.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2335116"
},
{
"name": "C++",
"bytes": "4036644"
},
{
"name": "CMake",
"bytes": "7062"
},
{
"name": "Jupyter Notebook",
"bytes": "10948"
},
{
"name": "Objective-C",
"bytes": "7316"
},
{
"name": "Python",
"bytes": "5752642"
},
{
"name": "R",
"bytes": "1354"
},
{
"name": "Shell",
"bytes": "5174"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function
from collections import OrderedDict
import pytest
import numpy as np
from matplotlib.axes import Axes
from mock import MagicMock, patch
from numpy.testing import assert_array_equal
from glue.core.tests.test_state import clone
from glue.core.tests.util import simple_session
from glue.core.subset import SubsetState
from glue.core import Data
from glue import custom_viewer
from glue.app.qt import GlueApplication
from glue.app.qt.tests.test_application import check_clone_app
from ..custom_viewer import (FormElement, NumberElement,
ChoiceElement, CustomViewer,
CustomSubsetState, AttributeInfo,
FloatElement, TextBoxElement, SettingsOracle,
MissingSettingError, FrozenSettings)
def _make_widget(viewer):
s = simple_session()
return viewer._widget_cls(s)
viewer = custom_viewer('Testing Custom Viewer',
a=(0, 100),
b='att',
c='att(x)',
d=True,
e=False,
f=['a', 'b', 'c'],
g=OrderedDict(a=1, b=2, c=3),
h=64
)
setup = MagicMock()
settings_changed = MagicMock()
plot_subset = MagicMock()
plot_data = MagicMock()
make_selector = MagicMock()
make_selector.return_value = MagicMock(spec=SubsetState)
make_selector().copy.return_value = MagicMock(spec=SubsetState)
make_selector().copy().to_mask.return_value = np.array([False, True, True])
@viewer.setup
def _setup(axes):
setup(axes)
@viewer.plot_data
def _plot_data(axes, a, b, g, h):
plot_data(axes=axes, a=a, b=b, g=g, h=h)
return []
@viewer.plot_subset
def _plot_subset(b, c, d, e, f, style):
plot_subset(b=b, c=c, d=d, e=e, f=f, style=style)
return []
@viewer.settings_changed
def _settings_changed(state):
settings_changed(state=state)
@viewer.make_selector
def _make_selector(roi, c):
make_selector(roi=roi, c=c)
return SubsetState()
def test_custom_classes_dont_share_methods():
"""Regression test for #479"""
a = custom_viewer('a')
b = custom_viewer('b')
assert a._custom_functions is not b._custom_functions
class ViewerSubclass(CustomViewer):
a = (0, 100)
b = 'att'
c = 'att(x)'
d = True
e = False
f = ['a', 'b', 'c']
g = OrderedDict(a=1, b=2, c=3)
h = 64
def setup(self, axes):
return setup(axes)
def plot_data(self, axes, a, b, g, h):
return plot_data(axes=axes, a=a, b=b, g=g, h=h)
def plot_subset(self, b, c, d, e, f, style):
return plot_subset(b=b, c=c, d=d, e=e, f=f, style=style)
def settings_changed(self, state):
return settings_changed(state=state)
def make_selector(self, roi, c):
return make_selector(roi=roi, c=c)
class TestCustomViewer(object):
def setup_class(self):
self.viewer = viewer
def setup_method(self, method):
setup.reset_mock()
settings_changed.reset_mock()
plot_subset.reset_mock()
plot_data.reset_mock()
make_selector.reset_mock()
self.data = Data(x=[1, 2, 3], y=[2, 3, 4])
self.session = simple_session()
self.dc = self.session.data_collection
self.dc.append(self.data)
def teardown_method(self, method):
if hasattr(self, 'w'):
self.w.unregister(self.session.hub)
def build(self):
w = self.viewer._widget_cls(self.session)
w.register_to_hub(self.session.hub)
self.w = w
return w
def test_setup_called_on_init(self):
ct = setup.call_count
self.build()
assert setup.call_count == ct + 1
def test_separate_widgets_have_separate_state(self):
w1 = self.build()
w2 = self.build()
assert w1._coordinator is not w2._coordinator
assert w1._coordinator.state is not w2._coordinator.state
def test_plot_data(self):
w = self.build()
w.add_data(self.data)
a, k = plot_data.call_args
assert isinstance(k['axes'], Axes)
assert set(k.keys()) == set(('axes', 'a', 'b', 'g', 'h'))
assert k['a'] == 50
assert k['g'] == 1
assert k['h'] == 64
def test_plot_subset(self):
w = self.build()
w.add_data(self.data)
self.dc.new_subset_group(subset_state=self.data.id['x'] > 2)
a, k = plot_subset.call_args
assert set(k.keys()) == set(('b', 'c', 'd', 'e', 'f', 'style'))
assert_array_equal(k['b'].values, [3])
assert_array_equal(k['c'].values, [3])
assert k['d']
assert not k['e']
assert k['f'] == 'a'
def test_make_selector(self):
w = self.build()
roi = MagicMock()
w.apply_roi(roi)
a, k = make_selector.call_args
assert set(k.keys()) == set(('roi', 'c'))
assert k['roi'] is roi
def test_settings_change(self):
w = self.build()
ct = settings_changed.call_count
w._coordinator._settings['d'].ui.setChecked(False)
assert settings_changed.call_count == ct + 1
a, k = settings_changed.call_args
assert 'state' in k
def test_register(self):
with patch('glue.viewers.custom.qt.FormElement.register_to_hub') as r:
self.build()
assert r.call_count > 0
def test_component(self):
w = self.build()
w.add_data(self.data)
assert_array_equal(w._coordinator.value('b', layer=self.data).values,
[1, 2, 3])
def test_component_autoupdate(self):
w = self.build()
w.add_data(self.data)
assert w._coordinator._settings['b'].ui.count() == 2
self.data.add_component([10, 20, 30], label='c')
assert w._coordinator._settings['b'].ui.count() == 3
def test_settings_changed_called_on_init(self):
self.build()
assert settings_changed.call_count == 1
def test_selections_enabled(self):
w = self.build()
assert w._coordinator.selections_enabled
assert 'select:rectangle' in w.toolbar.tools
assert 'select:polygon' in w.toolbar.tools
def test_state_save():
app = GlueApplication()
w = app.new_data_viewer(viewer._widget_cls) # noqa
check_clone_app(app)
def test_state_save_with_data_layers():
app = GlueApplication()
dc = app.data_collection
d = Data(x=[1, 2, 3], label='test')
dc.append(d)
w = app.new_data_viewer(viewer._widget_cls)
w.add_data(d)
check_clone_app(app)
class TestCustomSelectMethod(object):
def setup_class(self):
self.viewer = custom_viewer('CustomSelectViewer',
x='att(x)', flip=False)
@self.viewer.select
def select(roi, x, flip):
if flip:
return x <= 1
return x > 1
def setup_method(self, method):
self.data = Data(x=[1, 2, 3], y=[2, 3, 4])
self.session = simple_session()
self.dc = self.session.data_collection
self.dc.append(self.data)
def build(self):
return self.viewer._widget_cls(self.session)
def test_state(self):
w = self.build()
v = w._coordinator
roi = MagicMock()
s = CustomSubsetState(type(v), roi, v.settings())
assert_array_equal(s.to_mask(self.data), [False, True, True])
def test_state_view(self):
w = self.build()
v = w._coordinator
roi = MagicMock()
s = CustomSubsetState(type(v), roi, v.settings())
assert_array_equal(s.to_mask(self.data, view=slice(None, None, 2)),
[False, True])
def test_settings_frozen_at_creation(self):
w = self.build()
v = w._coordinator
roi = MagicMock()
s = CustomSubsetState(type(v), roi, v.settings())
w.flip = True
assert_array_equal(s.to_mask(self.data), [False, True, True])
def test_save_load(self):
w = self.build()
v = w._coordinator
roi = None
s = CustomSubsetState(type(v), roi, v.settings())
s2 = clone(s)
assert_array_equal(s2.to_mask(self.data), [False, True, True])
class TestCustomViewerSubclassForm(TestCustomViewer):
def setup_class(self):
self.viewer = ViewerSubclass
class TestFormElements(object):
def test_number_default_value(self):
e = FormElement.auto((0, 100, 30))
assert e.value() == 30
def test_number_float(self):
e = FormElement.auto((0.0, 1.0, 0.3))
assert e.value() == 0.3
def test_number_list(self):
e = FormElement.auto([0, 10])
assert isinstance(e, NumberElement)
def test_choice_list(self):
e = FormElement.auto(['a', 'b'])
assert isinstance(e, ChoiceElement)
def test_choice_tuple(self):
e = FormElement.auto(('a', 'b'))
assert isinstance(e, ChoiceElement)
def test_float(self):
e = FormElement.auto(1.2)
assert isinstance(e, FloatElement)
e = FormElement.auto(2)
assert isinstance(e, FloatElement)
assert e.value() == 2
def test_textbox(self):
e = FormElement.auto('_str')
assert isinstance(e, TextBoxElement)
assert e.value() == 'str'
def test_recognizes_subsubclasses(self):
class SubClassFormElement(TextBoxElement):
@classmethod
def recognizes(cls, params):
return params == 'specific_class'
e = FormElement.auto('specific_class')
assert isinstance(e, SubClassFormElement)
def test_unrecognized(self):
with pytest.raises(ValueError):
FormElement.auto(None)
class TestAttributeInfo(object):
def setup_method(self, method):
d = Data(x=[1, 2, 3, 4, 5], c=['a', 'b', 'a', 'a', 'b'], label='test')
s = d.new_subset()
s.subset_state = d.id['x'] > 2
self.d = d
self.s = s
def test_numerical(self):
v = AttributeInfo.from_layer(self.d, self.d.id['x'])
assert_array_equal(v, [1, 2, 3, 4, 5])
assert v.id == self.d.id['x']
assert v.categories is None
def test_categorical(self):
v = AttributeInfo.from_layer(self.d, self.d.id['c'])
assert_array_equal(v, [0, 1, 0, 0, 1])
assert v.id == self.d.id['c']
assert_array_equal(v.categories, ['a', 'b'])
def test_subset(self):
v = AttributeInfo.from_layer(self.s, self.d.id['x'])
assert_array_equal(v, [3, 4, 5])
assert v.id == self.d.id['x']
assert v.categories is None
def test_has_component(self):
v = AttributeInfo.from_layer(self.s, self.d.id['x'])
comp = self.s.data.get_component(self.d.id['x'])
assert v._component == comp
class TestSettingsOracle(object):
def test_oracle_raises_original_error(self):
class BadFormElement(TextBoxElement):
def value(self, layer=None, view=None):
raise AttributeError('Inner Error')
oracle = SettingsOracle({'bad_form': BadFormElement('str("text")')})
try:
oracle('bad_form')
assert False
except AttributeError as err:
assert 'Inner Error' in err.args
def test_oracle_raises_missing(self):
oracle = SettingsOracle({'Form': TextBoxElement('_text')})
with pytest.raises(MissingSettingError):
oracle('missing')
def test_frozen_oracle_raises_missing(self):
oracle = FrozenSettings()
with pytest.raises(MissingSettingError):
oracle.value('missing')
def test_load_reserved_words(self):
_self = MagicMock()
layer = MagicMock()
style = layer.style
extra = MagicMock()
oracle = SettingsOracle({}, _self=_self,
layer=layer,
extra=extra)
assert oracle('self') == _self
assert oracle('layer') == layer
assert oracle('style') == style
assert oracle('extra') == extra
def test_setting_names(self):
oracle = SettingsOracle({'Form': TextBoxElement('_text')})
assert sorted(oracle.setting_names()) == sorted(['style', 'layer', 'Form'])
def test_raises_if_overlapping_reserved_words(self):
with pytest.raises(AssertionError):
SettingsOracle({'self': TextBoxElement('_text')})
def test_two_custom_viewer_classes():
class MyWidget1(CustomViewer):
text_box1_Widget1 = '_Hello'
def setup(self, text_box1_Widget1):
pass
class MyWidget2(CustomViewer):
text_box1_Widget2 = '_Hello'
text_box2_Widget2 = '_world'
def setup(self, text_box1_Widget2, text_box2_Widget2):
pass
app = GlueApplication()
dc = app.data_collection
d = Data(x=[1, 2, 3], label='test')
dc.append(d)
app.new_data_viewer(MyWidget1._widget_cls)
app.new_data_viewer(MyWidget2._widget_cls)
|
{
"content_hash": "d9ff12f43133eacf6c7611fcc985c0e3",
"timestamp": "",
"source": "github",
"line_count": 467,
"max_line_length": 83,
"avg_line_length": 28.19271948608137,
"alnum_prop": 0.5777001367157831,
"repo_name": "stscieisenhamer/glue",
"id": "58be4d69bf66dab15969c0f5afad8c60c256db45",
"size": "13166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "glue/viewers/custom/qt/tests/test_custom_viewer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1591083"
},
{
"name": "Shell",
"bytes": "1603"
}
],
"symlink_target": ""
}
|
import os, itertools, copy
import re, glob, string
import json
import param
try:
import numpy as np
np_ftypes = np.sctypes['float']
except:
np, np_ftypes = None, []
try: from pandas import DataFrame
except: DataFrame = None # pyflakes:ignore (try/except import)
try: from holoviews import Table
except: Table = None # pyflakes:ignore (try/except import)
from collections import defaultdict, OrderedDict
float_types = [float] + np_ftypes
def identityfn(x): return x
def fp_repr(x): return str(x) if (type(x) in float_types) else repr(x)
def set_fp_precision(value):
"""
Function to set the floating precision across lancet.
"""
Arguments.set_default('fp_precision', value)
def to_table(args, vdims=[]):
"Helper function to convet an Args object to a HoloViews Table"
if not Table:
return "HoloViews Table not available"
kdims = [dim for dim in args.constant_keys + args.varying_keys
if dim not in vdims]
items = [tuple([spec[k] for k in kdims+vdims])
for spec in args.specs]
return Table(items, kdims=kdims, vdims=vdims)
#=====================#
# Argument Specifiers #
#=====================#
class PrettyPrinted(object):
"""
A mixin class for generating pretty-printed representations.
"""
def pprint_args(self, pos_args, keyword_args, infix_operator=None, extra_params={}):
"""
Method to define the positional arguments and keyword order
for pretty printing.
"""
if infix_operator and not (len(pos_args)==2 and keyword_args==[]):
raise Exception('Infix format requires exactly two'
' positional arguments and no keywords')
(kwargs,_,_,_) = self._pprint_args
self._pprint_args = (keyword_args + kwargs, pos_args, infix_operator, extra_params)
def _pprint(self, cycle=False, flat=False, annotate=False, onlychanged=True, level=1, tab = ' '):
"""
Pretty printer that prints only the modified keywords and
generates flat representations (for repr) and optionally
annotates the top of the repr with a comment.
"""
(kwargs, pos_args, infix_operator, extra_params) = self._pprint_args
(br, indent) = ('' if flat else '\n', '' if flat else tab * level)
prettify = lambda x: isinstance(x, PrettyPrinted) and not flat
pretty = lambda x: x._pprint(flat=flat, level=level+1) if prettify(x) else repr(x)
params = dict(self.get_param_values())
show_lexsort = getattr(self, '_lexorder', None) is not None
modified = [k for (k,v) in self.get_param_values(onlychanged=onlychanged)]
pkwargs = [(k, params[k]) for k in kwargs if (k in modified)] + list(extra_params.items())
arg_list = [(k,params[k]) for k in pos_args] + pkwargs
lines = []
if annotate: # Optional annotating comment
len_ckeys, len_vkeys = len(self.constant_keys), len(self.varying_keys)
info_triple = (len(self),
', %d constant key(s)' % len_ckeys if len_ckeys else '',
', %d varying key(s)' % len_vkeys if len_vkeys else '')
annotation = '# == %d items%s%s ==\n' % info_triple
lines = [annotation]
if show_lexsort: lines.append('(')
if cycle:
lines.append('%s(...)' % self.__class__.__name__)
elif infix_operator:
level = level - 1
triple = (pretty(params[pos_args[0]]), infix_operator, pretty(params[pos_args[1]]))
lines.append('%s %s %s' % triple)
else:
lines.append('%s(' % self.__class__.__name__)
for (k,v) in arg_list:
lines.append('%s%s=%s' % (br+indent, k, pretty(v)))
lines.append(',')
lines = lines[:-1] +[br+(tab*(level-1))+')'] # Remove trailing comma
if show_lexsort:
lines.append(').lexsort(%s)' % ', '.join(repr(el) for el in self._lexorder))
return ''.join(lines)
def __repr__(self):
return self._pprint(flat=True, onlychanged=False)
def __str__(self):
return self._pprint()
class Arguments(PrettyPrinted, param.Parameterized):
"""
The abstract, base class that defines the core interface and
methods for all members of the Arguments family of classes,
including either the simple, static members of Args below, or the
sophisticated parameter exploration algorithms subclassing from
DynamicArgs defined in dynamic.py.
The Args subclass may be used directly and forms the root of one
family of classes that have statically defined or precomputed
argument sets (defined below). The second subfamily are the
DynamicArgs, designed to allow more sophisticated, online
parameter space exploration techniques such as hill climbing,
bisection search, genetic algorithms and so on.
"""
fp_precision = param.Integer(default=4, constant=True, doc='''
The floating point precision to use for floating point values.
Unlike other basic Python types, floats need care with their
representation as you only want to display up to the precision
actually specified. A floating point precision of 0 casts
number to integers before representing them.''')
def __init__(self, **params):
self._pprint_args = ([],[],None,{})
self.pprint_args([],['fp_precision', 'dynamic'])
super(Arguments,self).__init__(**params)
# Some types cannot be sorted easily (e.g. numpy arrays)
self.unsortable_keys = []
def __iter__(self): return self
def __contains__(self, value):
return value in (self.constant_keys + self.varying_keys)
@classmethod
def spec_formatter(cls, spec):
" Formats the elements of an argument set appropriately"
return type(spec)((k, str(v)) for (k,v) in spec.items())
@property
def constant_keys(self):
"""
Returns the list of parameter names whose values are constant
as the argument specifier is iterated. Note that the union of
constant and varying_keys should partition the entire set of
keys in the case where there are no unsortable keys.
"""
raise NotImplementedError
@property
def constant_items(self):
"""
Returns the set of constant items as a list of tuples. This
allows easy conversion to dictionary format. Note, the items
should be supplied in the same key ordering as for
constant_keys for consistency.
"""
raise NotImplementedError
@property
def varying_keys(self):
"""
Returns the list of parameters whose values vary as the
argument specifier is iterated. Whenever it is possible, keys
should be sorted from those slowest to faster varying and
sorted alphanumerically within groups that vary at the same
rate.
"""
raise NotImplementedError
def round_floats(self, specs, fp_precision):
_round_float = lambda v, fp: np.round(v, fp) if (type(v) in np_ftypes) else round(v, fp)
_round = (lambda v, fp: int(v)) if fp_precision==0 else _round_float
return (dict((k, _round(v, fp_precision) if (type(v) in float_types) else v)
for (k,v) in spec.items()) for spec in specs)
def __next__(self):
"""
Called to get a list of specifications: dictionaries with
parameter name keys and string values.
"""
raise StopIteration
next = __next__
def copy(self):
"""
Convenience method to avoid using the specifier without
exhausting it.
"""
return copy.copy(self)
def _collect_by_key(self,specs):
"""
Returns a dictionary like object with the lists of values
collapsed by their respective key. Useful to find varying vs
constant keys and to find how fast keys vary.
"""
# Collect (key, value) tuples as list of lists, flatten with chain
allkeys = itertools.chain.from_iterable(
[[(k, run[k]) for k in run] for run in specs])
collection = defaultdict(list)
for (k,v) in allkeys: collection[k].append(v)
return collection
def _operator(self, operator, other):
identities = [isinstance(el, Identity) for el in [self, other]]
if not any(identities): return operator(self,other)
if all(identities): return Identity()
elif identities[1]: return self
else: return other
def __add__(self, other):
"""
Concatenates two argument specifiers.
"""
return self._operator(Concatenate, other)
def __mul__(self, other):
"""
Takes the Cartesian product of two argument specifiers.
"""
return self._operator(CartesianProduct, other)
def _cartesian_product(self, first_specs, second_specs):
"""
Takes the Cartesian product of the specifications. Result will
contain N specifications where N = len(first_specs) *
len(second_specs) and keys are merged.
Example: [{'a':1},{'b':2}] * [{'c':3},{'d':4}] =
[{'a':1,'c':3},{'a':1,'d':4},{'b':2,'c':3},{'b':2,'d':4}]
"""
return [ dict(zip(
list(s1.keys()) + list(s2.keys()),
list(s1.values()) + list(s2.values())
))
for s1 in first_specs for s2 in second_specs ]
def summary(self):
"""
A succinct summary of the argument specifier. Unlike the repr,
a summary does not have to be complete but must supply the
most relevant information about the object to the user.
"""
print("Items: %s" % len(self))
varying_keys = ', '.join('%r' % k for k in self.varying_keys)
print("Varying Keys: %s" % varying_keys)
items = ', '.join(['%s=%r' % (k,v)
for (k,v) in self.constant_items])
if self.constant_items:
print("Constant Items: %s" % items)
class Identity(Arguments):
"""
The identity element for any Arguments object 'args' under the *
operator (CartesianProduct) and + operator (Concatenate). The
following identities hold:
args is (Identity() * args)
args is (args * Identity())
args is (Identity() + args)
args is (args + Identity())
Note that the empty Args() object can also fulfill the role of
Identity under the addition operator.
"""
fp_precision = param.Integer(default=None, allow_None=True,
precedence=(-1), constant=True, doc='''
fp_precision is disabled as Identity() never contains any
arguments.''')
def __eq__(self, other): return isinstance(other, Identity)
def __repr__(self): return "Identity()"
def __str__(self): return repr(self)
def __nonzero__(self): raise ValueError("The boolean value of Identity is undefined")
def __bool__(self): raise ValueError("The boolean value of Identity is undefined")
class Args(Arguments):
"""
An Arguments class that supports statically specified or
precomputed argument sets. It may be used directly to specify
argument values but also forms the base class for a family of more
specific static Argument classes. Each subclass is less flexible
and general but allows arguments to be easily and succinctly
specified. For instance, the Range subclass allows parameter
ranges to be easily declared.
The constructor of Args accepts argument definitions in two
different formats. The keyword format allows constant arguments to
be specified directly and easily. For instance:
>>> v1 = Args(a=2, b=3)
>>> v1
Args(fp_precision=4,a=2,b=3)
The alternative input format takes an explicit list of the
argument specifications:
>>> v2 = Args([{'a':2, 'b':3}]) # Equivalent behaviour to above
>>> v1.specs == v2.specs
True
This latter format is completely flexible and general, allowing
any arbitrary list of arguments to be specified as desired. This
is not generally recommended however as the structure of a
parameter space is often expressed more clearly by composing
together simpler, more succinct Args objects with the
CartesianProduct (*) or Concatenation (+) operators.
"""
specs = param.List(default=[], constant=True, doc='''
The static list of specifications (ie. dictionaries) to be
returned by the specifier. Float values are rounded according
to fp_precision.''')
def __init__(self, specs=None, fp_precision=None, **params):
if fp_precision is None: fp_precision = Arguments.fp_precision
raw_specs, params, explicit = self._build_specs(specs, params, fp_precision)
super(Args, self).__init__(fp_precision=fp_precision, specs=raw_specs, **params)
self._lexorder = None
if explicit:
self.pprint_args(['specs'],[])
else: # Present in kwarg format
self.pprint_args([], self.constant_keys, None,
OrderedDict(sorted(self.constant_items)))
def _build_specs(self, specs, kwargs, fp_precision):
"""
Returns the specs, the remaining kwargs and whether or not the
constructor was called with kwarg or explicit specs.
"""
if specs is None:
overrides = param.ParamOverrides(self, kwargs,
allow_extra_keywords=True)
extra_kwargs = overrides.extra_keywords()
kwargs = dict([(k,v) for (k,v) in kwargs.items()
if k not in extra_kwargs])
rounded_specs = list(self.round_floats([extra_kwargs],
fp_precision))
if extra_kwargs=={}: return [], kwargs, True
else: return rounded_specs, kwargs, False
return list(self.round_floats(specs, fp_precision)), kwargs, True
def __iter__(self):
self._exhausted = False
return self
def __next__(self):
if self._exhausted:
raise StopIteration
else:
self._exhausted=True
return self.specs
next = __next__
def _unique(self, sequence, idfun=repr):
"""
Note: repr() must be implemented properly on all objects. This
is implicitly assumed by Lancet when Python objects need to be
formatted to string representation.
"""
seen = {}
return [seen.setdefault(idfun(e),e) for e in sequence
if idfun(e) not in seen]
def show(self, exclude=[]):
"""
Convenience method to inspect the available argument values in
human-readable format. The ordering of keys is determined by
how quickly they vary.
The exclude list allows specific keys to be excluded for
readability (e.g. to hide long, absolute filenames).
"""
ordering = self.constant_keys + self.varying_keys
spec_lines = [', '.join(['%s=%s' % (k, s[k]) for k in ordering
if (k in s) and (k not in exclude)])
for s in self.specs]
print('\n'.join(['%d: %s' % (i,l) for (i,l) in enumerate(spec_lines)]))
def lexsort(self, *order):
"""
The lexical sort order is specified by a list of string
arguments. Each string is a key name prefixed by '+' or '-'
for ascending and descending sort respectively. If the key is
not found in the operand's set of varying keys, it is ignored.
"""
if order == []:
raise Exception("Please specify the keys for sorting, use"
"'+' prefix for ascending,"
"'-' for descending.)")
if not set(el[1:] for el in order).issubset(set(self.varying_keys)):
raise Exception("Key(s) specified not in the set of varying keys.")
sorted_args = copy.deepcopy(self)
specs_param = sorted_args.params('specs')
specs_param.constant = False
sorted_args.specs = self._lexsorted_specs(order)
specs_param.constant = True
sorted_args._lexorder = order
return sorted_args
def _lexsorted_specs(self, order):
"""
A lexsort is specified using normal key string prefixed by '+'
(for ascending) or '-' for (for descending).
Note that in Python 2, if a key is missing, None is returned
(smallest Python value). In Python 3, an Exception will be
raised regarding comparison of heterogenous types.
"""
specs = self.specs[:]
if not all(el[0] in ['+', '-'] for el in order):
raise Exception("Please specify the keys for sorting, use"
"'+' prefix for ascending,"
"'-' for descending.)")
sort_cycles = [(el[1:], True if el[0]=='+' else False)
for el in reversed(order)
if el[1:] in self.varying_keys]
for (key, ascending) in sort_cycles:
specs = sorted(specs, key=lambda s: s.get(key, None),
reverse=(not ascending))
return specs
@property
def constant_keys(self):
collection = self._collect_by_key(self.specs)
return [k for k in sorted(collection) if
(len(self._unique(collection[k])) == 1)]
@property
def constant_items(self):
collection = self._collect_by_key(self.specs)
return [(k,collection[k][0]) for k in self.constant_keys]
@property
def varying_keys(self):
collection = self._collect_by_key(self.specs)
constant_set = set(self.constant_keys)
unordered_varying = set(collection.keys()).difference(constant_set)
# Finding out how fast keys are varying
grouplens = [(len([len(list(y)) for (_,y)
in itertools.groupby(collection[k])]),k)
for k in collection
if (k not in self.unsortable_keys)]
varying_counts = [(n,k) for (n,k) in sorted(grouplens) if (k in unordered_varying)]
# Grouping keys with common frequency alphanumerically (desired behaviour).
ddict = defaultdict(list)
for (n,k) in varying_counts: ddict[n].append(k)
alphagroups = [sorted(ddict[k]) for k in sorted(ddict)]
return [el for group in alphagroups for el in group] + sorted(self.unsortable_keys)
@property
def dframe(self):
return DataFrame(self.specs) if DataFrame else "Pandas not available"
@property
def table(self):
return to_table(self)
def __len__(self): return len(self.specs)
class Concatenate(Args):
"""
Concatenate is the sequential composition of two specifiers. The
specifier created by the compositon (firsts + second) generates
the arguments in first followed by the arguments in second.
"""
first = param.ClassSelector(default=None, class_=Args, allow_None=True, constant=True, doc='''
The first specifier in the concatenation.''')
second = param.ClassSelector(default=None, class_=Args, allow_None=True, constant=True, doc='''
The second specifier in the concatenation.''')
def __init__(self, first, second):
max_precision = max(first.fp_precision, second.fp_precision)
specs = first.specs + second.specs
super(Concatenate, self).__init__(specs, fp_precision=max_precision,
first=first, second=second)
self.pprint_args(['first', 'second'],[], infix_operator='+')
class CartesianProduct(Args):
"""
CartesianProduct is the Cartesian product of two specifiers. The
specifier created by the compositon (firsts * second) generates
the cartesian produce of the arguments in first followed by the
arguments in second. Note that len(first * second) =
len(first)*len(second)
"""
first = param.ClassSelector(default=None, class_=Args, allow_None=True,
constant=True, doc='''The first specifier in the Cartesian product.''')
second = param.ClassSelector(default=None, class_=Args, allow_None=True,
constant=True, doc='''The second specifier in the Cartesian product.''')
def __init__(self, first, second):
max_precision = max(first.fp_precision, second.fp_precision)
specs = self._cartesian_product(first.specs, second.specs)
overlap = (set(first.varying_keys + first.constant_keys)
& set(second.varying_keys + second.constant_keys))
assert overlap == set(), ('Sets of keys cannot overlap'
'between argument specifiers'
'in cartesian product.')
super(CartesianProduct, self).__init__(specs, fp_precision=max_precision,
first=first, second=second)
self.pprint_args(['first', 'second'],[], infix_operator='*')
class Range(Args):
"""
Range generates an argument from a numerically interpolated range
which is linear by default. An optional function can be specified
to sample a numeric range with regular intervals.
"""
key = param.String(default='', constant=True, doc='''
The key assigned to the values computed over the numeric range.''')
start_value = param.Number(default=None, allow_None=True, constant=True, doc='''
The starting numeric value of the range.''')
end_value = param.Number(default=None, allow_None=True, constant=True, doc='''
The ending numeric value of the range (inclusive).''')
steps = param.Integer(default=2, constant=True, bounds=(1,None), doc='''
The number of steps to interpolate over. Default is 2 which
returns the start and end values without interpolation.''')
# Can't this be a lambda?
mapfn = param.Callable(default=identityfn, constant=True, doc='''
The function to be mapped across the linear range. The
identity function is used by by default''')
def __init__(self, key, start_value, end_value, steps=2, mapfn=identityfn, **params):
values = self.linspace(start_value, end_value, steps)
specs = [{key:mapfn(val)} for val in values ]
super(Range, self).__init__(specs, key=key, start_value=start_value,
end_value=end_value, steps=steps,
mapfn=mapfn, **params)
self.pprint_args(['key', 'start_value'], ['end_value', 'steps'])
def linspace(self, start, stop, n):
""" Simple replacement for numpy linspace"""
if n == 1: return [start]
L = [0.0] * n
nm1 = n - 1
nm1inv = 1.0 / nm1
for i in range(n):
L[i] = nm1inv * (start*(nm1 - i) + stop*i)
return L
class List(Args):
"""
An argument specifier that takes its values from a given list.
"""
values = param.List(default=[], constant=True, doc='''
The list values that are to be returned by the specifier''')
key = param.String(default='default', constant=True, doc='''
The key assigned to the elements of the supplied list.''')
def __init__(self, key, values, **params):
specs = [{key:val} for val in values]
super(List, self).__init__(specs, key=key, values=values, **params)
self.pprint_args(['key', 'values'], [])
class Log(Args):
"""
Specifier that loads arguments from a log file in task id (tid)
order. This wrapper class allows a concise representation of file
logs with the option of adding the task id to the loaded
specifications.
For full control over the arguments, you can use this class to
create a fully specified Args object as follows:
Args(Log.extract_log(<log_file>).values()),
"""
log_path = param.String(default=None, allow_None=True, constant=True, doc='''
The relative or absolute path to the log file. If a relative
path is given, the absolute path is computed relative to
os.getcwd().''')
tid_key = param.String(default='tid', constant=True, allow_None=True, doc='''
If not None, the key given to the tid values included in the
loaded specifications. If None, the tid number is ignored.''')
@staticmethod
def extract_log(log_path, dict_type=dict):
"""
Parses the log file generated by a launcher and returns
dictionary with tid keys and specification values.
Ordering can be maintained by setting dict_type to the
appropriate constructor (i.e. OrderedDict). Keys are converted
from unicode to strings for kwarg use.
"""
log_path = (log_path if os.path.isfile(log_path)
else os.path.join(os.getcwd(), log_path))
with open(log_path,'r') as log:
splits = (line.split() for line in log)
uzipped = ((int(split[0]), json.loads(" ".join(split[1:]))) for split in splits)
szipped = [(i, dict((str(k),v) for (k,v) in d.items())) for (i,d) in uzipped]
return dict_type(szipped)
@staticmethod
def write_log(log_path, data, allow_append=True):
"""
Writes the supplied specifications to the log path. The data
may be supplied as either as a an Args or as a list of
dictionaries.
By default, specifications will be appropriately appended to
an existing log file. This can be disabled by setting
allow_append to False.
"""
append = os.path.isfile(log_path)
islist = isinstance(data, list)
if append and not allow_append:
raise Exception('Appending has been disabled'
' and file %s exists' % log_path)
if not (islist or isinstance(data, Args)):
raise Exception('Can only write Args objects or dictionary'
' lists to log file.')
specs = data if islist else data.specs
if not all(isinstance(el,dict) for el in specs):
raise Exception('List elements must be dictionaries.')
log_file = open(log_path, 'r+') if append else open(log_path, 'w')
start = int(log_file.readlines()[-1].split()[0])+1 if append else 0
ascending_indices = range(start, start+len(data))
log_str = '\n'.join(['%d %s' % (tid, json.dumps(el))
for (tid, el) in zip(ascending_indices,specs)])
log_file.write("\n"+log_str if append else log_str)
log_file.close()
def __init__(self, log_path, tid_key='tid', **params):
log_items = sorted(Log.extract_log(log_path).items())
if tid_key is None:
log_specs = [spec for (_, spec) in log_items]
else:
log_specs = [dict(list(spec.items())+[(tid_key,idx)])
for (idx, spec) in log_items]
super(Log, self).__init__(log_specs,
log_path=log_path,
tid_key=tid_key,
**params)
self.pprint_args(['log_path'], ['tid_key'])
class FilePattern(Args):
"""
A FilePattern specifier allows files to be matched and information
encoded in filenames to be extracted via an extended form of
globbing. This object may be used to specify filename arguments to
CommandTemplates when launching jobs but it also very useful for
collating files for analysis.
For instance, you can find the absolute filenames of all npz files
in a 'data' subdirectory (relative to the root) that start with
'timeseries' using the pattern 'data/timeseries*.npz'.
In addition to globbing supported by the glob module, patterns can
extract metadata encoded in filenames using a subset of the Python
format specification syntax. To illustrate, you can use
'data/timeseries-{date}.npz' to record the date strings associated
with matched files. Note that a particular named fields can only
be used in a particular pattern once.
By default metadata is extracted as strings but format types are
supported in the usual manner
eg. 'data/timeseries-{day:d}-{month:d}.npz' will extract the day
and month from the filename as integer values. Only field names
and types are recognised with other format specification syntax
ignored. Type codes supported: 'd', 'b', 'o', 'x', 'e','E','f',
'F','g', 'G', 'n' (if ommited, result is a string by default).
Note that ordering is determined via ascending alphanumeric sort
and that actual filenames should not include any globbing
characters, namely: '?','*','[' and ']' (general good practice for
filenames anyway).
"""
key = param.String(default=None, allow_None=True, constant=True, doc='''
The key name given to the matched file path strings.''')
pattern = param.String(default=None, allow_None=True, constant=True, doc='''
The pattern files are to be searched against.''')
root = param.String(default=None, allow_None=True, constant=True, doc='''
The root directory from which patterns are to be loaded. The
root is set relative to os.getcwd().''')
@classmethod
def directory(cls, directory, root=None, extension=None, **kwargs):
"""
Load all the files in a given directory selecting only files
with the given extension if specified. The given kwargs are
passed through to the normal constructor.
"""
root = os.getcwd() if root is None else root
suffix = '' if extension is None else '.' + extension.rsplit('.')[-1]
pattern = directory + os.sep + '*' + suffix
key = os.path.join(root, directory,'*').rsplit(os.sep)[-2]
format_parse = list(string.Formatter().parse(key))
if not all([el is None for el in zip(*format_parse)[1]]):
raise Exception('Directory cannot contain format field specifications')
return cls(key, pattern, root, **kwargs)
def __init__(self, key, pattern, root=None, **params):
root = os.getcwd() if root is None else root
specs = self._load_expansion(key, root, pattern)
self.files = [s[key] for s in specs]
super(FilePattern, self).__init__(specs, key=key, pattern=pattern,
root=root, **params)
self.pprint_args(['key', 'pattern'], ['root'])
def fields(self):
"""
Return the fields specified in the pattern using Python's
formatting mini-language.
"""
parse = list(string.Formatter().parse(self.pattern))
return [f for f in zip(*parse)[1] if f is not None]
def _load_expansion(self, key, root, pattern):
"""
Loads the files that match the given pattern.
"""
path_pattern = os.path.join(root, pattern)
expanded_paths = self._expand_pattern(path_pattern)
specs=[]
for (path, tags) in expanded_paths:
filelist = [os.path.join(path,f) for f in os.listdir(path)] if os.path.isdir(path) else [path]
for filepath in filelist:
specs.append(dict(tags,**{key:os.path.abspath(filepath)}))
return sorted(specs, key=lambda s: s[key])
def _expand_pattern(self, pattern):
"""
From the pattern decomposition, finds the absolute paths
matching the pattern.
"""
(globpattern, regexp, fields, types) = self._decompose_pattern(pattern)
filelist = glob.glob(globpattern)
expansion = []
for fname in filelist:
if fields == []:
expansion.append((fname, {}))
continue
match = re.match(regexp, fname)
if match is None: continue
match_items = match.groupdict().items()
tags = dict((k,types.get(k, str)(v)) for (k,v) in match_items)
expansion.append((fname, tags))
return expansion
def _decompose_pattern(self, pattern):
"""
Given a path pattern with format declaration, generates a
four-tuple (glob_pattern, regexp pattern, fields, type map)
"""
sep = '~lancet~sep~'
float_codes = ['e','E','f', 'F','g', 'G', 'n']
typecodes = dict([(k,float) for k in float_codes]
+ [('b',bin), ('d',int), ('o',oct), ('x',hex)])
parse = list(string.Formatter().parse(pattern))
text, fields, codes, _ = zip(*parse)
# Finding the field types from format string
types = []
for (field, code) in zip(fields, codes):
if code in ['', None]: continue
constructor = typecodes.get(code[-1], None)
if constructor: types += [(field, constructor)]
stars = ['' if not f else '*' for f in fields]
globpat = ''.join(text+star for (text,star) in zip(text,stars))
refields = ['' if not f else sep+('(?P<%s>.*?)'% f)+sep for f in fields]
parts = ''.join(text+group for (text,group) in zip(text, refields)).split(sep)
for i in range(0, len(parts), 2): parts[i] = re.escape(parts[i])
regexp_pattern = ''.join(parts).replace('\\*','.*')
fields = list(f for f in fields if f)
return globpat, regexp_pattern , fields, dict(types)
@property
def table(self):
return to_table(self, [self.key])
# Importing from filetypes requires PrettyPrinted to be defined first
from lancet.filetypes import FileType
class FileInfo(Args):
"""
Loads metadata from a set of filenames. For instance, you can load
metadata associated with a series of image files given by a
FilePattern. Unlike other explicit instances of Args, this object
extends the values of an existing Args object. Once you have
loaded the metadata, FileInfo allows you to load the file data
into a pandas DataFrame or a HoloViews Table.
"""
source = param.ClassSelector(class_ = Args, doc='''
The argument specifier that supplies the file paths.''')
filetype = param.ClassSelector(constant=True, class_= FileType, doc='''
A FileType object to be applied to each file path.''')
key = param.String(constant=True, doc='''
The key used to find the file paths for inspection.''')
ignore = param.List(default=[], constant=True, doc='''
Metadata keys that are to be explicitly ignored. ''')
def __init__(self, source, key, filetype, ignore = [], **params):
specs = self._info(source, key, filetype, ignore)
super(FileInfo, self).__init__(specs,
source = source,
filetype = filetype,
key = key,
ignore=ignore,
**params)
self.pprint_args(['source', 'key', 'filetype'], ['ignore'])
@classmethod
def from_pattern(cls, pattern, filetype=None, key='filename', root=None, ignore=[]):
"""
Convenience method to directly chain a pattern processed by
FilePattern into a FileInfo instance.
Note that if a default filetype has been set on FileInfo, the
filetype argument may be omitted.
"""
filepattern = FilePattern(key, pattern, root=root)
if FileInfo.filetype and filetype is None:
filetype = FileInfo.filetype
elif filetype is None:
raise Exception("The filetype argument must be supplied unless "
"an appropriate default has been specified as "
"FileInfo.filetype")
return FileInfo(filepattern, key, filetype, ignore=ignore)
@property
def table(self):
return to_table(self, [self.key])
def load(self, val, **kwargs):
"""
Load the file contents into the supplied pandas dataframe or
HoloViews Table. This allows a selection to be made over the
metadata before loading the file contents (may be slow).
"""
if Table and isinstance(val, Table):
return self.load_table(val, **kwargs)
elif DataFrame and isinstance(val, DataFrame):
return self.load_dframe(val, **kwargs)
else:
raise Exception("Type %s not a DataFrame or Table." % type(val))
def load_table(self, table):
"""
Load the file contents into the supplied Table using the
specified key and filetype. The input table should have the
filenames as values which will be replaced by the loaded
data. If data_key is specified, this key will be used to index
the loaded data to retrive the specified item.
"""
items, data_keys = [], None
for key, filename in table.items():
data_dict = self.filetype.data(filename[0])
current_keys = tuple(sorted(data_dict.keys()))
values = [data_dict[k] for k in current_keys]
if data_keys is None:
data_keys = current_keys
elif data_keys != current_keys:
raise Exception("Data keys are inconsistent")
items.append((key, values))
return Table(items, kdims=table.kdims, vdims=data_keys)
def load_dframe(self, dframe):
"""
Load the file contents into the supplied dataframe using the
specified key and filetype.
"""
filename_series = dframe[self.key]
loaded_data = filename_series.map(self.filetype.data)
keys = [list(el.keys()) for el in loaded_data.values]
for key in set().union(*keys):
key_exists = key in dframe.columns
if key_exists:
self.warning("Appending '_data' suffix to data key %r to avoid"
"overwriting existing metadata with the same name." % key)
suffix = '_data' if key_exists else ''
dframe[key+suffix] = loaded_data.map(lambda x: x.get(key, np.nan))
return dframe
def _info(self, source, key, filetype, ignore):
"""
Generates the union of the source.specs and the metadata
dictionary loaded by the filetype object.
"""
specs, mdata = [], {}
mdata_clashes = set()
for spec in source.specs:
if key not in spec:
raise Exception("Key %r not available in 'source'." % key)
mdata = dict((k,v) for (k,v) in filetype.metadata(spec[key]).items()
if k not in ignore)
mdata_spec = {}
mdata_spec.update(spec)
mdata_spec.update(mdata)
specs.append(mdata_spec)
mdata_clashes = mdata_clashes | (set(spec.keys()) & set(mdata.keys()))
# Metadata clashes can be avoided by using the ignore list.
if mdata_clashes:
self.warning("Loaded metadata keys overriding source keys.")
return specs
|
{
"content_hash": "0dba57b3f331f9ecf10e4e8afd7b5174",
"timestamp": "",
"source": "github",
"line_count": 979,
"max_line_length": 106,
"avg_line_length": 40.27987742594484,
"alnum_prop": 0.601105644874981,
"repo_name": "ioam/lancet",
"id": "a4a8ad4ce866a71202135b685ca459ec485f569b",
"size": "39453",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lancet/core.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "131874"
}
],
"symlink_target": ""
}
|
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_server_group
short_description: Manage OpenStack server groups
extends_documentation_fragment: openstack
version_added: "2.2"
author: "Lingxian Kong (@kong)"
description:
- Add or remove server groups from OpenStack.
options:
state:
description:
- Indicate desired state of the resource. When I(state) is 'present',
then I(policies) is required.
choices: ['present', 'absent']
required: false
default: present
name:
description:
- Server group name.
required: true
policies:
description:
- A list of one or more policy names to associate with the server
group. The list must contain at least one policy name. The current
valid policy names are anti-affinity, affinity, soft-anti-affinity
and soft-affinity.
required: false
availability_zone:
description:
- Ignored. Present for backwards compatibility
required: false
requirements:
- "python >= 2.6"
- "shade"
'''
EXAMPLES = '''
# Create a server group with 'affinity' policy.
- os_server_group:
state: present
auth:
auth_url: https://api.cloud.catalyst.net.nz:5000/v2.0
username: admin
password: admin
project_name: admin
name: my_server_group
policies:
- affinity
# Delete 'my_server_group' server group.
- os_server_group:
state: absent
auth:
auth_url: https://api.cloud.catalyst.net.nz:5000/v2.0
username: admin
password: admin
project_name: admin
name: my_server_group
'''
RETURN = '''
id:
description: Unique UUID.
returned: success
type: string
name:
description: The name of the server group.
returned: success
type: string
policies:
description: A list of one or more policy names of the server group.
returned: success
type: list
members:
description: A list of members in the server group.
returned: success
type: list
metadata:
description: Metadata key and value pairs.
returned: success
type: dict
project_id:
description: The project ID who owns the server group.
returned: success
type: string
user_id:
description: The user ID who owns the server group.
returned: success
type: string
'''
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
def _system_state_change(state, server_group):
if state == 'present' and not server_group:
return True
if state == 'absent' and server_group:
return True
return False
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=True),
policies=dict(required=False, type='list'),
state=dict(default='present', choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
**module_kwargs
)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
name = module.params['name']
policies = module.params['policies']
state = module.params['state']
try:
cloud = shade.openstack_cloud(**module.params)
server_group = cloud.get_server_group(name)
if module.check_mode:
module.exit_json(
changed=_system_state_change(state, server_group)
)
changed = False
if state == 'present':
if not server_group:
if not policies:
module.fail_json(
msg="Parameter 'policies' is required in Server Group "
"Create"
)
server_group = cloud.create_server_group(name, policies)
changed = True
module.exit_json(
changed=changed,
id=server_group['id'],
server_group=server_group
)
if state == 'absent':
if server_group:
cloud.delete_server_group(server_group['id'])
changed = True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e), extra_data=e.extra_data)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
|
{
"content_hash": "ec39227d0bc88bac65ec49f6e7bcff4f",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 79,
"avg_line_length": 26.69942196531792,
"alnum_prop": 0.6103052608789782,
"repo_name": "e-gob/plataforma-kioscos-autoatencion",
"id": "0d740955923d601cf38abaa4af3668a62b966b9e",
"size": "5317",
"binary": false,
"copies": "19",
"ref": "refs/heads/master",
"path": "scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/cloud/openstack/os_server_group.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "41110"
},
{
"name": "C++",
"bytes": "3804"
},
{
"name": "CSS",
"bytes": "34823"
},
{
"name": "CoffeeScript",
"bytes": "8521"
},
{
"name": "HTML",
"bytes": "61168"
},
{
"name": "JavaScript",
"bytes": "7206"
},
{
"name": "Makefile",
"bytes": "1347"
},
{
"name": "PowerShell",
"bytes": "584344"
},
{
"name": "Python",
"bytes": "25506593"
},
{
"name": "Ruby",
"bytes": "245726"
},
{
"name": "Shell",
"bytes": "5075"
}
],
"symlink_target": ""
}
|
"""Accesses the google.cloud.videointelligence.v1p2beta1 VideoIntelligenceService API."""
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.operation
import google.api_core.operations_v1
import grpc
from google.cloud.videointelligence_v1p2beta1.gapic import enums
from google.cloud.videointelligence_v1p2beta1.gapic import video_intelligence_service_client_config
from google.cloud.videointelligence_v1p2beta1.gapic.transports import video_intelligence_service_grpc_transport
from google.cloud.videointelligence_v1p2beta1.proto import video_intelligence_pb2
from google.cloud.videointelligence_v1p2beta1.proto import video_intelligence_pb2_grpc
from google.longrunning import operations_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
'google-cloud-videointelligence', ).version
class VideoIntelligenceServiceClient(object):
"""Service that implements Google Cloud Video Intelligence API."""
SERVICE_ADDRESS = 'videointelligence.googleapis.com:443'
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = 'google.cloud.videointelligence.v1p2beta1.VideoIntelligenceService'
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
VideoIntelligenceServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
def __init__(self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None):
"""Constructor.
Args:
transport (Union[~.VideoIntelligenceServiceGrpcTransport,
Callable[[~.Credentials, type], ~.VideoIntelligenceServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
'The `client_config` argument is deprecated.',
PendingDeprecationWarning,
stacklevel=2)
else:
client_config = video_intelligence_service_client_config.config
if channel:
warnings.warn(
'The `channel` argument is deprecated; use '
'`transport` instead.',
PendingDeprecationWarning,
stacklevel=2)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=video_intelligence_service_grpc_transport.
VideoIntelligenceServiceGrpcTransport,
)
else:
if credentials:
raise ValueError(
'Received both a transport instance and '
'credentials; these are mutually exclusive.')
self.transport = transport
else:
self.transport = video_intelligence_service_grpc_transport.VideoIntelligenceServiceGrpcTransport(
address=self.SERVICE_ADDRESS,
channel=channel,
credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION, )
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config['interfaces'][self._INTERFACE_NAME], )
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def annotate_video(self,
input_uri=None,
input_content=None,
features=None,
video_context=None,
output_uri=None,
location_id=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Performs asynchronous video annotation. Progress and results can be
retrieved through the ``google.longrunning.Operations`` interface.
``Operation.metadata`` contains ``AnnotateVideoProgress`` (progress).
``Operation.response`` contains ``AnnotateVideoResponse`` (results).
Example:
>>> from google.cloud import videointelligence_v1p2beta1
>>> from google.cloud.videointelligence_v1p2beta1 import enums
>>>
>>> client = videointelligence_v1p2beta1.VideoIntelligenceServiceClient()
>>>
>>> input_uri = 'gs://demomaker/cat.mp4'
>>> features_element = enums.Feature.LABEL_DETECTION
>>> features = [features_element]
>>>
>>> response = client.annotate_video(input_uri=input_uri, features=features)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
input_uri (str): Input video location. Currently, only `Google Cloud
Storage <https://cloud.google.com/storage/>`__ URIs are supported, which
must be specified in the following format: ``gs://bucket-id/object-id``
(other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For
more information, see `Request
URIs <https://cloud.google.com/storage/docs/reference-uris>`__. A video
URI may include wildcards in ``object-id``, and thus identify multiple
videos. Supported wildcards: '\*' to match 0 or more characters; '?' to
match 1 character. If unset, the input video should be embedded in the
request as ``input_content``. If set, ``input_content`` should be unset.
input_content (bytes): The video data bytes. If unset, the input video(s) should be specified
via ``input_uri``. If set, ``input_uri`` should be unset.
features (list[~google.cloud.videointelligence_v1p2beta1.types.Feature]): Requested video annotation features.
video_context (Union[dict, ~google.cloud.videointelligence_v1p2beta1.types.VideoContext]): Additional video context and/or feature-specific parameters.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.videointelligence_v1p2beta1.types.VideoContext`
output_uri (str): Optional location where the output (in JSON format) should be stored.
Currently, only `Google Cloud
Storage <https://cloud.google.com/storage/>`__ URIs are supported, which
must be specified in the following format: ``gs://bucket-id/object-id``
(other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For
more information, see `Request
URIs <https://cloud.google.com/storage/docs/reference-uris>`__.
location_id (str): Optional cloud region where annotation should take place. Supported
cloud regions: ``us-east1``, ``us-west1``, ``europe-west1``,
``asia-east1``. If no region is specified, a region will be determined
based on video file location.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.videointelligence_v1p2beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'annotate_video' not in self._inner_api_calls:
self._inner_api_calls[
'annotate_video'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.annotate_video,
default_retry=self._method_configs['AnnotateVideo'].retry,
default_timeout=self._method_configs['AnnotateVideo'].
timeout,
client_info=self._client_info,
)
request = video_intelligence_pb2.AnnotateVideoRequest(
input_uri=input_uri,
input_content=input_content,
features=features,
video_context=video_context,
output_uri=output_uri,
location_id=location_id,
)
operation = self._inner_api_calls['annotate_video'](
request, retry=retry, timeout=timeout, metadata=metadata)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
video_intelligence_pb2.AnnotateVideoResponse,
metadata_type=video_intelligence_pb2.AnnotateVideoProgress,
)
|
{
"content_hash": "3a9adfbdd318105ac748ab87f215d0d7",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 163,
"avg_line_length": 49.32954545454545,
"alnum_prop": 0.617369269753513,
"repo_name": "jonparrott/gcloud-python",
"id": "f26b4c1dc507ec998205b96d38c166241e2a0980",
"size": "13624",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "videointelligence/google/cloud/videointelligence_v1p2beta1/gapic/video_intelligence_service_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "62009"
},
{
"name": "Python",
"bytes": "3459300"
},
{
"name": "Shell",
"bytes": "7548"
}
],
"symlink_target": ""
}
|
from flask_wtf import Form, RecaptchaField, recaptcha
from wtforms import (
StringField,
TextField,
TextAreaField,
PasswordField,
BooleanField,
ValidationError
)
from wtforms.validators import DataRequired, Length, EqualTo, URL
from models import User
class RegisterForm(Form):
"""Register Form."""
username = StringField('Username', [DataRequired(), Length(max=255)])
password = PasswordField('Password', [DataRequired(), Length(min=8)])
confirm = PasswordField('Confirm Password', [DataRequired(), EqualTo('password')])
#recaptcha = RecaptchaField()
def validate(self):
check_validate = super(RegisterForm, self).validate()
if not check_validate:
return False
user = User.query.filter_by(username=self.username.data).first()
if user:
self.username.errors.append('User with that name already exists.')
return False
return True
class LoginForm(Form):
"""Login Form"""
username = StringField('Username', [DataRequired(), Length(max=255)])
password = PasswordField('Password', [DataRequired()])
remember = BooleanField("Remember Me")
def validate(self):
"""Validator for check the account information."""
check_validata = super(LoginForm, self).validate()
if not check_validata:
return False
user = User.query.filter_by(username=self.username.data).first()
if not user:
self.username.errors.append('Invalid username or password.')
return False
if not user.check_password(self.password.data):
self.username.errors.append('Invalid username or password.')
return False
return True
class CommentForm(Form):
"""Form validator for comment."""
# Set some field(InputBox) for enter the data.
# patam validatos: setup list of validators
name = StringField(
'Name',
validators=[DataRequired(), Length(max=255)])
text = TextField(u'Comment', validators=[DataRequired()])
class PostForm(Form):
"""Post Form."""
title = StringField('Title', [DataRequired(), Length(max=255)])
text = TextAreaField('Blog Content', [DataRequired()])
class OpenIDForm(Form):
"""OpenID Form."""
openid_url = StringField('OpenID URL', [DataRequired(), URL()])
|
{
"content_hash": "25b50ddf048af31dee038a5448b25c5c",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 86,
"avg_line_length": 29.641975308641975,
"alnum_prop": 0.6393169512703041,
"repo_name": "indicolite/flask-blog",
"id": "147d155e1599736b2ff14be070e50af23e587469",
"size": "2439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flaskblog/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "9600"
},
{
"name": "JavaScript",
"bytes": "434054"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "27185"
}
],
"symlink_target": ""
}
|
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._restore_points_operations import build_create_request, build_delete_request, build_get_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class RestorePointsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.compute.v2022_08_01.aio.ComputeManagementClient`'s
:attr:`restore_points` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
async def _create_initial(
self,
resource_group_name: str,
restore_point_collection_name: str,
restore_point_name: str,
parameters: Union[_models.RestorePoint, IO],
**kwargs: Any
) -> _models.RestorePoint:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.RestorePoint]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "RestorePoint")
request = build_create_request(
resource_group_name=resource_group_name,
restore_point_collection_name=restore_point_collection_name,
restore_point_name=restore_point_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("RestorePoint", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/restorePointCollections/{restorePointCollectionName}/restorePoints/{restorePointName}"} # type: ignore
@overload
async def begin_create(
self,
resource_group_name: str,
restore_point_collection_name: str,
restore_point_name: str,
parameters: _models.RestorePoint,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.RestorePoint]:
"""The operation to create the restore point. Updating properties of an existing restore point is
not allowed.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param restore_point_collection_name: The name of the restore point collection. Required.
:type restore_point_collection_name: str
:param restore_point_name: The name of the restore point. Required.
:type restore_point_name: str
:param parameters: Parameters supplied to the Create restore point operation. Required.
:type parameters: ~azure.mgmt.compute.v2022_08_01.models.RestorePoint
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RestorePoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2022_08_01.models.RestorePoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_create(
self,
resource_group_name: str,
restore_point_collection_name: str,
restore_point_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.RestorePoint]:
"""The operation to create the restore point. Updating properties of an existing restore point is
not allowed.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param restore_point_collection_name: The name of the restore point collection. Required.
:type restore_point_collection_name: str
:param restore_point_name: The name of the restore point. Required.
:type restore_point_name: str
:param parameters: Parameters supplied to the Create restore point operation. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RestorePoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2022_08_01.models.RestorePoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_create(
self,
resource_group_name: str,
restore_point_collection_name: str,
restore_point_name: str,
parameters: Union[_models.RestorePoint, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.RestorePoint]:
"""The operation to create the restore point. Updating properties of an existing restore point is
not allowed.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param restore_point_collection_name: The name of the restore point collection. Required.
:type restore_point_collection_name: str
:param restore_point_name: The name of the restore point. Required.
:type restore_point_name: str
:param parameters: Parameters supplied to the Create restore point operation. Is either a model
type or a IO type. Required.
:type parameters: ~azure.mgmt.compute.v2022_08_01.models.RestorePoint or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RestorePoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2022_08_01.models.RestorePoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.RestorePoint]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_initial( # type: ignore
resource_group_name=resource_group_name,
restore_point_collection_name=restore_point_collection_name,
restore_point_name=restore_point_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("RestorePoint", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/restorePointCollections/{restorePointCollectionName}/restorePoints/{restorePointName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, restore_point_collection_name: str, restore_point_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
restore_point_collection_name=restore_point_collection_name,
restore_point_name=restore_point_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/restorePointCollections/{restorePointCollectionName}/restorePoints/{restorePointName}"} # type: ignore
@distributed_trace_async
async def begin_delete(
self, resource_group_name: str, restore_point_collection_name: str, restore_point_name: str, **kwargs: Any
) -> AsyncLROPoller[None]:
"""The operation to delete the restore point.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param restore_point_collection_name: The name of the Restore Point Collection. Required.
:type restore_point_collection_name: str
:param restore_point_name: The name of the restore point. Required.
:type restore_point_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
restore_point_collection_name=restore_point_collection_name,
restore_point_name=restore_point_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/restorePointCollections/{restorePointCollectionName}/restorePoints/{restorePointName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
restore_point_collection_name: str,
restore_point_name: str,
expand: Optional[Union[str, _models.RestorePointExpandOptions]] = None,
**kwargs: Any
) -> _models.RestorePoint:
"""The operation to get the restore point.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param restore_point_collection_name: The name of the restore point collection. Required.
:type restore_point_collection_name: str
:param restore_point_name: The name of the restore point. Required.
:type restore_point_name: str
:param expand: The expand expression to apply on the operation. 'InstanceView' retrieves
information about the run-time state of a restore point. "instanceView" Default value is None.
:type expand: str or ~azure.mgmt.compute.v2022_08_01.models.RestorePointExpandOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RestorePoint or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2022_08_01.models.RestorePoint
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.RestorePoint]
request = build_get_request(
resource_group_name=resource_group_name,
restore_point_collection_name=restore_point_collection_name,
restore_point_name=restore_point_name,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("RestorePoint", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/restorePointCollections/{restorePointCollectionName}/restorePoints/{restorePointName}"} # type: ignore
|
{
"content_hash": "f985b5d9939b8b87a8d28f1adf47d845",
"timestamp": "",
"source": "github",
"line_count": 456,
"max_line_length": 238,
"avg_line_length": 49.2828947368421,
"alnum_prop": 0.6605259644907222,
"repo_name": "Azure/azure-sdk-for-python",
"id": "1ffec2c1d4e69bda8477da8a1f791d82a73a5aa7",
"size": "22973",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2022_08_01/aio/operations/_restore_points_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
import pkg_resources
from google.cloud.compute_v1.types import compute
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-compute",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class GlobalOrganizationOperationsTransport(abc.ABC):
"""Abstract transport class for GlobalOrganizationOperations."""
AUTH_SCOPES = (
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
)
DEFAULT_HOST: str = "compute.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# Don't apply audience if the credentials file passed from user.
if hasattr(credentials, "with_gdch_audience"):
credentials = credentials.with_gdch_audience(
api_audience if api_audience else host
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.delete: gapic_v1.method.wrap_method(
self.delete,
default_timeout=None,
client_info=client_info,
),
self.get: gapic_v1.method.wrap_method(
self.get,
default_timeout=None,
client_info=client_info,
),
self.list: gapic_v1.method.wrap_method(
self.list,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def delete(
self,
) -> Callable[
[compute.DeleteGlobalOrganizationOperationRequest],
Union[
compute.DeleteGlobalOrganizationOperationResponse,
Awaitable[compute.DeleteGlobalOrganizationOperationResponse],
],
]:
raise NotImplementedError()
@property
def get(
self,
) -> Callable[
[compute.GetGlobalOrganizationOperationRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def list(
self,
) -> Callable[
[compute.ListGlobalOrganizationOperationsRequest],
Union[compute.OperationList, Awaitable[compute.OperationList]],
]:
raise NotImplementedError()
@property
def kind(self) -> str:
raise NotImplementedError()
__all__ = ("GlobalOrganizationOperationsTransport",)
|
{
"content_hash": "3ec5a342d116950a8198bdeb5be15ccf",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 101,
"avg_line_length": 36.41340782122905,
"alnum_prop": 0.6153728137465481,
"repo_name": "googleapis/python-compute",
"id": "99eef4a3a9399392a49f16142dcb5d8223d07c86",
"size": "7118",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/cloud/compute_v1/services/global_organization_operations/transports/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "32681847"
},
{
"name": "Shell",
"bytes": "30663"
}
],
"symlink_target": ""
}
|
import sys
import logging
from django.conf import settings
class NullHandler(logging.Handler):
def emit(self, record):
pass
h = NullHandler()
_logger = logging.getLogger("south")
_logger.addHandler(h)
_logger.setLevel(logging.DEBUG)
# TODO: Add a log formatter?
def get_logger():
debug_on = getattr(settings, "SOUTH_LOGGING_ON", False)
logging_file = getattr(settings, "SOUTH_LOGGING_FILE", False)
if debug_on:
if logging_file:
_logger.addHandler( logging.FileHandler(logging_file) )
_logger.setLevel(logging.DEBUG)
else:
raise IOError, "SOUTH_LOGGING_ON is True. You also need a SOUTH_LOGGING_FILE setting."
return _logger
|
{
"content_hash": "09b7d91a0cffc128bbe24972c82b4550",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 98,
"avg_line_length": 27.423076923076923,
"alnum_prop": 0.6718092566619915,
"repo_name": "wahaha02/myblog",
"id": "052236aa1924fd91f72859b61db5257a0d235c50",
"size": "713",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "south/logger.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('properties.views',
url(r'^$', 'index'),
#url(r'^properties/(?P<properties_id>\d+)/$', 'properties.views.detail')
)
|
{
"content_hash": "f3221e0b27f3dfdef1959883bfa3b77a",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 77,
"avg_line_length": 29,
"alnum_prop": 0.6600985221674877,
"repo_name": "khawley/Town_Country",
"id": "41b02448029b421345f3518ba61c30c9e3bd381e",
"size": "203",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "properties/urls (Karin-chan-Mac's conflicted copy 2012-09-03).py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "75416"
},
{
"name": "Python",
"bytes": "25561"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from securedpi_events.models import Event
admin.site.register(Event)
|
{
"content_hash": "83c2ed5f19fe9bdf805f4c08dff5c3e9",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 41,
"avg_line_length": 25.75,
"alnum_prop": 0.8349514563106796,
"repo_name": "Secured-Pi/Secured-Pi",
"id": "6c292beb5c5fb91bb6654038551aec03e49a2c73",
"size": "103",
"binary": false,
"copies": "1",
"ref": "refs/heads/staging",
"path": "securedpi_events/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "95"
},
{
"name": "HTML",
"bytes": "25485"
},
{
"name": "Python",
"bytes": "79801"
}
],
"symlink_target": ""
}
|
"""
$description Japanese live streaming and video hosting platform for live video game broadcasts and individual live streams.
$url mildom.com
$type live, vod
"""
import logging
import re
from time import time
from uuid import uuid4
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
from streamlink.utils.url import url_concat
log = logging.getLogger(__name__)
class MildomHLSStream(HLSStream):
__shortname__ = "hls-mildom"
expiry_time = 60 * 120
def __init__(self, session_, api, server, token, quality, **args):
self.session = session_
self.api = api
self.server = server
self.token = token
self.quality = quality
self._url = self.build_hls_url()
super().__init__(self.session, self._url, **args)
self.expiry = time()
def build_hls_url(self):
if not self.server or not self.token:
raise ValueError("server and token must be set")
return url_concat(self.server, f"{self.api.channel_id}{self.quality}.m3u8?{self.token}")
@property
def url(self):
if time() - self.expiry > MildomHLSStream.expiry_time:
self.expiry = time()
self.token = self.api.get_token()
self._url = self.build_hls_url()
log.debug("Updated HLS playlist URL query string")
return self._url
class MildomAPI:
def __init__(self, session, channel_id=None, video_id=None):
self.session = session
self.channel_id = channel_id
self.video_id = video_id
def _is_api_error(self, data):
log.trace(f"{data!r}")
if data["code"] != 0:
log.debug(data.get("message", "Mildom API returned an error"))
return True
return False
def get_vod_streams_data(self):
if not self.video_id:
return
data = self.session.http.get(
"https://cloudac.mildom.com/nonolive/videocontent/playback/getPlaybackDetail",
params={
"__platform": "web",
"v_id": self.video_id,
},
schema=validate.Schema(validate.parse_json(), {
"code": int,
validate.optional("message"): str,
validate.optional("body"): {
"playback": {
"video_link": [{"name": str, "url": validate.url()}],
},
},
})
)
if self._is_api_error(data):
return
if data.get("body"):
return data["body"]["playback"]["video_link"]
def get_token(self):
if not self.channel_id:
return
data = self.session.http.post(
"https://cloudac.mildom.com/nonolive/gappserv/live/token",
params={
"__platform": "web",
"__guest_id": "pc-gp-{}".format(uuid4()),
},
headers={"Accept-Language": "en"},
json={"host_id": self.channel_id, "type": "hls"},
schema=validate.Schema(
validate.parse_json(),
{
"code": int,
validate.optional("message"): str,
validate.optional("body"): {
"data": [
{"token": str, }
],
}
}
)
)
if self._is_api_error(data):
return
if data.get("body"):
return data["body"]["data"][0]["token"]
def get_server(self):
if not self.channel_id:
return
data = self.session.http.get(
"https://cloudac.mildom.com/nonolive/gappserv/live/liveserver",
params={
"__platform": "web",
"user_id": self.channel_id,
"live_server_type": "hls",
},
headers={"Accept-Language": "en"},
schema=validate.Schema(
validate.parse_json(),
{
"code": int,
validate.optional("message"): str,
validate.optional("body"): {
"stream_server": validate.url(),
}
}
)
)
if self._is_api_error(data):
return
if data.get("body"):
return data["body"]["stream_server"]
def get_live_streams_data(self):
if not self.channel_id:
return
data = self.session.http.get(
"https://cloudac.mildom.com/nonolive/gappserv/live/enterstudio",
params={
"__platform": "web",
"user_id": self.channel_id,
},
headers={"Accept-Language": "en"},
schema=validate.Schema(
validate.parse_json(),
{
"code": int,
validate.optional("message"): str,
validate.optional("body"): {
validate.optional("status"): int,
"anchor_live": int,
validate.optional("live_type"): int,
"ext": {
"cmode_params": [{
"cmode": str,
"name": str,
}],
validate.optional("live_mode"): int,
},
},
},
)
)
if self._is_api_error(data):
return
if data.get("body"):
return data["body"]
@pluginmatcher(re.compile(r"""
https?://(?:www\.)?mildom\.com/
(?:
playback/(\d+)(/(?P<video_id>(\d+)-(\w+)))
|
(?P<channel_id>\d+)
)
""", re.VERBOSE))
class Mildom(Plugin):
def _get_streams(self):
api = MildomAPI(self.session, channel_id=self.match.group("channel_id"), video_id=self.match.group("video_id"))
if api.video_id:
data = api.get_vod_streams_data()
if data:
for stream in data:
yield stream["name"], HLSStream(self.session, stream["url"])
else:
data = api.get_live_streams_data()
if not data:
return
if data["anchor_live"] != 11:
log.debug("User doesn't appear to be live")
return
qualities = []
for quality_info in data["ext"]["cmode_params"]:
qualities.append((quality_info["name"], "_" + quality_info["cmode"] if quality_info["cmode"] != "raw" else ""))
server = api.get_server()
token = api.get_token()
self.session.http.headers.update({"Referer": "https://www.mildom.com/"})
for quality in qualities:
yield quality[0], MildomHLSStream(self.session, api, server, token, quality[1])
__plugin__ = Mildom
|
{
"content_hash": "5e7247cba3a386e04cb01b9a8c695aa6",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 127,
"avg_line_length": 33.0046511627907,
"alnum_prop": 0.4785794813979707,
"repo_name": "gravyboat/streamlink",
"id": "84ac311e00da1f99a25542221014c594655e5514",
"size": "7096",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/streamlink/plugins/mildom.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1392475"
},
{
"name": "Shell",
"bytes": "6280"
}
],
"symlink_target": ""
}
|
import mock_pydbus
# General Bluez D-Bus Object Paths
#: BlueZ DBus Service Name
BLUEZ_SERVICE_NAME = 'org.bluez'
#: BlueZ DBus adapter interface
ADAPTER_INTERFACE = 'org.bluez.Adapter1'
#: BlueZ DBus device Interface
DEVICE_INTERFACE = 'org.bluez.Device1'
# Bluez GATT D-Bus Object Paths
#: BlueZ DBus GATT manager Interface
GATT_MANAGER_IFACE = 'org.bluez.GattManager1'
#: BlueZ DBus GATT Profile Interface
GATT_PROFILE_IFACE = 'org.bluez.GattProfile1'
#: BlueZ DBus GATT Service Interface
GATT_SERVICE_IFACE = 'org.bluez.GattService1'
#: BlueZ DBus GATT Characteristic Interface
GATT_CHRC_IFACE = 'org.bluez.GattCharacteristic1'
#: BlueZ DBus GATT Descriptor Interface
GATT_DESC_IFACE = 'org.bluez.GattDescriptor1'
class InvalidSearch(Exception):
pass
def _get_dbus_path(mngrd_objs, parent_path, iface, prop, value):
rtrn_val = None
for d_path in mngrd_objs:
if parent_path in d_path:
if iface in mngrd_objs[d_path]:
mngrd_objs[d_path][iface][prop]
if value.lower() in mngrd_objs[d_path][iface][prop].lower():
rtrn_val = d_path
return rtrn_val
def _get_dbus_path2(objects, parent_path, iface_in, prop, value):
if parent_path is None:
raise InvalidSearch
for path, iface in objects.items():
props = iface.get(iface_in)
if props is None:
continue
if props[prop].lower() == value.lower() and path.startswith(parent_path):
return path
def get_dbus_path(adapter=None,
device=None,
service=None,
characteristic=None,
descriptor=None):
mngd_objs = mock_pydbus.GetManagedObjects()
_dbus_obj_path = None
if adapter is not None:
_dbus_obj_path = _get_dbus_path2(mngd_objs,
'/org/bluez',
ADAPTER_INTERFACE,
'Address',
adapter)
if device is not None:
_dbus_obj_path = _get_dbus_path2(mngd_objs,
_dbus_obj_path,
DEVICE_INTERFACE,
'Address',
device)
if service is not None:
_dbus_obj_path = _get_dbus_path2(mngd_objs,
_dbus_obj_path,
GATT_SERVICE_IFACE,
'UUID',
service)
if characteristic is not None:
_dbus_obj_path = _get_dbus_path2(mngd_objs,
_dbus_obj_path,
GATT_CHRC_IFACE,
'UUID',
characteristic)
if descriptor is not None:
_dbus_obj_path = _get_dbus_path2(mngd_objs,
_dbus_obj_path,
GATT_DESC_IFACE,
'UUID',
descriptor)
return _dbus_obj_path
|
{
"content_hash": "297142ce38e571c1d4834a4fa5f78e59",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 81,
"avg_line_length": 34.787234042553195,
"alnum_prop": 0.4920489296636086,
"repo_name": "campug/bzero_kata",
"id": "43a6b480b9dc63efb3db02b87362079707cd6a2b",
"size": "3270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "solutions/bluezero.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "78098"
}
],
"symlink_target": ""
}
|
from Component import *
import ComponentTypeConstants
import ComponentCategories
import ComponentHeaders
class WaitComponent(Component):
name = 'Wait'
type = ComponentTypeConstants.METHOD
category = ComponentCategories.SYSTEM
headers = [ComponentHeaders.SYSTEM]
args = [] #array of tuples (name of arg, componentName)
returnType = ComponentTypeConstants.NONE
def parse(self, args):
return 'system("PAUSE");'
|
{
"content_hash": "7c048e5277dc37c86f086fa9fe5ba024",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 59,
"avg_line_length": 27.9375,
"alnum_prop": 0.7404921700223713,
"repo_name": "Vishwas-Adiga/Bloks",
"id": "95bb7d1d7a1f4c1fb034a5464ef507074be347f3",
"size": "447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/WaitComponent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "99"
},
{
"name": "C",
"bytes": "2064928"
},
{
"name": "C++",
"bytes": "4083654"
},
{
"name": "Python",
"bytes": "50080"
}
],
"symlink_target": ""
}
|
abc = 'abcdefghijklmnopqrstuvwxyz'
def cifrar(cadena, clave):
text_cifrado = ''
for letra in cadena:
suma = abc.find(letra) + clave
modulo = int(suma) % len(abc)
text_cifrado = text_cifrado + str(abc[modulo])
print suma
print modulo
print text_cifrado
return text_cifrado
def decifrar(cadena, clave):
text_cifrado = ''
for letra in cadena:
suma = abc.find(letra) - clave
modulo = int(suma) % len(abc)
text_cifrado = text_cifrado + str(abc[modulo])
return text_cifrado
def main():
c = str(raw_input('cadena a cifrar: ')).lower()
n = int(raw_input('clave numerica: '))
print cifrar(c,n)
cc = str(raw_input('cadena a decifrar: ')).lower()
cn = int(raw_input('clave numerica: '))
print decifrar(cc,cn)
if __name__ == '__main__':
main()
|
{
"content_hash": "c398f1d31a5b34e2dd4ea5ef089793dd",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 54,
"avg_line_length": 23.405405405405407,
"alnum_prop": 0.5866050808314087,
"repo_name": "pesinasiller/classic-crypto",
"id": "ef61deae322aaeb3c324de002a4f88ad63d586ab",
"size": "1063",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ceasar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5614"
}
],
"symlink_target": ""
}
|
from AppKit import NSAlternateKeyMask, NSApplication, NSBundle, NSLog, \
NSMenuItem, NSUserDefaults
import objc
import re
def Category(classname):
return objc.Category(objc.lookUpClass(classname))
def Class(classname):
return objc.lookUpClass(classname)
def flow(text, width = 77):
quote, indent = re.match(r'(>+ ?|)(\s*)', text, re.UNICODE).groups()
prefix = len(quote)
if text[prefix:] == u'-- ':
return [text]
text = text.rstrip(u' ')
if not quote:
if indent.startswith(u' ') or text.startswith(u'From '):
text = u' ' + text
if indent or len(text) <= width:
return [text]
matches = re.finditer(r'\S+\s*(?=\S|$)', text[prefix:], re.UNICODE)
breaks, lines = [match.end() + prefix for match in matches], []
while True:
for index, cursor in enumerate(breaks[1:]):
if len(text[:cursor].expandtabs()) >= width:
cursor = breaks[index]
break
else:
lines.append(text)
return lines
lines.append(text[:cursor] + u' ')
if not quote and text[cursor:].startswith(u'From '):
text, cursor = u' ' + text[cursor:], cursor - 1
else:
text, cursor = quote + text[cursor:], cursor - prefix
breaks = [offset - cursor for offset in breaks[index + 1:]]
def swizzle(classname, selector):
def decorator(function):
cls = objc.lookUpClass(classname)
try:
old = cls.instanceMethodForSelector_(selector)
if old.isClassMethod:
old = cls.methodForSelector_(selector)
except:
return None
def wrapper(self, *args, **kwargs):
return function(self, old, *args, **kwargs)
new = objc.selector(wrapper, selector = old.selector,
signature = old.signature,
isClassMethod = old.isClassMethod)
objc.classAddMethod(cls, selector, new)
return wrapper
return decorator
class ComposeViewController(Category('ComposeViewController')):
@swizzle('ComposeViewController', b'_finishLoadingEditor')
def _finishLoadingEditor(self, old):
result = old(self)
if self.messageType() not in [1, 2, 3, 8]:
return result
view = self.composeWebView()
document = view.mainFrame().DOMDocument()
view.contentElement().removeStrayLinefeeds()
blockquotes = document.getElementsByTagName_('BLOCKQUOTE')
for index in range(blockquotes.length()):
if blockquotes.item_(index):
blockquotes.item_(index).removeStrayLinefeeds()
if self.messageType() in [1, 2, 8]:
view.moveToBeginningOfDocument_(None)
view.moveToEndOfParagraphAndModifySelection_(None)
view.moveForwardAndModifySelection_(None)
item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(
'Decrease', 'changeQuoteLevel:', '')
item.setTag_(-1)
view.changeQuoteLevel_(item)
if self._fixAttribution:
attribution = view.selectedDOMRange().stringValue()
attribution = attribution.split(u',', 2)[-1].lstrip()
if view.isAutomaticTextReplacementEnabled():
view.setAutomaticTextReplacementEnabled_(False)
view.insertText_(attribution)
view.setAutomaticTextReplacementEnabled_(True)
else:
view.insertText_(attribution)
signature = document.getElementById_('AppleMailSignature')
if signature:
domrange = document.createRange()
domrange.selectNode_(signature)
view.setSelectedDOMRange_affinity_(domrange, 0)
view.moveUp_(None)
else:
view.moveToEndOfDocument_(None)
view.insertParagraphSeparator_(None)
if self.messageType() == 3:
for index in range(blockquotes.length()):
blockquote = blockquotes.item_(index)
if blockquote.quoteLevel() == 1:
blockquote.parentNode().insertBefore__(
document.createElement_('BR'), blockquote)
view.insertParagraphSeparator_(None)
view.undoManager().removeAllActions()
self.setHasUserMadeChanges_(False)
self.backEnd().setHasChanges_(False)
return result
@swizzle('ComposeViewController', b'show')
def show(self, old):
result = old(self)
if self.messageType() in [1, 2, 8]:
view = self.composeWebView()
document = view.mainFrame().DOMDocument()
signature = document.getElementById_('AppleMailSignature')
if signature:
domrange = document.createRange()
domrange.selectNode_(signature)
view.setSelectedDOMRange_affinity_(domrange, 0)
view.moveUp_(None)
else:
view.moveToEndOfDocument_(None)
return result
class EditingMessageWebView(Category('EditingMessageWebView')):
@swizzle('EditingMessageWebView', b'decreaseIndentation:')
def decreaseIndentation_(self, original, sender, indent = 2):
if self.contentElement().className() != 'ApplePlainTextBody':
return original(self, sender)
self.undoManager().beginUndoGrouping()
affinity = self.selectionAffinity()
selection = self.selectedDOMRange()
self.moveToBeginningOfParagraph_(None)
if selection.collapsed():
for _ in range(indent):
self.moveForwardAndModifySelection_(None)
text = self.selectedDOMRange().stringValue() or ''
if re.match(u'[ \xa0]{%d}' % indent, text, re.UNICODE):
self.deleteBackward_(None)
else:
while selection.compareBoundaryPoints__(1, # START_TO_END
self.selectedDOMRange()) > 0:
for _ in range(indent):
self.moveForwardAndModifySelection_(None)
text = self.selectedDOMRange().stringValue() or ''
if re.match(u'[ \xa0]{%d}' % indent, text, re.UNICODE):
self.deleteBackward_(None)
else:
self.moveBackward_(None)
self.moveToEndOfParagraph_(None)
self.moveForward_(None)
self.setSelectedDOMRange_affinity_(selection, affinity)
self.undoManager().endUndoGrouping()
@swizzle('EditingMessageWebView', b'increaseIndentation:')
def increaseIndentation_(self, original, sender, indent = 2):
if self.contentElement().className() != 'ApplePlainTextBody':
return original(self, sender)
self.undoManager().beginUndoGrouping()
affinity = self.selectionAffinity()
selection = self.selectedDOMRange()
if selection.collapsed():
position = self.selectedRange().location
self.moveToBeginningOfParagraph_(None)
position -= self.selectedRange().location
self.insertText_(indent * u' ')
for _ in range(position):
self.moveForward_(None)
else:
self.moveToBeginningOfParagraph_(None)
while selection.compareBoundaryPoints__(1, # START_TO_END
self.selectedDOMRange()) > 0:
self.moveToEndOfParagraphAndModifySelection_(None)
if not self.selectedDOMRange().collapsed():
self.moveToBeginningOfParagraph_(None)
self.insertText_(indent * u' ')
self.moveToEndOfParagraph_(None)
self.moveForward_(None)
self.setSelectedDOMRange_affinity_(selection, affinity)
self.undoManager().endUndoGrouping()
class MCMessage(Category('MCMessage')):
@swizzle('MCMessage', b'forwardedMessagePrefixWithSpacer:')
def forwardedMessagePrefixWithSpacer_(self, old, *args):
return u''
class MCMessageGenerator(Category('MCMessageGenerator')):
@swizzle('MCMessageGenerator', b'_encodeDataForMimePart:withPartData:')
def _encodeDataForMimePart_withPartData_(self, old, part, data):
if part.type() != 'text' or part.subtype() != 'plain':
return old(self, part, data)
text = bytes(data.objectForKey_(part))
if any(len(line) > 998 for line in text.splitlines()):
return old(self, part, data)
try:
text.decode('ascii')
part.setContentTransferEncoding_('7bit')
except UnicodeDecodeError:
part.setContentTransferEncoding_('8bit')
return True
@swizzle('MCMessageGenerator',
b'_newPlainTextPartWithAttributedString:partData:')
def _newPlainTextPartWithAttributedString_partData_(self, old, *args):
event = NSApplication.sharedApplication().currentEvent()
result = old(self, *args)
width = self._flowWidth
if not result or width <= 0:
return result
if event and event.modifierFlags() & NSAlternateKeyMask:
return result
charset = result.bodyParameterForKey_('charset') or 'utf-8'
data = args[1].objectForKey_(result)
lines = bytes(data).decode(charset).split('\n')
lines = [line for text in lines for line in flow(text, width)]
encoded = u'\n'.join(lines).encode(charset)
try:
data.setData_(buffer(encoded))
except:
data.setData_(encoded)
result.setBodyParameter_forKey_('yes', 'delsp')
result.setBodyParameter_forKey_('flowed', 'format')
return result
class MCMimePart(Category('MCMimePart')):
@swizzle('MCMimePart', b'_decodeText')
def _decodeText(self, old):
result = old(self)
if result.startswith(u' '):
result = u' ' + result[1:]
return result.replace(u'<BR> ', u'<BR> ')
class MessageViewController(Category('MessageViewController')):
@swizzle('MessageViewController', b'forward:')
def forward_(self, old, *args):
event = NSApplication.sharedApplication().currentEvent()
if event and event.modifierFlags() & NSAlternateKeyMask:
return old(self, *args)
return self._messageViewer().forwardAsAttachment_(*args)
class MessageViewer(Category('MessageViewer')):
@swizzle('MessageViewer', b'forwardMessage:')
def forwardMessage_(self, old, *args):
event = NSApplication.sharedApplication().currentEvent()
if event and event.modifierFlags() & NSAlternateKeyMask:
return old(self, *args)
return self.forwardAsAttachment_(*args)
class SingleMessageViewer(Category('SingleMessageViewer')):
@swizzle('SingleMessageViewer', b'forwardMessage:')
def forwardMessage_(self, old, *args):
event = NSApplication.sharedApplication().currentEvent()
if event and event.modifierFlags() & NSAlternateKeyMask:
return old(self, *args)
return self.forwardAsAttachment_(*args)
class MailFlow(Class('MVMailBundle')):
@classmethod
def initialize(self):
bundle = NSBundle.bundleWithIdentifier_('uk.me.cdw.MailFlow')
self.registerBundle()
defaults = NSUserDefaults.standardUserDefaults()
defaults = defaults.dictionaryForKey_('MailFlow') or {}
ComposeViewController._fixAttribution = defaults.get('FixAttribution', True)
MCMessageGenerator._flowWidth = int(defaults.get('FlowWidth', 76))
version = bundle.objectForInfoDictionaryKey_('CFBundleVersion')
NSLog('Loaded MailFlow %s' % version)
|
{
"content_hash": "a8964ac651b358239c85bd16c54267bf",
"timestamp": "",
"source": "github",
"line_count": 299,
"max_line_length": 84,
"avg_line_length": 39.39799331103679,
"alnum_prop": 0.6097623089983022,
"repo_name": "arachsys/mailflow",
"id": "ae1f91e27075856505513b80292f727420e9d0f1",
"size": "11780",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MailFlow.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15298"
}
],
"symlink_target": ""
}
|
import os
DEBUG = True
TEMPLATES_AUTO_RELOAD = True
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
|
{
"content_hash": "21d6739f5456a3970452f36d8b305bd3",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 52,
"avg_line_length": 21.2,
"alnum_prop": 0.7641509433962265,
"repo_name": "afh/yabab",
"id": "fcbc5a0d7d18a40371208107f584c12e1220a1a8",
"size": "106",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app_conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10774"
}
],
"symlink_target": ""
}
|
from click.testing import CliRunner
from ichnaea.scripts.sentry_test import sentry_test_group
def test_basic():
"""Test that the command imports and runs at all."""
runner = CliRunner()
result = runner.invoke(sentry_test_group)
assert result.exit_code == 0
|
{
"content_hash": "ba5ea844e632db40301a42911b08cd66",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 57,
"avg_line_length": 27.6,
"alnum_prop": 0.717391304347826,
"repo_name": "mozilla/ichnaea",
"id": "667d3ef9f2a03b647e672ab7de9b252972cf6bbc",
"size": "276",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "ichnaea/scripts/tests/test_sentry_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "34767"
},
{
"name": "Cython",
"bytes": "16678"
},
{
"name": "Dockerfile",
"bytes": "2819"
},
{
"name": "HTML",
"bytes": "32679"
},
{
"name": "JavaScript",
"bytes": "139102"
},
{
"name": "Makefile",
"bytes": "11673"
},
{
"name": "Mako",
"bytes": "432"
},
{
"name": "Python",
"bytes": "1007139"
},
{
"name": "Shell",
"bytes": "8899"
}
],
"symlink_target": ""
}
|
import sys, os, getopt, shutil, py_compile, subprocess
OPTIONLIST = [
("dir", 1, "Name of directory containing game"),
("name", 1, "Human-readable name of the game"),
("version", 1, "Version number to add to game name"),
("rmdir", 2, "Delete all directories with given name"),
("rmext", 2, "Delete all files with given extension"),
("fast", 0, "Use fast compression instead of good compression"),
("bam", 0, "Generate BAM files, change default-model-extension to BAM"),
("pyc", 0, "Generate PYC files"),
]
def ParseFailure():
print("")
print("packpanda usage:")
print("")
for (opt, hasval, explanation) in OPTIONLIST:
if (hasval):
print(" --%-10s %s"%(opt+" x", explanation))
else:
print(" --%-10s %s"%(opt+" ", explanation))
sys.exit(1)
def ParseOptions(args):
try:
options = {}
longopts = []
for (opt, hasval, explanation) in OPTIONLIST:
if (hasval==2):
longopts.append(opt+"=")
options[opt] = []
elif (hasval==1):
longopts.append(opt+"=")
options[opt] = ""
else:
longopts.append(opt)
options[opt] = 0
opts, extras = getopt.getopt(args, "", longopts)
for option, value in opts:
for (opt, hasval, explanation) in OPTIONLIST:
if (option == "--"+opt):
if (hasval==2): options[opt].append(value)
elif (hasval==1): options[opt] = value
else: options[opt] = 1
return options
except: ParseFailure();
OPTIONS = ParseOptions(sys.argv[1:])
##############################################################################
#
# Locate the relevant trees.
#
##############################################################################
PANDA=None
for dir in sys.path:
if (dir != "") and os.path.exists(os.path.join(dir,"direct")) and os.path.exists(os.path.join(dir,"pandac")):
PANDA=os.path.abspath(dir)
if (PANDA is None):
sys.exit("Cannot locate the panda root directory in the python path (cannot locate directory containing direct and pandac).")
print("PANDA located at "+PANDA)
if (os.path.exists(os.path.join(PANDA,"..","makepanda","makepanda.py"))) and (sys.platform != "win32" or os.path.exists(os.path.join(PANDA,"..","thirdparty","win-nsis","makensis.exe"))):
PSOURCE=os.path.abspath(os.path.join(PANDA,".."))
if (sys.platform == "win32"):
NSIS=os.path.abspath(os.path.join(PANDA,"..","thirdparty","win-nsis"))
else:
PSOURCE=PANDA
if (sys.platform == "win32"):
NSIS=os.path.join(PANDA,"nsis")
##############################################################################
#
# Identify the main parts of the game: DIR, NAME, MAIN, ICON, BITMAP, etc
#
##############################################################################
VER=OPTIONS["version"]
DIR=OPTIONS["dir"]
if (DIR==""):
print("You must specify the --dir option.")
ParseFailure()
DIR=os.path.abspath(DIR)
MYDIR=os.path.abspath(os.getcwd())
BASENAME=os.path.basename(DIR)
if (OPTIONS["name"] != ""):
NAME=OPTIONS["name"]
else:
NAME=BASENAME
SMDIRECTORY=NAME
if (VER!=""): SMDIRECTORY=SMDIRECTORY+" "+VER
PYTHONV="python"+sys.version[:3]
LICENSE=os.path.join(DIR, "license.txt")
OUTFILE=os.path.basename(DIR)
if (VER!=""): OUTFILE=OUTFILE+"-"+VER
if (sys.platform == "win32"):
ICON=os.path.join(DIR, "icon.ico")
BITMAP=os.path.join(DIR, "installer.bmp")
OUTFILE=os.path.abspath(OUTFILE+".exe")
INSTALLDIR='C:\\'+os.path.basename(DIR)
if (VER!=""): INSTALLDIR=INSTALLDIR+"-"+VER
COMPRESS="lzma"
if (OPTIONS["fast"]): COMPRESS="zlib"
if (OPTIONS["pyc"]): MAIN="main.pyc"
else: MAIN="main.py"
def PrintFileStatus(label, file):
if (os.path.exists(file)):
print("%-15s: %s"%(label, file))
else:
print("%-15s: %s (MISSING)"%(label, file))
PrintFileStatus("Dir", DIR)
print("%-15s: %s"%("Name", NAME))
print("%-15s: %s"%("Start Menu", SMDIRECTORY))
PrintFileStatus("Main", os.path.join(DIR, MAIN))
if (sys.platform == "win32"):
PrintFileStatus("Icon", ICON)
PrintFileStatus("Bitmap", BITMAP)
PrintFileStatus("License", LICENSE)
print("%-15s: %s"%("Output", OUTFILE))
if (sys.platform == "win32"):
print("%-15s: %s"%("Install Dir", INSTALLDIR))
if (os.path.isdir(DIR)==0):
sys.exit("Difficulty reading "+DIR+". Cannot continue.")
if (os.path.isfile(os.path.join(DIR, "main.py"))==0):
sys.exit("Difficulty reading main.py. Cannot continue.")
if (os.path.isfile(LICENSE)==0):
LICENSE=os.path.join(PANDA,"LICENSE")
if (sys.platform == "win32") and (os.path.isfile(BITMAP)==0):
BITMAP=os.path.join(NSIS,"Contrib","Graphics","Wizard","nsis.bmp")
if (sys.platform == "win32"):
if (os.path.isfile(ICON)==0):
PPICON="bin\\ppython.exe"
else:
PPICON="game\\icon.ico"
##############################################################################
#
# Copy the game to a temporary directory, so we can modify it safely.
#
##############################################################################
def limitedCopyTree(src, dst, rmdir):
if (os.path.isdir(src)):
if (os.path.basename(src) in rmdir):
return
if (not os.path.isdir(dst)): os.mkdir(dst)
for x in os.listdir(src):
limitedCopyTree(os.path.join(src,x), os.path.join(dst,x), rmdir)
else:
shutil.copyfile(src, dst)
TMPDIR=os.path.abspath("packpanda-TMP")
if (sys.platform == "win32"):
TMPGAME=os.path.join(TMPDIR,"game")
TMPETC=os.path.join(TMPDIR,"etc")
else:
TMPGAME=os.path.join(TMPDIR,"usr","share","games",BASENAME,"game")
TMPETC=os.path.join(TMPDIR,"usr","share","games",BASENAME,"etc")
print("")
print("Copying the game to "+TMPDIR+"...")
if (os.path.exists(TMPDIR)):
try: shutil.rmtree(TMPDIR)
except: sys.exit("Cannot delete "+TMPDIR)
try:
os.mkdir(TMPDIR)
rmdir = {}
for x in OPTIONS["rmdir"]:
rmdir[x] = 1
if not os.path.isdir( TMPGAME ):
os.makedirs(TMPGAME)
limitedCopyTree(DIR, TMPGAME, rmdir)
if not os.path.isdir( TMPETC ):
os.makedirs(TMPETC)
if sys.platform == "win32":
limitedCopyTree(os.path.join(PANDA, "etc"), TMPETC, {})
else:
shutil.copyfile("/etc/Config.prc", os.path.join(TMPETC, "Config.prc"))
shutil.copyfile("/etc/Confauto.prc", os.path.join(TMPETC, "Confauto.prc"))
except: sys.exit("Cannot copy game to "+TMPDIR)
##############################################################################
#
# If --bam requested, change default-model-extension .egg to bam.
#
##############################################################################
def ReadFile(wfile):
try:
srchandle = open(wfile, "rb")
data = srchandle.read()
srchandle.close()
return data
except: exit("Cannot read "+wfile)
def WriteFile(wfile,data):
try:
dsthandle = open(wfile, "wb")
dsthandle.write(data)
dsthandle.close()
except: exit("Cannot write "+wfile)
if OPTIONS["bam"]:
CONF=ReadFile(os.path.join(TMPETC,"Confauto.prc"))
CONF=CONF.replace("default-model-extension .egg","default-model-extension .bam")
WriteFile(os.path.join(TMPETC,"Confauto.prc"), CONF)
##############################################################################
#
# Compile all py files, convert all egg files.
#
# We do this as a sanity check, even if the user
# hasn't requested that his files be compiled.
#
##############################################################################
if (sys.platform == "win32"):
EGG2BAM=os.path.join(PANDA,"bin","egg2bam.exe")
else:
EGG2BAM=os.path.join(PANDA,"bin","egg2bam")
def egg2bam(file,bam):
present = os.path.exists(bam)
if (present): bam = "packpanda-TMP.bam";
cmd = 'egg2bam -noabs -ps rel -pd . "'+file+'" -o "'+bam+'"'
print("Executing: "+cmd)
if (sys.platform == "win32"):
res = os.spawnl(os.P_WAIT, EGG2BAM, cmd)
else:
res = os.system(cmd)
if (res != 0): sys.exit("Problem in egg file: "+file)
if (present) or (OPTIONS["bam"]==0):
os.unlink(bam)
def py2pyc(file):
print("Compiling python "+file)
pyc = file[:-3]+'.pyc'
pyo = file[:-3]+'.pyo'
if (os.path.exists(pyc)): os.unlink(pyc)
if (os.path.exists(pyo)): os.unlink(pyo)
try: py_compile.compile(file)
except: sys.exit("Cannot compile "+file)
if (OPTIONS["pyc"]==0):
if (os.path.exists(pyc)):
os.unlink(pyc)
if (os.path.exists(pyo)):
os.unlink(pyo)
def CompileFiles(file):
if (os.path.isfile(file)):
if (file.endswith(".egg")):
egg2bam(file, file[:-4]+'.bam')
elif (file.endswith(".egg.pz") or file.endswith(".egg.gz")):
egg2bam(file, file[:-7]+'.bam')
elif (file.endswith(".py")):
py2pyc(file)
else: pass
elif (os.path.isdir(file)):
for x in os.listdir(file):
CompileFiles(os.path.join(file, x))
def DeleteFiles(file):
base = os.path.basename(file).lower()
if (os.path.isdir(file)):
for pattern in OPTIONS["rmdir"]:
if pattern.lower() == base:
print("Deleting "+file)
shutil.rmtree(file)
return
for x in os.listdir(file):
DeleteFiles(os.path.join(file, x))
else:
for ext in OPTIONS["rmext"]:
if base[-(len(ext) + 1):] == ("." + ext).lower():
print("Deleting "+file)
os.unlink(file)
return
print("")
print("Compiling BAM and PYC files...")
os.chdir(TMPGAME)
CompileFiles(".")
DeleteFiles(".")
##############################################################################
#
# Now make the installer. Yay!
#
##############################################################################
INSTALLER_DEB_FILE="""
Package: BASENAME
Version: VERSION
Section: games
Priority: optional
Architecture: ARCH
Essential: no
Depends: PYTHONV
Provides: BASENAME
Description: NAME
Maintainer: Unknown
"""
INSTALLER_SPEC_FILE="""
Summary: NAME
Name: BASENAME
Version: VERSION
Release: 1
Group: Amusement/Games
License: See license file
BuildRoot: TMPDIR
BuildRequires: PYTHONV
%description
NAME
%files
%defattr(-,root,root)
/usr/bin/BASENAME
/usr/lib/games/BASENAME
/usr/share/games/BASENAME
"""
RUN_SCRIPT="""
#!/bin/sh
cd /usr/share/games/BASENAME/game
PYTHONPATH=/usr/lib/games/BASENAME:/usr/share/games/BASENAME
LD_LIBRARY_PATH=/usr/lib/games/BASENAME
PYTHONV MAIN
"""
if (sys.platform == "win32"):
CMD="\""+NSIS+"\\makensis.exe\" /V2 "
CMD=CMD+'/DCOMPRESSOR="'+COMPRESS+'" '
CMD=CMD+'/DNAME="'+NAME+'" '
CMD=CMD+'/DSMDIRECTORY="'+SMDIRECTORY+'" '
CMD=CMD+'/DINSTALLDIR="'+INSTALLDIR+'" '
CMD=CMD+'/DOUTFILE="'+OUTFILE+'" '
CMD=CMD+'/DLICENSE="'+LICENSE+'" '
CMD=CMD+'/DLANGUAGE="English" '
CMD=CMD+'/DRUNTEXT="Play '+NAME+'" '
CMD=CMD+'/DIBITMAP="'+BITMAP+'" '
CMD=CMD+'/DUBITMAP="'+BITMAP+'" '
CMD=CMD+'/DPANDA="'+PANDA+'" '
CMD=CMD+'/DPANDACONF="'+TMPETC+'" '
CMD=CMD+'/DPSOURCE="'+PSOURCE+'" '
CMD=CMD+'/DPPGAME="'+TMPGAME+'" '
CMD=CMD+'/DPPMAIN="'+MAIN+'" '
CMD=CMD+'/DPPICON="'+PPICON+'" '
CMD=CMD+'"'+PSOURCE+'\\direct\\directscripts\\packpanda.nsi"'
print("")
print(CMD)
print("packing...")
subprocess.call(CMD)
else:
os.chdir(MYDIR)
os.system("mkdir -p %s/usr/bin" % TMPDIR)
os.system("mkdir -p %s/usr/share/games/%s" % (TMPDIR, BASENAME))
os.system("mkdir -p %s/usr/lib/games/%s" % (TMPDIR, BASENAME))
os.system("cp --recursive %s/direct %s/usr/share/games/%s/direct" % (PANDA, TMPDIR, BASENAME))
os.system("cp --recursive %s/pandac %s/usr/share/games/%s/pandac" % (PANDA, TMPDIR, BASENAME))
os.system("cp --recursive %s/models %s/usr/share/games/%s/models" % (PANDA, TMPDIR, BASENAME))
os.system("cp --recursive %s/Pmw %s/usr/share/games/%s/Pmw" % (PANDA, TMPDIR, BASENAME))
os.system("cp %s %s/usr/share/games/%s/LICENSE" % (LICENSE, TMPDIR, BASENAME))
os.system("cp --recursive /usr/lib/panda3d/* %s/usr/lib/games/%s/" % (TMPDIR, BASENAME))
# Make the script to run the game
txt = RUN_SCRIPT[1:].replace("BASENAME",BASENAME).replace("PYTHONV",PYTHONV).replace("MAIN",MAIN)
WriteFile(TMPDIR+"/usr/bin/"+BASENAME, txt)
os.system("chmod +x "+TMPDIR+"/usr/bin/"+BASENAME)
if (os.path.exists("/usr/bin/rpmbuild")):
os.system("rm -rf %s/DEBIAN" % TMPDIR)
os.system("rpm -E '%_target_cpu' > packpanda-TMP.txt")
ARCH=ReadFile("packpanda-TMP.txt").strip()
os.remove("packpanda-TMP.txt")
txt = INSTALLER_SPEC_FILE[1:].replace("VERSION",VER).replace("TMPDIR",TMPDIR)
txt = txt.replace("BASENAME",BASENAME).replace("NAME",NAME).replace("PYTHONV",PYTHONV)
WriteFile("packpanda-TMP.spec", txt)
os.system("rpmbuild --define '_rpmdir "+TMPDIR+"' -bb packpanda-TMP.spec")
os.system("mv "+ARCH+"/"+BASENAME+"-"+VER+"-1."+ARCH+".rpm .")
os.rmdir(ARCH)
os.remove("packpanda-TMP.spec")
if (os.path.exists("/usr/bin/dpkg-deb")):
os.system("dpkg --print-architecture > packpanda-TMP.txt")
ARCH=ReadFile("packpanda-TMP.txt").strip()
os.remove("packpanda-TMP.txt")
txt = INSTALLER_DEB_FILE[1:].replace("VERSION",str(VER)).replace("PYTHONV",PYTHONV)
txt = txt.replace("BASENAME",BASENAME).replace("NAME",NAME).replace("ARCH",ARCH)
os.system("mkdir -p %s/DEBIAN" % TMPDIR)
os.system("cd %s ; (find usr -type f -exec md5sum {} \;) > DEBIAN/md5sums" % TMPDIR)
WriteFile(TMPDIR+"/DEBIAN/control",txt)
os.system("dpkg-deb -b "+TMPDIR+" "+BASENAME+"_"+VER+"_"+ARCH+".deb")
if not(os.path.exists("/usr/bin/rpmbuild") or os.path.exists("/usr/bin/dpkg-deb")):
exit("To build an installer, either rpmbuild or dpkg-deb must be present on your system!")
|
{
"content_hash": "b23d8e59f8eac3ca201c93202c3cc086",
"timestamp": "",
"source": "github",
"line_count": 406,
"max_line_length": 186,
"avg_line_length": 34.1576354679803,
"alnum_prop": 0.5745601384482262,
"repo_name": "tobspr/panda3d",
"id": "6d89a9a0d052d9b571c962603d537efb1ca313af",
"size": "14593",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "direct/src/directscripts/packpanda.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4004"
},
{
"name": "C",
"bytes": "6724918"
},
{
"name": "C++",
"bytes": "25480688"
},
{
"name": "Emacs Lisp",
"bytes": "229264"
},
{
"name": "Groff",
"bytes": "3106"
},
{
"name": "HTML",
"bytes": "8081"
},
{
"name": "Java",
"bytes": "3113"
},
{
"name": "JavaScript",
"bytes": "7003"
},
{
"name": "Logos",
"bytes": "5504"
},
{
"name": "MAXScript",
"bytes": "1745"
},
{
"name": "NSIS",
"bytes": "92320"
},
{
"name": "Nemerle",
"bytes": "4403"
},
{
"name": "Objective-C",
"bytes": "28865"
},
{
"name": "Objective-C++",
"bytes": "257446"
},
{
"name": "Perl",
"bytes": "206982"
},
{
"name": "Perl6",
"bytes": "30484"
},
{
"name": "Puppet",
"bytes": "2627"
},
{
"name": "Python",
"bytes": "5537773"
},
{
"name": "R",
"bytes": "421"
},
{
"name": "Shell",
"bytes": "55940"
},
{
"name": "Visual Basic",
"bytes": "136"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import webnotes, json
class Profile:
"""
A profile object is created at the beginning of every request with details of the use.
The global profile object is `webnotes.user`
"""
def __init__(self, name=''):
self.defaults = None
self.name = name or webnotes.session.get('user')
self.roles = []
self.all_read = []
self.can_create = []
self.can_read = []
self.can_write = []
self.can_cancel = []
self.can_search = []
self.can_get_report = []
self.allow_modules = []
self.in_create = []
def get_roles(self):
"""get list of roles"""
if not self.roles:
self.roles = webnotes.get_roles(self.name)
return self.roles
def build_doctype_map(self):
"""build map of special doctype properties"""
self.doctype_map = {}
for r in webnotes.conn.sql("""select name, in_create, issingle, istable,
read_only, module from tabDocType""", as_dict=1):
self.doctype_map[r['name']] = r
def build_perm_map(self):
"""build map of permissions at level 0"""
self.perm_map = {}
for r in webnotes.conn.sql("""select parent, `read`, `write`, `create`, `submit`, `cancel`, `report`
from tabDocPerm where docstatus=0
and ifnull(permlevel,0)=0
and parent not like "old_parent:%%"
and role in ('%s')""" % "','".join(self.get_roles()), as_dict=1):
dt = r['parent']
if not dt in self.perm_map:
self.perm_map[dt] = {}
for k in ('read', 'write', 'create', 'submit', 'cancel', 'report'):
if not self.perm_map[dt].get(k):
self.perm_map[dt][k] = r.get(k)
def build_permissions(self):
"""build lists of what the user can read / write / create
quirks:
read_only => Not in Search
in_create => Not in create
"""
self.build_doctype_map()
self.build_perm_map()
for dt in self.doctype_map:
dtp = self.doctype_map[dt]
p = self.perm_map.get(dt, {})
if not dtp.get('istable'):
if p.get('create') and not dtp.get('issingle'):
if dtp.get('in_create'):
self.in_create.append(dt)
else:
self.can_create.append(dt)
elif p.get('write'):
self.can_write.append(dt)
elif p.get('read'):
if dtp.get('read_only'):
self.all_read.append(dt)
else:
self.can_read.append(dt)
if p.get('cancel'):
self.can_cancel.append(dt)
if (p.get('read') or p.get('write') or p.get('create')):
if p.get('report'):
self.can_get_report.append(dt)
if not dtp.get('istable'):
if not dtp.get('issingle') and not dtp.get('read_only'):
self.can_search.append(dt)
if not dtp.get('module') in self.allow_modules:
self.allow_modules.append(dtp.get('module'))
self.can_write += self.can_create
self.can_write += self.in_create
self.can_read += self.can_write
self.all_read += self.can_read
def get_defaults(self):
import webnotes.defaults
self.defaults = webnotes.defaults.get_defaults(self.name)
return self.defaults
# update recent documents
def update_recent(self, dt, dn):
rdl = webnotes.cache().get_value("recent:" + self.name) or []
new_rd = [dt, dn]
# clear if exists
for i in range(len(rdl)):
rd = rdl[i]
if rd==new_rd:
del rdl[i]
break
if len(rdl) > 19:
rdl = rdl[:19]
rdl = [new_rd] + rdl
r = webnotes.cache().set_value("recent:" + self.name, rdl)
def get_can_read(self):
"""return list of doctypes that the user can read"""
if not self.can_read:
self.build_permissions()
return self.can_read
def load_profile(self):
d = webnotes.conn.sql("""select email, first_name, last_name,
email_signature, background_image, user_type
from tabProfile where name = %s""", self.name, as_dict=1)[0]
if not self.can_read:
self.build_permissions()
d.name = self.name
d.recent = json.dumps(webnotes.cache().get_value("recent:" + self.name) or [])
d['roles'] = self.get_roles()
d['defaults'] = self.get_defaults()
d['can_create'] = self.can_create
d['can_write'] = self.can_write
d['can_read'] = list(set(self.can_read))
d['can_cancel'] = list(set(self.can_cancel))
d['can_get_report'] = list(set(self.can_get_report))
d['allow_modules'] = self.allow_modules
d['all_read'] = self.all_read
d['can_search'] = list(set(self.can_search))
d['in_create'] = self.in_create
return d
def get_user_fullname(user):
fullname = webnotes.conn.sql("SELECT CONCAT_WS(' ', first_name, last_name) FROM `tabProfile` WHERE name=%s", user)
return fullname and fullname[0][0] or ''
def get_system_managers():
"""returns all system manager's profile details"""
system_managers = webnotes.conn.sql("""select distinct name
from tabProfile p
where docstatus < 2 and enabled = 1
and name not in ("Administrator", "Guest")
and exists (select * from tabUserRole ur
where ur.parent = p.name and ur.role="System Manager")""")
return [p[0] for p in system_managers]
def add_role(profile, role):
profile_wrapper = webnotes.bean("Profile", profile)
profile_wrapper.doclist.append({
"doctype": "UserRole",
"parentfield": "user_roles",
"role": role
})
profile_wrapper.save()
|
{
"content_hash": "bb2e26a31d8c8b0662c8ab72126e6d67",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 115,
"avg_line_length": 28.741573033707866,
"alnum_prop": 0.6362392494136043,
"repo_name": "rohitw1991/latestadbwnf",
"id": "81848ab988886ffe0322d7012b99bd3a23628481",
"size": "5204",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "webnotes/profile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "213083"
},
{
"name": "JavaScript",
"bytes": "1686472"
},
{
"name": "Python",
"bytes": "523801"
}
],
"symlink_target": ""
}
|
"""E2E Tests for tfx.examples.bert.cola.bert_cola_pipeline."""
import os
import unittest
import tensorflow as tf
from tfx.dsl.io import fileio
from tfx.examples.bert.cola import bert_cola_pipeline
from tfx.orchestration import metadata
from tfx.orchestration.local.local_dag_runner import LocalDagRunner
@unittest.skipIf(tf.__version__ < '2',
'Bert model requires tf.text >=2 and TF >= 2.')
class ColaPipelineNativeKerasEndToEndTest(tf.test.TestCase):
def setUp(self):
super().setUp()
self._test_dir = os.path.join(
os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()),
self._testMethodName)
self._pipeline_name = 'keras_test'
self._data_root = os.path.join(os.path.dirname(__file__), 'data')
self._module_file = os.path.join(
os.path.dirname(__file__), 'bert_cola_utils.py')
self._serving_model_dir = os.path.join(self._test_dir, 'serving_model')
self._pipeline_root = os.path.join(self._test_dir, 'tfx', 'pipelines',
self._pipeline_name)
self._metadata_path = os.path.join(self._test_dir, 'tfx', 'metadata',
self._pipeline_name, 'metadata.db')
def assertExecutedOnce(self, component: str) -> None:
"""Check the component is executed exactly once."""
component_path = os.path.join(self._pipeline_root, component)
self.assertTrue(fileio.exists(component_path))
execution_path = os.path.join(
component_path, '.system', 'executor_execution')
execution = fileio.listdir(execution_path)
self.assertLen(execution, 1)
def assertPipelineExecution(self) -> None:
self.assertExecutedOnce('CsvExampleGen')
self.assertExecutedOnce('Evaluator')
self.assertExecutedOnce('ExampleValidator')
self.assertExecutedOnce('Pusher')
self.assertExecutedOnce('SchemaGen')
self.assertExecutedOnce('StatisticsGen')
self.assertExecutedOnce('Trainer')
self.assertExecutedOnce('Transform')
def testColaPipelineNativeKeras(self):
pipeline = bert_cola_pipeline._create_pipeline(
pipeline_name=self._pipeline_name,
data_root=self._data_root,
module_file=self._module_file,
serving_model_dir=self._serving_model_dir,
pipeline_root=self._pipeline_root,
metadata_path=self._metadata_path,
beam_pipeline_args=[])
LocalDagRunner().run(pipeline)
self.assertTrue(fileio.exists(self._serving_model_dir))
self.assertTrue(fileio.exists(self._metadata_path))
expected_execution_count = 9 # 8 components + 1 resolver
metadata_config = metadata.sqlite_metadata_connection_config(
self._metadata_path)
with metadata.Metadata(metadata_config) as m:
artifact_count = len(m.store.get_artifacts())
execution_count = len(m.store.get_executions())
self.assertGreaterEqual(artifact_count, execution_count)
self.assertEqual(expected_execution_count, execution_count)
self.assertPipelineExecution()
if __name__ == '__main__':
tf.compat.v1.enable_v2_behavior()
tf.test.main()
|
{
"content_hash": "b8ff1ba75a267819036186cc626f1692",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 75,
"avg_line_length": 38.24691358024691,
"alnum_prop": 0.6817301484828922,
"repo_name": "tensorflow/tfx",
"id": "bf88a217b5fee888549753c4232621f2798f46d7",
"size": "3694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tfx/examples/bert/cola/bert_cola_pipeline_e2e_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "7405"
},
{
"name": "Jupyter Notebook",
"bytes": "38579"
},
{
"name": "Python",
"bytes": "6009050"
},
{
"name": "Shell",
"bytes": "34056"
},
{
"name": "Starlark",
"bytes": "20324"
}
],
"symlink_target": ""
}
|
import sys
import yaml
import re
import collections
from pathlib import Path
import textwrap
def yml_dump_color(d):
s = yaml.dump(d, default_flow_style=False, allow_unicode=True)
return re.sub(r'^([a-zA-Z0-9_]+?:)', _key_colorer, s, flags=re.MULTILINE)
def fail(err):
print('\n'.join(textwrap.wrap(err, width=90, replace_whitespace=False)))
sys.exit(1)
def _key_colorer(m):
from termcolor import colored
contents = m.group(1)
assert contents[-1] == ':'
return colored(contents, 'blue')
def pretty_path(p):
p_with_home = None
if "~" in str(p):
p_with_home = p
try:
p = p.expanduser()
except AttributeError:
import os.path
p = Path(os.path.expanduser( str(p) ))
p = p.absolute()
cwd = Path.cwd()
if p == cwd:
return 'Current directory'
try:
return './' + p.relative_to(cwd).as_posix()
except ValueError:
if p_with_home is not None:
return p_with_home.as_posix()
return p.as_posix()
def _set_yaml_prettyprint():
# TODO: don't mess with yaml's global configuratino
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
def str_presenter(dumper, data):
if len(data.splitlines()) > 1: # check for multiline string
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
yaml.add_representer(str, str_presenter)
def dict_representer(dumper, data):
return dumper.represent_dict(data.items())
def dict_constructor(loader, node):
return collections.OrderedDict(loader.construct_pairs(node))
yaml.add_representer(collections.OrderedDict, dict_representer)
yaml.add_constructor(_mapping_tag, dict_constructor)
if sys.version_info.major == 2:
yaml.add_representer(unicode,
lambda dumper, value:
dumper.represent_scalar(u'tag:yaml.org,2002:str', value))
#yaml.add_representer(unicode, str_presenter)
_set_yaml_prettyprint()
|
{
"content_hash": "fbd985fbc02ae202063c696ce52fc0b0",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 86,
"avg_line_length": 29.21917808219178,
"alnum_prop": 0.6272855133614628,
"repo_name": "molecular-workflow-repository/molflow",
"id": "15c5e621406f8148f9e70c1ff7ad6e5fd1c1935e",
"size": "2711",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "molflow/formatting.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "178849"
}
],
"symlink_target": ""
}
|
from collections import namedtuple
import sure # noqa
from moto.swf.exceptions import SWFUnknownResourceFault
from moto.swf.models import Domain
# Ensure 'assert_raises' context manager support for Python 2.6
import tests.backport_assert_raises # noqa
# Fake WorkflowExecution for tests purposes
WorkflowExecution = namedtuple(
"WorkflowExecution",
["workflow_id", "run_id", "execution_status", "open"]
)
def test_domain_short_dict_representation():
domain = Domain("foo", "52")
domain.to_short_dict().should.equal(
{"name": "foo", "status": "REGISTERED"})
domain.description = "foo bar"
domain.to_short_dict()["description"].should.equal("foo bar")
def test_domain_full_dict_representation():
domain = Domain("foo", "52")
domain.to_full_dict()["domainInfo"].should.equal(domain.to_short_dict())
_config = domain.to_full_dict()["configuration"]
_config["workflowExecutionRetentionPeriodInDays"].should.equal("52")
def test_domain_string_representation():
domain = Domain("my-domain", "60")
str(domain).should.equal("Domain(name: my-domain, status: REGISTERED)")
def test_domain_add_to_activity_task_list():
domain = Domain("my-domain", "60")
domain.add_to_activity_task_list("foo", "bar")
domain.activity_task_lists.should.equal({
"foo": ["bar"]
})
def test_domain_activity_tasks():
domain = Domain("my-domain", "60")
domain.add_to_activity_task_list("foo", "bar")
domain.add_to_activity_task_list("other", "baz")
sorted(domain.activity_tasks).should.equal(["bar", "baz"])
def test_domain_add_to_decision_task_list():
domain = Domain("my-domain", "60")
domain.add_to_decision_task_list("foo", "bar")
domain.decision_task_lists.should.equal({
"foo": ["bar"]
})
def test_domain_decision_tasks():
domain = Domain("my-domain", "60")
domain.add_to_decision_task_list("foo", "bar")
domain.add_to_decision_task_list("other", "baz")
sorted(domain.decision_tasks).should.equal(["bar", "baz"])
def test_domain_get_workflow_execution():
domain = Domain("my-domain", "60")
wfe1 = WorkflowExecution(
workflow_id="wf-id-1", run_id="run-id-1", execution_status="OPEN", open=True)
wfe2 = WorkflowExecution(
workflow_id="wf-id-1", run_id="run-id-2", execution_status="CLOSED", open=False)
wfe3 = WorkflowExecution(
workflow_id="wf-id-2", run_id="run-id-3", execution_status="OPEN", open=True)
wfe4 = WorkflowExecution(
workflow_id="wf-id-3", run_id="run-id-4", execution_status="CLOSED", open=False)
domain.workflow_executions = [wfe1, wfe2, wfe3, wfe4]
# get workflow execution through workflow_id and run_id
domain.get_workflow_execution(
"wf-id-1", run_id="run-id-1").should.equal(wfe1)
domain.get_workflow_execution(
"wf-id-1", run_id="run-id-2").should.equal(wfe2)
domain.get_workflow_execution(
"wf-id-3", run_id="run-id-4").should.equal(wfe4)
domain.get_workflow_execution.when.called_with(
"wf-id-1", run_id="non-existent"
).should.throw(
SWFUnknownResourceFault,
)
# get OPEN workflow execution by default if no run_id
domain.get_workflow_execution("wf-id-1").should.equal(wfe1)
domain.get_workflow_execution.when.called_with(
"wf-id-3"
).should.throw(
SWFUnknownResourceFault
)
domain.get_workflow_execution.when.called_with(
"wf-id-non-existent"
).should.throw(
SWFUnknownResourceFault
)
# raise_if_closed attribute
domain.get_workflow_execution(
"wf-id-1", run_id="run-id-1", raise_if_closed=True).should.equal(wfe1)
domain.get_workflow_execution.when.called_with(
"wf-id-3", run_id="run-id-4", raise_if_closed=True
).should.throw(
SWFUnknownResourceFault
)
# raise_if_none attribute
domain.get_workflow_execution("foo", raise_if_none=False).should.be.none
|
{
"content_hash": "b105594d8caf44e6eeb92dec28ec7226",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 88,
"avg_line_length": 33.252100840336134,
"alnum_prop": 0.6623704826889057,
"repo_name": "okomestudio/moto",
"id": "1a8a1268d761a99676c6aad91bcc5d941254c40e",
"size": "3957",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/test_swf/models/test_domain.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "443"
},
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "1143"
},
{
"name": "Python",
"bytes": "4658734"
},
{
"name": "Ruby",
"bytes": "188"
},
{
"name": "Shell",
"bytes": "797"
}
],
"symlink_target": ""
}
|
from pyasn1.type import univ, constraint, namedtype, namedval, tag
from pyasn1_modules import rfc5280
MAX = float('inf')
class KeyEncryptionAlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
pass
class PrivateKeyAlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
pass
class EncryptedData(univ.OctetString):
pass
class EncryptedPrivateKeyInfo(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('encryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
namedtype.NamedType('encryptedData', EncryptedData())
)
class Version(univ.Integer):
namedValues = namedval.NamedValues(('v1', 0), ('v2', 1))
class PrivateKey(univ.OctetString):
pass
class Attributes(univ.SetOf):
componentType = rfc5280.Attribute()
class PublicKey(univ.BitString):
pass
# OneAsymmetricKey is essentially version 2 of PrivateKeyInfo.
# If publicKey is present, then the version must be v2;
# otherwise, the version should be v1.
class OneAsymmetricKey(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('version', Version()),
namedtype.NamedType('privateKeyAlgorithm', PrivateKeyAlgorithmIdentifier()),
namedtype.NamedType('privateKey', PrivateKey()),
namedtype.OptionalNamedType('attributes', Attributes().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.OptionalNamedType('publicKey', PublicKey().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
)
class PrivateKeyInfo(OneAsymmetricKey):
pass
# The CMS AsymmetricKeyPackage Content Type
id_ct_KP_aKeyPackage = univ.ObjectIdentifier('2.16.840.1.101.2.1.2.78.5')
class AsymmetricKeyPackage(univ.SequenceOf):
pass
AsymmetricKeyPackage.componentType = OneAsymmetricKey()
AsymmetricKeyPackage.sizeSpec=constraint.ValueSizeConstraint(1, MAX)
# Map of Content Type OIDs to Content Types
# To be added to the ones that are in rfc5652.py
cmsContentTypesMapUpdate = {
id_ct_KP_aKeyPackage: AsymmetricKeyPackage(),
}
|
{
"content_hash": "25aa896ad64c49b8c89ffb0d4ada001a",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 87,
"avg_line_length": 26.4625,
"alnum_prop": 0.7487009919697686,
"repo_name": "cloudera/hue",
"id": "d8c7c90e90918820641b198ec5f6b39b225cec92",
"size": "2555",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/pyasn1-modules-0.2.6/pyasn1_modules/rfc5958.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
}
|
"""
Integration (not unit) tests for pylast.py
"""
import os
import unittest
import pylast
from .test_pylast import TestPyLastWithLastFm
class TestPyLastUser(TestPyLastWithLastFm):
def test_repr(self):
# Arrange
user = self.network.get_user("RJ")
# Act
representation = repr(user)
# Assert
self.assert_startswith(representation, "pylast.User('RJ',")
def test_str(self):
# Arrange
user = self.network.get_user("RJ")
# Act
string = str(user)
# Assert
self.assertEqual(string, "RJ")
def test_equality(self):
# Arrange
user_1a = self.network.get_user("RJ")
user_1b = self.network.get_user("RJ")
user_2 = self.network.get_user("Test User")
not_a_user = self.network
# Act / Assert
self.assertEqual(user_1a, user_1b)
self.assertNotEqual(user_1a, user_2)
self.assertNotEqual(user_1a, not_a_user)
def test_get_name(self):
# Arrange
user = self.network.get_user("RJ")
# Act
name = user.get_name(properly_capitalized=True)
# Assert
self.assertEqual(name, "RJ")
def test_get_user_registration(self):
# Arrange
user = self.network.get_user("RJ")
# Act
registered = user.get_registered()
# Assert
if int(registered):
# Last.fm API broken? Used to be yyyy-mm-dd not Unix timestamp
self.assertEqual(registered, "1037793040")
else:
# Old way
# Just check date because of timezones
self.assertIn("2002-11-20 ", registered)
def test_get_user_unixtime_registration(self):
# Arrange
user = self.network.get_user("RJ")
# Act
unixtime_registered = user.get_unixtime_registered()
# Assert
# Just check date because of timezones
self.assertEqual(unixtime_registered, 1037793040)
def test_get_countryless_user(self):
# Arrange
# Currently test_user has no country set:
lastfm_user = self.network.get_user("test_user")
# Act
country = lastfm_user.get_country()
# Assert
self.assertIsNone(country)
def test_user_get_country(self):
# Arrange
lastfm_user = self.network.get_user("RJ")
# Act
country = lastfm_user.get_country()
# Assert
self.assertEqual(str(country), "United Kingdom")
def test_user_equals_none(self):
# Arrange
lastfm_user = self.network.get_user(self.username)
# Act
value = lastfm_user is None
# Assert
self.assertFalse(value)
def test_user_not_equal_to_none(self):
# Arrange
lastfm_user = self.network.get_user(self.username)
# Act
value = lastfm_user is not None
# Assert
self.assertTrue(value)
def test_now_playing_user_with_no_scrobbles(self):
# Arrange
# Currently test-account has no scrobbles:
user = self.network.get_user("test-account")
# Act
current_track = user.get_now_playing()
# Assert
self.assertIsNone(current_track)
def test_love_limits(self):
# Arrange
# Currently test-account has at least 23 loved tracks:
user = self.network.get_user("test-user")
# Act/Assert
self.assertEqual(len(user.get_loved_tracks(limit=20)), 20)
self.assertLessEqual(len(user.get_loved_tracks(limit=100)), 100)
self.assertGreaterEqual(len(user.get_loved_tracks(limit=None)), 23)
self.assertGreaterEqual(len(user.get_loved_tracks(limit=0)), 23)
def test_user_is_hashable(self):
# Arrange
user = self.network.get_user(self.username)
# Act/Assert
self.helper_is_thing_hashable(user)
# Commented out because (a) it'll take a long time and (b) it strangely
# fails due Last.fm's complaining of hitting the rate limit, even when
# limited to one call per second. The ToS allows 5 calls per second.
# def test_get_all_scrobbles(self):
# # Arrange
# lastfm_user = self.network.get_user("RJ")
# self.network.enable_rate_limit() # this is going to be slow...
#
# # Act
# tracks = lastfm_user.get_recent_tracks(limit=None)
#
# # Assert
# self.assertGreaterEqual(len(tracks), 0)
def test_pickle(self):
# Arrange
import pickle
lastfm_user = self.network.get_user(self.username)
filename = str(self.unix_timestamp()) + ".pkl"
# Act
with open(filename, "wb") as f:
pickle.dump(lastfm_user, f)
with open(filename, "rb") as f:
loaded_user = pickle.load(f)
os.remove(filename)
# Assert
self.assertEqual(lastfm_user, loaded_user)
def test_cacheable_user_artist_tracks(self):
# Arrange
lastfm_user = self.network.get_authenticated_user()
# Act
result1 = lastfm_user.get_artist_tracks("Test Artist", cacheable=False)
result2 = lastfm_user.get_artist_tracks("Test Artist", cacheable=True)
result3 = lastfm_user.get_artist_tracks("Test Artist")
# Assert
self.helper_validate_results(result1, result2, result3)
def test_cacheable_user(self):
# Arrange
lastfm_user = self.network.get_authenticated_user()
# Act/Assert
self.helper_validate_cacheable(lastfm_user, "get_friends")
self.helper_validate_cacheable(lastfm_user, "get_loved_tracks")
self.helper_validate_cacheable(lastfm_user, "get_recent_tracks")
def test_user_get_top_tags_with_limit(self):
# Arrange
user = self.network.get_user("RJ")
# Act
tags = user.get_top_tags(limit=1)
# Assert
self.helper_only_one_thing_in_top_list(tags, pylast.Tag)
def test_user_top_tracks(self):
# Arrange
lastfm_user = self.network.get_user("RJ")
# Act
things = lastfm_user.get_top_tracks(limit=2)
# Assert
self.helper_two_different_things_in_top_list(things, pylast.Track)
def helper_assert_chart(self, chart, expected_type):
# Assert
self.assertIsNotNone(chart)
self.assertGreater(len(chart), 0)
self.assertIsInstance(chart[0], pylast.TopItem)
self.assertIsInstance(chart[0].item, expected_type)
def helper_get_assert_charts(self, thing, date):
# Arrange
album_chart, track_chart = None, None
(from_date, to_date) = date
# Act
artist_chart = thing.get_weekly_artist_charts(from_date, to_date)
if type(thing) is not pylast.Tag:
album_chart = thing.get_weekly_album_charts(from_date, to_date)
track_chart = thing.get_weekly_track_charts(from_date, to_date)
# Assert
self.helper_assert_chart(artist_chart, pylast.Artist)
if type(thing) is not pylast.Tag:
self.helper_assert_chart(album_chart, pylast.Album)
self.helper_assert_chart(track_chart, pylast.Track)
def helper_dates_valid(self, dates):
# Assert
self.assertGreaterEqual(len(dates), 1)
self.assertIsInstance(dates[0], tuple)
(start, end) = dates[0]
self.assertLess(start, end)
def test_user_charts(self):
# Arrange
lastfm_user = self.network.get_user("RJ")
dates = lastfm_user.get_weekly_chart_dates()
self.helper_dates_valid(dates)
# Act/Assert
self.helper_get_assert_charts(lastfm_user, dates[0])
def test_user_top_artists(self):
# Arrange
lastfm_user = self.network.get_user(self.username)
# Act
artists = lastfm_user.get_top_artists(limit=1)
# Assert
self.helper_only_one_thing_in_top_list(artists, pylast.Artist)
def test_user_top_albums(self):
# Arrange
user = self.network.get_user("RJ")
# Act
albums = user.get_top_albums(limit=1)
# Assert
self.helper_only_one_thing_in_top_list(albums, pylast.Album)
def test_user_tagged_artists(self):
# Arrange
lastfm_user = self.network.get_user(self.username)
tags = ["artisttagola"]
artist = self.network.get_artist("Test Artist")
artist.add_tags(tags)
# Act
artists = lastfm_user.get_tagged_artists("artisttagola", limit=1)
# Assert
self.helper_only_one_thing_in_list(artists, pylast.Artist)
def test_user_tagged_albums(self):
# Arrange
lastfm_user = self.network.get_user(self.username)
tags = ["albumtagola"]
album = self.network.get_album("Test Artist", "Test Album")
album.add_tags(tags)
# Act
albums = lastfm_user.get_tagged_albums("albumtagola", limit=1)
# Assert
self.helper_only_one_thing_in_list(albums, pylast.Album)
def test_user_tagged_tracks(self):
# Arrange
lastfm_user = self.network.get_user(self.username)
tags = ["tracktagola"]
track = self.network.get_track("Test Artist", "test title")
track.add_tags(tags)
# Act
tracks = lastfm_user.get_tagged_tracks("tracktagola", limit=1)
# Assert
self.helper_only_one_thing_in_list(tracks, pylast.Track)
def test_user_subscriber(self):
# Arrange
subscriber = self.network.get_user("RJ")
non_subscriber = self.network.get_user("Test User")
# Act
subscriber_is_subscriber = subscriber.is_subscriber()
non_subscriber_is_subscriber = non_subscriber.is_subscriber()
# Assert
self.assertTrue(subscriber_is_subscriber)
self.assertFalse(non_subscriber_is_subscriber)
def test_user_get_image(self):
# Arrange
user = self.network.get_user("RJ")
# Act
url = user.get_image()
# Assert
self.assert_startswith(url, "https://")
def test_user_get_library(self):
# Arrange
user = self.network.get_user(self.username)
# Act
library = user.get_library()
# Assert
self.assertIsInstance(library, pylast.Library)
def test_get_recent_tracks_from_to(self):
# Arrange
lastfm_user = self.network.get_user("RJ")
from datetime import datetime
start = datetime(2011, 7, 21, 15, 10)
end = datetime(2011, 7, 21, 15, 15)
import calendar
utc_start = calendar.timegm(start.utctimetuple())
utc_end = calendar.timegm(end.utctimetuple())
# Act
tracks = lastfm_user.get_recent_tracks(time_from=utc_start, time_to=utc_end)
# Assert
self.assertEqual(len(tracks), 1)
self.assertEqual(str(tracks[0].track.artist), "Johnny Cash")
self.assertEqual(str(tracks[0].track.title), "Ring of Fire")
def test_get_playcount(self):
# Arrange
user = self.network.get_user("RJ")
# Act
playcount = user.get_playcount()
# Assert
self.assertGreaterEqual(playcount, 128387)
def test_get_image(self):
# Arrange
user = self.network.get_user("RJ")
# Act
image = user.get_image()
# Assert
self.assert_startswith(image, "https://")
self.assert_endswith(image, ".png")
def test_get_url(self):
# Arrange
user = self.network.get_user("RJ")
# Act
url = user.get_url()
# Assert
self.assertEqual(url, "https://www.last.fm/user/rj")
def test_get_weekly_artist_charts(self):
# Arrange
user = self.network.get_user("bbc6music")
# Act
charts = user.get_weekly_artist_charts()
artist, weight = charts[0]
# Assert
self.assertIsNotNone(artist)
self.assertIsInstance(artist.network, pylast.LastFMNetwork)
def test_get_weekly_track_charts(self):
# Arrange
user = self.network.get_user("bbc6music")
# Act
charts = user.get_weekly_track_charts()
track, weight = charts[0]
# Assert
self.assertIsNotNone(track)
self.assertIsInstance(track.network, pylast.LastFMNetwork)
if __name__ == "__main__":
unittest.main(failfast=True)
|
{
"content_hash": "bae2b1786b35006cf6c8af31087b4258",
"timestamp": "",
"source": "github",
"line_count": 434,
"max_line_length": 84,
"avg_line_length": 28.599078341013826,
"alnum_prop": 0.5986142442797293,
"repo_name": "hugovk/pylast",
"id": "1e7c76d35825661a39ace3dbfad08248214d5995",
"size": "12434",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/test_user.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "141632"
}
],
"symlink_target": ""
}
|
from django import template
from django.conf import settings
register = template.Library()
@register.filter(name='in_list')
def in_list(value, the_list):
the_list = [x.strip() for x in the_list.encode('utf8').split(',')]
return value in the_list
@register.simple_tag
def settings_value(name):
return getattr(settings, name, "")
@register.simple_tag
def url_replace(request, field, value):
dict_ = request.GET.copy()
dict_[field] = value
return dict_.urlencode()
|
{
"content_hash": "22459b2a5511702c972c8868ce5ca94c",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 70,
"avg_line_length": 22.40909090909091,
"alnum_prop": 0.691683569979716,
"repo_name": "fcopantoja/sips",
"id": "3eb89b50348c79360f112b6ed059cd1d007672c1",
"size": "493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sips/utils/templatetags/util_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "321522"
},
{
"name": "HTML",
"bytes": "41865"
},
{
"name": "JavaScript",
"bytes": "19371"
},
{
"name": "Python",
"bytes": "53936"
}
],
"symlink_target": ""
}
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management import *
import status_params
import os
import fnmatch
# server configurations
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
#RPM versioning support
rpm_version = default("/configurations/cluster-env/rpm_version", None)
#hadoop params
if rpm_version:
hadoop_bin_dir = "/usr/bigtop/current/hadoop-client/bin"
hadoop_lib_home = "/usr/bigtop/current/hadoop-client/lib"
hive_lib_dir = "/usr/bigtop/current/hive-client/lib"
oozie_lib_dir = "/usr/bigtop/current/oozie-client/"
oozie_setup_sh = "/usr/bigtop/current/oozie-client/bin/oozie-setup.sh"
oozie_webapps_dir = "/usr/bigtop/current/oozie-client/tomcat-deployment/webapps"
oozie_webapps_conf_dir = "/usr/bigtop/current/oozie-client/tomcat-deployment/conf"
oozie_libext_dir = "/usr/bigtop/current/oozie-client/libext"
oozie_server_dir = "/usr/bigtop/current/oozie-client/tomcat-deployment"
oozie_shared_lib = "/usr/bigtop/current/oozie-client/oozie-sharelib.tar.gz"
oozie_home = "/usr/bigtop/current/oozie-client"
oozie_bin_dir = "/usr/bigtop/current/oozie-client/bin"
falcon_home = '/usr/bigtop/current/falcon-client'
tomcat_conf = "/etc/oozie/tomcat-conf.http/conf"
tomcat_conf_secure = "/etc/oozie/tomcat-conf.https/conf"
else:
hadoop_bin_dir = "/usr/bin"
hadoop_lib_home = "/usr/lib/hadoop/lib"
hive_lib_dir = "/usr/lib/hive/lib"
oozie_lib_dir = "/var/lib/oozie/"
oozie_setup_sh = "/usr/lib/oozie/bin/oozie-setup.sh"
oozie_webapps_dir = "/var/lib/oozie/tomcat-deployment/webapps/"
oozie_webapps_conf_dir = "/var/lib/oozie/tomcat-deployment/conf"
oozie_libext_dir = "/usr/lib/oozie/libext"
oozie_server_dir = "/var/lib/oozie/tomcat-deployment"
oozie_shared_lib = "/usr/lib/oozie/oozie-sharelib.tar.gz"
oozie_home = "/usr/lib/oozie"
oozie_bin_dir = "/usr/bin"
falcon_home = '/usr/lib/falcon'
tomcat_conf = "/etc/oozie/tomcat-conf.http/conf"
tomcat_conf_secure = "/etc/oozie/tomcat-conf.https/conf"
execute_path = oozie_bin_dir + os.pathsep + hadoop_bin_dir
hadoop_conf_dir = "/etc/hadoop/conf"
conf_dir = "/etc/oozie/conf"
oozie_user = config['configurations']['oozie-env']['oozie_user']
smokeuser = config['configurations']['cluster-env']['smokeuser']
user_group = config['configurations']['cluster-env']['user_group']
jdk_location = config['hostLevelParams']['jdk_location']
check_db_connection_jar_name = "DBConnectionVerification.jar"
check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
oozie_tmp_dir = "/var/tmp/oozie"
oozie_hdfs_user_dir = format("/user/{oozie_user}")
oozie_pid_dir = status_params.oozie_pid_dir
pid_file = status_params.pid_file
hadoop_jar_location = "/usr/lib/hadoop/"
security_enabled = config['configurations']['cluster-env']['security_enabled']
hive_jar_files = ""
if not os.path.exists(hive_lib_dir):
raise Fail("Could not find Hive library directory: %s" % (hive_lib_dir))
for entry in os.listdir(hive_lib_dir):
absolute_path = os.path.join(hive_lib_dir, entry)
if os.path.isfile(absolute_path) and not os.path.islink(absolute_path):
if fnmatch.fnmatchcase(entry, "hive-*.jar"):
if (len(hive_jar_files) == 0):
hive_jar_files = absolute_path
else:
hive_jar_files = hive_jar_files + "," + absolute_path
catalina_properties_common_loader = "/usr/lib/hive-hcatalog/share/hcatalog/*.jar,/usr/lib/hive-hcatalog/share/webhcat/java-client/*.jar"
if (len(hive_jar_files) != 0):
catalina_properties_common_loader = hive_jar_files + "," + catalina_properties_common_loader
kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
oozie_service_keytab = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']
oozie_principal = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.kerberos.principal']
smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
oozie_keytab = config['configurations']['oozie-env']['oozie_keytab']
oozie_env_sh_template = config['configurations']['oozie-env']['content']
oracle_driver_jar_name = "ojdbc6.jar"
java_home = config['hostLevelParams']['java_home']
oozie_metastore_user_name = config['configurations']['oozie-site']['oozie.service.JPAService.jdbc.username']
oozie_metastore_user_passwd = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.password","")
oozie_jdbc_connection_url = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.url", "")
oozie_log_dir = config['configurations']['oozie-env']['oozie_log_dir']
oozie_data_dir = config['configurations']['oozie-env']['oozie_data_dir']
oozie_server_port = get_port_from_url(config['configurations']['oozie-site']['oozie.base.url'])
oozie_server_admin_port = config['configurations']['oozie-env']['oozie_admin_port']
fs_root = config['configurations']['core-site']['fs.defaultFS']
put_shared_lib_to_hdfs_cmd = format("{oozie_setup_sh} sharelib create -fs {fs_root} -locallib {oozie_shared_lib}")
jdbc_driver_name = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.driver", "")
if jdbc_driver_name == "com.mysql.jdbc.Driver":
jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
elif jdbc_driver_name == "org.postgresql.Driver":
jdbc_driver_jar = format("{oozie_home}/libserver/postgresql-9.0-801.jdbc4.jar")
elif jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
jdbc_driver_jar = "/usr/share/java/ojdbc6.jar"
else:
jdbc_driver_jar = ""
hostname = config["hostname"]
ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
falcon_host = default("/clusterHostInfo/falcon_server_hosts", [])
has_falcon_host = not len(falcon_host) == 0
#oozie-log4j.properties
if (('oozie-log4j' in config['configurations']) and ('content' in config['configurations']['oozie-log4j'])):
log4j_props = config['configurations']['oozie-log4j']['content']
else:
log4j_props = None
oozie_hdfs_user_mode = 0775
#for create_hdfs_directory
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
import functools
#create partial functions with common arguments for every HdfsDirectory call
#to create hdfs directory we need to call params.HdfsDirectory in code
HdfsDirectory = functools.partial(
HdfsDirectory,
conf_dir=hadoop_conf_dir,
hdfs_user=hdfs_user,
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
bin_dir = hadoop_bin_dir
)
|
{
"content_hash": "ae922f2c1ca910fe4e0a40fb89a5a9c4",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 136,
"avg_line_length": 45.888888888888886,
"alnum_prop": 0.7340597255851493,
"repo_name": "arenadata/ambari",
"id": "12ea97e35ff7b9130dfbe16ac39771a2c5d65305",
"size": "7456",
"binary": false,
"copies": "4",
"ref": "refs/heads/branch-adh-1.6",
"path": "ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "46700"
},
{
"name": "C",
"bytes": "331204"
},
{
"name": "C#",
"bytes": "215907"
},
{
"name": "C++",
"bytes": "257"
},
{
"name": "CSS",
"bytes": "343739"
},
{
"name": "CoffeeScript",
"bytes": "8465"
},
{
"name": "Dockerfile",
"bytes": "6387"
},
{
"name": "EJS",
"bytes": "777"
},
{
"name": "FreeMarker",
"bytes": "2654"
},
{
"name": "Gherkin",
"bytes": "990"
},
{
"name": "Groovy",
"bytes": "15882"
},
{
"name": "HTML",
"bytes": "717983"
},
{
"name": "Handlebars",
"bytes": "1819641"
},
{
"name": "Java",
"bytes": "29172298"
},
{
"name": "JavaScript",
"bytes": "18571926"
},
{
"name": "Jinja",
"bytes": "1490416"
},
{
"name": "Less",
"bytes": "412933"
},
{
"name": "Makefile",
"bytes": "11111"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLpgSQL",
"bytes": "287501"
},
{
"name": "PowerShell",
"bytes": "2090340"
},
{
"name": "Python",
"bytes": "18507704"
},
{
"name": "R",
"bytes": "3943"
},
{
"name": "Ruby",
"bytes": "38590"
},
{
"name": "SCSS",
"bytes": "40072"
},
{
"name": "Shell",
"bytes": "924115"
},
{
"name": "Stylus",
"bytes": "820"
},
{
"name": "TSQL",
"bytes": "42351"
},
{
"name": "Vim script",
"bytes": "5813"
},
{
"name": "sed",
"bytes": "2303"
}
],
"symlink_target": ""
}
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='gerrit_ext_review_list',
version='0.1',
description='',
author='',
author_email='',
install_requires=[
"pygerrit",
"pecan",
],
test_suite='gerrit_ext_review_list',
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=['ez_setup'])
)
|
{
"content_hash": "730dd8d7eb2cce8c82a81f2b1e2ad99d",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 48,
"avg_line_length": 23.181818181818183,
"alnum_prop": 0.6392156862745098,
"repo_name": "nttmcl/gerrit_ext_webui",
"id": "5b6d56139f113a699a50cdbbe824fb138300621f",
"size": "534",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "596"
},
{
"name": "Python",
"bytes": "6542"
}
],
"symlink_target": ""
}
|
import shutil, os, re
# Create a regex that matches files with the American date format.
datePattern = re.compile(r"""^(.*?) # all text before the date
((0|1)?\d)- # one or two digits for the month
((0|1|2|3)?\d)- # one or two digits for the day
((19|20)\d\d) # four digits for the year
(.*?)$ # all text after the date
""", re.VERBOSE)
# TODO: Loop over the files in the working directory.
for amerFilename in os.listdir('.'):
mo = datePattern.search(amerFilename)
# Skip files without a date.
if mo == None:
continue
# Get the different parts of the filename.
beforePart = mo.group(1)
monthPart = mo.group(2)
dayPart = mo.group(4)
yearPart = mo.group(6)
afterPart = mo.group(8)
# TODO: Skip files without a date.
# TODO: Get the different parts of the filename.
# TODO: Form the European-style filename.
# For the European-style filename.
euroFilename = beforePart + dayPart + '-' + monthPart + '-' + yearPart + afterPart
#
# TODO: Get the full, absolute file paths.
# Get the full, absolute file paths.
absWorkingDir = os.path.abspath('.')
amerFilename = os.path.join(absWorkingDir, amerFilename)
euroFilename = os.path.join(absWorkingDir, euroFilename)
# TODO: Rename the files.
# Rename the files.
print('Renaming "%s" to "%s"...' % (amerFilename, euroFilename))
# shutil.move(amerFilename, euroFilename) # uncomment after testing
|
{
"content_hash": "b1872e0e06a185aaecebc3487e11267e",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 82,
"avg_line_length": 25.775862068965516,
"alnum_prop": 0.6374581939799331,
"repo_name": "rfreiberger/Automate-the-Boring-Stuff",
"id": "124ee88272d9d780ac2edf054a2e39327a337b83",
"size": "1608",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ch9/renameDates.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "325"
},
{
"name": "Python",
"bytes": "27630"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/powerup/weapon/shared_thrown_explosive.iff"
result.attribute_template_id = -1
result.stfName("powerup_n","weapon_thrown_explosive")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "299dd7e3379d48adc2d512614ea8ed1a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 79,
"avg_line_length": 24.846153846153847,
"alnum_prop": 0.7089783281733746,
"repo_name": "anhstudios/swganh",
"id": "62dead66bdb8abd85f06fff54863d9959bb45400",
"size": "468",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/tangible/powerup/weapon/shared_thrown_explosive.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.