content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
import json
import os
from contextlib import contextmanager
from datetime import date, datetime, timedelta
from unittest.mock import Mock
import pytest
from bs4 import BeautifulSoup
from flask import Flask, url_for
from notifications_python_client.errors import HTTPError
from notifications_utils.url_safe_token import generate_token
from app import create_app
from app.notify_client.models import InvitedOrgUser, InvitedUser, User
from . import (
TestClient,
api_key_json,
generate_uuid,
invite_json,
invited_user,
job_json,
notification_json,
org_invite_json,
organisation_json,
sample_uuid,
service_json,
single_notification_json,
template_json,
template_version_json,
user_json,
)
@pytest.fixture
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
SERVICE_ONE_ID = "596364a0-858e-42c8-9062-a8fe822260eb"
SERVICE_TWO_ID = "147ad62a-2951-4fa1-9ca0-093cd1a52c52"
ORGANISATION_ID = "c011fa40-4cbe-4524-b415-dde2f421bd9c"
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture
@pytest.fixture
@pytest.fixture
@pytest.fixture
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture
@pytest.fixture
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture
def os_environ():
"""
clear os.environ, and restore it after the test runs
"""
# for use whenever you expect code to edit environment variables
old_env = os.environ.copy()
os.environ = {}
yield
os.environ = old_env
@pytest.fixture
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@contextmanager
@contextmanager
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture(scope='function')
@pytest.fixture
@pytest.fixture
def mock_create_event(mocker):
"""
This should be used whenever your code is calling `flask_login.login_user`
"""
return mocker.patch('app.events_api_client.create_event', side_effect=_add_event)
| [
11748,
33918,
198,
11748,
28686,
198,
6738,
4732,
8019,
1330,
4732,
37153,
198,
6738,
4818,
8079,
1330,
3128,
11,
4818,
8079,
11,
28805,
12514,
198,
6738,
555,
715,
395,
13,
76,
735,
1330,
44123,
198,
198,
11748,
12972,
9288,
198,
6738,... | 2.730745 | 3,142 |
# For plotting the images
from matplotlib import pyplot as plt
import numpy as np
from som import SOM
#source https://codesachin.wordpress.com/2015/11/28/self-organizing-maps-with-googles-tensorflow/
# Training inputs for RGBcolors
colors = np.array(
[[0., 0., 0.],
[0., 0., 1.],
[0., 0., 0.5],
[0.125, 0.529, 1.0],
[0.33, 0.4, 0.67],
[0.6, 0.5, 1.0],
[0., 1., 0.],
[1., 0., 0.],
[0., 1., 1.],
[1., 0., 1.],
[1., 1., 0.],
[1., 1., 1.],
[.33, .33, .33],
[.5, .5, .5],
[.66, .66, .66]])
color_names = \
['black', 'blue', 'darkblue', 'skyblue',
'greyblue', 'lilac', 'green', 'red',
'cyan', 'violet', 'yellow', 'white',
'darkgrey', 'mediumgrey', 'lightgrey']
# Train a 20x30 SOM with 400 iterations
som = SOM(20, 30, 3, 400)
som.train(colors)
# Get output grid
image_grid = som.get_centroids()
# Map colours to their closest neurons
mapped = som.map_vects(colors)
# Plot
plt.imshow(image_grid)
plt.title('Color SOM')
for i, m in enumerate(mapped):
plt.text(m[1], m[0], color_names[i], ha='center', va='center',
bbox=dict(facecolor='white', alpha=0.5, lw=0))
plt.show() | [
2,
1114,
29353,
262,
4263,
198,
6738,
2603,
29487,
8019,
1330,
12972,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
3870,
1330,
42121,
198,
198,
2,
10459,
3740,
1378,
40148,
620,
259,
13,
40346,
13,
785,
14,
4626... | 2.095915 | 563 |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetSiteResult',
'AwaitableGetSiteResult',
'get_site',
'get_site_output',
]
@pulumi.output_type
# pylint: disable=using-constant-test
def get_site(project: Optional[str] = None,
site_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSiteResult:
"""
Gets the specified Hosting Site.
"""
__args__ = dict()
__args__['project'] = project
__args__['siteId'] = site_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('google-native:firebasehosting/v1beta1:getSite', __args__, opts=opts, typ=GetSiteResult).value
return AwaitableGetSiteResult(
app_id=__ret__.app_id,
default_url=__ret__.default_url,
labels=__ret__.labels,
name=__ret__.name,
type=__ret__.type)
@_utilities.lift_output_func(get_site)
def get_site_output(project: Optional[pulumi.Input[Optional[str]]] = None,
site_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSiteResult]:
"""
Gets the specified Hosting Site.
"""
...
| [
2,
19617,
28,
40477,
12,
23,
198,
2,
17202,
39410,
25,
428,
2393,
373,
7560,
416,
262,
21624,
12994,
26144,
35986,
13,
17202,
198,
2,
17202,
2141,
407,
4370,
416,
1021,
4556,
345,
821,
1728,
345,
760,
644,
345,
389,
1804,
0,
17202,
... | 2.443598 | 656 |
# encoding: utf-8
__author__ = "Dimitrios Karkalousos"
# Taken and adapted from: https://github.com/NVIDIA/NeMo/blob/main/nemo/core/classes/module.py
from abc import ABC
from contextlib import contextmanager
from torch.nn import Module
__all__ = ["NeuralModule"]
from mridc.core.classes.common import FileIO, Serialization, Typing
class NeuralModule(Module, Typing, Serialization, FileIO, ABC):
"""Abstract class offering interface shared between all PyTorch Neural Modules."""
@property
def num_weights(self):
"""Utility property that returns the total number of parameters of NeuralModule."""
return sum(p.numel() for p in self.parameters() if p.requires_grad)
@staticmethod
def input_example(max_batch=None, max_dim=None):
"""
Override this method if random inputs won't work
Args:
max_batch: Maximum batch size to generate
max_dim: Maximum dimension to generate
Returns:
A tuple sample of valid input data.
"""
return None
def freeze(self) -> None:
r"""Freeze all params for inference."""
for param in self.parameters():
param.requires_grad = False
self.eval()
def unfreeze(self) -> None:
"""Unfreeze all parameters for training."""
for param in self.parameters():
param.requires_grad = True
self.train()
@contextmanager
def as_frozen(self):
"""Context manager which temporarily freezes a module, yields control and finally unfreezes the module."""
self.freeze()
try:
yield
finally:
self.unfreeze()
| [
2,
21004,
25,
3384,
69,
12,
23,
198,
834,
9800,
834,
796,
366,
29271,
49510,
418,
509,
668,
282,
516,
418,
1,
198,
198,
2,
30222,
290,
16573,
422,
25,
3740,
1378,
12567,
13,
785,
14,
38021,
14,
8199,
16632,
14,
2436,
672,
14,
12... | 2.613354 | 644 |
# -*- encoding: utf-8 -*-
"""
@File : urls
@Date : 2019/11/15
@Author : Regulus
@Contact : yusheng831143@gmail.com
@Desc : 所有首页相关url映射。
"""
from django.urls import path
from apps.index import views
app_name = 'app_index'
urlpatterns = [
path('', views.index, name='index'),
path('home', views.home, name='home'),
path('alerts', views.alerts, name='alerts'),
path('warframes', views.warframes, name='warframes'),
path('weapons', views.weapons, name='weapons'),
]
| [
2,
532,
9,
12,
21004,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
31,
8979,
220,
220,
220,
1058,
2956,
7278,
198,
31,
10430,
220,
220,
220,
1058,
13130,
14,
1157,
14,
1314,
198,
31,
13838,
220,
1058,
3310,
23515,
198,
31,... | 2.349057 | 212 |
import numpy as np
from .._tier0 import Image
def touching_labels_to_networkx(label_image:Image):
"""
Takes a label image, determines which labels are touching each other and returns an networkx graph
representing labels in range.
Parameters
----------
label_image : Image
Returns
-------
networkx Graph
"""
from .._tier9 import centroids_of_labels
from .._tier1 import generate_touch_matrix
from ._to_networkx import to_networkx
centroids = centroids_of_labels(label_image)
touch_matrix = generate_touch_matrix(label_image)
return to_networkx(touch_matrix, centroids) | [
11748,
299,
32152,
355,
45941,
198,
6738,
11485,
62,
24948,
15,
1330,
7412,
198,
198,
4299,
15241,
62,
23912,
1424,
62,
1462,
62,
27349,
87,
7,
18242,
62,
9060,
25,
5159,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
33687,
2... | 2.90411 | 219 |
# Copyright 2019-2020 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""
Unit tests for the Strawberry Fields command line interface.
"""
# pylint: disable=no-self-use,unused-argument
import os
import functools
import argparse
import networkx as nx
import numpy as np
import pytest
from strawberryfields.api import Result
from strawberryfields.apps import clique
from strawberryfields.configuration import store_account, ConfigurationError
from strawberryfields import cli as cli
import sys
import builtins
pytestmark = pytest.mark.frontend
class TestCreateParser:
"""Tests for creating a parser object."""
def test_general_details(self):
"""Test the general details of the parser created."""
parser = cli.create_parser()
assert parser._optionals.title == "General Options"
assert parser.description == "See below for available options and commands for working with the Xanadu cloud platform."
assert parser.add_help
@pytest.mark.parametrize("option", ['--ping', '-p'])
def test_ping(self, option):
"""Test that specifying --ping to the CLI sets the correct attribute."""
parser = cli.create_parser()
args = parser.parse_args([option])
assert args.ping
@pytest.mark.parametrize("token_option", ['--token', '-t'])
def test_configure_token(self, token_option):
"""Test that specifying configure, --token and passing an argument to
the CLI sets the correct attribute."""
parser = cli.create_parser()
args = parser.parse_args(['configure', token_option, 'SomeToken'])
assert args.func is cli.configure
assert args.token == 'SomeToken'
assert not args.local
def test_configuration_wizard(self):
"""Test that specifying configure, --local to the CLI sets the correct
attribute."""
parser = cli.create_parser()
args = parser.parse_args(['configure'])
assert args.func is cli.configure
assert not args.local
@pytest.mark.parametrize("token_option", ['--token', '-t'])
@pytest.mark.parametrize("local_option", ['--local', '-l'])
def test_configure_token_locally(self, token_option, local_option):
"""Test that specifying configure, --token, --local and passing an argument to
the CLI sets the correct attribute."""
parser = cli.create_parser()
args = parser.parse_args(['configure', token_option, 'SomeToken', local_option])
assert args.func is cli.configure
assert args.token == 'SomeToken'
@pytest.mark.parametrize("option", ['--local', '-l'])
def test_configuration_wizard_locally(self, option):
"""Test that specifying configure, --local to the CLI sets the correct
attribute."""
parser = cli.create_parser()
args = parser.parse_args(['configure', option])
assert args.func is cli.configure
assert args.local
def test_run(self):
"""Test that specifying input and passing an argument to the CLI sets
the correct attribute."""
parser = cli.create_parser()
args = parser.parse_args(['run', 'SomePath'])
assert args.func is cli.run_blackbird_script
assert args.input == 'SomePath'
def test_output(self):
"""Test that specifying input, --output and passing the arguments to
the CLI sets the correct attributes."""
parser = cli.create_parser()
args = parser.parse_args(['run', 'SomeInputPath', '--output', 'SomeOutputPath'])
assert args.func is cli.run_blackbird_script
assert args.input == 'SomeInputPath'
assert args.output == 'SomeOutputPath'
class MockArgs:
"""A mock class used for mocking the args that are parsed from the command
line."""
class MockStoreAccount:
"""A mock class used for capturing the arguments with which the store_account
function is being called."""
EXPECTED_KWARGS = {
"authentication_token": "",
"hostname": "platform.strawberryfields.ai",
"use_ssl": True,
"port": 443,
}
class TestConfigure:
"""Unit tests for the configure function checking that the lines of
execution is correct."""
def test_token(self, monkeypatch):
"""Tests that if a token was given as a command line argument then
configuration takes place accordingly."""
with monkeypatch.context() as m:
mock_store_account = MockStoreAccount()
m.setattr(cli, "store_account", mock_store_account.store_account)
args = MockArgs()
args.token = "SomeToken"
cli.configure(args)
assert mock_store_account.kwargs == {"authentication_token": "SomeToken"}
def test_configuration_wizard(self, monkeypatch):
"""Tests that if no token was given as a command line argument then
configuration takes place using the configuration_wizard function."""
with monkeypatch.context() as m:
mock_store_account = MockStoreAccount()
m.setattr(cli, "configuration_wizard", lambda: cli.create_config()["api"])
m.setattr(cli, "store_account", mock_store_account.store_account)
args = MockArgs()
args.token = False
cli.configure(args)
assert mock_store_account.kwargs == EXPECTED_KWARGS
def test_token_local(self, monkeypatch):
"""Tests that if a token was given as a command line argument and
local configuration was specified then configuration takes place
accordingly."""
with monkeypatch.context() as m:
mock_store_account = MockStoreAccount()
m.setattr(cli, "store_account", mock_store_account.store_account)
args = MockArgs()
args.token = "SomeToken"
args.local = True
cli.configure(args)
assert mock_store_account.kwargs == {"authentication_token": "SomeToken", "location": "local"}
def test_configuration_wizard_local(self, monkeypatch):
"""Tests that if no token was given as a command line argument and
local configuration was specified then configuration takes place using
the configuration_wizard function."""
with monkeypatch.context() as m:
mock_store_account = MockStoreAccount()
m.setattr(cli, "configuration_wizard", lambda: cli.create_config()["api"])
m.setattr(cli, "store_account", mock_store_account.store_account)
args = MockArgs()
args.token = False
args.local = True
cli.configure(args)
EXPECTED_KWARGS["location"] = "local"
assert mock_store_account.kwargs == EXPECTED_KWARGS
class MockSuccessfulConnection:
"""A Connection class mocking a successful establishment of connection."""
class MockFailedConnection:
"""A Connection class mocking a failed establishment of connection."""
class TestPing:
"""Tests for the pinging mechanism of the CLI."""
def test_success(self, monkeypatch, capsys):
"""Test that pinging was successful."""
with monkeypatch.context() as m:
m.setattr(cli, "Connection", MockSuccessfulConnection)
cli.ping()
out, _ = capsys.readouterr()
assert out == "You have successfully authenticated to the platform!\n"
def test_fail(self, monkeypatch, capsys):
"""Test that pinging failed."""
with monkeypatch.context() as m:
m.setattr(cli, "Connection", MockFailedConnection)
out, _ = capsys.readouterr()
cli.ping()
out, _ = capsys.readouterr()
assert out == "There was a problem when authenticating to the platform!\n"
# Keys are adjusted to the prompt message displayed to the user
MOCK_PROMPTS = {
"token": "MyAuth",
"hostname": "MyHost",
"port": 123,
"SSL": "n",
}
EXPECTED_KWARGS_FOR_PROMPTS = {
"authentication_token": "MyAuth",
"hostname": "MyHost",
"port": 123,
"use_ssl": False,
}
def mock_input(arg):
"""A mock function that substitutes the built-in input function."""
option = {k: v for k, v in MOCK_PROMPTS.items() if k in arg}
if option and len(option) == 1:
return list(option.values())[0]
class TestConfigureEverything:
"""Unit tests for the configuration_wizard function."""
def test_no_auth_exit_with_message(self, monkeypatch, capsys):
"""Test that by default the configuration_wizard function exits with a
relevant message."""
with monkeypatch.context() as m:
m.setattr(builtins, "input", lambda *args: False)
with pytest.raises(SystemExit):
cli.configuration_wizard()
out, _ = capsys.readouterr()
assert out == "No authentication token was provided, please configure again."
def test_auth_correct(self, monkeypatch):
"""Test that by default the configuration_wizard function works
correctly, once the authentication token is passed."""
with monkeypatch.context() as m:
auth_prompt = "Please enter the authentication token"
default_config = cli.create_config()["api"]
default_auth = "SomeAuth"
default_config['authentication_token'] = default_auth
m.setattr(builtins, "input", lambda arg: default_auth if (auth_prompt in arg) else "")
assert cli.configuration_wizard() == default_config
def test_correct_inputs(self, monkeypatch):
"""Test that the configuration_wizard function returns a dictionary
based on the inputs, when each configuration detail was inputted."""
with monkeypatch.context() as m:
auth_prompt = "Please enter the authentication token"
default_config = cli.create_config()["api"]
default_auth = "SomeAuth"
default_config['authentication_token'] = default_auth
m.setattr(builtins, "input", mock_input)
assert cli.configuration_wizard() == EXPECTED_KWARGS_FOR_PROMPTS
class MockProgram:
"""A mock class used for capturing the arguments with which the
the Program class is instantiated."""
class MockRemoteEngine:
"""A mock class used for capturing the arguments with which the
the RemoteEngine class is instantiated and its run method is called."""
class MockWriteScriptResults:
"""A mock class used for capturing the arguments with which the
write_script_results function is being called."""
TEST_SCRIPT = """\
name template_1x2_X8_01 # Name of the program
version 1.0 # Blackbird version number
target X8_01 (shots = 50) # This program will run on X8_01 for 50 shots
# Define the interferometer phase values
float phi0 = 0.574
float phi1 = 1.33
MZgate(phi0, phi1) | [0, 1]
MZgate(phi0, phi1) | [4, 5]
# Perform a photon number counting measurement
MeasureFock() | [0, 1, 2, 3, 4, 5, 6, 7]
"""
class TestRunBlackbirdScript:
"""Unit tests for the run_blackbird_script function."""
def test_exit_if_file_not_found(self, monkeypatch, capsys):
"""Tests that if the input script file was not found then a system exit
occurs along with a message being outputted."""
mocked_program = MockProgram()
mocked_args = MockArgs()
with monkeypatch.context() as m:
m.setattr(cli, "load", mock_load)
with pytest.raises(SystemExit):
cli.run_blackbird_script(mocked_args)
out, _ = capsys.readouterr()
assert "blackbird script was not found" in out
def test_result_is_none(self, monkeypatch, capsys):
"""Tests that the write_script_results function is not called if the
results from the run method of the engine returned a None."""
mocked_program = MockProgram()
mocked_args = MockArgs()
mocked_write_script_results = MockWriteScriptResults()
with monkeypatch.context() as m:
m.setattr(cli, "load", lambda arg: mocked_program)
m.setattr(cli, "RemoteEngine", MockRemoteEngine)
m.setattr(cli, "write_script_results", mocked_write_script_results.write_script_results)
with pytest.raises(SystemExit):
cli.run_blackbird_script(mocked_args)
out, _ = capsys.readouterr()
assert "Executing program on remote hardware..." in out
# Check that the write_script_results function was not called
assert not mocked_write_script_results.called
test_samples = [1, 2, 3, 4]
class MockRemoteEngineIntegration:
"""A mock class used for capturing the arguments with which the
the RemoteEngine class is instantiated and its run method is called when
multiple components are tested."""
class TestRunBlackbirdScriptIntegration:
"""Tests for the run_blackbird_script function that integrate multiple
components."""
def test_integration_std_out(self, tmpdir, monkeypatch, capsys):
"""Tests that a blackbird script was loaded and samples were written to
the standard output using the run_blackbird_script function."""
filepath = tmpdir.join("test_script.xbb")
with open(filepath, "w") as f:
f.write(TEST_SCRIPT)
mocked_args = MockArgs()
mocked_args.input = filepath
with monkeypatch.context() as m:
m.setattr(cli, "RemoteEngine", MockRemoteEngineIntegration)
cli.run_blackbird_script(mocked_args)
out, err = capsys.readouterr()
execution_message = "Executing program on remote hardware...\n"
outputs = execution_message + str(Result(test_samples).samples)
assert outputs == out
def test_integration_file(self, tmpdir, monkeypatch, capsys):
"""Tests that a blackbird script was loaded and samples were written to
the specified output file using the run_blackbird_script function."""
filepath = tmpdir.join("test_script.xbb")
with open(filepath, "w") as f:
f.write(TEST_SCRIPT)
mocked_args = MockArgs()
mocked_args.input = filepath
out_filepath = tmpdir.join("test_script.xbb")
mocked_args.output = out_filepath
with monkeypatch.context() as m:
m.setattr(cli, "RemoteEngine", MockRemoteEngineIntegration)
cli.run_blackbird_script(mocked_args)
with open(filepath, "r") as f:
results_from_file = f.read()
out, _ = capsys.readouterr()
assert out == "Executing program on remote hardware...\n"
assert results_from_file == str(Result(test_samples).samples)
class TestWriteScriptResults:
"""Tests for the write_script_results function."""
def test_write_to_file(self, tmpdir):
"""Tests that the write_script_results function writes to file
correctly."""
some_samples = [1, 2, 3, 4, 5]
filepath = tmpdir.join("test_script.xbb")
cli.write_script_results(some_samples, output_file=filepath)
with open(filepath, "r") as f:
results_from_file = f.read()
assert results_from_file == str(some_samples)
def test_write_to_std_out(self, monkeypatch, capsys):
"""Tests that the write_script_results function writes to the standard
output correctly."""
some_samples = [1, 2, 3, 4, 5]
with monkeypatch.context() as m:
cli.write_script_results(some_samples)
out, _ = capsys.readouterr()
assert out == str(some_samples)
| [
2,
15069,
13130,
12,
42334,
47482,
324,
84,
29082,
21852,
3457,
13,
198,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,... | 2.657751 | 6,057 |
"""
更加简洁的WeightDefiner版本
用于实现bug report转vec
2021.4.14 crx
"""
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
from file_reader import FileReader
from file_writer import JSONWriter
from constants import data_pool_path
import os
if __name__ == '__main__':
wd = VectorBuilder('report', 'bug2vec.json')
wd.process()
| [
37811,
198,
162,
249,
112,
27950,
254,
163,
106,
222,
162,
112,
223,
21410,
25844,
7469,
7274,
48304,
17312,
105,
198,
18796,
101,
12859,
236,
22522,
252,
163,
236,
108,
25456,
989,
164,
121,
105,
35138,
198,
1238,
2481,
13,
19,
13,
... | 2.528571 | 140 |
# @file cli.py
# @brief cli of whole tool
# README: Command line interface
# MODULE_ARCH:
# CLASS_ARCH:
# GLOBAL USAGE:
#standard
import cmd
#extend
#library
import lib.globalclasses as gc
from lib.const import *
##### Code section #####
#Spec: local variable maintain, about, user commands, test commands
#How/NeedToKnow:
class Cli(cmd.Cmd):
"""Simple command processor example."""
############ cli maintain ####################
def do_set(self,line):
"""set scli variable, can be new or update.
set var_name var_value
ex: set mac_addr 001122334455"""
pars=line.split()
if len(pars)==2:
var = pars[0]
value = pars[1]
else:
return
if var in ('dev_console_display','log_level_file','log_level_console'):
value = int(value)
gc.GAP.user_vars[var] = value
# dynamic apply
# these variable need to work out, log_level_file, log_level_console
def do_show(self,line):
"""show simcli variables, if miss variable name, show all
show variable_name
system variables list:
;log level definition, DEBUG=10,INFO=20,WARNING=30,ERROR=40,CRITICAL=50
log_level_console=20 #the console message log level
log_level_file=40 #file message log level
;device console real time display
dev_console_display=1 #(0) don't display (1) display
ex: show mac_addr """
for var in gc.GAP.user_vars.keys():
print("%s=%s" % ( var , gc.GAP.user_vars[var]))
############ top command ####################
#def do_test1(self, line):
# """current debug command"""
# self.cli_ebm.do_init("")
def do_about(self, line):
"""About this software"""
print("%s version: v%s" %(LSIM_TITLE,LSIM_VERSION))
def do_quit(self, line):
"""quit"""
return True
############ top command ####################
def do_simrun(self, line):
"""Start simulation
simrun [ until ]
; until: how many time unit simulation should run, default 15 hours
"""
pars=line.split()
until = 15
if len(pars)==1:
until = int(pars[0])
else:
return
gc.GAP.simrun(until)
def do_save_esri_xml(self,line):
"""Save map to RSRI ASCII xml format
save_esri_xml [ name ]
; name: export file name prefix
ex: save_esri_xml sim
ESRI export naming rule:
name.xml
"""
pars=line.split()
name = ""
if len(pars)==1:
name = pars[0]
else:
return
gc.UI.save_esri_xml(gc.MODEL,name)
def do_save_esri(self,line):
"""Save map to RSRI ASCII format
save_esri [ name ]
; name: export file name prefix
ex: save_esri map
ESRI export naming rule:
name_timestamp.asc (ex: abc_20160708210000.asc)
"""
pars=line.split()
name = ""
if len(pars)==1:
name = pars[0]
else:
return
gc.UI.save_esri(gc.MODEL.map,name)
def do_loadlass(self,line):
"""load current lass data"""
lassdata = gc.LASSDATA
lassdata.load_site_list()
lassdata.tag_site_by_area('default',gc.MODEL.corners)#[24.0, 120.0 ,25.0,121.0])
#lassdata.load_site_history_of_2day('FT1_001')
lassdata.load_his_by_tag('default')
#lassdata.desc(0)
#lassdata.save_csv('default','output/lass_his.csv')
def do_test(self,line):
"""current debug command"""
#gc.ENVDATA.load_car_density()
#gc.ENVDATA.load_population_count()
#gc.ENVDATA.load_fixed_pollution_srcs("include/%s" % (gc.SETTING["IN_FIX_POLLUTION_SRC_DIR"]))
#gc.ENVDATA.load_cwb_weather_curr()
#gc.ENVDATA.load_cwb_weather_gfs("include/%s" %(gc.SETTING["IN_CWB_WEATHER_GFS"]))
gc.ENVDATA.load_all()
gc.ENVDATA.desc(0) | [
2,
2488,
7753,
537,
72,
13,
9078,
198,
2,
2488,
65,
3796,
537,
72,
286,
2187,
2891,
198,
2,
20832,
11682,
25,
9455,
1627,
7071,
198,
2,
33893,
62,
31315,
25,
220,
220,
198,
2,
42715,
62,
31315,
25,
198,
2,
10188,
9864,
1847,
129... | 2.018905 | 2,063 |
#!/usr/bin/env python
import boto3
rds = boto3.client('rds')
try:
# get all of the db instances
dbs = rds.describe_db_instances()
for db in dbs['DBInstances']:
print ("{}".format(db['Endpoint']['Address']))
except Exception as error:
print (error) | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
11748,
275,
2069,
18,
198,
4372,
82,
796,
275,
2069,
18,
13,
16366,
10786,
4372,
82,
11537,
198,
28311,
25,
198,
220,
220,
220,
1303,
651,
477,
286,
262,
20613,
10245,
198,
220,
220,... | 2.419643 | 112 |
#!/usr/bin/env python
#
# ovf.py - Class for OVF/OVA handling
#
# August 2013, Glenn F. Matthews
# Copyright (c) 2013-2017, 2019 the COT project developers.
# See the COPYRIGHT.txt file at the top-level directory of this distribution
# and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt.
#
# This file is part of the Common OVF Tool (COT) project.
# It is subject to the license terms in the LICENSE.txt file found in the
# top-level directory of this distribution and at
# https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part
# of COT, including this file, may be copied, modified, propagated, or
# distributed except according to the terms contained in the LICENSE.txt file.
"""Module for handling OVF and OVA virtual machine description files.
**Classes**
.. autosummary::
:nosignatures:
OVF
"""
import logging
import os
import os.path
import re
import tarfile
import xml.etree.ElementTree as ET # noqa: N814
from xml.etree.ElementTree import ParseError
import textwrap
from COT.xml_file import XML
from COT.data_validation import (
match_or_die, check_for_conflict, file_checksum,
ValueTooHighError, ValueUnsupportedError, canonicalize_nic_subtype,
)
from COT.file_reference import FileReference, FileOnDisk
from COT.platforms import Platform
from COT.disks import DiskRepresentation
from COT.utilities import pretty_bytes, tar_entry_size
from ..vm_description import VMDescription, VMInitError
from .name_helper import name_helper, CIM_URI
from .hardware import OVFHardware, OVFHardwareDataError
from .item import list_union
from .utilities import (
int_bytes_to_programmatic_units, parse_manifest, programmatic_bytes_to_int,
)
logger = logging.getLogger(__name__)
class OVF(VMDescription, XML):
"""Representation of the contents of an OVF or OVA.
**Properties**
.. autosummary::
:nosignatures:
input_file
output_file
ovf_version
product_class
platform
config_profiles
default_config_profile
environment_properties
environment_transports
networks
network_descriptions
system_types
version_short
version_long
"""
# API methods to be called by clients
@staticmethod
def detect_type_from_name(filename):
"""Check the given filename to see if it looks like a type we support.
For our purposes, the file needs to match ".ov[af]" to appear to be
an OVF/OVA file. We also support names like "foo.ovf.20150101" as those
have been seen in the wild.
Does not check file contents, as the given filename may not yet exist.
Args:
filename (str): File name/path
Returns:
str: '.ovf', '.box' or '.ova'
Raises:
ValueUnsupportedError: if filename doesn't match ovf/ova
"""
# We don't care about any directory path
filename = os.path.basename(filename)
extension = os.path.splitext(filename)[1]
if extension == ".ovf" or extension == ".ova" or extension == ".box":
return extension
# Some sources of files are not good about preserving the extension
# and hence tend to append additional extensions - while this may open
# us to incorrect behavior (assuming that 'foo.ovf.zip' is a valid OVF
# when it's probably a zip of an OVF) we'll err on the side of
# accepting too much rather than incorrectly rejecting something like
# "foo.ova.2014.05.06A" that's just lazily named.
match = re.search(r"(\.ov[fa])[^a-zA-Z0-9]", filename)
if match:
extension = match.group(1)
logger.warning("Filename '%s' does not end in '.ovf' or '.ova', "
"but found '%s' in mid-filename; treating as such.",
filename, extension)
return extension
raise ValueUnsupportedError("filename", filename, ('.ovf', '.ova'))
def _ovf_descriptor_from_name(self, input_file):
"""Get the OVF descriptor for the given file.
1. The file may be an OVF descriptor itself.
2. The file may be an OVA, in which case we need to untar it and
return the path to the extracted OVF descriptor.
Args:
input_file (str): Path to an OVF descriptor or OVA file.
Returns:
str: OVF descriptor path
"""
extension = self.detect_type_from_name(input_file)
if extension == '.ova' or extension == '.box':
# Untar the ova to our working directory
return self.untar(input_file)
elif extension == '.ovf':
return input_file
else:
return None
def __init__(self, input_file, output_file):
"""Open the specified OVF and read its XML into memory.
Args:
input_file (str): Data file to read in.
output_file (str): File name to write to. If this VM is read-only,
(there will never be an output file) this value should be
``None``; if the output filename is not yet known, use ``""``
and subsequently set :attr:`output_file` when it is determined.
Raises:
VMInitError:
* if the OVF descriptor cannot be located
* if an XML parsing error occurs
* if the XML is not actually an OVF descriptor
* if the OVF hardware validation fails
Exception: will call :meth:`destroy` to clean up before reraising
any exception encountered.
"""
try:
self.output_extension = None
VMDescription.__init__(self, input_file, output_file)
# Make sure we know how to read the input
self.ovf_descriptor = self._ovf_descriptor_from_name(input_file)
if self.ovf_descriptor is None:
# We should never get here, but be safe...
raise VMInitError(
2,
"File does not appear to be an OVA or OVF",
input_file)
# Open the provided OVF
try:
XML.__init__(self, self.ovf_descriptor)
except ParseError as exc:
raise VMInitError(2,
"XML error in parsing file: " + str(exc),
self.ovf_descriptor)
# Quick sanity check before we go any further:
if ((not re.search(r"Envelope", self.root.tag)) or
(XML.strip_ns(self.root.tag) != 'Envelope')):
raise VMInitError(
2,
"File does not appear to be an OVF descriptor - "
"expected top-level element {0} but found {1} instead"
.format('Envelope', self.root.tag),
self.ovf_descriptor)
self._ovf_version = None
self.name_helper = name_helper(self.ovf_version)
for (prefix, uri) in self.NSM.items():
ET.register_namespace(prefix, uri)
# Register additional non-standard namespaces we're aware of:
ET.register_namespace('vmw', "http://www.vmware.com/schema/ovf")
ET.register_namespace('vbox',
"http://www.virtualbox.org/ovf/machine")
ET.register_namespace(
'pasd',
CIM_URI + "/cim-schema/2/CIM_ProcessorAllocationSettingData")
# Go ahead and set pointers to some of the most useful XML sections
self.envelope = self.root
self.references = self.find_child(
self.envelope,
self.REFERENCES,
required=True)
self.disk_section = self.find_child(
self.envelope,
self.DISK_SECTION,
attrib=self.DISK_SECTION_ATTRIB)
self.network_section = self.find_child(
self.envelope,
self.NETWORK_SECTION,
attrib=self.NETWORK_SECTION_ATTRIB)
self.deploy_opt_section = self.find_child(
self.envelope,
self.DEPLOY_OPT_SECTION,
required=False)
self.virtual_system = self.find_child(
self.envelope,
self.VIRTUAL_SYSTEM,
attrib=self.VIRTUAL_SYSTEM_ATTRIB,
required=True)
self.product_section = self.find_child(
self.virtual_system,
self.PRODUCT_SECTION,
attrib=self.PRODUCT_SECTION_ATTRIB)
self.annotation_section = self.find_child(
self.virtual_system,
self.ANNOTATION_SECTION,
attrib=self.ANNOTATION_SECTION_ATTRIB)
self.virtual_hw_section = self.find_child(
self.virtual_system,
self.VIRTUAL_HW_SECTION,
attrib=self.VIRTUAL_HW_SECTION_ATTRIB,
required=True)
# Initialize various caches
self._configuration_profiles = None
self._file_references = {}
self._platform = None
try:
self.hardware = OVFHardware(self)
except OVFHardwareDataError as exc:
raise VMInitError(1,
"OVF descriptor is invalid: {0}".format(exc),
self.ovf_descriptor)
assert self.platform
self.file_references = self._init_check_file_entries()
"""Dictionary of FileReferences for this package.
Does not include the manifest file."""
except Exception:
self.destroy()
raise
def _compare_file_lists(self, descriptor_file_list, manifest_file_list):
"""Compare two lists of files.
Helper for _init_check_file_entries method.
Args:
descriptor_file_list (list): List of file names derived from the
OVF descriptor.
manifest_file_list (list): List of file names derived from the
manifest file (minus the descriptor itself).
"""
if not manifest_file_list:
return
descriptor_in_manifest = False
# DSP0243 2.1.0: "The manifest file shall contain SHA digests for all
# distinct files referenced in the References element
# of the OVF descriptor and for no other files."
for filename in manifest_file_list:
if filename == os.path.basename(self.ovf_descriptor):
# Manifest should reference the descriptor, but of course the
# descriptor does not reference itself
descriptor_in_manifest = True
elif filename not in descriptor_file_list:
logger.error("The manifest lists file '%s' but the OVF"
" descriptor does not include it in its"
" References section", filename)
for filename in descriptor_file_list:
if filename not in manifest_file_list:
logger.error("The OVF descriptor references file '%s' but"
" this file is not included in the manifest",
filename)
if not descriptor_in_manifest:
logger.error("The manifest does not list the OVF descriptor")
def _init_check_file_entries(self):
"""Check files described in the OVF and store file references.
Also compare the referenced files against the manifest, if any.
Returns:
dict: File HREF (file name) --> :class:`~COT.FileReference` object.
Note that this does *not* include the OVF manifest file.
"""
descriptor_files = dict(
[(elem.get(self.FILE_HREF), elem.get(self.FILE_SIZE)) for
elem in self.references.findall(self.FILE)])
if self.input_file == self.ovf_descriptor:
# Check files in the directory referenced by the OVF descriptor
input_path = os.path.dirname(os.path.abspath(self.ovf_descriptor))
else:
# OVA - check contents of TAR file.
input_path = os.path.abspath(self.input_file)
file_references = {}
mf_filename = os.path.splitext(
os.path.basename(self.ovf_descriptor))[0] + ".mf"
manifest_entries = {}
try:
# We don't store the manifest file itself in file_references,
# as it's basically a read-once file and storing it in the file
# references causes much confusion when writing back out to
# generate the OVF descriptor and manifest file.
manifest_file = FileReference.create(input_path, mf_filename)
with manifest_file.open('rb') as file_obj:
manifest_text = file_obj.read().decode()
manifest_entries = parse_manifest(manifest_text)
except IOError:
logger.debug("Manifest file is missing or unreadable.")
self._compare_file_lists(descriptor_files.keys(),
manifest_entries.keys())
# Check the checksum of the descriptor itself
# We don't store this in file_references as that would be
# prone to self-recursion.
m_algo, m_cksum = manifest_entries.get(
os.path.basename(self.ovf_descriptor), (None, None))
if m_algo and m_algo != self.checksum_algorithm:
# TODO: log a warning? Discard the checksum?
pass
FileReference.create(
input_path, os.path.basename(self.ovf_descriptor),
checksum_algorithm=self.checksum_algorithm,
expected_checksum=m_cksum)
# Now check the checksum of the other files
for file_href, file_size in descriptor_files.items():
m_algo, m_cksum = manifest_entries.get(file_href, (None, None))
if m_algo and m_algo != self.checksum_algorithm:
# TODO: log a warning? Discard the checksum?
pass
try:
file_references[file_href] = FileReference.create(
input_path, file_href,
checksum_algorithm=self.checksum_algorithm,
expected_checksum=m_cksum,
expected_size=file_size)
except IOError:
logger.error("File '%s' referenced in the OVF descriptor "
"does not exist.", file_href)
continue
return file_references
@property
def output_file(self):
"""OVF or OVA file that will be created or updated by :meth:`write`.
Raises:
ValueUnsupportedError: if :func:`detect_type_from_name` fails
"""
return super(OVF, self).output_file
@output_file.setter
@property
def ovf_version(self):
"""Float representing the OVF specification version in use.
Supported values at present are 0.9, 1.0, and 2.0.
"""
if self._ovf_version is None:
root_namespace = XML.get_ns(self.root.tag)
logger.debug("Root namespace is " + root_namespace)
if root_namespace == 'http://www.vmware.com/schema/ovf/1/envelope':
logger.info("OVF version is 0.9")
self._ovf_version = 0.9
elif root_namespace == 'http://schemas.dmtf.org/ovf/envelope/1':
logger.info("OVF version is 1.x")
self._ovf_version = 1.0
elif root_namespace == 'http://schemas.dmtf.org/ovf/envelope/2':
logger.info("OVF version is 2.x")
self._ovf_version = 2.0
else:
raise VMInitError(
2,
"File has an Envelope but it is in unknown namespace '{0}'"
.format(root_namespace),
self.ovf_descriptor)
return self._ovf_version
@property
def checksum_algorithm(self):
"""Get the preferred file checksum algorithm for this OVF."""
if self.ovf_version >= 2.0:
# OVF 2.x uses SHA256 for manifest
return 'sha256'
else:
# OVF 0.x and 1.x use SHA1
return 'sha1'
@property
def product_class(self):
"""Get/set the product class identifier, such as com.cisco.csr1000v."""
if self._product_class is None and self.product_section is not None:
self._product_class = self.product_section.get(self.PRODUCT_CLASS)
return super(OVF, self).product_class
@product_class.setter
@property
def platform(self):
"""Get the platform type, as determined from the OVF descriptor.
This will be the class :class:`~COT.platforms.Platform` or
a more-specific subclass if recognized as such.
"""
if self._platform is None:
self._platform = Platform.for_product_string(self.product_class)
logger.info("OVF product class %s --> platform %s",
self.product_class, self.platform)
return self._platform
def validate_hardware(self):
"""Check sanity of hardware properties for this VM/product/platform.
Returns:
bool: ``True`` if hardware is sane, ``False`` if not.
"""
result = True
# TODO refactor to share logic with profile_info_list()
profile_ids = self.config_profiles
if not profile_ids:
profile_ids = [None]
plat = self.platform
def _validate_helper(label, validator, *args):
"""Call validation function, catch errors and warn user instead.
Args:
label (str): Label to prepend to any warning messages
validator (function): Validation function to call.
*args (list): Arguments to validation function.
Returns:
bool: True if valid, False if invalid
"""
try:
validator(*args)
return True
except ValueUnsupportedError as exc:
logger.warning(label + str(exc))
return False
for profile_id in profile_ids:
profile_str = ""
if profile_id:
profile_str = "In profile '{0}':".format(profile_id)
cpu_item = self.hardware.find_item('cpu', profile=profile_id)
if cpu_item:
cpus = cpu_item.get_value(self.VIRTUAL_QUANTITY,
[profile_id])
result &= _validate_helper(profile_str,
plat.validate_cpu_count, int(cpus))
ram_item = self.hardware.find_item('memory', profile=profile_id)
if ram_item:
megabytes = (programmatic_bytes_to_int(
ram_item.get_value(self.VIRTUAL_QUANTITY, [profile_id]),
ram_item.get_value(self.ALLOCATION_UNITS, [profile_id])
) / (1024 * 1024))
result &= _validate_helper(profile_str,
plat.validate_memory_amount,
int(megabytes))
nics = self.hardware.get_item_count('ethernet', profile_id)
result &= _validate_helper(profile_str,
plat.validate_nic_count, nics)
eth_subtypes = list_union(
*[eth.get_all_values(self.RESOURCE_SUB_TYPE) for
eth in self.hardware.find_all_items('ethernet')])
result &= _validate_helper(profile_str,
plat.validate_nic_types, eth_subtypes)
# TODO: validate_ide_subtypes
# TODO: validate_scsi_subtypes
return result
@property
def config_profiles(self):
"""Get the list of supported configuration profiles.
If this OVF has no defined profiles, returns an empty list.
If there is a default profile, it will be first in the list.
"""
if self._configuration_profiles is None:
profile_ids = []
if self.deploy_opt_section is not None:
profiles = self.deploy_opt_section.findall(self.CONFIG)
for profile in profiles:
# Force the "default" profile to the head of the list
if (profile.get(self.CONFIG_DEFAULT) == 'true' or
profile.get(self.CONFIG_DEFAULT) == '1'):
profile_ids.insert(0, profile.get(self.CONFIG_ID))
else:
profile_ids.append(profile.get(self.CONFIG_ID))
logger.verbose("Current configuration profiles are: %s",
profile_ids)
self._configuration_profiles = profile_ids
return self._configuration_profiles
@property
def environment_properties(self):
"""Get the array of environment properties.
Array of dicts (one per property) with the keys ``"key"``, ``"value"``,
``"qualifiers"``, ``"type"``, ``"user_configurable"``, ``"label"``,
and ``"description"``.
"""
result = []
if self.ovf_version < 1.0 or self.product_section is None:
return result
elems = self.product_section.findall(self.PROPERTY)
for elem in elems:
label = elem.findtext(self.PROPERTY_LABEL, "")
descr = elem.findtext(self.PROPERTY_DESC, "")
result.append({
'key': elem.get(self.PROP_KEY),
'value': elem.get(self.PROP_VALUE),
'qualifiers': elem.get(self.PROP_QUAL, ""),
'type': elem.get(self.PROP_TYPE, ""),
'user_configurable': elem.get(self.PROP_USER_CONFIGABLE, ""),
'label': label,
'description': descr,
})
return result
@property
def environment_transports(self):
"""Get/set the list of environment transport method strings."""
if self.ovf_version < 1.0:
return None
if self.virtual_hw_section is not None:
value = self.virtual_hw_section.get(self.ENVIRONMENT_TRANSPORT)
if value:
return value.split(" ")
return None
@environment_transports.setter
@property
def networks(self):
"""Get the list of network names currently defined in this VM."""
if self.network_section is None:
return []
return [network.get(self.NETWORK_NAME) for
network in self.network_section.findall(self.NETWORK)]
@property
def network_descriptions(self):
"""Get the list of network descriptions currently defined in this VM.
Returns:
list: List of network description strings
"""
if self.network_section is None:
return []
return [network.findtext(self.NWK_DESC, "") for
network in self.network_section.findall(self.NETWORK)]
@property
def system_types(self):
"""Get/set the list of virtual system type(s) supported by this VM.
For an OVF, this corresponds to the ``VirtualSystemType`` element.
"""
if self.virtual_hw_section is not None:
system = self.virtual_hw_section.find(self.SYSTEM)
if system is not None:
value = system.findtext(self.VIRTUAL_SYSTEM_TYPE, None)
if value:
return value.split(" ")
return None
@system_types.setter
@property
def product(self):
"""Short descriptive product string (XML ``Product`` element)."""
if self.product_section is not None:
return self.product_section.findtext(self.PRODUCT, None)
return None
@product.setter
@property
def vendor(self):
"""Short descriptive vendor string (XML ``Vendor`` element)."""
if self.product_section is not None:
return self.product_section.findtext(self.VENDOR, None)
return None
@vendor.setter
@property
def version_short(self):
"""Short descriptive version string (XML ``Version`` element)."""
if self.product_section is not None:
return self.product_section.findtext(self.VERSION, None)
return None
@version_short.setter
@property
def version_long(self):
"""Long descriptive version string (XML ``FullVersion`` element)."""
if self.product_section is not None:
return self.product_section.findtext(self.FULL_VERSION, None)
return None
@version_long.setter
@property
def product_url(self):
"""Product URL string (XML ``ProductUrl`` element)."""
if self.product_section is not None:
return self.product_section.findtext(self.PRODUCT_URL, None)
return None
@product_url.setter
@property
def vendor_url(self):
"""Vendor URL string (XML ``VendorUrl`` element)."""
if self.product_section is not None:
return self.product_section.findtext(self.VENDOR_URL, None)
return None
@vendor_url.setter
@property
def application_url(self):
"""Application URL string (XML ``AppUrl`` element)."""
if self.product_section is not None:
return self.product_section.findtext(self.APPLICATION_URL, None)
return None
@application_url.setter
def __getattr__(self, name):
"""Transparently pass attribute lookups off to name_helper.
Args:
name (str): Attribute being looked up.
Returns:
Attribute value
Raises:
AttributeError: Magic methods (``__foo``) will not be passed
through but will raise an AttributeError as usual.
"""
# Don't pass 'special' attributes through to the helper
if re.match(r"^__", name):
raise AttributeError("'OVF' object has no attribute '{0}'"
.format(name))
return getattr(self.name_helper, name)
def predicted_output_size(self):
"""Estimate how much disk space (in bytes) is needed to write out.
Since OVA (TAR) is an uncompressed format, the disk space required
is approximately the same for both OVF and OVA output. Thus we can
provide this value even if :attr:`output_file` is ``None``.
In the TAR format, each file in the archive has a 512-byte header
and its total size is rounded up to a multiple of 512 bytes. The
archive is terminated by 2 512-byte blocks filled with zero, and
the overall archive file size is a multiple of 10 kiB.
Returns:
int: Estimated number of bytes consumed when writing out to
:attr:`output_file` (plus any associated files).
"""
# Size of the OVF descriptor
needed = tar_entry_size(len(ET.tostring(self.root)))
# Account for the size of all the referenced files
manifest_size = 0
for href, file_ref in self.file_references.items():
# Approximate size of a manifest entry for this file
if self.ovf_version >= 2.0:
# SHA256(href)= <64 hex digits>
# so 64 + href length + ~12 other characters
manifest_size += 76 + len(href)
else:
# SHA1(href)= <40 hex digits>
# so 40 + href length + ~10 other characters
manifest_size += 50 + len(href)
# Size of the file proper
needed += tar_entry_size(file_ref.size)
# Manifest file
needed += tar_entry_size(manifest_size)
# Archive end - two 512-byte records filled with zeros
needed += 1024
# Overall size must be a multiple of 10 kiB
needed += (10240 - needed) % 10240
logger.debug("Estimated output size is %s", pretty_bytes(needed))
return needed
def write(self):
"""Write OVF or OVA to :attr:`output_file`, if set."""
if not self.output_file:
return
logger.info("Updating and validating internal data before writing"
" out to disk")
prefix = os.path.splitext(self.output_file)[0]
extension = self.output_extension
# Update the XML ElementTree to reflect any hardware changes
self.hardware.update_xml()
# Validate the hardware to be written
self.validate_hardware()
# Make sure file references are correct:
self._refresh_file_references()
# Make sure all defined networks are actually used by NICs,
# and delete any networks that are unused.
self._refresh_networks()
logger.info("Writing out to file %s", self.output_file)
if extension == '.ova':
ovf_file = os.path.join(self.working_dir, "{0}.ovf"
.format(os.path.basename(prefix)))
self.write_xml(ovf_file)
self.generate_manifest(ovf_file)
self.tar(ovf_file, self.output_file)
elif extension == '.ovf':
self.write_xml(self.output_file)
# Copy all files from working directory to destination
dest_dir = os.path.dirname(os.path.abspath(self.output_file))
for file_ref in self.file_references.values():
file_ref.copy_to(dest_dir)
# Generate manifest
self.generate_manifest(self.output_file)
else:
# We should never get here, but to be safe:
raise NotImplementedError("Not sure how to write a '{0}' file"
.format(extension))
def _refresh_file_references(self):
"""Check all File entries to make sure they are valid and up to date.
Helper method for :func:`write`.
"""
# Refresh the file references
to_delete = []
for filename, file_ref in self.file_references.items():
if file_ref.exists:
file_ref.refresh()
else:
# file used to exist but no longer does??
logger.error("Referenced file '%s' does not exist!", filename)
to_delete.append(filename)
for filename in to_delete:
del self.file_references[filename]
for file_elem in self.references.findall(self.FILE):
href = file_elem.get(self.FILE_HREF)
if href not in self.file_references:
# TODO this should probably have a confirm() check...
logger.notice("Removing reference to missing file %s", href)
self.references.remove(file_elem)
# TODO remove references to this file from Disk, Item?
for filename, file_ref in self.file_references.items():
file_elem = self.find_child(self.references, self.FILE,
{self.FILE_HREF: filename})
assert file_elem is not None
file_elem.set(self.FILE_SIZE, str(file_ref.size))
real_capacity = None
disk_item = self.find_disk_from_file_id(
file_elem.get(self.FILE_ID))
if disk_item is not None:
# We can't check disk capacity inside a tar file.
# It seems wasteful to extract the disk file (could be
# quite large) from the TAR just to check, so we don't.
if file_ref.file_path is not None:
diskrep = DiskRepresentation.from_file(file_ref.file_path)
real_capacity = diskrep.capacity
if disk_item is not None and real_capacity is not None:
reported_capacity = str(self.get_capacity_from_disk(disk_item))
if reported_capacity != real_capacity:
logger.warning(
"Capacity of disk '%s' seems to have changed "
"from %s (reported in the original OVF) "
"to %s (actual capacity). "
"The updated OVF will reflect this change.",
filename, reported_capacity, real_capacity)
self.set_capacity_of_disk(disk_item, real_capacity)
def _refresh_networks(self):
"""Make sure all defined networks are actually used by NICs.
Delete any networks that are unused and warn the user.
Helper method for :func:`write`.
"""
if self.network_section is None:
return
networks = self.network_section.findall(self.NETWORK)
items = self.virtual_hw_section.findall(self.ETHERNET_PORT_ITEM)
connected_networks = set()
for item in items:
conn = item.find(self.EPASD + self.CONNECTION)
if conn is not None:
connected_networks.add(conn.text)
for net in networks:
name = net.get(self.NETWORK_NAME)
if name not in connected_networks:
logger.notice("Removing unused network %s", name)
self.network_section.remove(net)
# If all networks were removed, remove the NetworkSection too
if not self.network_section.findall(self.NETWORK):
logger.notice("No networks left - removing NetworkSection")
self.envelope.remove(self.network_section)
self.network_section = None
def _info_string_header(self, width):
"""Generate OVF/OVA file header for :meth:`info_string`.
Args:
width (int): Line length to wrap to where possible.
Returns:
str: File header
"""
str_list = []
str_list.append('-' * width)
str_list.append(self.input_file)
if self.platform and self.platform.__class__ is not Platform:
str_list.append("COT detected platform type: {0}"
.format(self.platform))
str_list.append('-' * width)
return '\n'.join(str_list)
def _info_string_product(self, verbosity_option, wrapper):
"""Generate product information as part of :meth:`info_string`.
Args:
verbosity_option (str): 'brief', None (default), or 'verbose'
wrapper (textwrap.TextWrapper): Helper object for wrapping text
lines if needed.
Returns:
str: Product information
"""
if ((not any([self.product, self.vendor, self.version_short])) and
(verbosity_option == 'brief' or not any([
self.product_url, self.vendor_url, self.version_long]))):
return None
str_list = []
wrapper.initial_indent = ''
wrapper.subsequent_indent = ' '
# All elements in this section are optional
for label, value, default, verbose_only in [
["Product: ", self.product, "(No product string)", False],
[" ", self.product_url, "(No product URL)", True],
["Vendor: ", self.vendor, "(No vendor string)", False],
[" ", self.vendor_url, "(No vendor URL)", True],
["Version: ", self.version_short,
"(No version string)", False],
[" ", self.version_long,
"(No detailed version string)", True],
]:
if verbosity_option == 'brief' and verbose_only:
continue
if value is None:
value = default
str_list.extend(wrapper.wrap("{0}{1}".format(label, value)))
return "\n".join(str_list)
def _info_string_annotation(self, wrapper):
"""Generate annotation information as part of :meth:`info_string`.
Args:
wrapper (textwrap.TextWrapper): Helper object for wrapping
text lines if needed.
Returns:
str: Annotation information string, or None
"""
if self.annotation_section is None:
return None
ann = self.annotation_section.find(self.ANNOTATION)
if ann is None or not ann.text:
return None
str_list = []
first = True
wrapper.initial_indent = 'Annotation: '
wrapper.subsequent_indent = ' '
for line in ann.text.splitlines():
if not line:
str_list.append("")
else:
str_list.extend(wrapper.wrap(line))
if first:
wrapper.initial_indent = wrapper.subsequent_indent
first = False
return "\n".join(str_list)
def _info_string_eula(self, verbosity_option, wrapper):
"""Generate EULA information as part of :meth:`info_string`.
Args:
verbosity_option (str): 'brief', None (default), or 'verbose'
wrapper (textwrap.TextWrapper): Helper object for wrapping
text lines if needed.
Returns:
str: EULA information
"""
# An OVF may have zero, one, or more
eula_header = False
str_list = []
for eula in self.find_all_children(self.virtual_system,
self.EULA_SECTION,
self.EULA_SECTION_ATTRIB):
info = eula.find(self.INFO)
lic = eula.find(self.EULA_LICENSE)
if lic is not None and lic.text:
if not eula_header:
str_list.append("End User License Agreement(s):")
eula_header = True
if info is not None and info.text:
wrapper.initial_indent = ' '
wrapper.subsequent_indent = ' '
str_list.extend(wrapper.wrap(info.text))
if verbosity_option != 'verbose':
str_list.append(" (not displayed, use 'cot info "
"--verbose' if desired)")
else:
wrapper.initial_indent = ' '
wrapper.subsequent_indent = ' '
for line in lic.text.splitlines():
if not line:
str_list.append("")
else:
str_list.extend(wrapper.wrap(line))
return "\n".join(str_list)
INFO_STRING_DISK_TEMPLATE = (
"{{0:{0}}} " # file/disk name - width is dynamically set
"{{1:>9}} " # file size - width 9 for "999.9 MiB"
"{{2:>9}} " # disk capacity - width 9 for "999.9 MiB"
"{{3:.20}}" # disk info - width 20 for "harddisk @ SCSI 1:15"
)
INFO_STRING_DISK_COLUMNS_WIDTH = (1 + 9 + 1 + 9 + 1 + 20)
INFO_STRING_FILE_TEMPLATE = (
"{{0:{0}}} " # file/disk name - width is dynamically set
"{{1:>9}}" # file size - width 9 for "999.9 MiB"
)
def _info_strings_for_file(self, file_obj):
"""Get attributes of a file which may describe a disk as well.
Helper for :meth:`_info_string_files_disks`.
Args:
file_obj (xml.etree.ElementTree.Element): File to inspect
Returns:
tuple: (file_id, file_size, disk_id, disk_capacity, device_info)
"""
# FILE_SIZE is optional
reported_size = file_obj.get(self.FILE_SIZE)
if reported_size is None:
# TODO - check file size in working dir and/or tarfile
file_size_str = ""
else:
file_size_str = pretty_bytes(reported_size)
disk_obj = self.find_disk_from_file_id(file_obj.get(self.FILE_ID))
if disk_obj is None:
disk_id = ""
disk_cap_string = ""
device_item = self.find_item_from_file(file_obj)
else:
disk_id = disk_obj.get(self.DISK_ID)
disk_cap_string = pretty_bytes(
self.get_capacity_from_disk(disk_obj))
device_item = self.find_item_from_disk(disk_obj)
device_str = self.device_info_str(device_item)
return (file_obj.get(self.FILE_ID),
file_size_str,
disk_id,
disk_cap_string,
device_str)
def _info_string_files_disks(self, width, verbosity_option):
"""Describe files and disks as part of :meth:`info_string`.
Args:
width (int): Line length to wrap to where possible.
verbosity_option (str): 'brief', None (default), or 'verbose'
Returns:
str: File/disk information string, or None
"""
file_list = self.references.findall(self.FILE)
disk_list = (self.disk_section.findall(self.DISK)
if self.disk_section is not None else [])
if not (file_list or disk_list):
return None
href_w = 0
if file_list:
href_w = max([len(f.get(self.FILE_HREF)) for f in file_list])
href_w = min(href_w, (width - self.INFO_STRING_DISK_COLUMNS_WIDTH - 2))
href_w = max(href_w, 18) # len("(placeholder disk)")
href_w += 2 # leading whitespace for disks
template = self.INFO_STRING_DISK_TEMPLATE.format(href_w)
template2 = self.INFO_STRING_FILE_TEMPLATE.format(href_w)
str_list = [template.format("Files and Disks:",
"File Size", "Capacity", "Device"),
template.format("", "---------", "---------",
"--------------------")]
for file_obj in file_list:
(file_id, file_size,
disk_id, disk_cap, device_str) = self._info_strings_for_file(
file_obj)
href_str = " " + file_obj.get(self.FILE_HREF)
# Truncate to fit in available space
if len(href_str) > href_w:
href_str = href_str[:(href_w-3)] + "..."
if disk_cap or device_str:
str_list.append(template.format(href_str, file_size,
disk_cap, device_str))
else:
str_list.append(template2.format(href_str, file_size))
if verbosity_option == 'verbose':
str_list.append(" File ID: {0}".format(file_id))
if disk_id:
str_list.append(" Disk ID: {0}".format(disk_id))
# Find placeholder disks as well
for disk in disk_list:
file_id = disk.get(self.DISK_FILE_REF)
file_obj = self.find_child(self.references, self.FILE,
attrib={self.FILE_ID: file_id})
if file_obj is not None:
continue # already reported on above
disk_cap_string = pretty_bytes(self.get_capacity_from_disk(disk))
device_item = self.find_item_from_disk(disk)
device_str = self.device_info_str(device_item)
str_list.append(template.format(" (disk placeholder)",
"--",
disk_cap_string,
device_str))
return "\n".join(str_list)
def _info_string_hardware(self, wrapper):
"""Describe hardware subtypes as part of :meth:`info_string`.
Args:
wrapper (textwrap.TextWrapper): Helper object for wrapping
text lines if needed.
Returns:
str: Hardware information string, or None
"""
virtual_system_types = self.system_types
scsi_subtypes = list_union(
*[scsi_ctrl.get_all_values(self.RESOURCE_SUB_TYPE) for
scsi_ctrl in self.hardware.find_all_items('scsi')])
ide_subtypes = list_union(
*[ide_ctrl.get_all_values(self.RESOURCE_SUB_TYPE) for
ide_ctrl in self.hardware.find_all_items('ide')])
eth_subtypes = list_union(
*[eth.get_all_values(self.RESOURCE_SUB_TYPE) for
eth in self.hardware.find_all_items('ethernet')])
if ((virtual_system_types is not None) or
(scsi_subtypes or ide_subtypes or eth_subtypes)):
str_list = ["Hardware Variants:"]
wrapper.subsequent_indent = ' ' * 28
if virtual_system_types is not None:
wrapper.initial_indent = " System types: "
str_list.extend(wrapper.wrap(" ".join(virtual_system_types)))
if scsi_subtypes:
wrapper.initial_indent = " SCSI device types: "
str_list.extend(wrapper.wrap(" ".join(scsi_subtypes)))
if ide_subtypes:
wrapper.initial_indent = " IDE device types: "
str_list.extend(wrapper.wrap(" ".join(ide_subtypes)))
if eth_subtypes:
wrapper.initial_indent = " Ethernet device types: "
str_list.extend(wrapper.wrap(" ".join(eth_subtypes)))
return "\n".join(str_list)
return None
def _info_string_networks(self, verbosity_option, wrapper):
"""Describe virtual networks as part of :meth:`info_string`.
Args:
verbosity_option (str): 'brief', None (default), or 'verbose'
wrapper (textwrap.TextWrapper): Helper object for wrapping
text lines if needed.
Returns:
str: Network information string, or None
"""
if self.network_section is None:
return None
str_list = ["Networks:"]
width = wrapper.width
names = []
descs = []
for network in self.network_section.findall(self.NETWORK):
names.append(network.get(self.NETWORK_NAME))
descs.append(network.findtext(self.NWK_DESC, None))
max_n = max([len(name) for name in names])
max_d = max([len(str(desc)) for desc in descs])
truncate = (max_n + max_d + 6 >= width and
verbosity_option != 'verbose')
wrapper.initial_indent = " "
wrapper.subsequent_indent = ' ' * (5 + max_n)
if truncate:
max_d = width - 6 - max_n
for name, desc in zip(names, descs):
if not desc:
str_list.append(" " + name)
elif truncate and len(desc) > max_d:
str_list.append(' {name:{w}} "{tdesc}..."'.format(
name=name, w=max_n, tdesc=desc[:max_d-3]))
else:
str_list.extend(wrapper.wrap(
'{name:{w}} "{desc}"'.format(name=name, w=max_n,
desc=desc)))
return "\n".join(str_list)
def _info_string_nics(self, verbosity_option, wrapper):
"""Describe NICs as part of :meth:`info_string`.
Args:
verbosity_option (str): 'brief', None (default), or 'verbose'
wrapper (textwrap.TextWrapper): Helper object for wrapping
text lines if needed.
Returns:
str: NIC information string, or None
"""
if verbosity_option == 'brief':
return None
nics = self.hardware.find_all_items('ethernet')
if not nics:
return None
str_list = ["NICs and Associated Networks:"]
wrapper.initial_indent = ' '
wrapper.subsequent_indent = ' '
max_len = max([len(str(nic.get_value(self.ELEMENT_NAME)))
for nic in nics])
max_len = max(max_len, len("<instance 10>"))
template = " {name:{len}} : {nwk}"
for nic in nics:
network_name = nic.get_value(self.CONNECTION)
nic_name = nic.get_value(self.ELEMENT_NAME)
if nic_name is None:
nic_name = "<instance {0}>".format(
nic.get_value(self.INSTANCE_ID))
str_list.append(template.format(name=nic_name,
len=max_len,
nwk=network_name))
if verbosity_option == 'verbose':
desc = nic.get_value(self.ITEM_DESCRIPTION)
if desc is None:
desc = nic.get_value(self.CAPTION)
if desc is not None:
str_list.extend(wrapper.wrap(desc))
return "\n".join(str_list)
def _info_string_environment(self, wrapper):
"""Describe environment for :meth:`info_string`.
Args:
wrapper (textwrap.TextWrapper): Helper object for wrapping
text lines if needed.
Returns:
str: Environment information string, or None
"""
if not self.environment_transports:
return None
str_list = ["Environment:"]
wrapper.initial_indent = ' '
wrapper.subsequent_indent = ' '
str_list.extend(wrapper.wrap(
"Transport types: {0}"
.format(" ".join(self.environment_transports))))
return "\n".join(str_list)
def _info_string_properties(self, verbosity_option, wrapper):
"""Describe config properties for :meth:`info_string`.
Args:
verbosity_option (str): 'brief', None (default), or 'verbose'
wrapper (textwrap.TextWrapper): Helper object for wrapping
text lines if needed.
Returns:
str: Property information string, or None
"""
properties = self.environment_properties
if not properties:
return None
str_list = ["Properties:"]
max_key = 2 + max([len(str(ph['key'])) for ph in properties])
max_label = max([len(str(ph['label'])) for ph in properties])
max_value = max([len(str(ph['value'])) for ph in properties])
width = wrapper.width
if all(ph['label'] for ph in properties):
max_width = max_label
else:
max_width = max(max_key, max_label)
wrapper.initial_indent = ' '
wrapper.subsequent_indent = ' '
for propdict in properties:
# If we have a label, and the terminal is wide enough,
# display "<key> label value", else if no label, display
# "<key> value", else only display "label value"
if max_label > 0 and (max_key + max_label + max_value <
width - 8):
format_str = ' {key:{kw}} {label:{lw}} {val}'
str_list.append(format_str.format(
key="<{0}>".format(propdict['key']),
kw=max_key,
label=propdict['label'],
lw=max_label,
val=('"{0}"'.format(propdict['value'])
if propdict['value'] is not None
else '--')))
else:
str_list.append(' {label:{width}} {val}'.format(
label=(propdict['label'] if propdict['label']
else "<{0}>".format(propdict['key'])),
width=max_width,
val=('"{0}"'.format(propdict['value'])
if propdict['value'] is not None
else '--')))
if verbosity_option == 'verbose':
for line in propdict['description'].splitlines():
if not line:
str_list.append("")
else:
str_list.extend(wrapper.wrap(line))
return "\n".join(str_list)
def info_string(self, width=79, verbosity_option=None):
"""Get a descriptive string summarizing the contents of this OVF.
Args:
width (int): Line length to wrap to where possible.
verbosity_option (str): 'brief', None (default), or 'verbose'
Returns:
str: Wrapped, appropriately verbose string.
"""
# Supposedly it's quicker to construct a list of strings then merge
# them all together with 'join()' rather than it is to repeatedly
# append to an existing string with '+'.
# I haven't profiled this to verify - it's fast enough for now.
# Don't break in mid-word or on hyphens, as the usual case where
# we may exceed the available width is URI literals, and there's
# no ideal way to wrap these.
wrapper = textwrap.TextWrapper(width=width,
break_long_words=False,
break_on_hyphens=False)
# File description
header = self._info_string_header(width)
section_list = [
self._info_string_product(verbosity_option, wrapper),
self._info_string_annotation(wrapper),
self._info_string_eula(verbosity_option, wrapper),
self._info_string_files_disks(width, verbosity_option),
self._info_string_hardware(wrapper),
self.profile_info_string(width, verbosity_option),
self._info_string_networks(verbosity_option, wrapper),
self._info_string_nics(verbosity_option, wrapper),
self._info_string_environment(wrapper),
self._info_string_properties(verbosity_option, wrapper)
]
# Discard empty sections
section_list = [s for s in section_list if s]
return header + '\n' + "\n\n".join(section_list)
def device_info_str(self, device_item):
"""Get a one-line summary of a hardware device.
Args:
device_item (OVFItem): Device to summarize
Returns:
str: Descriptive string such as "harddisk @ IDE 1:0"
"""
if device_item is None:
return ""
controller_item = self.find_parent_from_item(device_item)
if controller_item is None:
ctrl_type = "(?)"
ctrl_addr = "?"
else:
ctrl_type = controller_item.hardware_type.upper()
ctrl_addr = controller_item.get_value(self.ADDRESS)
return "{0} @ {1} {2}:{3}".format(
device_item.hardware_type,
ctrl_type,
ctrl_addr,
device_item.get_value(self.ADDRESS_ON_PARENT))
PROFILE_INFO_TEMPLATE = (
"{{0:{0}}} " # profile name - width is dynamically set
"{{1:>4}} " # CPUs - width 4 for "CPUs"
"{{2:>9}} " # memory - width 9 for "999.9 MiB"
"{{3:>4}} " # NICs - width 4 for "NICs"
"{{4:>7}} " # serial - width 7 for "Serials"
"{{5:>14}}" # disks - width 14 for "Disks/Capacity","10 / 999.9 MiB"
)
def profile_info_list(self, width=79, verbose=False):
"""Get a list describing available configuration profiles.
Args:
width (int): Line length to wrap to if possible
verbose (bool): if True, generate multiple lines per profile
Returns:
tuple: (header, list)
"""
str_list = []
default_profile_id = self.default_config_profile
profile_ids = self.config_profiles
if not profile_ids:
profile_ids = [None]
prof_w = max(len("Configuration Profiles: "),
2 + max([(len(str(pid))) for pid in profile_ids]),
2 + len(str(default_profile_id) + " (default)"))
# Profile information
template = self.PROFILE_INFO_TEMPLATE.format(prof_w)
header = template.format("Configuration Profiles:", "CPUs", "Memory",
"NICs", "Serials", "Disks/Capacity")
header += "\n" + template.format("", "----", "---------", "----",
"-------", "--------------")
if verbose:
wrapper = textwrap.TextWrapper(width=width,
initial_indent=' ',
subsequent_indent=' ' * 21)
index = 0
for profile_id in profile_ids:
cpus = 0
cpu_item = self.hardware.find_item('cpu', profile=profile_id)
if cpu_item:
cpus = cpu_item.get_value(self.VIRTUAL_QUANTITY,
[profile_id])
mem_bytes = 0
ram_item = self.hardware.find_item('memory', profile=profile_id)
if ram_item:
mem_bytes = programmatic_bytes_to_int(
ram_item.get_value(self.VIRTUAL_QUANTITY, [profile_id]),
ram_item.get_value(self.ALLOCATION_UNITS, [profile_id]))
nics = self.hardware.get_item_count('ethernet', profile_id)
serials = self.hardware.get_item_count('serial', profile_id)
disk_count = self.hardware.get_item_count('harddisk',
profile_id)
disks_size = 0
if self.disk_section is not None:
for disk in self.disk_section.findall(self.DISK):
disks_size += self.get_capacity_from_disk(disk)
profile_str = " " + str(profile_id)
if profile_id == default_profile_id:
profile_str += " (default)"
str_list.append(template.format(
profile_str,
cpus,
pretty_bytes(mem_bytes),
nics,
serials,
"{0:2} / {1:>9}".format(disk_count,
pretty_bytes(disks_size))))
if profile_id is not None and verbose:
profile = self.find_child(self.deploy_opt_section,
self.CONFIG,
attrib={self.CONFIG_ID: profile_id})
str_list.extend(wrapper.wrap(
'{0:15} "{1}"'.format("Label:",
profile.findtext(self.CFG_LABEL))))
str_list.extend(wrapper.wrap(
'{0:15} "{1}"'.format("Description:",
profile.findtext(self.CFG_DESC))))
index += 1
return (header, str_list)
def profile_info_string(self, width=79, verbosity_option=None):
"""Get a string summarizing available configuration profiles.
Args:
width (int): Line length to wrap to if possible
verbosity_option (str): 'brief', None (default), or 'verbose'
Returns:
str: Appropriately formatted and verbose string.
"""
header, str_list = self.profile_info_list(
width, (verbosity_option != 'brief'))
return "\n".join([header] + str_list)
def create_configuration_profile(self, pid, label, description):
"""Create or update a configuration profile with the given ID.
Args:
pid (str): Profile identifier
label (str): Brief descriptive label for the profile
description (str): Verbose description of the profile
"""
self.deploy_opt_section = self._ensure_section(
self.DEPLOY_OPT_SECTION, "Configuration Profiles")
cfg = self.find_child(self.deploy_opt_section, self.CONFIG,
attrib={self.CONFIG_ID: pid})
if cfg is None:
logger.debug("Creating new Configuration element")
cfg = ET.SubElement(self.deploy_opt_section, self.CONFIG,
{self.CONFIG_ID: pid})
self.set_or_make_child(cfg, self.CFG_LABEL, label)
self.set_or_make_child(cfg, self.CFG_DESC, description)
# Clear cache
logger.debug("New profile %s created - clear config_profiles cache",
pid)
self._configuration_profiles = None
def delete_configuration_profile(self, profile):
"""Delete the profile with the given ID.
Args:
profile (str): Profile ID to delete.
Raises:
LookupError: if the profile does not exist.
"""
cfg = self.find_child(self.deploy_opt_section, self.CONFIG,
attrib={self.CONFIG_ID: profile})
if cfg is None:
raise LookupError("No such configuration profile '{0}'"
.format(profile))
logger.notice("Deleting configuration profile %s", profile)
# Delete references to this profile from the hardware
items = self.hardware.find_all_items(profile_list=[profile])
logger.debug("Removing profile %s from %s hardware items",
profile, len(items))
for item in items:
item.remove_profile(profile, split_default=False)
# Delete the profile declaration itself
self.deploy_opt_section.remove(cfg)
if not self.deploy_opt_section.findall(self.CONFIG):
self.envelope.remove(self.deploy_opt_section)
# Clear cache
logger.debug("Profile %s deleted - clear config_profiles cache",
profile)
self._configuration_profiles = None
# TODO - how to insert a doc about the profile_list (see vm_description.py)
def set_cpu_count(self, cpus, profile_list):
"""Set the number of CPUs.
Args:
cpus (int): Number of CPUs
profile_list (list): Change only the given profiles
"""
logger.debug("Updating CPU count in OVF under profile %s to %s",
profile_list, cpus)
self.platform.validate_cpu_count(cpus)
self.hardware.set_value_for_all_items('cpu',
self.VIRTUAL_QUANTITY, cpus,
profile_list,
create_new=True)
def set_memory(self, megabytes, profile_list):
"""Set the amount of RAM, in megabytes.
Args:
megabytes (int): Memory value, in megabytes
profile_list (list): Change only the given profiles
"""
logger.debug("Updating RAM in OVF under profile %s to %s",
profile_list, megabytes)
self.platform.validate_memory_amount(megabytes)
self.hardware.set_value_for_all_items('memory',
self.VIRTUAL_QUANTITY, megabytes,
profile_list,
create_new=True)
self.hardware.set_value_for_all_items('memory',
self.ALLOCATION_UNITS,
'byte * 2^20',
profile_list)
def set_nic_types(self, type_list, profile_list):
"""Set the hardware type(s) for NICs.
Args:
type_list (list): NIC hardware type(s)
profile_list (list): Change only the given profiles.
"""
# Just to be safe...
type_list = [canonicalize_nic_subtype(t) for t in type_list]
self.platform.validate_nic_types(type_list)
self.hardware.set_value_for_all_items('ethernet',
self.RESOURCE_SUB_TYPE,
type_list,
profile_list)
def get_nic_count(self, profile_list):
"""Get the number of NICs under the given profile(s).
Args:
profile_list (list): Profile(s) of interest.
Returns:
dict: ``{ profile_name : nic_count }``
"""
return self.hardware.get_item_count_per_profile('ethernet',
profile_list)
def set_nic_count(self, count, profile_list):
"""Set the given profile(s) to have the given number of NICs.
Args:
count (int): number of NICs
profile_list (list): Change only the given profiles
"""
logger.debug("Updating NIC count in OVF under profile %s to %s",
profile_list, count)
self.platform.validate_nic_count(count)
self.hardware.set_item_count_per_profile('ethernet', count,
profile_list)
def create_network(self, label, description):
"""Define a new network with the given label and description.
Also serves to update the description of an existing network label.
Args:
label (str): Brief label for the network
description (str): Verbose description of the network
"""
self.network_section = self._ensure_section(
self.NETWORK_SECTION,
"Logical networks",
attrib=self.NETWORK_SECTION_ATTRIB)
network = self.set_or_make_child(self.network_section, self.NETWORK,
attrib={self.NETWORK_NAME: label})
self.set_or_make_child(network, self.NWK_DESC, description)
def set_nic_networks(self, network_list, profile_list):
"""Set the NIC to network mapping for NICs under the given profile(s).
.. note::
If the length of :attr:`network_list` is less than the number of
NICs, will use the last entry in the list for all remaining NICs.
Args:
network_list (list): List of networks to map NICs to
profile_list (list): Change only the given profiles
"""
self.hardware.set_item_values_per_profile('ethernet',
self.CONNECTION,
network_list,
profile_list,
default=network_list[-1])
def set_nic_mac_addresses(self, mac_list, profile_list):
"""Set the MAC addresses for NICs under the given profile(s).
.. note::
If the length of :attr:`mac_list` is less than the number of NICs,
will use the last entry in the list for all remaining NICs.
Args:
mac_list (list): List of MAC addresses to assign to NICs
profile_list (list): Change only the given profiles
"""
self.hardware.set_item_values_per_profile('ethernet',
self.ADDRESS,
mac_list,
profile_list,
default=mac_list[-1])
def set_nic_names(self, name_list, profile_list):
"""Set the device names for NICs under the given profile(s).
Args:
name_list (list): List of names to assign.
profile_list (list): Change only the given profiles
"""
self.hardware.set_item_values_per_profile('ethernet',
self.ELEMENT_NAME,
name_list,
profile_list)
def get_serial_count(self, profile_list):
"""Get the number of serial ports under the given profile(s).
Args:
profile_list (list): Profile(s) of interest.
Returns:
dict: ``{ profile_name : serial_count }``
"""
return self.hardware.get_item_count_per_profile('serial', profile_list)
def set_serial_count(self, count, profile_list):
"""Set the given profile(s) to have the given number of serial ports.
Args:
count (int): Number of serial ports
profile_list (list): Change only the given profiles
"""
logger.debug("Updating serial port count under profile %s to %s",
profile_list, count)
self.hardware.set_item_count_per_profile('serial', count, profile_list)
def set_serial_connectivity(self, conn_list, profile_list):
"""Set the serial port connectivity under the given profile(s).
Args:
conn_list (list): List of connectivity strings
profile_list (list): Change only the given profiles
"""
self.hardware.set_item_values_per_profile('serial',
self.ADDRESS, conn_list,
profile_list, default="")
def get_serial_connectivity(self, profile):
"""Get the serial port connectivity strings under the given profile.
Args:
profile (str): Profile of interest.
Returns:
list: connectivity strings
"""
return [item.get_value(self.ADDRESS) for item in
self.hardware.find_all_items('serial', profile_list=[profile])]
def set_scsi_subtypes(self, type_list, profile_list):
"""Set the device subtype(s) for the SCSI controller(s).
Args:
type_list (list): SCSI subtype string list
profile_list (list): Change only the given profiles
"""
# TODO validate supported types by platform
self.hardware.set_value_for_all_items('scsi',
self.RESOURCE_SUB_TYPE,
type_list,
profile_list)
def set_ide_subtypes(self, type_list, profile_list):
"""Set the device subtype(s) for the IDE controller(s).
Args:
type_list (list): IDE subtype string list
profile_list (list): Change only the given profiles
"""
# TODO validate supported types by platform
self.hardware.set_value_for_all_items('ide',
self.RESOURCE_SUB_TYPE,
type_list,
profile_list)
def get_property_value(self, key):
"""Get the value of the given property.
Args:
key (str): Property identifier
Returns:
str: Value of this property as a string, or ``None``
"""
if self.ovf_version < 1.0 or self.product_section is None:
return None
prop = self.find_child(self.product_section, self.PROPERTY,
attrib={self.PROP_KEY: key})
if prop is None:
return None
return prop.get(self.PROP_VALUE)
def _validate_value_for_property(self, prop, value):
"""Check whether the proposed value is valid for the given property.
This applies agnostic criteria such as property type and qualifiers;
it knows nothing of the property's actual meaning.
Args:
prop (xml.etree.ElementTree.Element): Existing Property element.
value (str): Proposed value to set for this property.
Returns:
str: the value, potentially canonicalized.
Raises:
ValueUnsupportedError: if the value does not meet criteria.
"""
key = prop.get(self.PROP_KEY)
# Check type validity and canonicalize if needed
prop_type = prop.get(self.PROP_TYPE, "")
if prop_type == "boolean":
# XML prefers to represent booleans as 'true' or 'false'
value = str(value).lower()
if str(value).lower() in ['true', '1', 't', 'y', 'yes']:
value = 'true'
elif str(value).lower() in ['false', '0', 'f', 'n', 'no']:
value = 'false'
else:
raise ValueUnsupportedError(key, value, "a boolean value")
elif prop_type == "string":
value = str(value)
# Check property qualifiers
prop_qual = prop.get(self.PROP_QUAL, "")
if prop_qual:
match = re.search(r"MaxLen\((\d+)\)", prop_qual)
if match:
max_len = int(match.group(1))
if len(value) > max_len:
raise ValueUnsupportedError(
key, value, "string no longer than {0} characters"
.format(max_len))
match = re.search(r"MinLen\((\d+)\)", prop_qual)
if match:
min_len = int(match.group(1))
if len(value) < min_len:
raise ValueUnsupportedError(
key, value, "string no shorter than {0} characters"
.format(min_len))
return value
def set_property_value(self, key, value,
user_configurable=None, property_type=None,
label=None, description=None):
"""Set the value of the given property (converting value if needed).
Args:
key (str): Property identifier
value (object): Value to set for this property
user_configurable (bool): Should this property be configurable at
deployment time by the user?
property_type (str): Value type - 'string' or 'boolean'
label (str): Brief explanatory label for this property
description (str): Detailed description of this property
Returns:
str: the (converted) value that was set.
Raises:
NotImplementedError: if :attr:`ovf_version` is less than 1.0;
OVF version 0.9 is not currently supported.
"""
if self.ovf_version < 1.0:
raise NotImplementedError("No support for setting environment "
"properties under OVF v0.9")
self.product_section = self._ensure_section(
self.PRODUCT_SECTION,
"Product Information",
attrib=self.PRODUCT_SECTION_ATTRIB,
parent=self.virtual_system)
prop = self.find_child(self.product_section, self.PROPERTY,
attrib={self.PROP_KEY: key})
if prop is None:
prop = self.set_or_make_child(self.product_section, self.PROPERTY,
attrib={self.PROP_KEY: key})
# Properties *must* have a type to be valid
if property_type is None:
property_type = 'string'
if user_configurable is not None:
prop.set(self.PROP_USER_CONFIGABLE, str(user_configurable).lower())
if property_type is not None:
prop.set(self.PROP_TYPE, property_type)
# Revalidate any existing value if not setting a new value
if value is None:
value = prop.get(self.PROP_VALUE)
if value is not None:
# Make sure the requested value is valid
value = self._validate_value_for_property(prop, value)
prop.set(self.PROP_VALUE, value)
if label is not None:
self.set_or_make_child(prop, self.PROPERTY_LABEL, label)
if description is not None:
self.set_or_make_child(prop, self.PROPERTY_DESC, description)
return value
def config_file_to_properties(self, file_path, user_configurable=None):
"""Import each line of a text file into a configuration property.
Args:
file_path (str): File name to import.
user_configurable (bool): Should the resulting properties be
configurable at deployment time by the user?
Raises:
NotImplementedError: if the :attr:`platform` for this OVF
does not define
:const:`~COT.platforms.Platform.LITERAL_CLI_STRING`
"""
if not self.platform.LITERAL_CLI_STRING:
raise NotImplementedError("no known support for literal CLI on " +
str(self.platform))
property_num = 0
with open(file_path, 'r') as fileobj:
for line in fileobj:
line = line.strip()
# Skip blank lines and comment lines
if (not line) or line[0] == '!':
continue
property_num += 1
self.set_property_value(
"{0}-{1:04d}".format(self.platform.LITERAL_CLI_STRING,
property_num),
line,
user_configurable)
def convert_disk_if_needed(self, disk_image, kind):
"""Convert the disk to a more appropriate format if needed.
* All hard disk files are converted to stream-optimized VMDK as it
is the only format that VMware supports in OVA packages.
* CD-ROM iso images are accepted without change.
Args:
disk_image (COT.disks.DiskRepresentation): Image to inspect and
possibly convert
kind (str): Image type (harddisk/cdrom)
Returns:
DiskRepresentation: :attr:`disk_image`, if no conversion was
required, or a new :class:`~COT.disks.DiskRepresentation` instance
representing a converted image that has been created in
:attr:`output_dir`.
"""
if kind != 'harddisk':
logger.debug("No disk conversion needed")
return disk_image
# Convert hard disk to VMDK format, streamOptimized subformat
if (disk_image.disk_format == 'vmdk' and
disk_image.disk_subformat == 'streamOptimized'):
logger.debug("No disk conversion needed")
return disk_image
logger.debug("Converting %s (%s, %s) to streamOptimized VMDK",
disk_image.path, disk_image.disk_format,
disk_image.disk_subformat)
return disk_image.convert_to(new_format='vmdk',
new_subformat='streamOptimized',
new_directory=self.working_dir)
def search_from_filename(self, filename):
"""From the given filename, try to find any existing objects.
This implementation uses the given :attr:`filename` to find a matching
``File`` in the OVF, then using that to find a matching ``Disk`` and
``Item`` entries.
Args:
filename (str): Filename to search from
Returns:
tuple: ``(file, disk, ctrl_item, disk_item)``, any or all of which
may be ``None``
Raises:
LookupError: If the ``disk_item`` is found but no ``ctrl_item`` is
found to be its parent.
"""
file_obj = None
disk = None
ctrl_item = None
disk_item = None
logger.debug("Looking for existing disk info based on filename %s",
filename)
file_obj = self.find_child(self.references, self.FILE,
attrib={self.FILE_HREF: filename})
if file_obj is None:
return (file_obj, disk, ctrl_item, disk_item)
file_id = file_obj.get(self.FILE_ID)
disk = self.find_disk_from_file_id(file_id)
disk_item_1 = self.find_item_from_file(file_obj)
disk_item_2 = self.find_item_from_disk(disk)
disk_item = check_for_conflict("disk Item", [disk_item_1, disk_item_2])
ctrl_item = self.find_parent_from_item(disk_item)
if disk_item is not None and ctrl_item is None:
raise LookupError("Found disk Item {0} but no controller Item "
"as its parent?"
.format(disk_item))
return (file_obj, disk, ctrl_item, disk_item)
def search_from_file_id(self, file_id):
"""From the given file ID, try to find any existing objects.
This implementation uses the given :attr:`file_id` to find a matching
``File`` in the OVF, then using that to find a matching ``Disk`` and
``Item`` entries.
Args:
file_id (str): File ID to search from
Returns:
tuple: ``(file, disk, ctrl_item, disk_item)``, any or all of which
may be ``None``
Raises:
LookupError: If the ``disk`` entry is found but no corresponding
``file`` is found.
LookupError: If the ``disk_item`` is found but no ``ctrl_item`` is
found to be its parent.
"""
if file_id is None:
return (None, None, None, None)
logger.debug(
"Looking for existing disk information based on file_id %s",
file_id)
file_obj = None
disk = None
ctrl_item = None
disk_item = None
file_obj = self.find_child(self.references, self.FILE,
attrib={self.FILE_ID: file_id})
disk = self.find_disk_from_file_id(file_id)
if disk is not None and file_obj is None:
# Should never happen - OVF is not valid
raise LookupError("Malformed OVF? Found Disk with fileRef {0} but "
"no corresponding File with id {0}"
.format(file_id))
disk_item_1 = self.find_item_from_file(file_obj)
disk_item_2 = self.find_item_from_disk(disk)
disk_item = check_for_conflict("disk Item", [disk_item_1, disk_item_2])
ctrl_item = self.find_parent_from_item(disk_item)
if disk_item is not None and ctrl_item is None:
raise LookupError("Found disk Item {0} but no controller Item "
"as its parent?"
.format(disk_item))
return (file_obj, disk, ctrl_item, disk_item)
def search_from_controller(self, controller, address):
"""From the controller type and device address, look for existing disk.
This implementation uses the parameters to find matching
controller and disk ``Item`` elements, then using the disk ``Item``
to find matching ``File`` and/or ``Disk``.
Args:
controller (str): ``'ide'`` or ``'scsi'``
address (str): Device address such as ``'1:0'``
Returns:
tuple: ``(file, disk, ctrl_item, disk_item)``, any or all of which
may be ``None``
"""
if controller is None or address is None:
return (None, None, None, None)
logger.debug("Looking for existing disk information based on "
"controller type (%s) and disk address (%s)",
controller, address)
file_obj = None
disk = None
ctrl_item = None
disk_item = None
ctrl_addr = address.split(":")[0]
disk_addr = address.split(":")[1]
logger.debug("Searching for controller address %s", ctrl_addr)
ctrl_item = self.hardware.find_item(controller,
{self.ADDRESS: ctrl_addr})
if ctrl_item is None:
return (file_obj, disk, ctrl_item, disk_item)
# From controller Item to its child disk Item
ctrl_instance = ctrl_item.get_value(self.INSTANCE_ID)
logger.debug("Searching for disk address %s with parent %s",
disk_addr, ctrl_instance)
disk_item = self.hardware.find_item(
properties={self.PARENT: ctrl_instance,
self.ADDRESS_ON_PARENT: disk_addr})
if disk_item is None:
return (file_obj, disk, ctrl_item, disk_item)
host_resource = disk_item.get_value(self.HOST_RESOURCE)
if host_resource is None:
logger.debug("Disk item has no RASD:HostResource - "
"i.e., empty drive")
return (file_obj, disk, ctrl_item, disk_item)
if (host_resource.startswith(self.HOST_RSRC_DISK_REF) or
host_resource.startswith(self.OLD_HOST_RSRC_DISK_REF)):
logger.debug("Looking for Disk and File matching disk Item")
# From disk Item to Disk
disk_id = os.path.basename(host_resource)
if self.disk_section is not None:
disk = self.find_child(self.disk_section, self.DISK,
attrib={self.DISK_ID: disk_id})
if disk is not None:
# From Disk to File
file_id = disk.get(self.DISK_FILE_REF)
file_obj = self.find_child(self.references, self.FILE,
attrib={self.FILE_ID: file_id})
elif (host_resource.startswith(self.HOST_RSRC_FILE_REF) or
host_resource.startswith(self.OLD_HOST_RSRC_FILE_REF)):
logger.debug("Looking for File and Disk matching disk Item")
# From disk Item to File
file_id = os.path.basename(host_resource)
file_obj = self.find_child(self.references, self.FILE,
attrib={self.FILE_ID: file_id})
if self.disk_section is not None:
disk = self.find_child(self.disk_section, self.DISK,
attrib={self.DISK_FILE_REF: file_id})
else:
logger.error(
"Unrecognized HostResource format '%s'; unable to identify "
"which File and Disk are associated with this disk Item",
host_resource)
return (file_obj, disk, ctrl_item, disk_item)
def find_open_controller(self, controller_type):
"""Find the first open slot on a controller of the given type.
Args:
controller_type (str): ``'ide'`` or ``'scsi'``
Returns:
tuple: ``(ctrl_item, address_string)`` or ``(None, None)``
"""
for ctrl_item in self.hardware.find_all_items(controller_type):
ctrl_instance = ctrl_item.get_value(self.INSTANCE_ID)
ctrl_addr = ctrl_item.get_value(self.ADDRESS)
logger.debug("Found controller instance %s address %s",
ctrl_instance, ctrl_addr)
disk_list = self.hardware.find_all_items(
properties={self.PARENT: ctrl_instance})
address_list = [disk.get_value(self.ADDRESS_ON_PARENT) for
disk in disk_list]
disk_addr = 0
while str(disk_addr) in address_list:
disk_addr += 1
if ((controller_type == 'scsi' and disk_addr > 7) or
(controller_type == 'ide' and disk_addr > 1)):
logger.debug("Controller address %s is already full",
ctrl_addr)
else:
logger.verbose("Found open slot %s:%s for %s controller",
ctrl_addr, disk_addr, controller_type)
return (ctrl_item, "{0}:{1}".format(ctrl_addr, disk_addr))
logger.notice("No open controller found")
return (None, None)
def get_id_from_file(self, file_obj):
"""Get the file ID from the given opaque file object.
Args:
file_obj (xml.etree.ElementTree.Element): 'File' element
Returns:
str: 'id' attribute value of this element
"""
return file_obj.get(self.FILE_ID)
def get_path_from_file(self, file_obj):
"""Get the file path from the given opaque file object.
Args:
file_obj (xml.etree.ElementTree.Element): 'File' element
Returns:
str: 'href' attribute value of this element
"""
return file_obj.get(self.FILE_HREF)
def get_file_ref_from_disk(self, disk):
"""Get the file reference from the given opaque disk object.
Args:
disk (xml.etree.ElementTree.Element): 'Disk' element
Returns:
str: 'fileRef' attribute value of this element
"""
return disk.get(self.DISK_FILE_REF)
def get_common_subtype(self, device_type):
"""Get the sub-type common to all devices of the given type.
Args:
device_type (str): Device type such as ``'ide'`` or ``'memory'``.
Returns:
str: Subtype string common to all devices of the type, or ``None``,
if multiple such devices exist and they do not all have the same
sub-type.
"""
subtype = None
for item in self.hardware.find_all_items(device_type):
item_subtype = item.get_value(self.RESOURCE_SUB_TYPE)
if subtype is None:
subtype = item_subtype
logger.verbose("Found %s subtype %s", device_type, subtype)
elif subtype != item_subtype:
logger.warning("Found different subtypes ('%s', '%s') for "
"device type %s - no common subtype exists",
subtype, item_subtype, device_type)
return None
return subtype
def check_sanity_of_disk_device(self, disk, file_obj,
disk_item, ctrl_item):
"""Check if the given disk is linked properly to the other objects.
Args:
disk (xml.etree.ElementTree.Element): Disk object to validate
file_obj (xml.etree.ElementTree.Element): File object which this
disk should be linked to (optional)
disk_item (OVFItem): Disk device object which should link to this
disk (optional)
ctrl_item (OVFItem): Controller device object which should link
to the :attr:`disk_item`
Raises:
ValueMismatchError: if the given items are not linked properly.
ValueUnsupportedError: if the :attr:`disk_item` has a
``HostResource`` value in an unrecognized or invalid format.
"""
if disk_item is None:
return
if ctrl_item is not None:
match_or_die("disk Item Parent", disk_item.get_value(self.PARENT),
"controller Item InstanceID",
ctrl_item.get_value(self.INSTANCE_ID))
host_resource = disk_item.get_value(self.HOST_RESOURCE)
if host_resource is not None:
if ((host_resource.startswith(self.HOST_RSRC_DISK_REF) or
host_resource.startswith(self.OLD_HOST_RSRC_DISK_REF)) and
disk is not None):
match_or_die("disk Item HostResource",
os.path.basename(host_resource),
"Disk diskId", disk.get(self.DISK_ID))
elif ((host_resource.startswith(self.HOST_RSRC_FILE_REF) or
host_resource.startswith(self.OLD_HOST_RSRC_FILE_REF)) and
file_obj is not None):
match_or_die("disk Item HostResource",
os.path.basename(host_resource),
"File id", file_obj.get(self.FILE_ID))
else:
# TODO: this is not a user input error, it's an OVF error
# so ValueUnsupportedError isn't really right?
raise ValueUnsupportedError("HostResource prefix",
host_resource,
[self.HOST_RSRC_FILE_REF,
self.HOST_RSRC_DISK_REF,
self.OLD_HOST_RSRC_FILE_REF,
self.OLD_HOST_RSRC_DISK_REF])
def add_file(self, file_path, file_id, file_obj=None, disk=None):
"""Add a new file object to the VM or overwrite the provided one.
Args:
file_path (str): Path to file to add
file_id (str): Identifier string for the file in the VM
file_obj (xml.etree.ElementTree.Element): Existing file object to
overwrite
disk (xml.etree.ElementTree.Element): Existing disk object
referencing :attr:`file`.
Returns:
xml.etree.ElementTree.Element: New or updated file object
"""
logger.debug("Adding File to OVF")
if file_obj is not None:
href = file_obj.get(self.FILE_HREF)
if href in self.file_references.keys():
del self.file_references[href]
file_obj.clear()
elif disk is None:
file_obj = ET.SubElement(self.references, self.FILE)
else:
# The OVF standard requires that Disks which reference a File
# be listed in the same order as the Files.
# Since there's already a Disk, make sure the new File is ordered
# appropriately.
# This is complicated by the fact that we may have
# Files which are not Disks and Disks with no backing File.
all_files = self.references.findall(self.FILE)
all_disks = self.disk_section.findall(self.DISK)
# Starting from the Disk entry corresponding to our new File,
# search forward until we find the next Disk (if any) which has a
# File, and insert our new File before that File.
disk_index = all_disks.index(disk)
file_index = len(all_files)
while disk_index < len(all_disks):
tmp_file_id = all_disks[disk_index].get(self.DISK_FILE_REF)
next_file = self.find_child(self.references, self.FILE,
attrib={self.FILE_ID: tmp_file_id})
if next_file is not None:
file_index = all_files.index(next_file)
break
disk_index += 1
file_obj = ET.Element(self.FILE)
self.references.insert(file_index, file_obj)
file_size_string = str(os.path.getsize(file_path))
file_name = os.path.basename(file_path)
file_obj.set(self.FILE_ID, file_id)
file_obj.set(self.FILE_HREF, file_name)
file_obj.set(self.FILE_SIZE, file_size_string)
# Make a note of the file's location - we'll copy it at write time.
# The file_path is always a FileOnDisk
self.file_references[file_name] = FileOnDisk(
os.path.dirname(os.path.abspath(file_path)), file_name,
checksum_algorithm=self.checksum_algorithm)
return file_obj
def remove_file(self, file_obj, disk=None, disk_drive=None):
"""Remove the given file object from the VM.
Args:
file_obj (xml.etree.ElementTree.Element): File object to remove
disk (xml.etree.ElementTree.Element): Disk object referencing
:attr:`file`
disk_drive (OVFItem): Disk drive mapping :attr:`file` to a device
Raises:
ValueUnsupportedError: If the ``disk_drive`` is a device type other
than 'cdrom' or 'harddisk'
"""
self.references.remove(file_obj)
del self.file_references[file_obj.get(self.FILE_HREF)]
if disk is not None:
self.disk_section.remove(disk)
if disk_drive is not None:
# For a CD-ROM drive, we can simply unmap the file.
# For a hard disk, we need to delete the device altogether.
drive_type = disk_drive.get_value(self.RESOURCE_TYPE)
if drive_type == self.RES_MAP['cdrom']:
disk_drive.set_property(self.HOST_RESOURCE, '')
elif drive_type == self.RES_MAP['harddisk']:
self.hardware.delete_item(disk_drive)
else:
raise ValueUnsupportedError("drive type", drive_type,
"CD-ROM ({0}) or hard disk ({1})"
.format(self.RES_MAP['cdrom'],
self.RES_MAP['harddisk']))
def add_disk(self, disk_repr, file_id, drive_type, disk=None):
"""Add a new disk object to the VM or overwrite the provided one.
Args:
disk_repr (COT.disks.DiskRepresentation): Disk file representation
file_id (str): Identifier string for the file/disk mapping
drive_type (str): 'harddisk' or 'cdrom'
disk (xml.etree.ElementTree.Element): Existing object to overwrite
Returns:
xml.etree.ElementTree.Element: New or updated disk object
"""
if drive_type != 'harddisk':
if disk is not None:
logger.notice("CD-ROMs do not require a Disk element. "
"Existing element will be deleted.")
if self.disk_section is not None:
self.disk_section.remove(disk)
if not self.disk_section.findall(self.DISK):
logger.notice("No Disks left - removing DiskSection")
self.envelope.remove(self.disk_section)
self.disk_section = None
disk = None
else:
logger.debug("Not adding Disk element to OVF, as CD-ROMs "
"do not require a Disk")
return disk
# Else, adding a hard disk:
self.disk_section = self._ensure_section(
self.DISK_SECTION,
"Virtual disk information",
attrib=self.DISK_SECTION_ATTRIB)
logger.debug("Adding Disk to OVF")
if disk is not None:
disk_id = disk.get(self.DISK_ID)
disk.clear()
else:
disk_id = file_id
disk = ET.SubElement(self.disk_section, self.DISK)
self.set_capacity_of_disk(disk, disk_repr.capacity)
disk.set(self.DISK_ID, disk_id)
disk.set(self.DISK_FILE_REF, file_id)
disk.set(self.DISK_FORMAT,
("http://www.vmware.com/interfaces/"
"specifications/vmdk.html#streamOptimized"))
return disk
def add_controller_device(self, device_type, subtype, address,
ctrl_item=None):
"""Create a new IDE or SCSI controller, or update existing one.
Args:
device_type (str): ``'ide'`` or ``'scsi'``
subtype (object): (Optional) subtype string such as ``'virtio'``
or list of subtype strings
address (int): Controller address such as 0 or 1 (optional)
ctrl_item (OVFItem): Existing controller device to update (optional)
Returns:
OVFItem: New or updated controller device object
Raises:
ValueTooHighError: if no more controllers can be created
"""
if ctrl_item is None:
logger.notice("%s controller not found, creating new Item",
device_type.upper())
(_, ctrl_item) = self.hardware.new_item(device_type)
if address is None:
# Find a controller address that isn't already used
address_list = [
ci.get_value(self.ADDRESS) for
ci in self.hardware.find_all_items(device_type)]
address = 0
while str(address) in address_list:
address += 1
logger.verbose("Selected address %s for new controller",
address)
if device_type == "scsi" and int(address) > 3:
raise ValueTooHighError("SCSI controller address", address, 3)
elif device_type == "ide" and int(address) > 1:
raise ValueTooHighError("IDE controller address", address, 1)
ctrl_item.set_property(self.ADDRESS, address)
ctrl_item.set_property(self.ELEMENT_NAME, "{0} Controller"
.format(device_type.upper()))
ctrl_item.set_property(self.ITEM_DESCRIPTION,
"{0} Controller {1}"
.format(device_type.upper(), address))
# Change subtype of existing controller or new controller
if subtype is not None:
ctrl_item.set_property(self.RESOURCE_SUB_TYPE, subtype)
return ctrl_item
def _create_new_disk_device(self, drive_type, address, name, ctrl_item):
"""Create a new disk device Item entry.
Helper for :meth:`add_disk_device`, in the case of no prior Item.
Args:
drive_type (str): ``'harddisk'`` or ``'cdrom'``
address (str): Address on controller, such as "1:0" (optional)
name (str): Device name string (optional)
ctrl_item (OVFItem): Controller object to serve as parent
Returns:
tuple: (disk_item, disk_name)
Raises:
ValueTooHighError: if the requested address is out of range
for the given controller, or if the controller is already full.
ValueUnsupportedError: if ``name`` is not specified and
``disk_type`` is not 'harddisk' or 'cdrom'.
"""
ctrl_instance = ctrl_item.get_value(self.INSTANCE_ID)
if address is None:
logger.debug("Working to identify address of new disk")
items = self.hardware.find_all_items(
properties={self.PARENT: ctrl_instance})
addresses = [item.get_value(self.ADDRESS_ON_PARENT) for
item in items]
address = 0
while str(address) in addresses:
address += 1
logger.warning("New disk address on parent not specified, "
"guessing it should be %s", address)
ctrl_type = ctrl_item.hardware_type
# Make sure the address is valid!
if ctrl_type == "scsi" and int(address) > 15:
raise ValueTooHighError("disk address on SCSI controller",
address, 15)
elif ctrl_type == "ide" and int(address) > 1:
raise ValueTooHighError("disk address on IDE controller",
address, 1)
if name is None:
if drive_type == 'cdrom':
name = "CD-ROM Drive"
elif drive_type == 'harddisk':
name = "Hard Disk Drive"
else:
# Should never get here!
raise ValueUnsupportedError("disk drive type", drive_type,
"'cdrom' or 'harddisk'")
(_, disk_item) = self.hardware.new_item(drive_type)
disk_item.set_property(self.ADDRESS_ON_PARENT, address)
disk_item.set_property(self.PARENT, ctrl_instance)
return disk_item, name
def add_disk_device(self, drive_type, address, name, description,
disk, file_obj, ctrl_item, disk_item=None):
"""Create a new disk hardware device or overwrite an existing one.
Args:
drive_type (str): ``'harddisk'`` or ``'cdrom'``
address (str): Address on controller, such as "1:0" (optional)
name (str): Device name string (optional)
description (str): Description string (optional)
disk (xml.etree.ElementTree.Element): Disk object to map to
this device
file_obj (xml.etree.ElementTree.Element): File object to map to
this device
ctrl_item (OVFItem): Controller object to serve as parent
disk_item (OVFItem): Existing disk device to update instead of
making a new device.
Returns:
xml.etree.ElementTree.Element: New or updated disk device object.
"""
if disk_item is None:
logger.notice("Disk Item not found, adding new Item")
disk_item, name = self._create_new_disk_device(
drive_type, address, name, ctrl_item)
else:
logger.debug("Updating existing disk Item")
# Make these changes to the disk Item regardless of new/existing
disk_item.set_property(self.RESOURCE_TYPE, self.RES_MAP[drive_type])
if drive_type == 'harddisk':
# Link to the Disk we created
disk_item.set_property(self.HOST_RESOURCE,
(self.HOST_RSRC_DISK_REF +
disk.get(self.DISK_ID)))
else:
# No Disk for CD-ROM; link to the File instead
disk_item.set_property(self.HOST_RESOURCE,
(self.HOST_RSRC_FILE_REF +
file_obj.get(self.FILE_ID)))
if name is not None:
disk_item.set_property(self.ELEMENT_NAME, name)
if description is not None:
disk_item.set_property(self.ITEM_DESCRIPTION, description)
return disk_item
# Helper methods - for internal use only
def untar(self, file_path):
"""Untar the OVF descriptor from an .ova to the working directory.
Args:
file_path (str): OVA file path
Returns:
str: Path to extracted OVF descriptor
Raises:
VMInitError: if the given file doesn't represent a valid OVA archive.
"""
logger.verbose("Untarring %s to working directory %s",
file_path, self.working_dir)
try:
tarf = tarfile.open(file_path, 'r')
except (EOFError, tarfile.TarError) as exc:
raise VMInitError(1, "Could not untar file: {0}".format(exc.args),
file_path)
try:
# The OVF standard says, with regard to OVAs:
# ...the files shall be in the following order inside the archive:
# 1) OVF descriptor
# 2) OVF manifest (optional)
# 3) OVF certificate (optional)
# 4) The remaining files shall be in the same order as listed
# in the References section...
# 5) OVF manifest (optional)
# 6) OVF certificate (optional)
#
# For now we just validate #1.
if not tarf.getmembers():
raise VMInitError(1, "No files to untar", file_path)
# Make sure the provided file doesn't contain any malicious paths
# http://stackoverflow.com/questions/8112742/
for pathname in tarf.getnames():
logger.debug("Examining path of %s prior to untar", pathname)
if not (os.path.abspath(os.path.join(self.working_dir,
pathname))
.startswith(self.working_dir)):
raise VMInitError(1, "Tar file contains malicious/unsafe "
"file path '{0}'!".format(pathname),
file_path)
ovf_descriptor = tarf.getmembers()[0]
if os.path.splitext(ovf_descriptor.name)[1] != '.ovf':
# Do we have an OVF descriptor elsewhere in the file?
candidates = [mem for mem in tarf.getmembers() if
os.path.splitext(mem.name)[1] == '.ovf']
if not candidates:
raise VMInitError(1,
"TAR file does not seem to contain any"
" .ovf file to serve as OVF descriptor"
" - OVA is invalid!",
file_path)
ovf_descriptor = candidates[0]
logger.error(
"OVF file %s found, but is not the first file in the TAR "
"as it should be - OVA is not standard-compliant!",
ovf_descriptor.name)
# TODO: In theory we could read the ovf descriptor XML directly
# from the TAR and not need to even extract this file to disk...
tarf.extract(ovf_descriptor, path=self.working_dir)
logger.debug(
"Extracted OVF descriptor from %s to working dir %s",
file_path, self.working_dir)
finally:
tarf.close()
# Find the OVF file
return os.path.join(self.working_dir, ovf_descriptor.name)
def generate_manifest(self, ovf_file):
"""Construct the manifest file for this package, if possible.
Args:
ovf_file (str): OVF descriptor file path
Returns:
bool: True if the manifest was successfully generated,
False if not successful (such as if checksum helper tools are
unavailable).
"""
(prefix, _) = os.path.splitext(ovf_file)
logger.verbose("Generating manifest for %s", ovf_file)
manifest = prefix + '.mf'
with open(ovf_file, 'rb') as ovfobj:
checksum = file_checksum(ovfobj, self.checksum_algorithm)
with open(manifest, 'wb') as mfobj:
mfobj.write("{algo}({file})= {sum}\n"
.format(algo=self.checksum_algorithm.upper(),
file=os.path.basename(ovf_file),
sum=checksum)
.encode('utf-8'))
# Checksum all referenced files as well
for file_obj in self.references.findall(self.FILE):
file_name = file_obj.get(self.FILE_HREF)
file_ref = self.file_references[file_name]
mfobj.write("{algo}({file})= {sum}\n"
.format(algo=self.checksum_algorithm.upper(),
file=file_name, sum=file_ref.checksum)
.encode('utf-8'))
logger.debug("Manifest generated successfully")
return True
def tar(self, ovf_descriptor, tar_file):
"""Create a .ova tar file based on the given OVF descriptor.
Args:
ovf_descriptor (str): File path for an OVF descriptor
tar_file (str): File path for the desired OVA archive.
"""
logger.verbose("Creating tar file %s", tar_file)
(prefix, _) = os.path.splitext(ovf_descriptor)
# Issue #66 - need to detect any of the possible scenarios:
# 1) output path and input path are the same real path
# (not just string-equal!)
# 2) output file and input file are the same file (including links)
# but not error out if (common case) output_file doesn't exist yet.
if (os.path.realpath(self.input_file) == os.path.realpath(tar_file) or
(os.path.exists(tar_file) and
os.path.samefile(self.input_file, tar_file))):
# We're about to overwrite the input OVA with a new OVA.
# (Python tarfile module doesn't support in-place edits.)
# Any files that we need to carry over need to be extracted NOW!
logger.info(
"Input OVA will be overwritten. Extracting files from %s to"
" working directory before overwriting it.", self.input_file)
for filename in self.file_references:
file_ref = self.file_references[filename]
if file_ref.file_path is None:
file_ref.copy_to(self.working_dir)
self.file_references[filename] = FileReference.create(
self.working_dir, filename,
checksum_algorithm=self.checksum_algorithm,
expected_checksum=file_ref.checksum,
expected_size=file_ref.size)
# Be sure to dereference any links to the actual file content!
with tarfile.open(tar_file, 'w', dereference=True) as tarf:
# OVF is always first
logger.debug("Adding OVF descriptor %s to %s",
ovf_descriptor, tar_file)
tarf.add(ovf_descriptor, os.path.basename(ovf_descriptor))
# Add manifest if present
manifest_path = prefix + '.mf'
if os.path.exists(manifest_path):
logger.debug("Adding manifest to %s", tar_file)
tarf.add(manifest_path, os.path.basename(manifest_path))
if os.path.exists("{0}.cert".format(prefix)):
logger.warning("COT doesn't know how to re-sign a certificate"
" file, so the existing certificate will be"
" omitted from %s.", tar_file)
# Add all other files mentioned in the OVF
for file_obj in self.references.findall(self.FILE):
file_name = file_obj.get(self.FILE_HREF)
file_ref = self.file_references[file_name]
logger.debug("Adding associated file %s to %s",
file_name, tar_file)
file_ref.add_to_archive(tarf)
def _ensure_section(self, section_tag, info_string,
attrib=None, parent=None):
"""If the OVF doesn't already have the given Section, create it.
Args:
section_tag (str): XML tag of the desired section.
info_string (str): Info string to set if a new Section is created.
attrib (dict): Attributes to filter by when looking for any existing
section (optional).
parent (xml.etree.ElementTree.Element): Parent element (optional).
If not specified, :attr:`envelope` will be the parent.
Returns:
xml.etree.ElementTree.Element: Section element that was found or
created
"""
if parent is None:
parent = self.envelope
section = self.find_child(parent, section_tag, attrib=attrib)
if section is not None:
return section
logger.notice("No existing %s. Creating it.",
XML.strip_ns(section_tag))
if attrib:
section = ET.Element(section_tag, attrib=attrib)
else:
section = ET.Element(section_tag)
# Section elements may be in arbitrary order relative to one another,
# but they MUST come after the References and before the VirtualSystem.
# We'll construct them immediately before the VirtualSystem.
index = 0
for child in list(parent):
if child.tag == self.VIRTUAL_SYSTEM:
break
index += 1
parent.insert(index, section)
# All Sections must have an Info child
self.set_or_make_child(section, self.INFO, info_string)
return section
def _set_product_section_child(self, child_tag, child_text):
"""Update or create the given child of the ProductSection.
Creates the ProductSection itself if necessary.
Args:
child_tag (str): XML tag of the product section child element.
child_text (str): Text to set for the child element.
Returns:
xml.etree.ElementTree.Element: The product section element that
was updated or created
"""
self.product_section = self._ensure_section(
self.PRODUCT_SECTION,
"Product Information",
attrib=self.PRODUCT_SECTION_ATTRIB,
parent=self.virtual_system)
return self.set_or_make_child(self.product_section, child_tag,
child_text)
def find_parent_from_item(self, item):
"""Find the parent Item of the given Item.
Args:
item (OVFItem): Item whose parent is desired
Returns:
OVFItem: instance representing the parent device, or None
"""
if item is None:
return None
parent_instance = item.get_value(self.PARENT)
if parent_instance is None:
logger.warning("Item instance %s has no 'Parent' subelement."
" Unable to identify parent Item.",
item.get_value(self.INSTANCE_ID))
return None
return self.hardware.find_item(
properties={self.INSTANCE_ID: parent_instance})
def find_item_from_disk(self, disk):
"""Find the disk Item that references the given Disk.
Args:
disk (xml.etree.ElementTree.Element): Disk element
Returns:
OVFItem: Corresponding instance, or None
"""
if disk is None:
return None
disk_id = disk.get(self.DISK_ID)
match = self.hardware.find_item(
properties={
self.HOST_RESOURCE: (self.HOST_RSRC_DISK_REF + disk_id)
})
if not match:
match = self.hardware.find_item(
properties={
self.HOST_RESOURCE: (self.OLD_HOST_RSRC_DISK_REF + disk_id)
})
return match
def find_item_from_file(self, file_obj):
"""Find the disk Item that references the given File.
Args:
file_obj (xml.etree.ElementTree.Element): File element
Returns:
OVFItem: Corresponding instance, or None.
"""
if file_obj is None:
return None
file_id = file_obj.get(self.FILE_ID)
match = self.hardware.find_item(
properties={
self.HOST_RESOURCE: (self.HOST_RSRC_FILE_REF + file_id)
})
if not match:
match = self.hardware.find_item(
properties={
self.HOST_RESOURCE: (self.OLD_HOST_RSRC_FILE_REF + file_id)
})
return match
def find_disk_from_file_id(self, file_id):
"""Find the Disk that uses the given file_id for backing.
Args:
file_id (str): File identifier string
Returns:
xml.etree.ElementTree.Element: Disk matching the file, or None
"""
if file_id is None or self.disk_section is None:
return None
return self.find_child(self.disk_section, self.DISK,
attrib={self.DISK_FILE_REF: file_id})
def find_empty_drive(self, drive_type):
"""Find a disk device that exists but contains no data.
Args:
drive_type (str): Either 'cdrom' or 'harddisk'
Returns:
OVFItem: Instance representing this disk device, or None.
Raises:
ValueUnsupportedError: if ``drive_type`` is unrecognized.
"""
if drive_type == 'cdrom':
# Find a drive that has no HostResource property
drives = self.hardware.find_all_items(
resource_type=drive_type,
properties={self.HOST_RESOURCE: None})
if drives:
return drives[0]
return None
elif drive_type == 'harddisk':
# All harddisk items must have a HostResource, so we need a
# different way to indicate an empty drive. By convention,
# we do this with a small placeholder disk (one with a Disk entry
# but no corresponding File included in the OVF package).
if self.disk_section is None:
logger.debug("No DiskSection, so no placeholder disk!")
return None
for disk in self.disk_section.findall(self.DISK):
file_id = disk.get(self.DISK_FILE_REF)
if file_id is None:
# Found placeholder disk!
# Now find the drive that's using this disk.
return self.find_item_from_disk(disk)
logger.debug("No placeholder disk found.")
return None
else:
raise ValueUnsupportedError("drive type",
drive_type,
"'cdrom' or 'harddisk'")
def find_device_location(self, device):
"""Find the controller type and address of a given device object.
Args:
device (OVFItem): Hardware device object.
Returns:
tuple: ``(type, address)``, such as ``("ide", "1:0")``.
Raises:
LookupError: if the controller is not found.
"""
controller = self.find_parent_from_item(device)
if controller is None:
raise LookupError("No parent controller for device?")
return (controller.hardware_type,
(controller.get_value(self.ADDRESS) + ':' +
device.get_value(self.ADDRESS_ON_PARENT)))
def get_id_from_disk(self, disk):
"""Get the identifier string associated with the given Disk object.
Args:
disk (xml.etree.ElementTree.Element): Disk object to inspect
Returns:
str: Disk identifier
"""
return disk.get(self.DISK_ID)
def get_capacity_from_disk(self, disk):
"""Get the capacity of the given Disk in bytes.
Args:
disk (xml.etree.ElementTree.Element): Disk element to inspect
Returns:
int: Disk capacity, in bytes
"""
cap = int(disk.get(self.DISK_CAPACITY))
cap_units = disk.get(self.DISK_CAP_UNITS, 'byte')
return programmatic_bytes_to_int(cap, cap_units)
def set_capacity_of_disk(self, disk, capacity_bytes):
"""Set the storage capacity of the given Disk.
Tries to use the most human-readable form possible (i.e., 8 GiB
instead of 8589934592 bytes).
Args:
disk (xml.etree.ElementTree.Element): Disk to update
capacity_bytes (int): Disk capacity, in bytes
"""
if self.ovf_version < 1.0:
# In OVF 0.9 only bytes is supported as a unit
disk.set(self.DISK_CAPACITY, capacity_bytes)
else:
(capacity, cap_units) = int_bytes_to_programmatic_units(
capacity_bytes)
disk.set(self.DISK_CAPACITY, capacity)
disk.set(self.DISK_CAP_UNITS, cap_units)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
198,
2,
19643,
69,
13,
9078,
532,
5016,
329,
440,
53,
37,
14,
41576,
9041,
198,
2,
198,
2,
2932,
2211,
11,
17551,
376,
13,
22233,
198,
2,
15069,
357,
66,
8,
2211,
12,
5539,
... | 2.062425 | 59,159 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import time
import copy
from alibabacloud.vendored.six.moves.urllib.request import pathname2url
from alibabacloud.vendored.six import iteritems
from alibabacloud.compat import urlencode
from alibabacloud.exceptions import NoCredentialsException
from alibabacloud.signer.algorithm import ShaHmac1 as mac1
from alibabacloud.utils import format_type as FormatType, parameter_helper as helper
from alibabacloud.utils.parameter_helper import md5_sum
FORMAT_ISO_8601 = "%Y-%m-%dT%H:%M:%SZ"
HEADER_SEPARATOR = "\n"
# this function will append the necessary parameters for signers process.
# parameters: the orignal parameters
# signers: sha_hmac1 or sha_hmac256
# accessKeyId: this is aliyun_access_key_id
# format: XML or JSON
# input parameters is headers
SIGNER_MAP = {
'RPC': RPCSigner,
'ROA': ROASigner
}
| [
2,
49962,
284,
262,
24843,
10442,
5693,
357,
1921,
37,
8,
739,
530,
198,
2,
393,
517,
18920,
5964,
11704,
13,
220,
4091,
262,
28536,
2393,
198,
2,
9387,
351,
428,
670,
329,
3224,
1321,
198,
2,
5115,
6634,
9238,
13,
220,
383,
7054,... | 3.306584 | 486 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from .twitch import Messages, Channel
from .subtitles import SubtitleWriter
from .settings import argparser, settings
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
33918,
198,
198,
6738,
764,
31844,
1330,
43534,
11,
11102,
198,
6738,
764,
7266,
83,
30540,
1330,
3834,
783... | 2.96 | 75 |
import numpy as np
from cereal import car
from selfdrive.config import Conversions as CV
from selfdrive.car.interfaces import CarStateBase
from opendbc.can.parser import CANParser
from opendbc.can.can_define import CANDefine
from selfdrive.car.volkswagen.values import DBC, CANBUS, NWL, TRANS, GEAR, BUTTON_STATES, CarControllerParams
| [
11748,
299,
32152,
355,
45941,
198,
6738,
33158,
1330,
1097,
198,
6738,
2116,
19472,
13,
11250,
1330,
32200,
507,
355,
26196,
198,
6738,
2116,
19472,
13,
7718,
13,
3849,
32186,
1330,
1879,
9012,
14881,
198,
6738,
1034,
437,
15630,
13,
5... | 3.383838 | 99 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-03 06:27
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2980,
515,
416,
37770,
352,
13,
1157,
319,
2864,
12,
3312,
12,
3070,
9130,
25,
1983,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738... | 2.933333 | 75 |
import debug
import random
main(5)
print("-" * 50)
main(10)
print("-" * 50)
main(15)
print("-" * 50)
main(20)
print("-" * 50)
main(30)
print("-" * 50) | [
198,
11748,
14257,
198,
11748,
4738,
198,
198,
12417,
7,
20,
8,
198,
4798,
7203,
21215,
1635,
2026,
8,
198,
198,
12417,
7,
940,
8,
198,
4798,
7203,
21215,
1635,
2026,
8,
198,
198,
12417,
7,
1314,
8,
198,
4798,
7203,
21215,
1635,
2... | 2.197183 | 71 |
#
# Valuation of Zero-Coupon Bonds
# in Cox-Ingersoll-Ross (1985) Model
# 09_gmm/CIR_zcb_valuation_gen.py
#
# (c) Dr. Yves J. Hilpisch
# Derivatives Analytics with Python
#
import math
import numpy as np
#
# Example Parameters CIR85 Model
#
r0, kappa_r, theta_r, sigma_r, t, T = 0.04, 0.3, 0.04, 0.1, 0.5, 5.0
#
# Zero-Coupon Bond Valuation Formula
#
def gamma(kappa_r, sigma_r):
''' Help Function. '''
return np.sqrt(kappa_r ** 2 + 2 * sigma_r ** 2)
def b1(alpha):
''' Help Function. '''
r0, kappa_r, theta_r, sigma_r, t, T = alpha
g = gamma(kappa_r, sigma_r)
return (((2 * g * np.exp((kappa_r + g) * (T - t) / 2)) /
(2 * g + (kappa_r + g) * (np.exp(g * (T - t)) - 1)))
** (2 * kappa_r * theta_r / sigma_r ** 2))
def b2(alpha):
''' Help Function. '''
r0, kappa_r, theta_r, sigma_r, t, T = alpha
g = gamma(kappa_r, sigma_r)
return ((2 * (np.exp(g * (T - t)) - 1)) /
(2 * g + (kappa_r + g) * (np.exp(g * (T - t)) - 1)))
def B(alpha):
''' Function to value unit zero-coupon bonds in CIR85 Model.
Parameters
==========
r0: float
initial short rate
kappa_r: float
mean-reversion factor
theta_r: float
long-run mean of short rate
sigma_r: float
volatility of short rate
t: float
valuation date
T: float
time horizon/interval
Returns
=======
zcb_value: float
value of zero-coupon bond
'''
b_1 = b1(alpha)
b_2 = b2(alpha)
r0, kappa_r, theta_r, sigma_r, t, T = alpha
E_rt = theta_r + np.exp(-kappa_r * t) * (r0 - theta_r)
# expected value of r_t
zcb_value = b_1 * np.exp(-b_2 * E_rt)
return zcb_value
if __name__ == '__main__':
#
# Example Valuation
#
BtT = B([r0, kappa_r, theta_r, sigma_r, t, T])
# discount factor, ZCB value for t & T
print "ZCB Value %10.4f" % BtT | [
2,
198,
2,
3254,
2288,
286,
12169,
12,
34,
10486,
261,
39161,
198,
2,
287,
18014,
12,
27682,
364,
692,
12,
38328,
357,
29110,
8,
9104,
198,
2,
7769,
62,
70,
3020,
14,
34,
4663,
62,
89,
21101,
62,
2100,
2288,
62,
5235,
13,
9078,
... | 2.01153 | 954 |
THIS.CODE.IS.UNDER.CONSTRUCTION
import tcp_helper as helper
import gen_rulemanager
#import protocol
helper_config = {
"mode": "client",
"address": None, # or IPv4/IPv6 address
"port": None, # TCP port,
"compression": False,
"fragmentation": False
}
rule_manager = XXX
rule_manager.Add({ rule_set })
helper = helper.helper(helper_config)
#tcp_layer2 = helper.create_tcp_connector()
#tcp_layer3 = helper.create_tcp_layer3() # Layer3 special
schc_protocol = helper.create_schc_protocol(rule_manager, tcp_layer2, tcp_layer3) # scheduler object also created here
tcp_layer3.send_at(10.0, b"0" * 100)
helper = helper.helper(helper_config)
schc_protocol = helper.create_schc_protocol(rule_manager) # scheduler object also created here
tcp_layer3 = helper.get_layer3()
tcp_layer3 = send_at(10.0, b"00000")
scheduler = helper.get_scheduler()
scheduler.run()
#---------------------------------------------------------
XXX
#helper.set_tcp_layer3_factory(MyLayer3)
my_upper_layer3 = MyLayer3(XXX)
helper.set_upper_layer3(my_upper_layer3)
XXX
| [
43559,
13,
34,
16820,
13,
1797,
13,
4944,
14418,
13,
10943,
46126,
2849,
198,
198,
11748,
48265,
62,
2978,
525,
355,
31904,
198,
11748,
2429,
62,
25135,
37153,
198,
2,
11748,
8435,
198,
198,
2978,
525,
62,
11250,
796,
1391,
198,
220,
... | 2.617073 | 410 |
#
# ---------------------------------------------------------
# Copyright 2018-present (c) Automatos Studios. All Rights Reserved.
# ---------------------------------------------------------
#
# @file data_provider.py
# @author cosmoplankton@automatos.studio
#
"""
DataProvider Implementation:
Imports the appropriate dataservice based on 'tag' and 'version'.
"""
import importlib
from utils import global_logging as logging
LOGGER = logging.get_logger()
class DataProvider:
"""
Imports the appropriate dataservice based on 'tag' and 'version'.
"""
@staticmethod
def get_dataservice(tag= None, version= None):
"""
Return the Dataservice object based on the requested version.
"""
if tag is None:
tag = 'base'
tag = tag.lower()
try:
if version is None:
_ds_module = importlib.import_module(
".data_service_" + tag,
package= 'dataservices')
else:
_ds_module = importlib.import_module(
".data_service_" + tag + '_' + version,
package= 'dataservices')
except ImportError:
LOGGER.error('Dataservice import failed')
raise
return _ds_module.ServiceBase()
| [
2,
198,
2,
20368,
22369,
12,
198,
2,
15069,
2864,
12,
25579,
357,
66,
8,
17406,
35492,
13799,
13,
1439,
6923,
33876,
13,
198,
2,
20368,
22369,
12,
198,
2,
220,
198,
2,
2488,
7753,
220,
220,
220,
220,
220,
220,
1366,
62,
15234,
1... | 2.766055 | 436 |
import re
import os
import sys
from datetime import datetime
import numpy as np
import scipy.io as sio
import datajoint as dj
import tqdm
from . import (reference, utilities, acquisition, analysis)
schema = dj.schema(dj.config['custom'].get('database.prefix', '') + 'extracellular')
@schema
@schema
@schema
@schema
| [
11748,
302,
198,
11748,
28686,
198,
11748,
25064,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
629,
541,
88,
13,
952,
355,
264,
952,
198,
11748,
1366,
73,
1563,
355,
42625,
198,
11748,
... | 2.963964 | 111 |
#!/usr/bin/env python
from datetime import datetime
import json
from fabric.api import local, require, settings, task
from fabric.state import env
from termcolor import colored
import app_config
# Other fabfiles
import ap
import assets
import daemons
import data
import instagram
import issues
import liveblog
import render
import stack
import text
import theme
import utils
if app_config.DEPLOY_TO_SERVERS:
import servers
if app_config.DEPLOY_CRONTAB:
import cron_jobs
# Bootstrap can only be run once, then it's disabled
if app_config.PROJECT_SLUG == '$NEW_PROJECT_SLUG':
import bootstrap
"""
Base configuration
"""
env.user = app_config.SERVER_USER
env.forward_agent = True
env.hosts = []
env.settings = None
"""
Environments
Changing environment requires a full-stack test.
An environment points to both a server and an S3
bucket.
"""
@task
def production():
"""
Run as though on production.
"""
env.settings = 'production'
app_config.configure_targets(env.settings)
env.hosts = app_config.SERVERS
@task
def staging():
"""
Run as though on staging.
"""
env.settings = 'staging'
app_config.configure_targets(env.settings)
env.hosts = app_config.SERVERS
"""
Branches
Changing branches requires deploying that branch to a host.
"""
@task
def stable():
"""
Work on stable branch.
"""
env.branch = 'stable'
@task
def master():
"""
Work on development branch.
"""
env.branch = 'master'
@task
def branch(branch_name):
"""
Work on any specified branch.
"""
env.branch = branch_name
@task
def tests():
"""
Run Python unit tests.
"""
with settings(warn_only=True):
local('createdb elections14test')
local('nosetests')
"""
Deployment
Changes to deployment requires a full-stack test. Deployment
has two primary functions: Pushing flat files to S3 and deploying
code to a remote server if required.
"""
@task
def update():
"""
Update all application data not in repository (copy, assets, etc).
"""
text.update()
assets.sync()
#data.update()
@task
def deploy_server(remote='origin'):
"""
Deploy server code and configuration.
"""
if app_config.DEPLOY_TO_SERVERS:
require('branch', provided_by=[stable, master, branch])
if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
utils.confirm(
colored("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch, "red")
)
servers.checkout_latest(remote)
servers.fabcast('text.update')
servers.fabcast('assets.sync')
if app_config.DEPLOY_CRONTAB:
servers.install_crontab()
if app_config.DEPLOY_SERVICES:
servers.deploy_confs()
@task
def deploy_client(remote='origin'):
"""
Render and deploy app to S3.
"""
require('settings', provided_by=[production, staging])
update()
render.render_all()
utils._gzip('www', '.gzip')
utils._deploy_to_s3()
@task
def deploy_liveblog():
"""
Deploy latest liveblog slides to S3.
"""
local('rm -rf .liveblog_slides_html .liveblog_slides_gzip')
render.render_liveblog()
utils._gzip('.liveblog_slides_html', '.liveblog_slides_gzip')
utils._deploy_to_s3('.liveblog_slides_gzip', copy_assets=False)
@task
def deploy_results():
"""
Deploy latest results slides to S3.
"""
local('rm -rf .results_slides_html .results_slides_gzip')
render.render_results()
utils._gzip('.results_slides_html', '.results_slides_gzip')
utils._deploy_to_s3('.results_slides_gzip', copy_assets=False)
@task
def deploy_states():
"""
Deploy latest state slides to S3.
"""
local('rm -rf .state_slides_html .state_slides_gzip')
render.render_states()
utils._gzip('.state_slides_html', '.state_slides_gzip')
utils._deploy_to_s3('.state_slides_gzip', copy_assets=False)
@task
def deploy_big_boards():
"""
Deploy big boards to S3.
"""
local('rm -rf .big_boards_html .big_boards_gzip')
render.render_big_boards()
utils._gzip('.big_boards_html', '.big_boards_gzip')
utils._deploy_to_s3('.big_boards_gzip', copy_assets=False)
@task
def deploy_bop():
"""
Deploy latest BOP.
"""
local('rm -rf .bop_html .bop_gzip')
render.render_bop()
utils._gzip('.bop_html', '.bop_gzip')
utils._deploy_to_s3('.bop_gzip', copy_assets=False)
@task
def deploy():
"""
Deploy the latest app to S3 and, if configured, to our servers.
"""
require('settings', provided_by=[production, staging])
deploy_server()
deploy_client()
@task
def reset_browsers():
"""
Deploy a timestamp so the client will reset their page. For bugfixes
"""
require('settings', provided_by=[production, staging])
payload = {}
now = datetime.now().strftime('%s')
payload['timestamp'] = now
payload['homepage'] = False
with open('www/live-data/timestamp.json', 'w') as f:
json.dump(now, f)
utils.deploy_json('www/live-data/timestamp.json', 'live-data/timestamp.json')
@task
"""
Destruction
Changes to destruction require setup/deploy to a test host in order to test.
Destruction should remove all files related to the project from both a remote
host and S3.
"""
@task
def shiva_the_destroyer():
"""
Deletes the app from s3
"""
require('settings', provided_by=[production, staging])
utils.confirm(
colored("You are about to destroy everything deployed to %s for this project.\nDo you know what you're doing?')" % app_config.DEPLOYMENT_TARGET, "red")
)
with settings(warn_only=True):
sync = 'aws s3 rm %s --recursive --region "%s"'
for bucket in app_config.S3_BUCKETS:
local(sync % ('s3://%s/' % bucket['bucket_name'], bucket['region']))
if app_config.DEPLOY_TO_SERVERS:
servers.delete_project()
if app_config.DEPLOY_CRONTAB:
servers.uninstall_crontab()
if app_config.DEPLOY_SERVICES:
servers.nuke_confs()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
11748,
33918,
198,
198,
6738,
9664,
13,
15042,
1330,
1957,
11,
2421,
11,
6460,
11,
4876,
198,
6738,
9664,
13,
5219,
1330,
17365,
198,
6738,
338... | 2.558388 | 2,432 |
from __future__ import absolute_import, division, print_function, unicode_literals
from echomesh.color import GammaTable
from echomesh.util.thread import Lock
# See http://dev.moorescloud.com/2012/10/18/about-lpd8806-based-rgb-led-strips/
#
# Inspired by:
# https://github.com/adammhaile/RPi-LPD8806/blob/master/LPD8806.py#L90
| [
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
11,
7297,
11,
3601,
62,
8818,
11,
28000,
1098,
62,
17201,
874,
198,
198,
6738,
304,
354,
2586,
71,
13,
8043,
1330,
43595,
10962,
198,
6738,
304,
354,
2586,
71,
13,
22602,
13,
16663,
13... | 2.66129 | 124 |
# Import
import sys
sys.path.append("../")
# Import
from apm import *
# Select server
server = 'http://byu.apmonitor.com'
# Application name
app = 'nlc'
# Clear previous application
apm(server,app,'clear all')
# Load model file
apm_load(server,app,'tank.apm')
# Load time points for future predictions
csv_load(server,app,'tank.csv')
# Load replay replay data for local use
data = csv.reader(open('replay.csv', 'r'))
#data = csv.reader(open('replay1.csv', 'r'))
#data = csv.reader(open('replay2.csv', 'r'))
#data = csv.reader(open('replay3.csv', 'r'))
replay = []
for row in data:
replay.append(row)
len_replay = len(replay)
# APM Variable Classification
# class = FV, MV, SV, CV
# F or FV = Fixed value - parameter may change to a new value every cycle
# M or MV = Manipulated variable - independent variable over time horizon
# S or SV = State variable - model variable for viewing
# C or CV = Controlled variable - model variable for control
FVs = 'kc','taui','taud','op_bias'
MVs = 'percent_open[1]','sp'
SVs = 'percent_open[2]','pv[1]','op[1]','pv[2]','op[2]', \
'inlet_flow[1]','outlet_flow[1]', \
'inlet_flow[2]','outlet_flow[2]', \
'proportional','integral','derivative', \
'error[1]','error[2]'
CVs = 'volume[1]','volume[2]'
# Set up variable classifications for data flow
for x in FVs: apm_info(server,app,'FV',x)
for x in MVs: apm_info(server,app,'MV',x)
for x in SVs: apm_info(server,app,'SV',x)
for x in CVs: apm_info(server,app,'CV',x)
# Options
# imode (1=ss, 2=mpu, 3=rto, 4=sim, 5=mhe, 6=nlc)
apm_option(server,app,'nlc.imode',6)
# controller mode (1=simulate, 2=predict, 3=control)
#apm_option(server,app,'nlc.reqctrlmode',3)
# time units (1=sec,2=min,3=hrs,etc)
apm_option(server,app,'nlc.ctrl_units',1)
# set controlled variable error model type
apm_option(server,app,'nlc.cv_type',1)
apm_option(server,app,'nlc.ev_type',1)
apm_option(server,app,'nlc.reqctrlmode',3)
# read discretization from CSV file
apm_option(server,app,'nlc.csv_read',1)
# turn on historization to see past results
apm_option(server,app,'nlc.hist_hor',500)
# set web plot update frequency
apm_option(server,app,'nlc.web_plot_freq',10)
# Objective for Nonlinear Control
# Controlled variable (c)
apm_option(server,app,'volume[1].sp',500)
apm_option(server,app,'volume[1].sphi',520)
apm_option(server,app,'volume[1].splo',480)
apm_option(server,app,'volume[2].sp',500)
apm_option(server,app,'volume[2].sphi',520)
apm_option(server,app,'volume[2].splo',480)
apm_option(server,app,'volume[1].tau',40.0)
apm_option(server,app,'volume[2].tau',40.0)
apm_option(server,app,'volume[1].status',1)
apm_option(server,app,'volume[2].status',0)
apm_option(server,app,'volume[1].fstatus',0)
apm_option(server,app,'volume[2].fstatus',0)
# Manipulated variables (u)
apm_option(server,app,'percent_open[1].upper',100)
apm_option(server,app,'percent_open[1].dmax',50)
apm_option(server,app,'percent_open[1].lower',0)
apm_option(server,app,'percent_open[1].status',1)
apm_option(server,app,'percent_open[1].fstatus',0)
for isim in range(1, len_replay-1):
print('')
print('--- Cycle %i of %i ---' %(isim,len_replay-2))
# allow server to process other requests
time.sleep(0.1)
for x in FVs:
value = csv_element(x,isim,replay)
if (not math.isnan(value)):
response = apm_meas(server,app,x,value)
print(response)
for x in MVs:
value = csv_element(x,isim,replay)
if (not math.isnan(value)):
response = apm_meas(server,app,x,value)
print(response)
for x in CVs:
value = csv_element(x,isim,replay)
if (not math.isnan(value)):
response = apm_meas(server,app,x,value)
print(response)
# schedule a set point change at cycle 40
#if (isim==4): apm_option(server,app,'volume.sp',50)
# Run NLC on APM server
solver_output = apm(server,app,'solve')
print(solver_output)
print("Finished Solving")
# Retrieve results
array = apm_sol(server,app)
#if (isim==1):
# # Open Web Viewer and Display Link
# print("Opening web viewer")
# url = apm_web(server,app)
# Retrieve results (MEAS,MODEL,NEWVAL)
# MEAS = FV, MV,or CV measured values
# MODEL = SV & CV predicted values
# NEWVAL = FV & MV optimized values
print('--- Available Variables ---')
print(array.keys())
# Plotting
from matplotlib import pyplot
x = array['time']
print(x)
y = array['percent_open[1]']
pyplot.plot(x, y)
pyplot.xlabel('Time')
pyplot.ylabel('Percent Open')
pyplot.show()
| [
2,
17267,
198,
11748,
25064,
198,
17597,
13,
6978,
13,
33295,
7203,
40720,
4943,
198,
198,
2,
17267,
198,
6738,
2471,
76,
1330,
1635,
198,
197,
198,
2,
9683,
4382,
198,
15388,
796,
705,
4023,
1378,
1525,
84,
13,
499,
41143,
13,
785,... | 2.471847 | 1,776 |
# Copyright 2020 - Nokia Software.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from pathlib import Path
from rally.common import cfg
from rally.task import validation
from rally_openstack import consts
from rally_openstack import scenario
from rally_openstack.scenarios.mistral import utils
CONF = cfg.CONF
SCENARIO_TIMEOUT_SEC = 16000
home_dir = str(Path.home())
wf_dir = '%s/.rally/extra/scenarios/big_wf/' % home_dir
action_files = ['dummy_actions.yaml', 'dummy_actions_nuage.yaml']
common_workflow_files = ['sub_wfs.yaml']
@validation.add("required_platform", platform="openstack", users=True)
@validation.add("required_services", services=[consts.Service.MISTRAL])
@scenario.configure(name="MistralExecutions.TerminateScenario",
platform="openstack")
@validation.add("required_platform", platform="openstack", users=True)
@validation.add("required_services", services=[consts.Service.MISTRAL])
@scenario.configure(name="MistralExecutions.DeployScenario",
platform="openstack")
| [
2,
15069,
12131,
532,
26182,
10442,
13,
198,
2,
198,
2,
220,
220,
220,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
220,
220,
220,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
35... | 3.011472 | 523 |
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge,
# to any person obtaining a copy of this software and associated
# documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
from collections import Counter
import string
import re
import argparse
import json
import sys
def normalize_answer(str_input):
"""Lower text and remove punctuation, articles and extra whitespace."""
def remove_articles(text):
'''
Remove "a|an|the"
'''
return re.sub(r'\b(a|an|the)\b', ' ', text)
def white_space_fix(text):
'''
Remove unnessary whitespace
'''
return ' '.join(text.split())
def remove_punc(text):
'''
Remove punc
'''
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
'''
Change string to lower form.
'''
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(str_input))))
def f1_score(prediction, ground_truth):
'''
Calculate the f1 score.
'''
prediction_tokens = normalize_answer(prediction).split()
ground_truth_tokens = normalize_answer(ground_truth).split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
return f1
def exact_match_score(prediction, ground_truth):
'''
Calculate the match score with prediction and ground truth.
'''
return normalize_answer(prediction) == normalize_answer(ground_truth)
def metric_max_over_ground_truths(metric_fn, prediction, ground_truths):
'''
Metric max over the ground truths.
'''
scores_for_ground_truths = []
for ground_truth in ground_truths:
score = metric_fn(prediction, ground_truth)
scores_for_ground_truths.append(score)
return max(scores_for_ground_truths)
def _evaluate(dataset, predictions):
'''
Evaluate function.
'''
f1 = exact_match = total = 0
count = 0
for article in dataset:
for paragraph in article['paragraphs']:
for qa in paragraph['qas']:
total += 1
if qa['id'] not in predictions:
message = 'Unanswered question ' + qa['id'] + \
' will receive score 0.'
#print(message, file=sys.stderr)
count += 1
continue
ground_truths = list(map(lambda x: x['text'], qa['answers']))
prediction = predictions[qa['id']]
exact_match += metric_max_over_ground_truths(
exact_match_score, prediction, ground_truths)
f1 += metric_max_over_ground_truths(
f1_score, prediction, ground_truths)
print('total', total, 'exact_match', exact_match, 'unanswer_question ', count)
exact_match = 100.0 * exact_match / total
f1 = 100.0 * f1 / total
return {'exact_match': exact_match, 'f1': f1}
def evaluate(data_file, pred_file):
'''
Evaluate.
'''
expected_version = '1.1'
with open(data_file) as dataset_file:
dataset_json = json.load(dataset_file)
if dataset_json['version'] != expected_version:
print('Evaluation expects v-' + expected_version +
', but got dataset with v-' + dataset_json['version'],
file=sys.stderr)
dataset = dataset_json['data']
with open(pred_file) as prediction_file:
predictions = json.load(prediction_file)
# print(json.dumps(evaluate(dataset, predictions)))
result = _evaluate(dataset, predictions)
# print('em:', result['exact_match'], 'f1:', result['f1'])
return result['exact_match']
def evaluate_with_predictions(data_file, predictions):
'''
Evalutate with predictions/
'''
expected_version = '1.1'
with open(data_file) as dataset_file:
dataset_json = json.load(dataset_file)
if dataset_json['version'] != expected_version:
print('Evaluation expects v-' + expected_version +
', but got dataset with v-' + dataset_json['version'],
file=sys.stderr)
dataset = dataset_json['data']
result = _evaluate(dataset, predictions)
return result['exact_match']
if __name__ == '__main__':
EXPECT_VERSION = '1.1'
parser = argparse.ArgumentParser(
description='Evaluation for SQuAD ' + EXPECT_VERSION)
parser.add_argument('dataset_file', help='Dataset file')
parser.add_argument('prediction_file', help='Prediction File')
args = parser.parse_args()
print(evaluate(args.dataset_file, args.prediction_file))
| [
2,
15069,
357,
66,
8,
5413,
10501,
198,
2,
1439,
2489,
10395,
13,
198,
2,
198,
2,
17168,
13789,
198,
2,
198,
2,
2448,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11,
198,
2,
284,
597,
1048,
16727,
257,
4866,
286,
428,
3788,
290... | 2.51988 | 2,339 |
# coding=utf-8
import os
import caffe
import yaml
import numpy as np
import numpy.random as npr
from utils.cython_bbox import bbox_overlaps
import json
DEBUG = False
import time
import sys
print sys.path
class LossTargetLayer(caffe.Layer):
"""
Assign anchors to ground-truth targets. Produces anchor classification
labels and bounding-box regression targets.
"""
def backward(self, top, propagate_down, bottom):
"""This layer does not propagate gradients."""
pass
def _compute_targets(ex_rois, gt_rois):
"""Compute bounding-box regression targets for an image."""
assert ex_rois.shape[0] == gt_rois.shape[0]
assert ex_rois.shape[1] == 4
targets = _bbox_transform(ex_rois, gt_rois[:, :4]).astype(
np.float32, copy=False)
# true
targets -= np.array(([0,0,0,0]))
targets /= np.array(([0.1,0.1,0.2,0.2]))
return targets
def _bbox_transform(ex_rois, gt_rois):
"""
dx = (Gx-Ex)/Ew
dx = (Gy-Ey)/Eh
dw = log(Gw/Ew)
dh = log(Gh/Eh)
"""
ex_widths = ex_rois[:, 2] - ex_rois[:, 0] + 1.0
ex_heights = ex_rois[:, 3] - ex_rois[:, 1] + 1.0
ex_ctr_x = ex_rois[:, 0] + 0.5 * ex_widths
ex_ctr_y = ex_rois[:, 1] + 0.5 * ex_heights
gt_widths = gt_rois[:, 2] - gt_rois[:, 0] + 1.0
gt_heights = gt_rois[:, 3] - gt_rois[:, 1] + 1.0
gt_ctr_x = gt_rois[:, 0] + 0.5 * gt_widths
gt_ctr_y = gt_rois[:, 1] + 0.5 * gt_heights
targets_dx = (gt_ctr_x - ex_ctr_x) / ex_widths
targets_dy = (gt_ctr_y - ex_ctr_y) / ex_heights
targets_dw = np.log(gt_widths / ex_widths)
targets_dh = np.log(gt_heights / ex_heights)
targets = np.vstack(
(targets_dx, targets_dy, targets_dw, targets_dh)).transpose()
return targets | [
2,
19617,
28,
40477,
12,
23,
198,
198,
11748,
28686,
198,
11748,
21121,
198,
11748,
331,
43695,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
299,
32152,
13,
25120,
355,
299,
1050,
198,
6738,
3384,
4487,
13,
948,
400,
261,
62,
65,
... | 2.135036 | 822 |
import pytest
import numpy as np
from h5py import Group, Dataset, SoftLink, ExternalLink
from h5preserve import RegistryContainer, HardLink, new_registry_list
def is_matching_hdf5_object(new, old):
"""
Check if two objects have the same hdf5 representation
"""
if type(new) != type(old):
return False
elif isinstance(new, Dataset):
if new[:] != old[:]:
return False
elif new.attrs != old.attrs:
return False
return True
elif isinstance(new, Group):
if new.keys() != old.keys():
return False
elif new.attrs != old.attrs:
return False
for key in new.keys():
if not is_matching_hdf5_object(new[key], old[key]):
return False
return True
return new == old
| [
11748,
12972,
9288,
198,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
289,
20,
9078,
1330,
4912,
11,
16092,
292,
316,
11,
8297,
11280,
11,
34579,
11280,
198,
198,
6738,
289,
20,
18302,
3760,
1330,
33432,
29869,
11,
6912,
11280,
11,
... | 2.227642 | 369 |
from microbench import MicroBench, MBNvidiaSmi
import subprocess
import unittest
import pandas
try:
subprocess.call(['nvidia-smi'])
nvidia_smi_available = True
except FileNotFoundError:
nvidia_smi_available = False
@unittest.skipIf(not nvidia_smi_available, 'nvidia-smi command not found')
| [
6738,
4580,
26968,
1330,
4527,
44199,
11,
10771,
45,
21744,
7556,
72,
198,
11748,
850,
14681,
198,
11748,
555,
715,
395,
198,
11748,
19798,
292,
198,
198,
28311,
25,
198,
220,
220,
220,
850,
14681,
13,
13345,
7,
17816,
77,
21744,
12,
... | 2.798165 | 109 |
'''
Extract results from the log files of Stanford sieve and put into a CSV file
Usage:
extract_results_stanford_sieve.py --out=<fn> <log_paths>...
'''
from utils import match_with_line_no, grouper
import re
from docopt import docopt
import pandas as pd
import os
if __name__ == '__main__':
args = docopt(__doc__)
main(args['<log_paths>'], args['--out'])
| [
7061,
6,
198,
11627,
974,
2482,
422,
262,
2604,
3696,
286,
13863,
264,
12311,
290,
1234,
656,
257,
44189,
2393,
198,
198,
28350,
25,
198,
220,
7925,
62,
43420,
62,
14192,
3841,
62,
82,
12311,
13,
9078,
1377,
448,
28,
27,
22184,
29,
... | 2.622378 | 143 |
import gevent
import gevent.monkey
import socket
from time import sleep, time
import requests
import logging
from typing import List
import sys
import os
| [
11748,
4903,
1151,
198,
11748,
4903,
1151,
13,
49572,
198,
11748,
17802,
198,
198,
6738,
640,
1330,
3993,
11,
640,
198,
11748,
7007,
198,
11748,
18931,
198,
6738,
19720,
1330,
7343,
198,
11748,
25064,
198,
11748,
28686,
628,
198
] | 4.025641 | 39 |
import scrapy
from scrapy.loader import ItemLoader
from scrapy.exceptions import CloseSpider
from fbcrawl.spiders.fbcrawl import FacebookSpider
from fbcrawl.items import EventsItem, parse_date, parse_date2
from datetime import datetime
class EventsSpider(FacebookSpider):
"""
Parse FB events, given a page (needs credentials)
"""
name = "events"
custom_settings = {
'FEED_EXPORT_FIELDS': ['name','where','location','photo','start_date', \
'end_date','description'],
'DUPEFILTER_CLASS' : 'scrapy.dupefilters.BaseDupeFilter',
'CONCURRENT_REQUESTS' : 1
}
| [
11748,
15881,
88,
198,
198,
6738,
15881,
88,
13,
29356,
1330,
9097,
17401,
198,
6738,
15881,
88,
13,
1069,
11755,
1330,
13872,
41294,
198,
6738,
277,
15630,
13132,
13,
2777,
4157,
13,
69,
15630,
13132,
1330,
3203,
41294,
198,
6738,
277,... | 2.546185 | 249 |
import codecs
import os.path
from setuptools import find_packages, setup
with open("README.rst", "r") as fh:
long_description = fh.read()
setup(
name="massgenotyping",
version=get_version("massgenotyping/__init__.py"),
description=(
"Python package for microsatellite genotyping from amplicon sequencing data"
),
long_description=long_description,
url="https://github.com/kohyamat/massgenotyping",
author="Tetsuo I. Kohyama",
author_email="tetsuo.kohyama@gmail.com",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Education",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
keywords="genotyping microsatellite NGS amplicon-sequencing",
packages=find_packages(),
install_requires=[
"biopython>=1.74",
"dataclasses;python_version=='3.6'",
"dataclasses_json",
"fuzzysearch>=0.6.2",
"matplotlib>=3.0.3",
"natsort>=5.1.0",
"numpy>=1.16.2",
"python-Levenshtein>=0.12.0",
"seaborn>=0.5.0",
"tqdm>=4.30.0"
],
python_requires=">=3.6",
entry_points={
"console_scripts": [
"mgt = massgenotyping:main",
]
},
)
| [
11748,
40481,
82,
198,
11748,
28686,
13,
6978,
198,
198,
6738,
900,
37623,
10141,
1330,
1064,
62,
43789,
11,
9058,
628,
628,
198,
4480,
1280,
7203,
15675,
11682,
13,
81,
301,
1600,
366,
81,
4943,
355,
277,
71,
25,
198,
220,
220,
220... | 2.258904 | 730 |
from .redis_tools import RedisInteraction
__all__ = ["RedisInteraction"]
| [
6738,
764,
445,
271,
62,
31391,
1330,
2297,
271,
9492,
2673,
198,
198,
834,
439,
834,
796,
14631,
7738,
271,
9492,
2673,
8973,
198
] | 3.083333 | 24 |
from setuptools import setup, find_packages
setup(
name='typogrify',
version='2.0.0',
packages=find_packages(),
author='Christian Metts',
author_email='xian@mintchaos.com',
license='BSD',
description='Typography related template filters for Django & Jinja2 applications',
url='https://github.com/mintchaos/typogrify',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'Topic :: Utilities'
],
install_requires=['smartypants>=1.6']
) | [
6738,
900,
37623,
10141,
1330,
9058,
11,
1064,
62,
43789,
628,
198,
40406,
7,
198,
220,
220,
220,
1438,
11639,
28004,
519,
81,
1958,
3256,
198,
220,
220,
220,
2196,
11639,
17,
13,
15,
13,
15,
3256,
198,
220,
220,
220,
10392,
28,
1... | 2.701068 | 281 |
#!/bin/python3
# Author: Igor Andruskiewitsch
# Notes: Custom commands for the Ranger terminal file manager
# -- IMPORTS
import os
import subprocess
import ranger
# -- COMMON CONSTANTS
NULL_ERR="2> /dev/null"
MATCH="\( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \)"
FIND="find -L {} {} -prune -o {} -print"
FZF="fzf --height 100% +m"
FZF_TMUX="fzf-tmux --height 100% +m"
# -- COMMANDS
class fzf(ranger.api.commands.Command):
""":fzf [-d]
Find a file or directory using `fzf`.
"""
# Constants
path="."
parse="sed 1d | cut -b3-"
class rg(ranger.api.commands.Command):
""":rg <args> <pattern>
Live RipGrep over current directory.
Outputs the files that match the search and navigates to the file.
"""
ripgrep="rg"
class z(ranger.api.commands.Command):
"""
:z
Jump to directory using fasd
"""
@staticmethod
| [
2,
48443,
8800,
14,
29412,
18,
198,
2,
6434,
25,
46157,
843,
14932,
74,
769,
48279,
198,
2,
11822,
25,
8562,
9729,
329,
262,
21888,
12094,
2393,
4706,
198,
198,
2,
1377,
30023,
33002,
198,
198,
11748,
28686,
198,
11748,
850,
14681,
... | 2.421918 | 365 |
from fizz_buzz_tree.fizz_buzz_tree import fizz_buzz_tree
from tree.tree import Binary_Tree, TNode
import pytest
@pytest.fixture
@pytest.fixture
| [
6738,
277,
6457,
62,
65,
4715,
62,
21048,
13,
69,
6457,
62,
65,
4715,
62,
21048,
1330,
277,
6457,
62,
65,
4715,
62,
21048,
198,
6738,
5509,
13,
21048,
1330,
45755,
62,
27660,
11,
309,
19667,
198,
11748,
12972,
9288,
198,
198,
31,
... | 2.551724 | 58 |
F = ''
L = ''
S = input()
N = len(S)
M = [S[k+1] for k in range(N-2)]
F = S[0]
L = S[-1]
for k in range(3):
S = input()
for w in range(N-2):
M[w] += S[w+1]
F += S[0]
L += S[-1]
res = ''
for w in range(N-2):
res += chr((int(F)*int(M[w])+int(L))%257)
print(res)
| [
37,
796,
10148,
198,
43,
796,
10148,
198,
50,
796,
5128,
3419,
198,
45,
796,
18896,
7,
50,
8,
198,
44,
796,
685,
50,
58,
74,
10,
16,
60,
329,
479,
287,
2837,
7,
45,
12,
17,
15437,
198,
37,
796,
311,
58,
15,
60,
198,
43,
79... | 1.7 | 170 |
"""
Compute priorbox coordinates in center-offset(2D) form for each source feature map.
Attempt to modify SSD anchor to 3D(scale:w,h,d). Create two kind of cube(small,large) in every feature map cell.
Reference: https://github.com/amdegroot/ssd.pytorch/blob/master/layers/functions/prior_box.py
"""
from typing import List, Tuple
from dataclasses import dataclass
from simple_parsing import Serializable
from math import sqrt as sqrt
from itertools import product as product
import torch
# |TODO(Jiyong)|: change for Objectron setting
@dataclass
class Anchor(object):
"""Compute priorbox coordinates in center-offset form for each source feature map."""
if __name__ == '__main__':
main() | [
37811,
198,
7293,
1133,
3161,
3524,
22715,
287,
3641,
12,
28968,
7,
17,
35,
8,
1296,
329,
1123,
2723,
3895,
3975,
13,
198,
37177,
284,
13096,
21252,
18021,
284,
513,
35,
7,
9888,
25,
86,
11,
71,
11,
67,
737,
13610,
734,
1611,
286,... | 3.25463 | 216 |
import logging
import machine
import utime
from m5stack import lcd
# global variables
logger = None
# decoretor
# date time function
if __name__ == '__main__':
id = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
password = 'xxxxxxxxxxxx'
contract_amperage = "50"
collect_date = "22"
bp35a1 = BP35A1(id, password, contract_amperage, collect_date)
bp35a1.open()
(datetime, data) = bp35a1.instantaneous_power()
print('Instantaneous power {} {}W'.format(datetime, data))
(datetime, data) = bp35a1.total_power()
print('Total power {} {}kWh'.format(datetime, data))
bp35a1.close() | [
11748,
18931,
198,
11748,
4572,
198,
11748,
3384,
524,
198,
6738,
285,
20,
25558,
1330,
300,
10210,
198,
198,
2,
3298,
9633,
198,
198,
6404,
1362,
796,
6045,
198,
198,
2,
875,
9997,
273,
628,
628,
198,
198,
2,
3128,
640,
2163,
628,
... | 2.605809 | 241 |
import logging
import pandas as pd
import spatialHeterogeneity as sh
from spatialHeterogeneity.neighborhood.utils import get_node_interactions
from networkx import Graph
import numpy as np
import spatialHeterogeneity as sh
from spatialHeterogeneity.graph_builder.constants import GRAPH_BUILDER_DEFAULT_PARAMS
import matplotlib.pyplot as plt
from matplotlib.cm import get_cmap
from matplotlib.colors import ListedColormap
import copy
from skimage.measure import regionprops
# %%
so = sh.dataset.imc()
spl = list(so.G.keys())[0]
attr = 'cell_type'
G: Graph = so.G[spl]['contact']
node = 2
neigh = list(G[node])
g = G.subgraph(neigh)
data = so.obs[spl][attr]
nint = get_node_interactions(g, data)
# %%
so = sh.dataset.imc()
spl = list(so.G.keys())[1]
attr = 'cell_type'
sh.pp.extract_centroids(so, spl)
config = GRAPH_BUILDER_DEFAULT_PARAMS['radius']
config['builder_params']['radius'] = 36
sh.graph.build_graph(so, spl, builder_type='radius', config=config)
sh.neigh.infiltration(so, spl, attr, graph_key='radius', local=True)
# %%
cmap = ['white', 'darkgreen', 'gold', 'steelblue', 'darkred', 'coral']
cmap_labels = {0: 'background', 1: 'immune', 2: 'endothelial', 3: 'stromal', 4: 'tumor', 5: 'myoepithelial'}
cmap = ListedColormap(cmap)
so.uns['cmaps'].update({'cell_type_id': cmap})
so.uns['cmap_labels'].update({'cell_type_id': cmap_labels})
cmap = copy.copy(get_cmap('BuGn'))
cmap = copy.copy(get_cmap('plasma'))
cmap.set_bad('gray')
so.uns['cmaps']['infiltration'] = cmap
sh.pp.extract_centroids(so, spl)
radi = [20, 36, 50]
fig, axs = plt.subplots(2, 2, figsize=(8, 8), dpi=300)
sh.pl.spatial(so, spl, attr='cell_type_id', ax=axs[0, 0])
for r, ax in zip(radi, axs.flat[1:]):
config['builder_params']['radius'] = r
sh.graph.build_graph(so, spl, builder_type='radius', config=config)
sh.neigh.infiltration(so, spl, attr, graph_key='radius', local=True)
sh.pl.spatial(so, spl, attr='infiltration', ax=ax, background_color='black')
fig.show()
fig.savefig('/Users/art/Downloads/infiltration_plasma.pdf')
# %%
config = GRAPH_BUILDER_DEFAULT_PARAMS['radius']
config['builder_params']['radius'] = 50
sh.graph.build_graph(so, spl, builder_type='radius', config=config)
G = so.G[spl]['radius']
# %%
dat: pd.DataFrame = so.obs[spl][['infiltration', 'x', 'y']]
dat = dat[~dat.infiltration.isna()]
# mask size
mask = so.masks[spl]['cellmasks']
rg = regionprops(mask)
areas = []
for r in rg:
areas.append(r.area)
plt.hist(areas, bins=100)
plt.show()
step_size = 20
x, y = np.arange(0, mask.shape[1], step_size), np.arange(0, mask.shape[0], step_size)
xv, yv = np.meshgrid(mask.shape[1], mask.shape[0])
img = np.zeros((len(y), len(x)))
dat['x_img'] = np.round(dat.x / step_size).astype(int)
dat['y_img'] = np.round(dat.y / step_size).astype(int)
for i in range(dat.shape[0]):
img[dat.y_img.iloc[i], dat.x_img.iloc[i]] = dat.infiltration.iloc[i]
methods = [None, 'bicubic', 'spline16',
'spline36', 'hanning', 'hamming', 'hermite', 'kaiser', 'quadric',
'catrom', 'gaussian', 'bessel', 'mitchell', 'sinc', 'lanczos']
plt.imshow(img, interpolation='gaussian', cmap='plasma');
plt.show()
# %%
# %%
infiltration(so, spl, step_size=20, collision_strategy='max')
# %%
attr = 'infiltration'
step_size = 10
dat = so.obs[spl][[attr] + ['x', 'y']]
dat = dat[~dat.infiltration.isna()]
# we add step_size to prevent out of bounds indexing should the `{x,y}_img` values be rounded up.
x, y = np.arange(0, mask.shape[1], step_size), np.arange(0, mask.shape[0], step_size)
img = np.zeros((len(y), len(x)))
# %%
import spatialHeterogeneity as sh
from spatialHeterogeneity.graph_builder.constants import GRAPH_BUILDER_DEFAULT_PARAMS
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
so = sh.dataset.imc()
spl = list(so.G.keys())[1]
cmap = ['white', 'darkgreen', 'gold', 'steelblue', 'darkred', 'coral']
cmap_labels = {0: 'background', 1: 'immune', 2: 'endothelial', 3: 'stromal', 4: 'tumor', 5: 'myoepithelial'}
cmap = ListedColormap(cmap)
so.uns['cmaps'].update({'cell_type_id': cmap})
so.uns['cmap_labels'].update({'cell_type_id': cmap_labels})
sh.pp.extract_centroids(so, spl)
config = GRAPH_BUILDER_DEFAULT_PARAMS['radius']
config['builder_params']['radius'] = 36
sh.graph.build_graph(so, spl, builder_type='radius', config=config)
attr = 'cell_type'
sh.neigh.infiltration(so, spl, attr, graph_key='radius', local=True)
step_sizes = [5, 10, 20]
fig, axs = plt.subplots(2,2, dpi=300)
sh.pl.spatial(so, spl, 'cell_type_id', ax=axs[0,0])
for ax, step_size in zip(axs.flat[1:], step_sizes):
ax: plt.Axes
sh.pl.infiltration(so, spl, step_size=step_size, ax=ax)
ax.set_title(f'step_size = {step_size}')
ax.set_axis_off()
fig.show()
fig.savefig('/Users/art/Downloads/infiltration.pdf')
methods = [None, 'none', 'nearest', 'bilinear', 'bicubic', 'spline16',
'spline36', 'hanning', 'hamming', 'hermite', 'kaiser', 'quadric',
'catrom', 'gaussian', 'bessel', 'mitchell', 'sinc', 'lanczos']
fig, axs = plt.subplots(nrows=3, ncols=6, figsize=(9, 6), dpi=300,
subplot_kw={'xticks': [], 'yticks': []})
step_size = 20
for ax, interpolation in zip(axs.flat, methods):
sh.pl.infiltration(so, spl, step_size=step_size, interpolation=interpolation, ax=ax)
ax.set_title(interpolation)
fig.show()
fig.savefig('/Users/art/Downloads/infiltration_interpolation.pdf')
# %%
sh.metrics.quadratic_entropy(so, spl, 'meta_id', graph_key='contact') | [
11748,
18931,
198,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
11748,
21739,
39,
2357,
37477,
355,
427,
198,
6738,
21739,
39,
2357,
37477,
13,
710,
394,
2865,
2894,
13,
26791,
1330,
651,
62,
17440,
62,
3849,
4658,
198,
6738,
3127,... | 2.371429 | 2,310 |
import os
import sys
import argparse
import cv2
from tqdm import tqdm
from ssds.ssds import SSDDetector
COLORS = [(255, 0, 0), (0, 255, 0), (0, 0, 255)]
FONT = cv2.FONT_HERSHEY_SIMPLEX
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Demo a ssds.pytorch network')
parser.add_argument('-cfg', '--confg-file',
help='the address of optional config file', default=None, type=str, required=True)
parser.add_argument('-i', '--demo-file',
help='the address of the demo file', default=None, type=str, required=True)
parser.add_argument('-t', '--type',
default='image', choices=['image', 'video'])
parser.add_argument('-d', '--display',
help='whether display the detection result', action="store_true")
parser.add_argument('-s', '--shift', action="store_true")
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
model = SSDDetector(args.confg_file, args.shift)
getattr(sys.modules[__name__], "demo_"+args.type)(model, args.demo_file, args.display) | [
11748,
28686,
198,
11748,
25064,
198,
11748,
1822,
29572,
198,
198,
11748,
269,
85,
17,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
198,
6738,
37786,
9310,
13,
824,
9310,
1330,
21252,
11242,
9250,
198,
198,
25154,
20673,
796,
47527... | 2.46 | 450 |
__author__ = 'fthiele'
data = {
'level 1-1':
{
'level 1-1-1':
{
'a': 1,
'b': 2,
'c': 3,
},
'level 1-1-2':
{
'd': 4,
'e': 5,
'f': 6,
},
},
'level 1-2':
{
'level 1-2-1':
{
'g': 7,
'h': 8,
'i': 9,
},
'level 1-2-2':
{
'j': 10,
'k': 11,
'l': 12,
'm': [
13, 14, 15, 16
]
}
}
}
if __name__ == "__main__":
main()
| [
834,
9800,
834,
796,
705,
69,
400,
494,
293,
6,
198,
198,
7890,
796,
1391,
198,
220,
220,
220,
705,
5715,
352,
12,
16,
10354,
198,
220,
220,
220,
220,
220,
220,
220,
1391,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
... | 1.217518 | 685 |
from .connected_components import find_dense_subgraphs
| [
6738,
764,
15236,
62,
5589,
3906,
1330,
1064,
62,
67,
1072,
62,
7266,
34960,
82,
198
] | 3.4375 | 16 |
import torch
class PSNR:
"""Peak Signal to Noise Ratio
img1 and img2 have range [0, 255]"""
@staticmethod
if __name__ == '__main__':
i = torch.randn(1, 3, 20, 20)
j = i / 1.000001
p = PSNR()
print(p(i, j).item())
| [
11748,
28034,
628,
198,
198,
4871,
6599,
24723,
25,
198,
220,
220,
220,
37227,
6435,
461,
26484,
284,
30964,
33956,
198,
220,
220,
220,
33705,
16,
290,
33705,
17,
423,
2837,
685,
15,
11,
14280,
60,
37811,
628,
220,
220,
220,
2488,
1... | 2.225225 | 111 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import filer.fields.image
import django.db.models.deletion
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
4981,
11,
15720,
602,
198,
11748,
1226,
263,
13,
25747,
13,
906... | 2.912281 | 57 |
import pandas as pd
from wind_power_forecasting import TIME_LABEL, NWP_PREFIX
from wind_power_forecasting.utils.dataframe import extract_columns
| [
11748,
19798,
292,
355,
279,
67,
198,
198,
6738,
2344,
62,
6477,
62,
754,
19913,
1330,
20460,
62,
48780,
3698,
11,
21966,
47,
62,
47,
31688,
10426,
198,
6738,
2344,
62,
6477,
62,
754,
19913,
13,
26791,
13,
7890,
14535,
1330,
7925,
6... | 3.125 | 48 |
from settings.globalsettings import GlobalSettings
globalsettings = GlobalSettings()
AWS_ACCOUNT_ID = globalsettings.AWS_ACCOUNT_ID
AWS_REGION = globalsettings.AWS_REGION
def code_build_batch_policy_in_json(project_ids):
"""
Define an IAM policy statement for CodeBuild batch operation.
:param project_ids: a list of CodeBuild project id.
:return: an IAM policy statement in json.
"""
resources = []
for project_id in project_ids:
resources.append(
"arn:aws:codebuild:{}:{}:project/{}*".format(
AWS_REGION, AWS_ACCOUNT_ID, project_id
)
)
return {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"codebuild:StartBuild",
"codebuild:StopBuild",
"codebuild:RetryBuild",
],
"Resource": resources,
}
],
}
def code_build_fuzz_policy_in_json():
"""
Define an IAM policy that only grants access to publish CloudWatch metrics to the current region in the same
namespace used in the calls to PutMetricData in tests/ci/common_fuzz.sh.
"""
return {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "cloudwatch:PutMetricData",
"Resource": "*",
"Condition": {
"StringEquals": {
"aws:RequestedRegion": [AWS_REGION],
"cloudwatch:namespace": ["AWS-LC-Fuzz"],
}
},
}
],
}
def s3_read_write_policy_in_json(s3_bucket_name):
"""
Define an IAM policy statement for reading and writing to S3 bucket.
:return: an IAM policy statement in json.
"""
return {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:Put*", "s3:Get*"],
"Resource": ["arn:aws:s3:::{}/*".format(s3_bucket_name)],
}
],
}
def ecr_repo_arn(repo_name):
"""
Create a ECR repository arn.
See https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonelasticcontainerregistry.html
:param repo_name: repository name.
:return: arn:aws:ecr:${Region}:${Account}:repository/${RepositoryName}
"""
ecr_arn_prefix = "arn:aws:ecr:{}:{}:repository".format(AWS_REGION, AWS_ACCOUNT_ID)
return "{}/{}".format(ecr_arn_prefix, repo_name)
def ecr_power_user_policy_in_json(ecr_repo_names):
"""
Define an AWS-LC specific IAM policy statement for AWS ECR power user used to create new docker images.
:return: an IAM policy statement in json.
"""
ecr_arns = []
for ecr_repo_name in ecr_repo_names:
ecr_arns.append(ecr_repo_arn(ecr_repo_name))
return {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["ecr:GetAuthorizationToken"],
"Resource": "*",
},
{
"Effect": "Allow",
"Action": [
"ecr:BatchCheckLayerAvailability",
"ecr:GetDownloadUrlForLayer",
"ecr:GetRepositoryPolicy",
"ecr:DescribeRepositories",
"ecr:ListImages",
"ecr:DescribeImages",
"ecr:BatchGetImage",
"ecr:GetLifecyclePolicy",
"ecr:GetLifecyclePolicyPreview",
"ecr:ListTagsForResource",
"ecr:DescribeImageScanFindings",
"ecr:InitiateLayerUpload",
"ecr:UploadLayerPart",
"ecr:CompleteLayerUpload",
"ecr:PutImage",
],
"Resource": ecr_arns,
},
],
}
def aws_secrets_manager_get_secret_policy_in_json(secret_arn):
"""
Define an IAM policy statement for getting secret value.
:return: an IAM policy statement in json.
"""
return {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["secretsmanager:GetSecretValue"],
"Resource": [secret_arn],
}
],
}
def aws_secrets_manager_get_secret_policy_in_json(secret_arn):
"""
Define an IAM policy statement for getting secret value.
:return: an IAM policy statement in json.
"""
return {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["secretsmanager:GetSecretValue"],
"Resource": [secret_arn],
}
],
}
| [
6738,
6460,
13,
4743,
672,
874,
12374,
1330,
8060,
26232,
198,
198,
4743,
672,
874,
12374,
796,
8060,
26232,
3419,
198,
198,
12298,
50,
62,
26861,
28270,
62,
2389,
796,
15095,
874,
12374,
13,
12298,
50,
62,
26861,
28270,
62,
2389,
198... | 1.91472 | 2,568 |
import asyncio
import random
import typing
from abc import ABC, abstractmethod
from datetime import datetime, timedelta
from enum import Enum, auto
import discord
from redbot.core import Config
from redbot.core.bot import Red
from redbot.core.config import Group
from . import constants, utils
from .recipe_provider import DEFAULT_RECIPE_PROVIDER
| [
11748,
30351,
952,
198,
11748,
4738,
198,
11748,
19720,
198,
6738,
450,
66,
1330,
9738,
11,
12531,
24396,
198,
6738,
4818,
8079,
1330,
4818,
8079,
11,
28805,
12514,
198,
6738,
33829,
1330,
2039,
388,
11,
8295,
198,
198,
11748,
36446,
19... | 3.612245 | 98 |
# testExplorer -
import string
import sys
import os
import win32com.client.dynamic
import win32api
import glob
import pythoncom
import time
from util import CheckClean
bVisibleEventFired = 0
if __name__=='__main__':
TestAll()
CheckClean()
| [
2,
1332,
18438,
11934,
532,
201,
198,
201,
198,
11748,
4731,
201,
198,
11748,
25064,
201,
198,
11748,
28686,
201,
198,
11748,
1592,
2624,
785,
13,
16366,
13,
67,
28995,
201,
198,
11748,
1592,
2624,
15042,
201,
198,
11748,
15095,
201,
... | 2.637255 | 102 |
"""
Find diffusion coefficients
"""
import numpy as np
import apl104lib as apl104
from scipy.optimize import minimize
import copy
exec(open('refsample.py').read())
exec(open('expsample.py').read())
# Define problem
# Optimization routine
guess=np.array([7.04998003e-03, 8.69586536e-03, 4.61830549e-03, 5.64390294e-02,
1.29183677e-03, 1.00000009e+00, 3.18173366e-02, 1.30978963e-02,
1.00000000e-05, 9.11079843e-04])
bounds=np.repeat(np.array([[1e-6,10]]),10,axis=0)
result=minimize(problem,guess,expsample,method='L-BFGS-B',bounds=bounds,options={'disp':True,'gtol':1e-04})
print(result)
| [
37811,
201,
198,
9938,
44258,
44036,
201,
198,
37811,
201,
198,
201,
198,
11748,
299,
32152,
355,
45941,
201,
198,
11748,
257,
489,
13464,
8019,
355,
257,
489,
13464,
201,
198,
6738,
629,
541,
88,
13,
40085,
1096,
1330,
17775,
201,
19... | 2.165517 | 290 |
from entrypoint import entrypoint, is_main
from tests.track import track
MAIN = "__main__"
TEST = "__test__"
entrypoint_call = entrypoint(MAIN)
entrypoint_no_call = entrypoint(TEST)
| [
6738,
5726,
4122,
1330,
5726,
4122,
11,
318,
62,
12417,
198,
6738,
5254,
13,
11659,
1330,
2610,
198,
198,
5673,
1268,
796,
366,
834,
12417,
834,
1,
198,
51,
6465,
796,
366,
834,
9288,
834,
1,
628,
198,
198,
13000,
4122,
62,
13345,
... | 2.863636 | 66 |
import configparser
| [
11748,
4566,
48610,
198
] | 5 | 4 |
from discord.ext import ipc
from quart import (Quart, Response, abort, redirect, render_template, request,
url_for)
from quart_auth import AuthManager, AuthUser, Unauthorized
from quart_auth import login_required as auth_required
from quart_auth import login_user, logout_user
from quart_discord import DiscordOAuth2Session
from werkzeug.exceptions import HTTPException
from os import environ
from dotenv import load_dotenv
load_dotenv()
app = Quart(__name__)
ipc_client = ipc.Client(secret_key="edoCBotAdmin")
app.config["SECRET_KEY"] = "edoC"
app.config["DISCORD_CLIENT_ID"] = 845186772698923029
app.config["DISCORD_CLIENT_SECRET"] = environ.get('CLIENT_SECRET')
app.config["DISCORD_REDIRECT_URI"] = "youtube.com"
discord = DiscordOAuth2Session(app)
# Auth
AuthManager(app)
# Routes
@app.route("/")
@app.route("/about")
@app.route("/support")
@app.route("/stats")
# Shortcuts
@app.route("/invite")
#@app.route("/vote")
#async def vote():
# return redirect("https://top.gg/bot/812395879146717214/vote")
@app.route("/server")
@app.route("/source")
@app.route("/donate")
# API (discord etc)
@app.route("/api/login")
@app.route("/api/logout")
@app.route("/api/callback")
@app.route("/api/webhook/<source>", methods=["POST"])
@app.errorhandler(Exception)
if __name__ == "__main__":
try:
app.run(host="0.0.0.0", debug=True)
except OSError:
print("Port is already in use.")
| [
6738,
36446,
13,
2302,
1330,
20966,
66,
198,
6738,
28176,
1330,
357,
4507,
433,
11,
18261,
11,
15614,
11,
18941,
11,
8543,
62,
28243,
11,
2581,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
22... | 2.563055 | 563 |
import controllers.base
| [
11748,
20624,
13,
8692,
198
] | 4.8 | 5 |
import aiohttp
from .exception import InvalidAPIKeyError, APIError, NotRegistered, BadRequest
from .objects.leaderboards import Leaderboards
from .objects.ranking import Ranking
from .objects.player import Player, Formatted
from .objects.api import Stats, Key
from .utils import DEFAULT
###########################
# #
# History #
# #
###########################
###########################
# #
# Leaderboards #
# #
###########################
###########################
# #
# Misc #
# #
########################### | [
11748,
257,
952,
4023,
198,
198,
6738,
764,
1069,
4516,
1330,
17665,
17614,
9218,
12331,
11,
7824,
12331,
11,
1892,
47473,
11,
7772,
18453,
198,
6738,
764,
48205,
13,
27940,
12821,
1330,
10540,
12821,
198,
6738,
764,
48205,
13,
28405,
1... | 2.094851 | 369 |
from django.db import models
# ---------- ---------- ----------
# ---------- ---------- ----------
# ---------- ---------- ----------
# ---------- ---------- ----------
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
198,
2,
24200,
438,
24200,
438,
24200,
438,
198,
2,
24200,
438,
24200,
438,
24200,
438,
198,
198,
2,
24200,
438,
24200,
438,
24200,
438,
198,
2,
24200,
438,
24200,
438,
24200,
438,
628,
... | 3.765957 | 47 |
#The MIT License (MIT)
#
#Copyright (c) 2015 Geoffroy Givry
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
import nuke
| [
2,
464,
17168,
13789,
357,
36393,
8,
201,
198,
2,
201,
198,
2,
15269,
357,
66,
8,
1853,
24688,
3287,
402,
452,
563,
201,
198,
2,
201,
198,
2,
5990,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11,
284,
597,
1048,
16727,
257,
486... | 3.422156 | 334 |
import sys
import time
import telepot
from telepot.loop import MessageLoop
import pyrebase
subbed = []
TOKEN = ""
config = {
"apiKey": "",
"authDomain": "clusterscanner.firebaseio.com",
"databaseURL": "https://clusterscanner.firebaseio.com",
"storageBucket": "clusterscanner.appspot.com"
}
firebase = pyrebase.initialize_app(config)
auth = firebase.auth()
user = auth.sign_in_with_email_and_password("fake@fake.com", "totallyLegit")
db = firebase.database() # reference to the database service
time.sleep(1)
stream = db.child("172").child("22").stream(stream_handler)
bot = telepot.Bot(TOKEN)
MessageLoop(bot, handle).run_as_thread()
print ('Listening ...')
# Keep the program running.
while 1:
time.sleep(10)
| [
11748,
25064,
198,
11748,
640,
198,
11748,
5735,
13059,
198,
6738,
5735,
13059,
13,
26268,
1330,
16000,
39516,
198,
11748,
12972,
260,
8692,
198,
198,
7266,
3077,
796,
17635,
198,
198,
10468,
43959,
796,
13538,
198,
198,
11250,
796,
1391,... | 2.791667 | 264 |
#
# Copyright (c) 2019. JetBrains s.r.o.
# Use of this source code is governed by the MIT license that can be found in the LICENSE file.
#
from typing import Dict, Tuple
from ._frontend_ctx import FrontendContext
from ._mime_types import LETS_PLOT_JSON
from .._type_utils import standardize_dict
| [
2,
198,
2,
15069,
357,
66,
8,
13130,
13,
19013,
9414,
1299,
264,
13,
81,
13,
78,
13,
198,
2,
5765,
286,
428,
2723,
2438,
318,
21825,
416,
262,
17168,
5964,
326,
460,
307,
1043,
287,
262,
38559,
24290,
2393,
13,
198,
2,
198,
198,... | 3.191489 | 94 |
# -*- coding:utf-8 -*-
citys = {
'kunming': {
'country': 'China',
'population': 14000000,
'fact': 'elephint in kunming',
},
'beijing': {
'country': 'China',
'population': 2400000,
'fact': 'changcheng in beijing',
},
'shanghai': {
'country': 'China',
'population': 5000000,
'fact': 'shanghai is the famous city',
},
}
for city, info in citys.items():
print("City: " + city)
print(city + " is in " + info['country'] + '.')
print('population: ' + str(info['population']))
print('the fact of ' + city + ' is : ' + info['fact']) | [
2,
532,
9,
12,
19617,
25,
40477,
12,
23,
532,
9,
12,
198,
198,
19205,
82,
796,
1391,
198,
220,
220,
220,
705,
28374,
2229,
10354,
1391,
198,
220,
220,
220,
220,
220,
220,
220,
705,
19315,
10354,
705,
14581,
3256,
198,
220,
220,
... | 2.185567 | 291 |
import algorithms
import bounds
#Nodes should be a dictionary of key value pairing : node num to xy coordinates
#Edges are implied in the adjacency matrix
#Adjacency matrix will be n x n; where n is the number of nodes
| [
11748,
16113,
198,
11748,
22303,
198,
197,
2,
45,
4147,
815,
307,
257,
22155,
286,
1994,
1988,
27356,
1058,
10139,
997,
284,
2124,
88,
22715,
198,
197,
2,
7407,
3212,
389,
17142,
287,
262,
9224,
330,
1387,
17593,
220,
198,
197,
2,
2... | 3.733333 | 60 |
# -*- coding:utf8
import itertools
import time
import sys
alphabet = u' !"#$%&\'()*+,-./:;<=>?@[\]^_`{£}~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzϕϴΩβε'
alphabet_length = len(alphabet)
example_key = {alphabet[i]: alphabet[(i + 32) % alphabet_length] for i in xrange(alphabet_length)}
if __name__ == '__main__':
main() | [
2,
532,
9,
12,
19617,
25,
40477,
23,
198,
11748,
340,
861,
10141,
198,
11748,
640,
198,
11748,
25064,
198,
198,
17307,
8380,
796,
334,
6,
220,
2474,
29953,
4,
5,
43054,
3419,
9,
10,
12095,
19571,
25,
26,
27,
14804,
30,
31,
58,
5... | 2.142857 | 161 |
import os
| [
11748,
28686,
628,
198
] | 3 | 4 |
############################
# Module: ICT1002 #
# Language: Python2 #
# Lab Exercise 1-1 #
# Done By: bitxer #
############################
if __name__ == '__main__':
main()
| [
14468,
7804,
4242,
198,
2,
19937,
25,
314,
4177,
3064,
17,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
198,
2,
15417,
25,
11361,
17,
220,
220,
220,
220,
220,
220,
220,
1303,
198,
2,
3498,
32900,
352,
12,
16,
220,
220,
220... | 2.315217 | 92 |
#!/usr/bin/env python
# encoding: utf-8
"""
@version: v1.0
@author: Shijie Qin
@license: Apache Licence
@contact: qsj4work@gmail.com
@site: https://shijieqin.github.io
@software: PyCharm
@file: xmlrpc.py
@time: 2018/11/8 3:16 PM
"""
import xmlrpc.client
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
220,
220,
198,
2,
21004,
25,
3384,
69,
12,
23,
220,
198,
198,
37811,
220,
198,
31,
9641,
25,
410,
16,
13,
15,
220,
198,
31,
9800,
25,
911,
2926,
494,
31482,
220,
198,
31,
43085,
25,
... | 2.214876 | 121 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from datetime import timedelta
try:
from urllib.parse import urlparse
except ImportError: # python2
from urlparse import urlparse
import requests
from django.conf import settings
from django.db import models, transaction
from django.urls import reverse
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from .utils import get_hub_credentials, generate_random_string, get_domain
logger = logging.getLogger(__name__)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
11748,
18931,
198,
198,
6738,
4818,
8079,
1330,
28805,
12514,
198,
198,
28311,
25,
198,
220,
220,
2... | 3.252941 | 170 |
import logging
logger = logging.getLogger(__name__)
| [
11748,
18931,
628,
198,
6404,
1362,
796,
18931,
13,
1136,
11187,
1362,
7,
834,
3672,
834,
8,
628,
628
] | 3 | 19 |
import numpy as np
from scipy import stats
from copy import copy
import csv
import math
import pandas as pd
"""
Get the unique values for each column in a dataset.
- value_dic = {col_num:{row_value:index}}
"""
"""
Takes a dataset of strings and returns the
integer mapping of the string values.
- return_dic: True - return value_dic
- value_dic a dictionary of the string values
mapped with numeric value (index)
format: value_dic = {col_num:{row_value:index}}
"""
'''
Read and preprocess the data
- read
- obtain column name
- sample
- replace string with numeric value
- optional: write preprocessed data to file
'''
| [
11748,
299,
32152,
355,
45941,
198,
6738,
629,
541,
88,
1330,
9756,
198,
6738,
4866,
1330,
4866,
198,
11748,
269,
21370,
198,
11748,
10688,
198,
11748,
19798,
292,
355,
279,
67,
628,
198,
37811,
198,
3855,
262,
3748,
3815,
329,
1123,
... | 3.3 | 190 |
from pycsp3.problems.data.parsing import *
next_line()
data["height"] = next_int()
data["width"] = next_int()
next_line()
nBugTypes = next_int()
next_line()
bugTypesLength = [next_int() for i in range(nBugTypes)];
next_line()
nBugs = next_int()
next_line()
data["bugs"] = []
data["bugTypes"] = []
lst = [False] * nBugs
for index in range(nBugs):
bug = {"row": next_int(), "col": next_int(), "type": next_int()}
data["bugs"].append(bug)
if lst[bug["type"]] is False: lst[bug["type"]] = []
lst[bug["type"]].append(index)
for index, length in enumerate(bugTypesLength):
data["bugTypes"].append({"length": length, "cells": lst[index]})
| [
6738,
12972,
66,
2777,
18,
13,
1676,
22143,
13,
7890,
13,
79,
945,
278,
1330,
1635,
198,
198,
19545,
62,
1370,
3419,
198,
7890,
14692,
17015,
8973,
796,
1306,
62,
600,
3419,
198,
7890,
14692,
10394,
8973,
796,
1306,
62,
600,
3419,
1... | 2.492424 | 264 |
# 使用注册的方法来实现虚拟子类有两种方法:一个是使用修饰器,一个是使用register函数
import sys
sys.path.extend(r'/Users/liuzhongkai/Documents/Git/PythonScript/FluentPython/chapter11_Tombola/')
from random import randrange
from tombola import Tombola
# 使用第一种方法
@Tombola.register # 使用基类的register修饰器来创建虚拟子类
| [
2,
220,
45635,
18796,
101,
37345,
101,
37863,
234,
21410,
43095,
37345,
243,
30266,
98,
22522,
252,
163,
236,
108,
164,
247,
248,
162,
233,
253,
36310,
163,
109,
119,
17312,
231,
10310,
97,
163,
100,
235,
43095,
37345,
243,
171,
120,
... | 1.40625 | 192 |
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
# TEST SCENARIO COVERAGE
# ----------------------
# Methods Total : 78
# Methods Covered : 70
# Examples Total : 93
# Examples Tested : 93
# Coverage % : 100
# ----------------------
# Current Operation Coverage:
# PrivateEndpointConnections: 0/4
# Registries: 14/15
# Operations: 1/1
# Replications: 5/5
# AgentPools: 6/6
# Webhooks: 8/8
# ScopeMaps: 5/5
# Tokens: 5/5
import unittest
import azure.mgmt.containerregistry
from azure.core.exceptions import HttpResponseError
from devtools_testutils import AzureMgmtTestCase, RandomNameResourceGroupPreparer
AZURE_LOCATION = 'eastus'
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
198,
2,
10097,
45537,
198,
2,
15069,
357,
66,
8,
5413,
10501,
13,
1439,
2489,
10395,
13,
198,
2,
49962,
739,
262,
17168,
13789,
13,
4091,
13789,
13,
14116,
287,
262,
1628,
6808,
329,
198,
2,
... | 3.535849 | 265 |
from django.urls import path
from django.views.decorators.csrf import csrf_exempt
from graphene_django.views import GraphQLView
from api.schema import schema
urlpatterns = [
path(
"graphql",
csrf_exempt(
GraphQLView.as_view(graphiql=True, schema=schema))
),
]
| [
6738,
42625,
14208,
13,
6371,
82,
1330,
3108,
198,
6738,
42625,
14208,
13,
33571,
13,
12501,
273,
2024,
13,
6359,
41871,
1330,
269,
27891,
69,
62,
42679,
198,
6738,
42463,
62,
28241,
14208,
13,
33571,
1330,
29681,
9711,
7680,
198,
198,
... | 2.365079 | 126 |
#!/usr/bin/env python
"""
Parse novoalign log, write out qc and post to database
usage:
author: chase.mateusiak@gmail.com
output: {'librarySize': 35003, 'uniqueAlignment': 22737, 'multiMap': 1986, 'noMap': 10233, 'homopolymerFilter': 47, 'readLengthFilter': 0}
output: parsed novoalign log as csv
database_interaction: post to url
"""
# standard library imports
import sys
import os
import re
from json import dumps as json_dumps
import argparse
import request
from urllib.request import HTTPError
# third party imports
import pandas as pd
# extend python path to include utils dir
sys.path.extend([os.path.join(os.path.realpath(__file__), 'utils')])
# local imports
from utils.DatabaseInteraction import postData
def parseAlignmentLog(alignment_log_file_path):
"""
parse the information on the alignment out of a novoalign log
:param alignment_log_file_path: the filepath to a novoalign alignment log
:returns: a dictionary of the parsed data of the input file
"""
library_metadata_dict = {}
alignment_regex_dict = {'librarySize': r"(?<=Read Sequences:\s)\s*\d*",
'uniqueAlignment': r"(?<=Unique Alignment:\s)\s*\d*",
'multiMap': r"(?<=Multi Mapped:\s)\s*\d*",
'noMap': r"(?<=No Mapping Found:\s)\s*\d*",
'homopolymerFilter': r"(?<=Homopolymer Filter:\s)\s*\d*",
'readLengthFilter': r"(?<=Read Length:\s)\s*\d*"}
# open the log path
alignment_file = open(alignment_log_file_path, 'r')
alignment_file_text = alignment_file.read()
# loop over alignment_regex dict and enter values extracted from alignment_file into alignment_metadata_dict
for alignment_category, regex_pattern in alignment_regex_dict.items():
# extract the value corresponding to the alignment_category regex (see alignment_regex_dict)
try:
extracted_value = int(re.findall(regex_pattern, alignment_file_text)[0])
except ValueError:
msg = 'problem with file %s' % alignment_log_file_path
print(msg)
except IndexError:
print('No %s in %s. Value set to 0' % (alignment_category, alignment_log_file_path))
extracted_value = 0
# check that the value is both an int and not 0
if isinstance(extracted_value, int):
library_metadata_dict.setdefault(alignment_category, extracted_value)
else:
print('cannot find %s in %s' % (alignment_category, alignment_log_file_path))
# close the alignment_file and return
alignment_file.close()
return library_metadata_dict
if __name__ == "__main__":
main(sys.argv)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
37811,
198,
220,
220,
220,
2547,
325,
645,
13038,
31494,
2604,
11,
3551,
503,
10662,
66,
290,
1281,
284,
6831,
198,
220,
220,
220,
8748,
25,
198,
220,
220,
220,
1772,
25,
15505,... | 2.500454 | 1,101 |
#!/usr/bin/env python
"""
Parsing GO Accession from a table file produced by InterProScan and mapping to GOSlim.
(c) Chien-Yueh Lee 2018 / MIT Licence
kinomoto[AT]sakura[DOT]idv[DOT]tw
"""
from __future__ import print_function
from os import path
import sys
import pandas as pd
from goatools.obo_parser import GODag
from goatools.mapslim import mapslim
from joblib import Parallel, delayed
import optparse
p = optparse.OptionParser("%prog [options] <eggnog_diamond_file> <go_obo_file>")
p.add_option("-o", "--out", dest="output_filename", help="Directory to store " "the output file [default: GO_term_annotation.txt]", action="store", type="string", default="GO_term_annotation.txt")
p.add_option("-g", "--goslim", dest="goslim_obo_file", action="store",
help="The .obo file for the most current GO Slim terms "
"[default: Null]", type="string", default=None)
p.add_option("-O", "--goslim_out", dest="goslim_output_filename", action="store", help="Directory to store the output file [default: " "GOSlim_annotation.txt]", type="string", default="GOSlim_annotation.txt")
p.add_option("-t", "--goslim_type", dest="goslim_type", action="store", type="string", default="direct", help="One of `direct` or `all`. Defines "
"whether the output should contain all GOSlim terms (all "
"ancestors) or only direct GOSlim terms (only direct "
"ancestors) [default: direct]")
p.add_option("-s", "--sort", dest="is_sort", action="store_true", default=False, help="Sort the output table [default: False]")
opts, args = p.parse_args()
# check for correct number of arguments
if len(args) != 2:
p.print_help()
sys.exit(1)
interpro_file = args[0]
assert path.exists(interpro_file), "file %s not found!" % interpro_file
obo_file = args[1]
assert path.exists(obo_file), "file %s not found!" % obo_file
# check that --goslim is set
USE_SLIM = False
if (opts.goslim_obo_file is not None):
assert path.exists(opts.goslim_obo_file), "file %s not found!" % opts.goslim_obo_file
USE_SLIM = True
# check that slim_out is either "direct" or "all" and set according flag
if opts.goslim_type.lower() == "direct":
ONLY_DIRECT = True
elif opts.goslim_type.lower() == "all":
ONLY_DIRECT = False
else:
p.print_help()
sys.exit(1)
# load InterProScan_tsv_file
interpro_table = pd.read_csv(interpro_file, sep='\t',skiprows=3,skipfooter=3,engine='python')
#interpro_go = interpro_table[['#query_name', 'GO_terms']]
all_protein=list(interpro_table['#query_name'])
gos=list(interpro_table['GO_terms'])
# load obo files
go = GODag(obo_file, load_obsolete=True)
output_hd = ['Protein Accession', 'GO Category', 'GO Accession', 'GO Description', 'GO Level']
output_table = pd.DataFrame(columns=output_hd)
#len(all_protein)
# start to annotate
results=Parallel(n_jobs=15)(delayed(tmp_func)(pro) for pro in range(len(all_protein)))
output_hd = ['Protein Accession', 'GO Category', 'GO Accession', 'GO Description', 'GO Level']
output_table = pd.DataFrame(columns=output_hd)
output_table=output_table.append(pd.concat(results))
# write the output
if opts.is_sort:
output_table[output_hd].sort_values(by=['Protein Accession', 'GO Category', 'GO Level']).to_csv(opts.output_filename, sep="\t", index=False)
if USE_SLIM:
output_slim_table[output_slim_hd].sort_values(by=['Protein Accession', 'GO Category', 'GOSlim Level']).to_csv(opts.goslim_output_filename, sep="\t", index=False)
else:
output_table[output_hd].to_csv(opts.output_filename, sep="\t", index=False)
if USE_SLIM:
output_slim_table[output_slim_hd].to_csv(opts.goslim_output_filename, sep="\t", index=False) | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
37811,
198,
47,
945,
278,
10351,
8798,
295,
422,
257,
3084,
2393,
4635,
416,
4225,
2964,
33351,
290,
16855,
284,
402,
2640,
2475,
13,
198,
7,
66,
8,
609,
2013,
12,
56,
518,
71,
574... | 2.645743 | 1,386 |
import socket
from core.redis import rds
from core.triage import Triage
from core.parser import ScanParser
from db.db_ports import database_ports
| [
11748,
17802,
198,
198,
6738,
4755,
13,
445,
271,
220,
220,
1330,
374,
9310,
198,
6738,
4755,
13,
83,
4087,
220,
1330,
309,
4087,
198,
6738,
4755,
13,
48610,
220,
1330,
20937,
46677,
198,
6738,
20613,
13,
9945,
62,
3742,
220,
1330,
... | 3.183673 | 49 |
# from functools import reduce
wordlen = [2,3,4,2]
sum = reduce(lambda x,y : x+y ,wordlen )
print(sum)
print(sum/len(wordlen))
| [
2,
422,
1257,
310,
10141,
1330,
4646,
198,
4775,
11925,
796,
685,
17,
11,
18,
11,
19,
11,
17,
60,
198,
16345,
796,
4646,
7,
50033,
2124,
11,
88,
1058,
2124,
10,
88,
837,
4775,
11925,
1267,
198,
4798,
7,
16345,
8,
197,
198,
4798,... | 2.37037 | 54 |
import sys
import requests
import urllib.request as urllib2
import json
from json import JSONDecodeError
HOST = 'localhost'
IP=HOST # temporary
PORT = 1234
VERSION = 'v1'
BASE_URL = 'http://' + HOST + ':' + str(PORT) + '/' + VERSION + '/'
HEADERS = {'Content-Type': 'application/json'}
VERBOSE=False
SUID_LIST = 'suid'
BASE_URL_NETWORK = BASE_URL + 'networks'
def api(namespace=None, command="", PARAMS={}, body=None, host=HOST, port=str(PORT), version=VERSION, method="POST", verbose=VERBOSE, url=None, parse_params=True):
"""
General function for interacting with Cytoscape API.
:param namespace: namespace where the request should be executed. eg. "string"
:param commnand: command to execute. eg. "protein query"
:param PARAMs: a dictionary with the parameters. Check your swagger normaly running on
http://localhost:1234/v1/swaggerUI/swagger-ui/index.html?url=http://localhost:1234/v1/commands/swagger.json
:param host: cytoscape host address, default=cytoscape_host
:param port: cytoscape port, default=1234
:param version: API version
:param method: type of http call, ie. "POST" or "GET" or "HELP".
:param verbose: print more information
:returns: For "POST" the data in the content's response. For "GET" None.
eg.
cytoscape("string","pubmed query",{"pubmed":"p53 p21","limit":"50"})
"""
if url:
baseurl=url
else:
if namespace:
baseurl="http://"+str(host)+":"+str(port)+"/"+str(version)+"/commands/"+str(namespace)+"/"+str(command)
else:
baseurl="http://"+str(host)+":"+str(port)+"/"+str(version)+"/commands"
if (method == "GET") or (method == "G"):
URL=baseurl
if verbose:
print("'"+URL+"'")
sys.stdout.flush()
r = requests.get(url = URL, params=PARAMS)
verbose_=checkresponse(r, verbose=verbose)
if (verbose) or (verbose_):
print("'"+URL+"'")
sys.stdout.flush()
if verbose_:
res=verbose_
else:
res=r
if (method == "DELETE"):
URL=baseurl
if verbose:
print("'"+URL+"'")
sys.stdout.flush()
r = requests.delete(url = URL)
verbose_=checkresponse(r, verbose=verbose)
if (verbose) or (verbose_):
print("'"+URL+"'")
sys.stdout.flush()
res=r.json()
if len(res["errors"]) > 0:
raise ValueError(res["errors"][0])
elif (method == "POST") or (method == "P"):
if verbose:
print("'"+baseurl+"'")
sys.stdout.flush()
r = requests.post(url = baseurl, json = PARAMS)
verbose_=checkresponse(r, verbose=verbose)
if (verbose) or (verbose_):
verbose=True
print(r.content)
sys.stdout.flush()
res=r.json()
if "errors" in res.keys():
if len(res["errors"]) > 0:
raise ValueError(res["errors"][0])
if not verbose:
if "data" in res.keys():
res=res["data"]
else:
res=verbose_
elif (method == "PUT"):
if verbose:
print("'"+baseurl+"'")
sys.stdout.flush()
r = requests.put(url = baseurl, json = body)
verbose_=checkresponse(r, verbose=verbose)
if (verbose) or (verbose_):
verbose=True
print(r.content)
sys.stdout.flush()
try:
res=r.json()
if "errors" in res.keys():
if len(res["errors"]) > 0:
raise ValueError(res["errors"][0])
except JSONDecodeError:
if not r.text:
res = {}
else:
raise
elif (method=="HTML") or (method == "H") or (method=="HELP"):
URL = baseurl
if verbose:
print("'"+URL+"'")
sys.stdout.flush()
response = requests.get(URL, params=PARAMS)
res = response.text.split("\n")
res="\n".join([ clean(x) for x in res ])
res=handle_status_codes(res,verbose=verbose)
return res
| [
11748,
25064,
198,
11748,
7007,
198,
11748,
2956,
297,
571,
13,
25927,
355,
2956,
297,
571,
17,
197,
197,
198,
11748,
33918,
628,
198,
6738,
33918,
1330,
19449,
10707,
1098,
12331,
198,
198,
39,
10892,
796,
705,
36750,
6,
198,
4061,
2... | 2.074314 | 2,005 |
# python3
# Copyright 2021 InstaDeep Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# type: ignore
import math
import numpy as np
from mava.utils.environments.RoboCup_env.robocup_utils.game_object import Flag
from mava.utils.environments.RoboCup_env.robocup_utils.util_functions import (
rad_rot_to_xy,
)
true_flag_coords = Flag.FLAG_COORDS
def rotate(origin, point, angle):
"""
Rotate a point counterclockwise by a given angle around a given origin.
The angle should be given in radians.
"""
ox, oy = origin
px, py = point
qx = ox + math.cos(angle) * (px - ox) - math.sin(angle) * (py - oy)
qy = oy + math.sin(angle) * (px - ox) + math.cos(angle) * (py - oy)
return qx, qy
class WorldModel:
"""
Holds and updates the model of the world as known from current and past
data.
"""
# constants for team sides
SIDE_L = "l"
SIDE_R = "r"
class PlayModes:
"""
Acts as a static class containing variables for all valid play modes.
The string values correspond to what the referee calls the game modes.
"""
BEFORE_KICK_OFF = "before_kick_off"
PLAY_ON = "play_on"
TIME_OVER = "time_over"
KICK_OFF_L = "kick_off_l"
KICK_OFF_R = "kick_off_r"
KICK_IN_L = "kick_in_l"
KICK_IN_R = "kick_in_r"
FREE_KICK_L = "free_kick_l"
FREE_KICK_R = "free_kick_r"
CORNER_KICK_L = "corner_kick_l"
CORNER_KICK_R = "corner_kick_r"
GOAL_KICK_L = "goal_kick_l"
GOAL_KICK_R = "goal_kick_r"
DROP_BALL = "drop_ball"
OFFSIDE_L = "offside_l"
OFFSIDE_R = "offside_r"
class RefereeMessages:
"""
Static class containing possible non-mode messages sent by a referee.
"""
# these are referee messages, not play modes
FOUL_L = "foul_l"
FOUL_R = "foul_r"
GOALIE_CATCH_BALL_L = "goalie_catch_ball_l"
GOALIE_CATCH_BALL_R = "goalie_catch_ball_r"
TIME_UP_WITHOUT_A_TEAM = "time_up_without_a_team"
TIME_UP = "time_up"
HALF_TIME = "half_time"
TIME_EXTENDED = "time_extended"
# these are special, as they are always followed by '_' and an int of
# the number of goals scored by that side so far. these won't match
# anything specifically, but goals WILL start with these.
GOAL_L = "goal_l_"
GOAL_R = "goal_r_"
def __init__(self, action_handler):
"""
Create the world model with default values and an ActionHandler class it
can use to complete requested actions.
"""
# we use the action handler to complete complex commands
self.ah = action_handler
# these variables store all objects for any particular game step
self.ball = None
self.flags = []
self.goals = []
self.players = []
self.lines = []
# the default position of this player, its home position
self.home_point = (None, None)
# scores for each side
self.score_l = 0
self.score_r = 0
# the name of the agent's team
self.teamname = None
# handle player information, like uniform number and side
self.side = None
self.uniform_number = None
# stores the most recent message heard
self.last_message = None
# the mode the game is currently in (default to not playing yet)
self.play_mode = WorldModel.PlayModes.BEFORE_KICK_OFF
# Obs updated
# self.obs_updated = False
# body state
self.view_width = None
self.view_quality = None
self.stamina = None
self.effort = None
self.speed_amount = None
self.speed_direction = None
self.neck_direction = None
self.new_data = False
# counts of actions taken so far
self.kick_count = None
self.dash_count = None
self.turn_count = None
self.say_count = None
self.turn_neck_count = None
self.catch_count = None
self.move_count = None
self.change_view_count = None
# apparent absolute player coordinates and neck/body directions
self.abs_coords = (None, None)
self.abs_body_dir = None
self.abs_neck_dir = None
# create a new server parameter object for holding all server params
self.server_parameters = ServerParameters()
def process_new_info(self, ball, flags, goals, players, lines):
"""
Update any internal variables based on the currently available
information. This also calculates information not available directly
from server-reported messages, such as player coordinates.
"""
# update basic information
self.ball = ball
self.flags = flags
self.goals = goals
self.players = players
self.lines = lines
self.__calculate_abs_info()
def is_playon(self):
"""
Tells us whether it's play time
"""
return (
self.play_mode == WorldModel.PlayModes.PLAY_ON
or self.play_mode == WorldModel.PlayModes.KICK_OFF_L
or self.play_mode == WorldModel.PlayModes.KICK_OFF_R
or self.play_mode == WorldModel.PlayModes.KICK_IN_L
or self.play_mode == WorldModel.PlayModes.KICK_IN_R
or self.play_mode == WorldModel.PlayModes.FREE_KICK_L
or self.play_mode == WorldModel.PlayModes.FREE_KICK_R
or self.play_mode == WorldModel.PlayModes.CORNER_KICK_L
or self.play_mode == WorldModel.PlayModes.CORNER_KICK_R
or self.play_mode == WorldModel.PlayModes.GOAL_KICK_L
or self.play_mode == WorldModel.PlayModes.GOAL_KICK_R
or self.play_mode == WorldModel.PlayModes.DROP_BALL
or self.play_mode == WorldModel.PlayModes.OFFSIDE_L
or self.play_mode == WorldModel.PlayModes.OFFSIDE_R
)
def is_before_kick_off(self):
"""
Tells us whether the game is in a pre-kickoff state.
"""
return self.play_mode == WorldModel.PlayModes.BEFORE_KICK_OFF
def is_kick_off_us(self):
"""
Tells us whether it's our turn to kick off.
"""
ko_left = WorldModel.PlayModes.KICK_OFF_L
ko_right = WorldModel.PlayModes.KICK_OFF_R
# return whether we're on the side that's kicking off
return (
self.side == WorldModel.SIDE_L
and self.play_mode == ko_left
or self.side == WorldModel.SIDE_R
and self.play_mode == ko_right
)
def is_dead_ball_them(self):
"""
Returns whether the ball is in the other team's posession and it's a
free kick, corner kick, or kick in.
"""
# shorthand for verbose constants
kil = WorldModel.PlayModes.KICK_IN_L
kir = WorldModel.PlayModes.KICK_IN_R
fkl = WorldModel.PlayModes.FREE_KICK_L
fkr = WorldModel.PlayModes.FREE_KICK_R
ckl = WorldModel.PlayModes.CORNER_KICK_L
ckr = WorldModel.PlayModes.CORNER_KICK_R
# shorthand for whether left team or right team is free to act
pm = self.play_mode
free_left = pm == kil or pm == fkl or pm == ckl
free_right = pm == kir or pm == fkr or pm == ckr
# return whether the opposing side is in a dead ball situation
if self.side == WorldModel.SIDE_L:
return free_right
else:
return free_left
def is_ball_kickable(self):
"""
Tells us whether the ball is in reach of the current player.
"""
# ball must be visible, not behind us, and within the kickable margin
return (
self.ball is not None
and self.ball.distance is not None
and self.ball.distance <= self.server_parameters.kickable_margin
)
def get_ball_speed_max(self):
"""
Returns the maximum speed the ball can be kicked at.
"""
return self.server_parameters.ball_speed_max
def get_stamina(self):
"""
Returns the agent's current stamina amount.
"""
return self.stamina
def get_stamina_max(self):
"""
Returns the maximum amount of stamina a player can have.
"""
return self.server_parameters.stamina_max
def turn_body_to_object(self, obj):
"""
Turns the player's body to face a particular object.
"""
self.ah.turn(obj.direction)
class ServerParameters:
"""A storage container for all the settings of the soccer server."""
def __init__(self):
"""Initialize default parameters for a server."""
self.audio_cut_dist = 50
self.auto_mode = 0
self.back_passes = 1
self.ball_accel_max = 2.7
self.ball_decay = 0.94
self.ball_rand = 0.05
self.ball_size = 0.085
self.ball_speed_max = 2.7
self.ball_stuck_area = 3
self.ball_weight = 0.2
self.catch_ban_cycle = 5
self.catch_probability = 1
self.catchable_area_l = 2
self.catchable_area_w = 1
self.ckick_margin = 1
self.clang_advice_win = 1
self.clang_define_win = 1
self.clang_del_win = 1
self.clang_info_win = 1
self.clang_mess_delay = 50
self.clang_mess_per_cycle = 1
self.clang_meta_win = 1
self.clang_rule_win = 1
self.clang_win_size = 300
self.coach = 0
self.coach_port = 6001
self.coach_w_referee = 0
self.connect_wait = 300
self.control_radius = 2
self.dash_power_rate = 0.006
self.drop_ball_time = 200
self.effort_dec = 0.005
self.effort_dec_thr = 0.3
self.effort_inc = 0.01
self.effort_inc_thr = 0.6
self.effort_init = 1
self.effort_min = 0.6
self.forbid_kick_off_offside = 1
self.free_kick_faults = 1
self.freeform_send_period = 20
self.freeform_wait_period = 600
self.fullstate_l = 0
self.fullstate_r = 0
self.game_log_compression = 0
self.game_log_dated = 1
self.game_log_dir = "./"
self.game_log_fixed = 0
self.game_log_fixed_name = "rcssserver"
self.game_log_version = 3
self.game_logging = 1
self.game_over_wait = 100
self.goal_width = 14.02
self.goalie_max_moves = 2
self.half_time = 300
self.hear_decay = 1
self.hear_inc = 1
self.hear_max = 1
self.inertia_moment = 5
self.keepaway = 0
self.keepaway_length = 20
self.keepaway_log_dated = 1
self.keepaway_log_dir = "./"
self.keepaway_log_fixed = 0
self.keepaway_log_fixed_name = "rcssserver"
self.keepaway_logging = 1
self.keepaway_start = -1
self.keepaway_width = 20
self.kick_off_wait = 100
self.kick_power_rate = 0.027
self.kick_rand = 0
self.kick_rand_factor_l = 1
self.kick_rand_factor_r = 1
self.kickable_margin = 0.7
self.landmark_file = "~/.rcssserver-landmark.xml"
self.log_date_format = "%Y%m%d%H%M-"
self.log_times = 0
self.max_goal_kicks = 3
self.maxmoment = 180
self.maxneckang = 90
self.maxneckmoment = 180
self.maxpower = 100
self.minmoment = -180
self.minneckang = -90
self.minneckmoment = -180
self.minpower = -100
self.nr_extra_halfs = 2
self.nr_normal_halfs = 2
self.offside_active_area_size = 2.5
self.offside_kick_margin = 9.15
self.olcoach_port = 6002
self.old_coach_hear = 0
self.pen_allow_mult_kicks = 1
self.pen_before_setup_wait = 30
self.pen_coach_moves_players = 1
self.pen_dist_x = 42.5
self.pen_max_extra_kicks = 10
self.pen_max_goalie_dist_x = 14
self.pen_nr_kicks = 5
self.pen_random_winner = 0
self.pen_ready_wait = 50
self.pen_setup_wait = 100
self.pen_taken_wait = 200
self.penalty_shoot_outs = 1
self.player_accel_max = 1
self.player_decay = 0.4
self.player_rand = 0.1
self.player_size = 0.3
self.player_speed_max = 1.2
self.player_weight = 60
self.point_to_ban = 5
self.point_to_duration = 20
self.port = 6000
self.prand_factor_l = 1
self.prand_factor_r = 1
self.profile = 0
self.proper_goal_kicks = 0
self.quantize_step = 0.1
self.quantize_step_l = 0.01
self.record_messages = 0
self.recover_dec = 0.002
self.recover_dec_thr = 0.3
self.recover_init = 1
self.recover_min = 0.5
self.recv_step = 10
self.say_coach_cnt_max = 128
self.say_coach_msg_size = 128
self.say_msg_size = 10
self.send_comms = 0
self.send_step = 150
self.send_vi_step = 100
self.sense_body_step = 100
self.simulator_step = 100
self.slow_down_factor = 1
self.slowness_on_top_for_left_team = 1
self.slowness_on_top_for_right_team = 1
self.stamina_inc_max = 45
self.stamina_max = 4000
self.start_goal_l = 0
self.start_goal_r = 0
self.stopped_ball_vel = 0.01
self.synch_micro_sleep = 1
self.synch_mode = 0
self.synch_offset = 60
self.tackle_back_dist = 0.5
self.tackle_cycles = 10
self.tackle_dist = 2
self.tackle_exponent = 6
self.tackle_power_rate = 0.027
self.tackle_width = 1
self.team_actuator_noise = 0
self.text_log_compression = 0
self.text_log_dated = 1
self.text_log_dir = "./"
self.text_log_fixed = 0
self.text_log_fixed_name = "rcssserver"
self.text_logging = 1
self.use_offside = 1
self.verbose = 0
self.visible_angle = 90
self.visible_distance = 3
self.wind_ang = 0
self.wind_dir = 0
self.wind_force = 0
self.wind_none = 0
self.wind_rand = 0
self.wind_random = 0
| [
2,
21015,
18,
198,
2,
15069,
33448,
2262,
64,
29744,
12052,
13,
1439,
2489,
10395,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
284... | 2.142239 | 6,932 |
from SteeringModule.Steering import Steering
import time
steer=Steering(14,0,180,15,90,180,36,160)
steer.setup()
time.sleep(2)
for i in range(0,900):
steer.Up()
for i in range(0,900):
steer.Down()
for i in range(0,900):
steer.Left()
for i in range(0,900):
steer.Right()
steer.specify(80,120)
steer.specify(20,100)
steer.specify(170,180)
steer.cleanup()
| [
6738,
2441,
1586,
26796,
13,
7447,
1586,
1330,
2441,
1586,
198,
11748,
640,
198,
4169,
263,
28,
7447,
1586,
7,
1415,
11,
15,
11,
15259,
11,
1314,
11,
3829,
11,
15259,
11,
2623,
11,
14198,
8,
198,
4169,
263,
13,
40406,
3419,
198,
2... | 2.220859 | 163 |
# coding=utf-8
import struct
import datetime
from abc import ABCMeta, abstractmethod
from weakref import WeakKeyDictionary
class DWORDValue(object):
"""A descriptor for DWORD values"""
class WORDValue(object):
"""A descriptor for WORD values"""
class BYTEValue(object):
"""A descriptor for BYTE values"""
class CABFileFormat(object):
"""
It provides a common set of methods for managing cabs
"""
__metaclass__ = ABCMeta
@abstractmethod
@abstractmethod
@abstractmethod
@abstractmethod
class CFHEADER(object):
"""
typedef struct _CFHEADER
{
u1 signature[4]; /*inet file signature */
u4 reserved1; /* Reserved field, set to zero. ??? */
u4 cbCabinet; /* size of this cabinet file in bytes */
u4 reserved2; /* Reserved field, set to zero. ??? */
u4 coffFiles; /* offset of the first CFFILE entry */
u4 reserved3; /* Reserved field, set to zero. ??? */
u1 versionMinor; /* cabinet file format version, minor */
u1 versionMajor; /* cabinet file format version, major */
u2 cFolders; /* number of CFFOLDER entries in this */
/* cabinet */
u2 cFiles; /* number of CFFILE entries in this cabinet */
u2 flags; /* cabinet file option indicators */
u2 setID; /* must be the same for all cabinets in a */
/* set */
u2 iCabinet; /* number of this cabinet file in a set */
u2 cbCFHeader; /* (optional) size of per-cabinet reserved */
/* area */
u1 cbCFFolder; /* (optional) size of per-folder reserved */
/* area */
u1 cbCFData; /* (optional) size of per-datablock reserved */
/* area */
u1 abReserve[]; /* (optional) per-cabinet reserved area */
u1 szCabinetPrev[]; /* (optional) name of previous cabinet file */
u1 szDiskPrev[]; /* (optional) name of previous disk */
u1 szCabinetNext[]; /* (optional) name of next cabinet file */
u1 szDiskNext[]; /* (optional) name of next disk */
} CFHEADER, *PCFHEADER;
"""
# flags Values
cfhdrPREV_CABINET = 0x0001
cfhdrNEXT_CABINET = 0x0002
cfhdrRESERVE_PRESENT = 0x0004
@property
@signature.setter
reserved1 = DWORDValue()
cbCabinet = DWORDValue()
reserved2 = DWORDValue()
coffFiles = DWORDValue()
reserved3 = DWORDValue()
versionMinor = BYTEValue()
versionMajor = BYTEValue()
cFolders = WORDValue()
cFiles = WORDValue()
flags = WORDValue()
setID = WORDValue()
iCabinet = WORDValue()
# From this point, the fields are optional
cbCFHeader = WORDValue()
cbCFFolder = BYTEValue()
cbCFData = BYTEValue()
@property
@abReserve.setter
@property
@szCabinetPrev.setter
@property
@szDiskPrev.setter
@property
@szCabinetNext.setter
@property
@szDiskNext.setter
def __init__(self, flags, reserve):
"""
reserve is a dictionary with the follow
{
'cbCFHeader' : value,
'cbCFFolder' : value,
'cbCFData' : value
}
"""
self.signature = "MSCF"
self.reserved1 = 0x00000000
self.cbCabinet = 0x00000000
self.reserved2 = 0x00000000
self.coffFiles = 0x00000000
self.reserved3 = 0x00000000
self.versionMinor = 0x03
self.versionMajor = 0x01
self.cFolders = 0x0000
self.cFiles = 0x0000
self.flags = flags
self.setID = 0x0000
self.iCabinet = 0x0000
self.cbCFHeader = reserve["cbCFHeader"] if reserve["cbCFHeader"] > 0 else 0x0000
self.cbCFFolder = reserve["cbCFFolder"] if reserve["cbCFFolder"] > 0 else 0x00
self.cbCFData = reserve["cbCFData"] if reserve["cbCFData"] > 0 else 0x00
if self.flags & CFHEADER.cfhdrRESERVE_PRESENT:
self.abReserve = "\x41" * self.cbCFHeader
else:
self.abReserve = ""
self.szCabinetPrev = ""
self.szDiskPrev = ""
self.szCabinetNext = ""
self.szDiskNext = ""
self.cffolder_list = []
@classmethod
@classmethod
class CFFOLDER(object):
"""
struct CFFOLDER
{
u4 coffCabStart; /* offset of the first CFDATA block in this
/* folder */
u2 cCFData; /* number of CFDATA blocks in this folder */
u2 typeCompress; /* compression type indicator */
u1 abReserve[]; /* (optional) per-folder reserved area */
};
"""
tcompMASK_TYPE = 0x000F # Mask for compression type
tcompTYPE_NONE = 0x0000 # No compression
tcompTYPE_MSZIP = 0x0001 # MSZIP
tcompTYPE_QUANTUM = 0x0002 # Quantum
tcompTYPE_LZX = 0x0003 # LZX
# Absolute file offset of first CFDATA block for THIS folder
coffCabStart = DWORDValue()
cCFData = WORDValue()
typeCompress = WORDValue()
@property
@abReserve.setter
# This is extra metadata for helping in the creation of cab files
# it isn´t used in the specification
@property
@name.setter
@classmethod
@classmethod
class CFFILE(object):
"""
struct CFFILE
{
u4 cbFile; /* uncompressed size of this file in bytes */
u4 uoffFolderStart; /* uncompressed offset of this file in the folder */
u2 iFolder; /* index into the CFFOLDER area */
u2 date; /* date stamp for this file */
u2 time; /* time stamp for this file */
u2 attribs; /* attribute flags for this file */
u1 szName[]; /* name of this file */
};
"""
#iFolder Values
ifoldTHIS_CABINET = 0x0000
ifoldCONTINUED_FROM_PREV = 0xFFFD
ifoldCONTINUED_TO_NEXT = 0xFFFE
ifoldCONTINUED_PREV_AND_NEXT = 0xFFFF
#attribs Values
_A_RDONLY = 0x01 # file is read-only
_A_HIDDEN = 0x02 # file is hidden
_A_SYSTEM = 0x04 # file is a system file
_A_ARCH = 0x20 # file modified since last backup
_A_EXEC = 0x40 # run after extraction
_A_NAME_IS_UTF = 0x80 # szName[] contains UTF
cbFile = DWORDValue()
uoffFolderStart = DWORDValue()
iFolder = WORDValue()
#format ((year - 1980) << 9)+(month << 5)+(day))
date = WORDValue()
#format (hour << 11)+(minute << 5)+(seconds/2)
time = WORDValue()
attribs = WORDValue()
@property
@szName.setter
@classmethod
@classmethod
@classmethod
class CFDATA(object):
"""
struct CFDATA
{
u4 csum; /* checksum of this CFDATA entry */
u2 cbData; /* number of compressed bytes in this block */
u2 cbUncomp; /* number of uncompressed bytes in this block */
u1 abReserve[]; /* (optional) per-datablock reserved area */
u1 ab[cbData]; /* compressed data bytes */
};
"""
csum = DWORDValue()
cbData = WORDValue()
cbUncomp = WORDValue()
@property
@abReserve.setter
@property
@ab.setter
@classmethod
| [
2,
19617,
28,
40477,
12,
23,
201,
198,
11748,
2878,
201,
198,
11748,
4818,
8079,
201,
198,
6738,
450,
66,
1330,
9738,
48526,
11,
12531,
24396,
201,
198,
6738,
4939,
5420,
1330,
28788,
9218,
35,
14188,
201,
198,
201,
198,
4871,
29652,
... | 2.080852 | 3,661 |
import json
from oic import rndstr
from oic.utils.keyio import build_keyjar
from otest.check import ERROR
from otest.test_setup import setup_conv
from oidctest.op.func import check_support
from oidctest.op.func import claims_locales
from oidctest.op.func import login_hint
from oidctest.op.func import store_sector_redirect_uris
from oidctest.op.func import sub_claims
from oidctest.op.func import ui_locales
from oidctest.op.func import set_response_where
from oidctest.op.func import set_state
from oidctest.op.func import static_jwk
from oidctest.op.func import set_discovery_issuer
from oidctest.op.func import set_essential_arg_claim
from oidctest.op.func import set_principal
from oidctest.op.func import multiple_return_uris
#from oidctest.op.func import redirect_uris_with_query_component
from oidctest.op.func import redirect_uri_with_query_component
from oidctest.op.func import redirect_uris_with_fragment
from oidctest.op.func import request_in_file
from oidctest.op.func import check_config
from oidctest.op.func import conditional_execution
from oidctest.op.func import essential_and_specific_acr_claim
from oidctest.op.func import id_token_hint
from oidctest.op.func import set_webfinger_resource
from oidctest.op.oper import AsyncAuthn
from oidctest.op.oper import Webfinger
from otest.events import EV_CONDITION
from otest.events import EV_PROTOCOL_RESPONSE
from otest.events import EV_RESPONSE
from oic.oic.message import AccessTokenResponse
KEYDEFS = [
{"type": "RSA", "key": '', "use": ["sig"]},
{"type": "EC", "crv": "P-256", "use": ["sig"]}
]
# def test_redirect_uris_with_query_component():
# _info = setup_conv()
# oper = AsyncAuthn(_info['conv'], _info['io'], None)
#
# oper.conv.entity.registration_info = {'redirect_uris': [
# 'https://example.org/authzcb']}
#
# redirect_uris_with_query_component(oper, {'foo': 'bar'})
#
# assert oper.req_args["redirect_uris"][0].endswith('?foo=bar')
| [
11748,
33918,
198,
198,
6738,
267,
291,
1330,
374,
358,
2536,
198,
6738,
267,
291,
13,
26791,
13,
2539,
952,
1330,
1382,
62,
2539,
9491,
198,
6738,
267,
9288,
13,
9122,
1330,
33854,
198,
6738,
267,
9288,
13,
9288,
62,
40406,
1330,
9... | 2.648469 | 751 |
# Import In-Built Modules
import logging
import asyncio
import aiopg.sa
from motor.motor_asyncio import AsyncIOMotorClient
from pymongo import MongoClient
from aiohttp import web
log = logging.getLogger(__name__)
async def init_pg(app: web.Application) -> None:
"""
A function that, when the server is started, connects to postgresql,
and after stopping it breaks the connection (after yield)
"""
try:
log.info(f'Initializing the PG database')
config = app['config']['postgres']
engine = await aiopg.sa.create_engine(**config)
app['db'] = engine
yield
app['db'].close()
await app['db'].wait_closed()
except Exception as e:
log.error(f'init_pg {e}')
raise e
async def init_mongodb(app: web.Application) -> None:
"""
Initializing the Mongo DB
:param app:
:return:
"""
try:
log.info(f'Initializing the MongoDB database Async & Sync Mongo DB')
await asyncio.sleep(1)
config = app['config']['mongodb']
_url = await _construct_db_url(config)
log.info(f'Mongo URL - {_url}')
# Creating the Pymongo DB instance
await _get_pymongo_instance(app, _url)
# Creating the AsyncIOMotorClient DB instance
await _get_asynciomotor_instance(app, _url)
except Exception as e:
log.error(f'init_mongodb {e}')
raise e
async def _construct_db_url(config):
"""
Construct the Mongo DB Url
mongodb://[username:password@]host1[:port1][,...hostN[:portN]][/[defaultauthdb][?options]]
:param config:
:return:
"""
return 'mongodb://{user}:{password}@{host}:{port}/{database}?authSource=admin'.format(
user=config['user'],
password=config['password'],
database=config['database'],
host=config['host'],
port=config['port'],
)
async def _get_asynciomotor_instance(app: web.Application, url) -> None:
"""
Getting Mongo Async-IO-motor instance for creating, updating, deleting Documents
Async IO-motor wont support creating new collections
:param app:
:param url:
:return:
"""
try:
log.info(f'Getting Async-IO-motor instance')
async_mongo_instance = dict()
_cli = AsyncIOMotorClient(url)
async_mongo_instance['client'] = _cli
async_mongo_instance['db'] = _cli['versiondb']
app['async_mongo'] = async_mongo_instance
await asyncio.sleep(1)
except Exception as e:
log.error(f'_get_asynciomotor_instance {e}')
raise e
async def _get_pymongo_instance(app: web.Application , url) -> None:
"""
Getting Mongo pymongo instance used for creating, updating, deleting collections
Async IO-motor wont support creating new collections, Pymongo supports
:param app:
:param url:
:return:
"""
try:
log.info(f'Getting pymongo instance')
mongo_instance = dict()
_cli = MongoClient(url)
mongo_instance['client'] = _cli
mongo_instance['db'] = _cli['versiondb']
app['mongo'] = mongo_instance
await asyncio.sleep(1)
except Exception as e:
log.error(f'_get_pymongo_instance {e}')
raise e | [
2,
17267,
554,
12,
39582,
3401,
5028,
198,
11748,
18931,
198,
11748,
30351,
952,
198,
11748,
257,
14922,
70,
13,
11400,
198,
6738,
5584,
13,
76,
20965,
62,
292,
13361,
952,
1330,
1081,
13361,
40,
2662,
20965,
11792,
198,
6738,
279,
49... | 2.394252 | 1,357 |
import maya.cmds as mc
import glTools.utils.attribute
import types
class Remap(object):
'''
Object wrapper for remapValue node in Maya.
'''
# CONSTANTS
_REMAPSUFFIX = 'remap'
def __init__( self,
remapName,
inputValue = None,
inputMin = None,
inputMax = None,
outputMin = None,
outputMax = None ):
'''
Remap object initilization
@param remapName: RemapValue node name
@type remapName: str
@param inputValue: RemapValue input value or source plug. If None, leave at default
@type inputValue: float or str or None
@param inputMin: RemapValue node input minimum value. If None, leave at default
@type inputMin: float or None
@param inputMax: RemapValue node input maximum value. If None, leave at default
@type inputMax: float or None
@param outputMin: RemapValue node output minimum value. If None, leave at default
@type outputMin: float or None
@param outputMax: RemapValue node output maximum value. If None, leave at default
@type outputMax: float or None
'''
# Checks Existing Node
if mc.objExists('%s_%s' % (remapName,self._REMAPSUFFIX) ):
# Use Existing Node
self._name = '%s_%s' % (remapName,self._REMAPSUFFIX)
else:
# Create New Node
self.create(remapName)
# Set Input
if(inputValue != None): self.setInput(inputValue)
# Set Range
self.setRange(inputMin,inputMax,outputMin,outputMax)
# Initialize Index
self.setIndex(0)
def create( self,name ):
'''
Create new remapValue node with the specified name
@param name: New node name.
@type name: str
'''
self._name = mc.createNode('remapValue', name='%s_%s' % (name, self._REMAPSUFFIX))
return self._name
#==================
# get
#==================
#==================
# set
#==================
def setAttribute(self,attr,value):
'''
Set remapValue node value or source plug.
@param attrName: RemapValue attribute name to set value or source plug for.
@type attrName: float or str
@param value: RemapValue attribute value or source plug.
@type value: int or float or str or None
'''
# Check None
if(value == None): return
# Check Numeric Input
if isinstance(value,(types.IntType,types.FloatType)):
# Set Numeric Attribute Value
try: mc.setAttr(attr,value)
except: raise Exception('Error setting remapValue attribute "'+attr+'" value!')
return
# Check String Input
elif isinstance(value,types.StringTypes):
# Connect External Plug
if glTools.utils.attribute.isAttr(value):
if not mc.isConnected(value,attr):
try: mc.connectAttr(value,attr,f=True)
except: raise Exception('Error connecting remapValue attribute ("'+value+'" >> "'+attr+'")!')
return
else:
print('RemapValue node attribute "'+attr+'" already connected to source plug "'+inputValue+'"! Skipping...')
return
else:
raise Exception('Source plug value is not a valid attribute! ("'+value+'")')
# Invlaid Type
raise Exception('Invalid value type specified for remapValue attribute "'+attr+'"! ('+str(type(value))+')!')
def setInput(self, inputValue):
'''
Set remapValue node inputValue.
@param inputValue: RemapValue node input value or source plug.
@type inputValue: float or str
'''
attr = self._name+'.inputValue'
self.setAttribute(attr,inputValue)
def setInputMin(self,inputMin):
'''
Set remapValue node inputMin attribute value
@param inputMin: RemapValue node input minimum value or source plug.
@type inputMin: float or None
'''
attr = self._name+'.inputMin'
self.setAttribute(attr,inputMin)
def setInputMax(self,inputMax):
'''
Set remapValue node inputMax attribute value
@param inputMax: Attribute Value to set.
@type inputMax: float or None
'''
attr = self._name+'.inputMax'
self.setAttribute(attr,inputMax)
def setOutputMin(self,outputMin):
'''
Set remapValue node outputMin attribute value
@param outputMin: Attribute Value to set.
@type outputMin: float or None
'''
attr = self._name+'.outputMin'
self.setAttribute(attr,outputMin)
def setOutputMax(self,outputMax):
'''
Set remapValue node outputMax attribute value
@param outputMax: Attribute Value to set.
@type outputMax: float or None
'''
attr = self._name+'.outputMax'
self.setAttribute(attr,outputMax)
def setInputRange( self,
inputMin = None,
inputMax = None ):
'''
Set remapValue node inputMin and inputMax attribute value
@param inputMin: Attribute value to set for inputMin.
@type inputMin: float or None
@param inputMax: Attribute value to set for inputMax.
@type inputMax: float or None
'''
if(inputMin != None): self.setInputMin(inputMin)
if(inputMax != None): self.setInputMax(inputMax)
def setOutputRange( self,
outputMin = None,
outputMax = None ):
'''
Set remapValue node outputMin and outputMax attribute value
@param outputMin: Attribute value to set for outputMin.
@type outputMin: float or None
@param outputMax: Attribute value to set for outputMax.
@type outputMax: float or None
'''
if(outputMin != None): self.setOutputMin(outputMin)
if(outputMax != None): self.setOutputMax(outputMax)
def setRange( self,
inputMin = None,
inputMax = None,
outputMin = None,
outputMax = None ):
'''
Set remapValue node inputMin, inputMax, outputMin, outputMax attribute value
@param outputMin: Attribute value to set for outputMin.
@type outputMin: float or None
@param outputMax: Attribute value to set for outputMax.
@type outputMax: float or None
'''
self.setInputRange(inputMin,inputMax)
self.setOutputRange(outputMin,outputMax)
def setPoint( self,
index,
position = None,
value = None,
interpolation = None):
'''
Set remap point on remapValue node.
@param index: Remap point index.
@type index: int or str
@param position: Remap point position.
@type position: float or str
@param value: Remap point value.
@type value: float or str
@param interpolation: Remap point interpolation.
@type interpolation: int or str
'''
# Set Index
self.setIndex(index)
# Set Position
self.setPosition(position)
# Set Value
self.setValue(value)
# Set Interpolation
self.setInterpolation(interpolation)
def setIndex(self,index):
'''
Set remapValue point index.
@param index: RemapValue point index.
@type index: int
'''
self._index = index
self._indexedName = '%s.value[%s]' % (self._name, index)
def setPosition(self,position):
'''
Set remapValue point position value.
@param position: RemapValue point float position or source plug.
@type position: float or str
'''
attr = self._indexedName+'.value_Position'
self.setAttribute(attr,position)
def setValue(self,value):
'''
Set remapValue point float value.
@param value: RemapValue point float value or source plug.
@type value: float or str
'''
attr = self._indexedName+'.value_FloatValue'
self.setAttribute(attr,value)
def setInterpolation(self,interpolation):
'''
Set remapValue point interpolation value.
@param interpolation: RemapValue point interpolation value or source plug.
@type interpolation: int or str
'''
attr = self._indexedName+'.value_Interp'
self.setAttribute(attr,interpolation)
def connectInput(self, objectAttrName):
'''
'''
if not mc.isConnected(objectAttrName, '%s.inputValue' % self._name):
mc.connectAttr(objectAttrName, '%s.inputValue' % self._name, force=True)
def connectOutput(self,dstAttr):
'''
Connect remapValue node output to destination plug.
@param dstAttr: Destination plug for remapValue node output.
@type dstAttr: str
'''
# Checks
if not glTools.utils.attribute.isAttr(dstAttr):
raise Exception('Destination attribute "'+dstAttr+'" is not a valid attribute! Unable to establish output connection...')
# Connect Output
outAttr = self._name+'.outValue'
if not mc.isConnected(outAttr,dstAttr):
try: mc.connectAttr(outAttr,dstAttr,f=True)
except: raise Exception('Error connecting remapValue output ("'+outAttr+'" >> "'+dstAttr+'")!')
else:
print('RemapValue node output "'+outAttr+'" already connected to destination plug "'+dstAttr+'"! Skipping...')
| [
11748,
743,
64,
13,
28758,
82,
355,
36650,
198,
198,
11748,
1278,
33637,
13,
26791,
13,
42348,
198,
198,
11748,
3858,
198,
198,
4871,
3982,
499,
7,
15252,
2599,
198,
197,
7061,
6,
198,
197,
10267,
29908,
329,
816,
499,
11395,
10139,
... | 2.716359 | 3,032 |
import spacy
nlp = spacy.load('en_core_web_sm')
path = './GMVbQ1UsMp8.txt'
with open(path, 'r') as file_to_read:
subtitle = file_to_read.read()
doc = nlp(subtitle)
candidate_captions = []
for np in doc.noun_chunks:
print(np.text)
candidate_captions.append(np.text)
import clip
import torch
from torchvision.datasets import CIFAR100
from PIL import Image
# Load the model
device = "cuda" if torch.cuda.is_available() else "cpu"
model, preprocess = clip.load('ViT-B/32', device)
# Download the dataset
cifar100 = CIFAR100(root=os.path.expanduser("~/.cache"), download=True, train=False)
# candidate_captions = ['banana plant with human','banana plant with human and text', 'human', 'banana plant', 'person','ant', 'zebra', 'candy', 'bird', 'bus']
# Prepare the inputs
image = Image.open('./frames/out_image7541.jpg')
# image, class_id = cifar100[3637]
image_input = preprocess(image).unsqueeze(0).to(device)
text_inputs = torch.cat([clip.tokenize(f"a photo of a {c}") for c in candidate_captions]).to(device)
# Calculate features
with torch.no_grad():
image_features = model.encode_image(image_input)
text_features = model.encode_text(text_inputs)
# Pick the top 5 most similar labels for the image
image_features /= image_features.norm(dim=-1, keepdim=True)
text_features /= text_features.norm(dim=-1, keepdim=True)
similarity = (100.0 * image_features @ text_features.T).softmax(dim=-1)
values, indices = similarity[0].topk(5)
# Print the result
print("\nTop predictions:\n")
for value, index in zip(values, indices):
print(f"{candidate_captions[index]:>16s}: {100 * value.item():.2f}%") | [
11748,
599,
1590,
198,
21283,
79,
796,
599,
1590,
13,
2220,
10786,
268,
62,
7295,
62,
12384,
62,
5796,
11537,
198,
6978,
796,
705,
19571,
15548,
53,
65,
48,
16,
5842,
28861,
23,
13,
14116,
6,
198,
4480,
1280,
7,
6978,
11,
705,
81,... | 2.738617 | 593 |
# Generated by Django 3.1.2 on 2020-10-27 20:31
from django.db import migrations
| [
2,
2980,
515,
416,
37770,
513,
13,
16,
13,
17,
319,
12131,
12,
940,
12,
1983,
1160,
25,
3132,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
628
] | 2.766667 | 30 |
from abc import ABC
from wai.common.decorator import ensure_error_type
from ..error import JSONSerialisationError
from ._JSONSerialisable import JSONSerialisable
from ._JSONDeserialisable import JSONDeserialisable, SelfType
class JSONBiserialisable(JSONSerialisable, JSONDeserialisable[SelfType], ABC):
"""
Interface for classes which implement both JSONSerialisable and
JSONDeserialisable[T].
"""
@ensure_error_type(JSONSerialisationError, "Error copying object using JSON serialisation: {0}")
def json_copy(self, validate: bool = True) -> SelfType:
"""
Creates a copy of this object by serialising to JSON and
then deserialising again.
:param validate: Whether to validate the serialised JSON.
:return: The copy of this object.
"""
# Only validate in one direction
return self.from_raw_json(self.to_raw_json(validate), False)
| [
6738,
450,
66,
1330,
9738,
198,
198,
6738,
266,
1872,
13,
11321,
13,
12501,
273,
1352,
1330,
4155,
62,
18224,
62,
4906,
198,
198,
6738,
11485,
18224,
1330,
19449,
32634,
5612,
12331,
198,
6738,
47540,
40386,
32634,
43942,
1330,
19449,
3... | 2.968254 | 315 |
"""
Core USB packet sniffer packet backend for OpenVizsla.
"""
import crcmod
from enum import IntFlag
from .protocol import OVPacketHandler
class USBEventFlags(IntFlag):
""" Enum representing the various flags an OV device can attach to a packet. """
# Physical layer error
ERROR = 0x01
# RX Path Overflow
OVERFLOW = 0x02
# Clipped by Filter
CLIPPED = 0x04
# Clipped due to packet length (> 800 bytes)
TRUNCATED = 0x08
# First packet of capture session; IE, when the cap hardware was enabled
FIRST = 0x10
# Last packet of capture session; IE, when the cap hardware was disabled
LAST = 0x20
class USBEventSink:
""" Base class for USB event "sinks", which listen for USB events. """
def handle_usb_packet(self, timestamp, buffer, flags):
""" Core functionality for a USB event sink -- reports each USB packet as it comes in.
Args:
timestamp -- The timestamp for the given packet.
"""
pass
class USBSniffer(OVPacketHandler):
""" USB Sniffer packet sink -- receives sniffed packets from the OpenVizsla. """
HANDLED_PACKET_NUMBERS = [0xac, 0xad, 0xa0]
USB_PACKET_REPORT = 0xa0
data_crc = staticmethod(crcmod.mkCrcFun(0x18005))
def __init__(self, write_handler, high_speed=False):
""" Set up our core USB Sniffer sink, which accepts wrapped USB events and passes them to our event sinks. """
self.high_speed = high_speed
self.got_start = False
# Start off with an empty array of packet sinks -- sinks should be registered by calling ``.register_sink`.
self._sinks = []
# Call the super-constructor.
super().__init__(write_handler)
def _packet_size(self, buf):
""" Return the packet size for our core USB events. """
if buf[0] != self.USB_PACKET_REPORT:
return 2
else:
return (buf[4] << 8 | buf[3]) + 8
def register_sink(self, sink):
""" Registers a USBEventSink to receive any USB events. """
self._sinks.append(sink)
@staticmethod
def handle_packet(self, buf):
""" Separates the input flags from the core meta-data extracted from the OV USB packet. """
if buf[0] == self.USB_PACKET_REPORT:
# Parse the flags and timeout from our buffer.
# TODO: replace with a call to struct.unpack
flags = buf[1] | buf[2] << 8
ts = buf[5] | buf[6] << 8 | buf[7] << 16
# TODO: validate packet size (buf[3] and buf[4])?
# TODO: get rid of me?
if flags != 0 and flags != USBEventFlags.FIRST and flags != USBEventFlags.LAST:
print("PERR: %04X (%s)" % (flags, self.decode_flags(flags)))
if flags & USBEventFlags.FIRST:
self.got_start = True
if self.got_start:
self.emit_usb_packet(ts, buf[8:], flags)
if flags & USBEventFlags.LAST:
self.got_start = False
else:
print("got interesting packet {}".format(buf[0]))
class USBSimplePrintSink(USBEventSink):
""" Most basic sink for USB events: report them directly to the console. """
import crcmod
data_crc = staticmethod(crcmod.mkCrcFun(0x18005))
| [
37811,
198,
14055,
8450,
19638,
26300,
263,
19638,
30203,
329,
4946,
53,
528,
82,
5031,
13,
198,
37811,
198,
198,
11748,
1067,
66,
4666,
198,
198,
6738,
33829,
1330,
2558,
34227,
198,
6738,
764,
11235,
4668,
1330,
440,
8859,
8317,
25060... | 2.428675 | 1,374 |
from ..losses.cross_entropy import CrossEntropyLoss
from ..losses.euclidean import EuclideanLoss
from ..losses.angular import AngularLoss
from ..losses.mse import MSELoss
from ..losses.l1 import L1Loss
__all__ = ('CrossEntropyLoss', 'EuclideanLoss', 'AngularLoss', 'MSELoss', 'L1Loss') | [
6738,
11485,
22462,
274,
13,
19692,
62,
298,
28338,
1330,
6372,
14539,
28338,
43,
793,
198,
6738,
11485,
22462,
274,
13,
12496,
565,
485,
272,
1330,
48862,
485,
272,
43,
793,
198,
6738,
11485,
22462,
274,
13,
21413,
1330,
28147,
43,
7... | 2.72381 | 105 |
#
# Copyright 2014-2018 Neueda Ltd.
#
import Config
props = Config.properties ()
print props.getKeys ()
try:
print props['invalid-key']
except Exception as e:
print e
props['test'] = '1234'
print props['test']
| [
2,
198,
2,
15069,
1946,
12,
7908,
3169,
1739,
64,
12052,
13,
198,
2,
198,
11748,
17056,
628,
198,
1676,
862,
796,
17056,
13,
48310,
7499,
198,
4798,
25744,
13,
1136,
40729,
7499,
198,
198,
28311,
25,
198,
220,
220,
220,
3601,
25744,... | 2.7875 | 80 |
import frappe
# @frappe.whitelist()
# # def get_wallet():
# # s = frappe.db.get_list('Wallet Transaction', filters={
# # 'docstatus': 1}, fields=['name', 'date', 'customer_name', 'product_price'])
# # return s
| [
11748,
5306,
27768,
201,
198,
201,
198,
201,
198,
2,
2488,
69,
430,
27768,
13,
1929,
270,
46331,
3419,
201,
198,
2,
1303,
825,
651,
62,
44623,
33529,
201,
198,
2,
1303,
220,
220,
220,
220,
264,
796,
5306,
27768,
13,
9945,
13,
1136... | 2.313725 | 102 |
##############################################################################
# Compute rank statistics
# WARNING: will download about 28 Go of data
# Authored by Ammar Mian, 10/01/2019
# e-mail: ammar.mian@centralesupelec.fr
# Modified by Antoine Collas, 10/2019
# e-mail: antoine.collas@centralesupelec.fr
##############################################################################
# Copyright 2018 @CentraleSupelec
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import os, sys, time
# The code is already multi threaded so we block OpenBLAS multi thread.
os.environ['OPENBLAS_NUM_THREADS'] = '1'
# import path of root repo
current_dir = os.path.dirname(os.path.abspath(__file__))
temp = os.path.dirname(os.path.dirname(current_dir))
sys.path.insert(1, temp)
import seaborn as sns
sns.set_style("darkgrid")
from LRST.generic_functions import *
import matplotlib.pyplot as plt
from LRST.monte_carlo_tools import *
from LRST.multivariate_images_tools import *
from LRST.change_detection_functions import *
from LRST.low_rank_statistics import *
from LRST.proportionality_statistics import *
from LRST.read_sar_data import *
from LRST.wavelet_functions import *
from compute_ROC_UAVSAR_dataset import load_UAVSAR
import random
if __name__ == '__main__':
# Activate latex in figures (or not)
LATEX_IN_FIGURES = False
if LATEX_IN_FIGURES:
enable_latex_infigures()
# data
# DEBUG mode for fast debugging (use a small patch of 200x200 pixels)
DEBUG = False
PATH = 'data/UAVSAR/'
FULL_TIME_SERIES = False # if true: use the full time series, else: use only the first and last images of the time series
image, _, X, Y = load_UAVSAR(PATH, DEBUG, FULL_TIME_SERIES)
# Parameters
n_r, n_c, p, T = image.shape
windows_mask = np.ones((5,5))
m_r, m_c = windows_mask.shape
N = m_r*m_c
METHODS = ['AIC', 'BIC', 'AICc', 'EDC']
while True:
i_r = random.choice(list(range(int(m_r/2), n_r-int(m_r/2))))
i_c = random.choice(list(range(int(m_c/2), n_c-int(m_c/2))))
# Obtaining data corresponding to the neighborhood defined by the mask
local_data = image[i_r-int(m_r/2):i_r+int(m_r/2)+1, i_c-int(m_c/2):i_c+int(m_c/2)+1, :, :]
local_data = np.transpose(local_data, [2, 0, 1, 3])
local_data = local_data.reshape((p,N*T))
# Computing the function over the local data
for method in METHODS:
criterion = SCM_rank_criterion(local_data, method=method)
ranks = np.arange(criterion.shape[0])+1
plt.plot(ranks, criterion, label=method)
idx_min = criterion.argmin()
plt.plot(ranks[idx_min],criterion[idx_min],'o')
plt.legend()
plt.show() | [
29113,
29113,
7804,
4242,
2235,
198,
2,
3082,
1133,
4279,
7869,
198,
2,
39410,
25,
481,
4321,
546,
2579,
1514,
286,
1366,
198,
2,
26828,
1850,
416,
1703,
3876,
337,
666,
11,
838,
14,
486,
14,
23344,
198,
2,
304,
12,
4529,
25,
716,... | 2.696327 | 1,225 |
# The following comments couldn't be translated into the new config version:
#cms_conditions_data/CMS_COND_20X_RPC"
import FWCore.ParameterSet.Config as cms
#
#RPC calibrations
#For now, no t0 corrections are applied in the reconstruction if fake calibration is used
#(the replace should be moved to main cfg in order to avoid warning message)
from CalibMuon.RPCCalibration.RPC_Calibration_cff import *
RPCCalibPerf.connect = 'frontier://cms_conditions_data/CMS_COND_20X_RPC'
| [
2,
383,
1708,
3651,
3521,
470,
307,
14251,
656,
262,
649,
4566,
2196,
25,
198,
198,
2,
46406,
62,
17561,
1756,
62,
7890,
14,
34,
5653,
62,
10943,
35,
62,
1238,
55,
62,
49,
5662,
1,
198,
11748,
48849,
14055,
13,
36301,
7248,
13,
... | 3.116883 | 154 |
from django.db import models
from django.contrib.auth.models import User
# Location class, used to determine from wich actual pos location the cashier
# is working (ex. Bar, restaurant, ...)
# Category class, used as a container for different type of items, related by
# area (ex. Kitchen, beverages, ...)
# Item class, basically the items that we'll sell (ex. Coke, Pizza, Costata,
# ...)
# Bill class, it stores a whole order made by one client
# class to store the single entries that when grouped together will form one,
# and just one bill
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
27530,
1330,
11787,
628,
198,
2,
13397,
1398,
11,
973,
284,
5004,
422,
266,
488,
4036,
1426,
4067,
262,
5003,
959,
198,
2,
318,
1762,
3... | 3.736486 | 148 |
#!/usr/bin/env python3
#coding=utf-8
import numpy as np
import time
from math import cos, sin, sqrt, pi, atan, asin, atan, atan2
from scipy.optimize import least_squares
from scipy.spatial.transform import Rotation
from angles import normalize_angle
import rospy
from origarm_ros.srv import ik
from origarm_ros.msg import *
import traceback
if __name__ == '__main__':
try:
IK = ik_solver()
print('IK is finished')
except rospy.ServiceException as exc:
print("IK call failed:"+str(exc))
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
66,
7656,
28,
40477,
12,
23,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
640,
198,
6738,
10688,
1330,
8615,
11,
7813,
11,
19862,
17034,
11,
31028,
11,
379,
272,
11,
... | 2.568627 | 204 |
import sys
import time
from pandalogger.PandaLogger import PandaLogger
from config import panda_config
import re
from re import error as ReError
# logger
_logger = PandaLogger().getLogger('RetrialModule')
NO_RETRY = 'no_retry'
INCREASE_MEM = 'increase_memory'
LIMIT_RETRY = 'limit_retry'
INCREASE_CPU = 'increase_cputime'
def timeit(method):
"""
Decorator function to time the execution time of any given method. Use as decorator.
"""
return timed
def safe_match(pattern, message):
"""
Wrapper around re.search with simple exception handling
"""
matches = False
try:
matches = re.match(pattern, message)
except ReError:
_logger.error("Regexp matching excepted. \nPattern: %s \nString: %s" %(pattern, message))
finally:
return matches
def conditions_apply(errordiag_job, architecture_job, release_job, wqid_job,
errordiag_rule, architecture_rule, release_rule, wqid_rule):
"""
Checks that the error regexp, architecture, release and work queue of rule and job match,
only in case the attributes are defined for the rule
"""
_logger.debug("Entered conditions_apply %s"%(locals()))
if ((errordiag_rule and not safe_match(errordiag_rule, errordiag_job))
or (architecture_rule and architecture_rule != architecture_job)
or (release_rule and release_rule != release_job)
or (wqid_rule and wqid_rule != wqid_job)):
_logger.debug("Leaving conditions_apply: False")
return False
_logger.debug("Leaving conditions_apply: True")
return True
def compare_strictness(rule1, rule2):
"""
Return 1 if rule1 is stricter, 0 if equal, -1 if rule2 is stricter
"""
_logger.debug("Entered compare_strictness")
rule1_weight = 0
if rule1['architecture']:
rule1_weight += 1
if rule1['release']:
rule1_weight += 1
if rule1['wqid']:
rule1_weight += 1
rule2_weight = 0
if rule2['architecture']:
rule2_weight += 1
if rule2['release']:
rule2_weight += 1
if rule2['wqid']:
rule2_weight += 1
if rule1 > rule2:
return 1
elif rule1 < rule2:
return -1
else:
return 0
def preprocess_rules(rules, error_diag_job, release_job, architecture_job, wqid_job):
"""
Do some preliminary validation of the applicable rules.
- Duplicate rules, (action=limit_retry, maxAttempt=5) vs (action=limit_retry, maxAttempt=7, release=X):
resolve to the most specific rule, in our example (action=limit_retry, maxAttempt=7, release=X)
- Inconsistent rules, e.g. (action=limit_retry, maxAttempt=5) vs (action=limit_retry, maxAttempt=7):
resolve into the strictest rule, in our example (limit_retry = 5)
- Bad intended rules, e.g. (action=limit_retry, maxAttempt=5) vs (action=limit_retry, maxAttempt=7, release=X):
"""
_logger.debug("Entered preprocess_rules")
filtered_rules = []
try:
# See if there is a NO_RETRY rule. Effect of NO_RETRY rules is the same, so just take the first one that appears
for rule in rules:
if (rule['action']!= NO_RETRY or
not conditions_apply(error_diag_job, architecture_job, release_job, wqid_job, rule['error_diag'],
rule['architecture'], rule['release'], rule['wqid'])):
continue
else:
filtered_rules.append(rule)
# See if there is a INCREASE_MEM rule. The effect of INCREASE_MEM rules is the same, so take the first one that appears
for rule in rules:
if (rule['action']!= INCREASE_MEM or
not conditions_apply(error_diag_job, architecture_job, release_job, wqid_job, rule['error_diag'],
rule['architecture'], rule['release'], rule['wqid'])):
continue
else:
filtered_rules.append(rule)
break
# See if there is a INCREASE_CPU rule. The effect of INCREASE_CPU rules is the same, so take the first one that appears
for rule in rules:
if (rule['action']!= INCREASE_CPU or
not conditions_apply(error_diag_job, architecture_job, release_job, wqid_job, rule['error_diag'],
rule['architecture'], rule['release'], rule['wqid'])):
continue
else:
filtered_rules.append(rule)
break
# See if there is a LIMIT_RETRY rule. Take the narrowest rule, in case of draw take the strictest conditions
limit_retry_rule = {}
for rule in rules:
if (rule['action']!= LIMIT_RETRY or
not conditions_apply(error_diag_job, architecture_job, release_job, wqid_job, rule['error_diag'],
rule['architecture'], rule['release'], rule['wqid'])):
continue
elif not limit_retry_rule:
limit_retry_rule = rule
else:
comparison = compare_strictness(rule, limit_retry_rule)
if comparison == 1:
limit_retry_rule = rule
elif comparison == 0:
limit_retry_rule['params']['maxAttempt'] = min(limit_retry_rule['params']['maxAttempt'],
rule['params']['maxAttempt'])
elif comparison == -1:
pass
except KeyError:
_logger.error("Rules are not properly defined. Rules: %s"%rules)
if limit_retry_rule:
filtered_rules.append(limit_retry_rule)
return filtered_rules
#TODO: Add a call to the retrial rules from the UserIF.killJob
@timeit
def apply_retrial_rules(task_buffer, jobID, error_source, error_code, error_diag, attemptNr):
"""
Get rules from DB and applies them to a failed job. Actions can be:
- flag the job so it is not retried again (error code is a final state and retrying will not help)
- limit the number of retries
- increase the memory of a job if it failed because of insufficient memory
"""
_logger.debug("Entered apply_retrial_rules for PandaID=%s, error_source=%s, error_code=%s, error_diag=%s, attemptNr=%s"
%(jobID, error_source, error_code, error_diag, attemptNr))
try:
error_code = int(error_code)
except ValueError:
_logger.error("Error code ({0}) can not be casted to int".format(error_code))
return
retrial_rules = task_buffer.getRetrialRules()
_logger.debug("Back from getRetrialRules: %s"%retrial_rules)
if not retrial_rules:
return
try:
job = task_buffer.peekJobs([jobID], fromDefined=False, fromArchived=True, fromWaiting=False)[0]
applicable_rules = preprocess_rules(retrial_rules[error_source][error_code], error_diag, job.AtlasRelease,
job.cmtConfig, job.workQueue_ID)
_logger.debug("Applicable rules for PandaID={0}: {1}".format(jobID, applicable_rules))
for rule in applicable_rules:
try:
error_id = rule['error_id']
error_diag_rule = rule['error_diag']
action = rule['action']
parameters = rule['params']
architecture = rule['architecture'] # cmtconfig
release = rule['release'] # transHome
wqid = rule['wqid'] # work queue ID
active = rule['active'] # If False, don't apply rule, only log
_logger.debug("error_diag_rule {0}, action {1}, parameters {2}, architecture {3}, release {4}, wqid {5}, active {6}"
.format(error_diag_rule, action, parameters, architecture, release, wqid, active))
_logger.debug("Processing rule {0} for jobID {1}, error_source {2}, error_code {3}, attemptNr {4}".
format(rule, jobID, error_source, error_code, attemptNr))
if not conditions_apply(error_diag, job.cmtConfig, job.AtlasRelease, job.workQueue_ID,
error_diag_rule, architecture, release, wqid):
_logger.debug("Skipped rule {0}. cmtConfig ({1} : {2}) or Release ({3} : {4}) did NOT match"
.format(rule, architecture, job.cmtConfig, release, job.AtlasRelease))
continue
if action == NO_RETRY:
if active:
task_buffer.setNoRetry(jobID, job.jediTaskID, job.Files)
# Log to pandamon and logfile
message = "action=setNoRetry for PandaID={0} jediTaskID={1} ( ErrorSource={2} ErrorCode={3} ErrorDiag: {4}. Error/action active={5} error_id={6} )"\
.format(jobID, job.jediTaskID, error_source, error_code, error_diag_rule, active, error_id)
_logger.info(message)
elif action == LIMIT_RETRY:
try:
if active:
task_buffer.setMaxAttempt(jobID, job.jediTaskID, job.Files, int(parameters['maxAttempt']))
# Log to pandamon and logfile
message = "action=setMaxAttempt for PandaID={0} jediTaskID={1} maxAttempt={2} ( ErrorSource={3} ErrorCode={4} ErrorDiag: {5}. Error/action active={6} error_id={7} )"\
.format(jobID, job.jediTaskID, int(parameters['maxAttempt']), error_source, error_code, error_diag_rule, active, error_id)
_logger.info(message)
except (KeyError, ValueError):
_logger.error("Inconsistent definition of limit_retry rule - maxAttempt not defined. parameters: %s" %parameters)
elif action == INCREASE_MEM:
try:
if active:
task_buffer.increaseRamLimitJobJEDI(job, job.minRamCount, job.jediTaskID)
# Log to pandamon and logfile
message = "action=increaseRAMLimit for PandaID={0} jediTaskID={1} ( ErrorSource={2} ErrorCode={3} ErrorDiag: {4}. Error/action active={5} error_id={6} )"\
.format(jobID, job.jediTaskID, error_source, error_code, error_diag_rule, active, error_id)
_logger.info(message)
except:
errtype,errvalue = sys.exc_info()[:2]
_logger.error("Failed to increase RAM limit : %s %s" % (errtype,errvalue))
elif action == INCREASE_CPU:
try:
# request recalculation of task parameters and see if it applied
applied= False
if active:
rowcount = task_buffer.requestTaskParameterRecalculation(job.jediTaskID)
else:
rowcount = 0
if rowcount:
applied = True
# Log to pandamon and logfile
message = "action=increaseCpuTime requested recalculation of task parameters for PandaID={0} jediTaskID={1} (active={2} ), applied={3}. ( ErrorSource={4} ErrorCode={5} ErrorDiag: {6}. Error/action active={7} error_id={8} )"\
.format(jobID, job.jediTaskID, active, applied, error_source, error_code, error_diag_rule, active, error_id)
_logger.info(message)
except:
errtype,errvalue = sys.exc_info()[:2]
_logger.error("Failed to increase CPU-Time : %s %s" % (errtype,errvalue))
_logger.debug("Finished rule {0} for PandaID={1} error_source={2} error_code={3} attemptNr={4}"
.format(rule, jobID, error_source, error_code, attemptNr))
except KeyError:
_logger.error("Rule was missing some field(s). Rule: %s" %rule)
except KeyError as e:
_logger.debug("No retrial rules to apply for jobID {0}, attemptNr {1}, failed with {2}={3}. (Exception {4})"
.format(jobID, attemptNr, error_source, error_code, e))
| [
11748,
25064,
198,
11748,
640,
198,
6738,
279,
7642,
519,
1362,
13,
47,
5282,
11187,
1362,
1330,
41112,
11187,
1362,
198,
6738,
4566,
1330,
279,
5282,
62,
11250,
198,
11748,
302,
198,
6738,
302,
1330,
4049,
355,
797,
12331,
198,
2,
49... | 2.076221 | 6,022 |
'''
Implementation of Compositional Pattern Producing Networks in Tensorflow
https://en.wikipedia.org/wiki/Compositional_pattern-producing_network
@hardmaru, 2016
Sampler Class
This file is meant to be run inside an IPython session, as it is meant
to be used interacively for experimentation.
It shouldn't be that hard to take bits of this code into a normal
command line environment though if you want to use outside of IPython.
usage:
%run -i sampler.py
sampler = Sampler(z_dim = 4, c_dim = 1, scale = 8.0, net_size = 32)
'''
import numpy as np
import tensorflow as tf
import math
import random
import PIL
from PIL import Image
import pylab
from model import CPPN
import matplotlib.pyplot as plt
import images2gif
from images2gif import writeGif
mgc = get_ipython().magic
mgc(u'matplotlib inline')
pylab.rcParams['figure.figsize'] = (10.0, 10.0)
| [
7061,
6,
198,
3546,
32851,
286,
29936,
1859,
23939,
21522,
2259,
27862,
287,
309,
22854,
11125,
198,
198,
5450,
1378,
268,
13,
31266,
13,
2398,
14,
15466,
14,
5377,
1930,
1859,
62,
33279,
12,
36866,
62,
27349,
198,
198,
31,
10424,
387... | 3.123636 | 275 |
from flask import Flask, request, jsonify
from gevent.pywsgi import WSGIServer
import traceback
import uuid
import os
from logger import logger
from boundingRect import detect
import cv2
app = Flask(__name__)
@app.route('/identifyDirection', methods=['POST'])
def get_direction():
"""
图片方向识别
:return:
"""
up_file_name = ''
try:
data_file = request.files['file']
if data_file:
up_file_name = data_file.filename
file_name = f'{str(uuid.uuid4())}.{up_file_name.rsplit(".", 1)[1]}'
if not os.path.exists('taskData'):
os.makedirs('taskData')
data_file.save(f'taskData/{file_name}')
img = cv2.imread(f'taskData/{file_name}')
direction, angle = detect(img, False)
os.remove(f'taskData/{file_name}')
return jsonify({'code': 0, 'msg': f'{up_file_name} 识别完成', 'data': {'direction': direction, 'angle': angle}})
else:
return jsonify({'code': 10001, 'msg': '未上传图片', 'data': ''})
except Exception as e:
logger.error(e)
return jsonify({'code': 10001, 'msg': f'{up_file_name} 上传失败', 'data': ''})
def run(ser_port=1666):
"""
启动服务
:param ser_port:
:return:
"""
api_server = None
try:
api_server = WSGIServer(('0.0.0.0', ser_port), app)
api_server.log.write(f'服务启动成功 http://127.0.0.1:{ser_port}/')
api_server.serve_forever()
except Exception as e:
traceback.print_stack()
finally:
if api_server:
api_server.close()
if __name__ == '__main__':
app.json_encoder.ensure_ascii = False
run(38082)
| [
6738,
42903,
1330,
46947,
11,
2581,
11,
33918,
1958,
198,
6738,
4903,
1151,
13,
9078,
18504,
12397,
1330,
25290,
38,
1797,
18497,
198,
11748,
12854,
1891,
198,
11748,
334,
27112,
198,
11748,
28686,
198,
6738,
49706,
1330,
49706,
198,
6738... | 1.992832 | 837 |
"""CHAPTER 29
WIND LOADS ON BUILDING APPURTENANCES AND OTHER STRUCTURES:
MAIN WIND FORCE RESISTING SYSTEM (DIRECTIONAL PROCEDURE)
• Basic wind speed, V (Section 26.5);
• Wind directionality factor, Kd (Section 26.6);
• Exposure category (Section 26.7);
• Topographic factor, Kzt (Section 26.8);
• Ground elevation factor, Ke (Section 26.9); and
• Enclosure classification (Section 26.12).
"""
from types import SimpleNamespace
from asce7.common import Log, Deg, attach_filter
from ceng.interp import interp_dict, interp1d
import numpy as np
##########################################################
# 29.4.3 Rooftop Solar Panels for Buildings of All Heights
# with Flat Roofs or Gable or Hip Roofs with Slopes
# Less Than 7°
###########################################################
# Fig. 29.4-7 Design Wind Loads (All Heights): Rooftop Solar Panels for Enclosed and Partially Enclosed Buildings,
# Roof θ≤7°
_FIG29P4D7_GCrn_nom__STR = """
area (sq ft)
1 500 5000
tilt (°)
0° to 5° 15° to 35°
0 5 15 35
Zone GCrn_nom
1 1.5 0.35 0.10 1.5 0.35 0.10 2.0 0.56 0.30 2.0 0.56 0.30
2 2.0 0.45 0.15 2.0 0.45 0.15 2.9 0.65 0.40 2.9 0.65 0.40
3 2.3 0.50 0.15 2.3 0.50 0.15 3.5 0.80 0.50 3.5 0.80 0.50
"""[1:-1]
FIG29P4D7_γa_NS = SimpleNamespace()
FIG29P4D7_γa_NS.zone = list("123") # roof zone (strings!)
FIG29P4D7_γa_NS.tilt = (0, 5, 15, 35) # deg - interpolate, inclusive
FIG29P4D7_γa_NS.area = tuple(Log(v) for v in (1, 500, 5000)) # psf - interpolate, log10
FIG29P4D7_γa_NS.GCrn_nom = ([[1.5, 0.35, 0.10],
[1.5, 0.35, 0.10],
[2.0, 0.56, 0.30],
[2.0, 0.56, 0.30], ],
[[2.0, 0.45, 0.15],
[2.0, 0.45, 0.15],
[2.9, 0.65, 0.40],
[2.9, 0.65, 0.40], ],
[[2.3, 0.50, 0.15],
[2.3, 0.50, 0.15],
[3.5, 0.80, 0.50],
[3.5, 0.80, 0.50], ],
)
FIG29P4D7_GCrn_nom_DICT = interp_dict(
x=FIG29P4D7_γa_NS.tilt,
y=FIG29P4D7_γa_NS.area,
z=dict(zip(FIG29P4D7_γa_NS.zone, FIG29P4D7_γa_NS.GCrn_nom)),
axis=0
)
def filter29p4p3(θ, roof_type, Lp, ω, h1, h2):
"""Section 29.4.3: Rooftop Solar Panels for Buildings of All Heights with Flat Roofs or Gable or Hip Roofs with
Slopes Less Than 7°.
slope (deg)
roof_type
Lp (ft)
ω (deg)
h1 (ft)
h2 (ft)
"""
return (θ <= Deg(7)) & \
np.in1d(roof_type, ["flat", "gable", "hip"]) & \
(Lp <= 6.7) & \
(ω <= Deg(35)) & \
(h1 <= 2) & \
(h2 <= 4)
@attach_filter(filter29p4p3)
def eq29p4d5_p(qh, GCrn):
"""Equation 29.4-5: design wind pressure for rooftop solar panels for buildings of all heights with flat roofs or
gable or hip roofs with slopes less than 7°.
p = qh*GCrn (lbf/sq ft)
"""
return qh * GCrn
@attach_filter(filter29p4p3)
def eq29p4d6_GCrn_nom(zone, tilt, area):
"""For Equation 29.4-6: Nominal Net Pressure Coefficient, (GCrn)nom
From Figure 29.4-7: Design Wind Loads (All Heights): Rooftop Solar Panels for Enclosed and Partially Enclosed
Buildings, Roof θ≤7°
"""
return FIG29P4D7_GCrn_nom_DICT[zone](tilt, Log(area))
@attach_filter(filter29p4p3)
def eq29p4d6_GCrn(γp, γc, γE, GCrn_nom):
"""Equation 29.4-6:
GCrn = γp*γc*γE*GCrn_nom
"""
return γp * γc * γE * GCrn_nom
@attach_filter(filter29p4p3)
def eq29p4d6_γp(hpt, h):
"""Equation 29.4-6 parapet height factor, γp:
γp = min(1.2, 0.9 + hpt∕h)
"""
return np.minimum(1.2, 0.9 + hpt/h)
@attach_filter(filter29p4p3)
def eq29p4d6_γc(Lp):
"""Equation 29.4-6 chord length factor, γc:
γc = max(0.6 + 0.06*Lp, 0.8)
"""
return np.maximum(0.6 + 0.06*Lp, 0.8)
EQ29P4D6_γE = {"exposed": 1.5, "unexposed": 1.0}
@attach_filter(filter29p4p3)
def eq29p4d6_γE(exposure_condition):
"""Equation 29.4-6 edge factor, γE:
γE = 1.5 if exposed, 1.0 for any other condition
"""
return EQ29P4D6_γE[exposure_condition]
#############################################################
# 29.4.4 Rooftop Solar Panels Parallel to the Roof Surface on
# Buildings of All Heights and Roof Slopes
#############################################################
def filter29p4p4(θ, roof_type, Lp, ω, h2):
"""Section 29.4.4 Rooftop Solar Panels Parallel to the Roof Surface on Buildings of All Heights and Roof Slopes.
slope (deg)
roof_type
Lp (ft)
ω (deg)
h2 (ft)
"""
return (θ <= Deg(7)) & \
np.in1d(roof_type, ["flat", "gable", "hip"]) & \
(Lp <= 6.7) & \
(ω <= Deg(2)) & \
(h2 <= 10 / 12)
@attach_filter(filter29p4p4)
def eq29p4d7_p(qh, GCp, γE, γa):
"""Equation 29.4-7:
p= qh*GCp*γE*γa (lb ∕ft2)
"""
return qh * GCp * γE * γa
EQ29P4D7_γE = {"exposed": 1.5, "unexposed": 1.0}
@attach_filter(filter29p4p4)
def eq29p4d7_γE(exposure_condition):
"""Equation 29.4-7 edge factor, γE:
γE = 1.5 if exposed, 1.0 for any other condition
"""
return EQ29P4D7_γE[exposure_condition]
# Fig. 29.4-8 Solar Panel Pressure Equalization Factor, γa, for Enclosed and Partially Enclosed Buildings
# of All Heights
_FIG29P4D8_γa__STR = """
Effective Wind Area, A (ft2) γa
1 0.8
10 0.8
100 0.4
1000 0.4
"""[1:-1]
FIG29P4D8_γa_NS = SimpleNamespace()
# X values
FIG29P4D8_γa_NS.A = tuple(Log(v) for v in [1, 10, 100, 1000]) # effective wind area (ft2)
# Y values
FIG29P4D8_γa_NS.γa = (0.8, 0.8, 0.4, 0.4) # solar panel pressure equalization factor
FIG29P4D8_γa_INTERPOLANT = interp1d(FIG29P4D8_γa_NS.A, FIG29P4D8_γa_NS.γa)
@attach_filter(filter29p4p4)
def eq29p4d7_γa(A):
"""For Equation 29.4-7: solar panel pressure equalization factor, γa
From Figure 29.4-8: Solar Panel Pressure Equalization Factor, γa, for Enclosed and Partially Enclosed Buildings
of All Heights
"""
return FIG29P4D8_γa_INTERPOLANT(Log(A))
| [
37811,
41481,
2808,
198,
28929,
17579,
47149,
6177,
20571,
26761,
2751,
43504,
4261,
51,
1677,
20940,
1546,
5357,
25401,
19269,
18415,
29514,
25,
198,
5673,
1268,
370,
12115,
7473,
5222,
15731,
8808,
2751,
36230,
357,
17931,
23988,
2849,
18... | 1.896047 | 3,415 |
#!/usr/bin/env python
# Copyright (c) 2019-2021 Gabriel Sanhueza.
#
# Distributed under the MIT License.
# See LICENSE for more info.
import numpy as np
from colour import Color
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
2,
220,
15069,
357,
66,
8,
13130,
12,
1238,
2481,
17371,
2986,
71,
518,
4496,
13,
198,
2,
198,
2,
220,
4307,
6169,
739,
262,
17168,
13789,
13,
198,
2,
220,
4091,
38559,
24290,... | 2.882353 | 68 |
#
# This file is part of pyasn1-modules software.
#
# Created by Russ Housley
# Copyright (c) 2019, Vigil Security, LLC
# License: http://snmplabs.com/pyasn1/license.html
#
import sys
import unittest
from pyasn1.codec.der.decoder import decode as der_decoder
from pyasn1.codec.der.encoder import encode as der_encoder
from pyasn1.type import univ
from pyasn1_modules import pem
from pyasn1_modules import rfc5652
from pyasn1_modules import rfc5751
suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
if __name__ == '__main__':
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(not result.wasSuccessful())
| [
2,
198,
2,
770,
2393,
318,
636,
286,
12972,
292,
77,
16,
12,
18170,
3788,
13,
198,
2,
198,
2,
15622,
416,
1887,
367,
516,
1636,
198,
2,
15069,
357,
66,
8,
13130,
11,
39840,
4765,
11,
11419,
198,
2,
13789,
25,
2638,
1378,
16184,
... | 2.733333 | 240 |