content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
#!/usr/bin/env python
# encoding: utf-8
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
r = 0
hd = ListNode(0)
p = None
while(l1 != None or l2 !=None or r != 0):
if(p == None):
p = hd
else:
p.next = ListNode(0)
p = p.next
a = l1.val if l1 else 0
b = l2.val if l2 else 0
m = (a + b + r) % 10
r = (a + b + r) // 10
p.val = m
l1 = l1.next if l1 else None
l2 = l2.next if l2 else None
return hd
|
nilq/baby-python
|
python
|
import atexit
from pathlib import Path
from typing import Dict, Union
from .template import BaseTemplate
from .exceptions import NotFoundError
class MemoryEngine(object):
_data: Dict
_path: Path
_template: BaseTemplate
def __init__(self, path: Union[Path, str], template: BaseTemplate, auto_load=True):
"""
:param path: path to save file
:param template: memory template
"""
self._data = {}
self._template = template
# update path
self.path = path
# exposing dictionary methods
self.clear = self._data.clear
self.items = self._data.items
# read the initial data
if auto_load:
self.load()
@property
def path(self):
return self._path
@path.setter
def path(self, value: Union[Path, str]):
if type(value) != Path:
self._path = Path(value)
else:
self._path = value
@property
def template(self):
return self._template
def save(self):
"""
write current data to disk
"""
self._template.save(self._data, self._path)
def load(self):
"""
read data from disk
"""
try:
self._data = self._template.load(self._path)
except NotFoundError:
self.save()
def get(self, key, default=None):
"""
:param key: key used as identifier
:param default: value to return is key not found
:return: data corresponding to identifer(key)
:returns: default if key not found
"""
try:
value = self._data[key]
except KeyError:
value = default
return value
def delete(self, *args):
"""
removes the keys from memory
:param args: keys to be removed
"""
for key in args:
try:
del self._data[key]
except KeyError:
pass
def put(self, key, value):
"""
adds key-value pair to memory
:param key: key used as identifier
:param value: data to store
:return: self, may be chained
"""
self._data[key] = value
return self
def putall(self, d: dict):
"""
adds all the key-value pairs in the map
:param d: dictionary map to be stored
"""
for key, value in d.items():
self._data[key] = value
def save_atexit(self, should_save=True):
"""
register save function to atexit module
:param should_save: whether to register or unregister
"""
if should_save:
atexit.register(self.save)
else:
atexit.unregister(self.save)
|
nilq/baby-python
|
python
|
#!/usr/bin/python
#Libraries
import RPi.GPIO as GPIO
import time
#GPIO Mode (BOARD / BCM)
GPIO.setmode(GPIO.BCM)
#set GPIO Pins
GPIO_TRIGGER = 18
GPIO_ECHO = 24
GPIO_IR = 6
#set GPIO direction (IN / OUT)
GPIO.setup(GPIO_TRIGGER, GPIO.OUT)
GPIO.setup(GPIO_ECHO, GPIO.IN)
GPIO.setup(GPIO_IR, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
last_dist = 0
last_dist_time = 0
def main ():
print("PIR Module Test (CTRL+C to exit)")
try:
GPIO.add_event_detect(GPIO_IR, GPIO.RISING, callback=MOTION)
while 1:
time.sleep(10)
except KeyboardInterrupt:
print("Quit")
GPIO.cleanup()
def MOTION(PIR_PIN):
global last_dist
global last_dist_time
dist = distance()
d_time = int(time.time())
print("%s\tOh hay I saw something" % d_time)
if dist < 2000:
print("%s\tI think I spot you at %.1f cm" % (d_time, dist))
last_dist = dist
last_dist_time = d_time
else:
if last_dist > 0:
del_time = d_time - last_dist_time
print("%s\tYa know I feel like you are hanging around and only %s seconds ago you were at %.1f cm away" % (d_time, del_time, last_dist))
else:
print("%s\tPretty Sneaky... " % d_time)
def distance():
# set Trigger to HIGH
GPIO.output(GPIO_TRIGGER, True)
# set Trigger after 0.01ms to LOW
time.sleep(0.00001)
GPIO.output(GPIO_TRIGGER, False)
StartTime = time.time()
StopTime = time.time()
# save StartTime
while GPIO.input(GPIO_ECHO) == 0:
StartTime = time.time()
# save time of arrival
while GPIO.input(GPIO_ECHO) == 1:
StopTime = time.time()
# time difference between start and arrival
TimeElapsed = StopTime - StartTime
# multiply with the sonic speed (34300 cm/s)
# and divide by 2, because there and back
distance = (TimeElapsed * 34300) / 2
return distance
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from projects.tokens import token_generator
from rest_framework import permissions
class HasAPIAccess(permissions.BasePermission):
""" """
message = _('Invalid or missing API Key.')
def has_permission(self, request, view):
api_token = request.META.get('HTTP_API_KEY', None)
return bool(api_token and token_generator.check_token(api_token))
|
nilq/baby-python
|
python
|
# Copyright 2019-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import pytest
TFS_DOCKER_BASE_NAME = 'sagemaker-tensorflow-serving'
def pytest_addoption(parser):
parser.addoption('--docker-base-name', default=TFS_DOCKER_BASE_NAME)
parser.addoption('--framework-version', required=True)
parser.addoption('--processor', default='cpu', choices=['cpu', 'gpu'])
parser.addoption('--aws-id', default=None)
parser.addoption('--tag')
parser.addoption('--generate-coverage-doc', default=False, action='store_true',
help='use this option to generate test coverage doc')
def pytest_collection_modifyitems(session, config, items):
if config.getoption("--generate-coverage-doc"):
from test.test_utils.test_reporting import TestReportGenerator
report_generator = TestReportGenerator(items, is_sagemaker=True)
report_generator.generate_coverage_doc(framework="tensorflow", job_type="inference")
@pytest.fixture(scope='module')
def docker_base_name(request):
return request.config.getoption('--docker-base-name')
@pytest.fixture(scope='module')
def framework_version(request):
return request.config.getoption('--framework-version')
@pytest.fixture(scope='module')
def processor(request):
return request.config.getoption('--processor')
@pytest.fixture(scope='module')
def runtime_config(request, processor):
if processor == 'gpu':
return '--runtime=nvidia '
else:
return ''
@pytest.fixture(scope='module')
def tag(request, framework_version, processor):
image_tag = request.config.getoption('--tag')
if not image_tag:
image_tag = '{}-{}'.format(framework_version, processor)
return image_tag
@pytest.fixture(autouse=True)
def skip_by_device_type(request, processor):
is_gpu = processor == 'gpu'
if (request.node.get_closest_marker('skip_gpu') and is_gpu) or \
(request.node.get_closest_marker('skip_cpu') and not is_gpu):
pytest.skip('Skipping because running on \'{}\' instance'.format(processor))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# A script to test the mask filter.
# replaces a circle with a color
# Image pipeline
reader = vtk.vtkPNMReader()
reader.ReleaseDataFlagOff()
reader.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/earth.ppm")
reader.Update()
sphere = vtk.vtkImageEllipsoidSource()
sphere.SetWholeExtent(0,511,0,255,0,0)
sphere.SetCenter(128,128,0)
sphere.SetRadius(80,80,1)
sphere.Update()
mask = vtk.vtkImageMask()
mask.SetImageInputData(reader.GetOutput())
mask.SetMaskInputData(sphere.GetOutput())
mask.SetMaskedOutputValue(100,128,200)
mask.NotMaskOn()
mask.ReleaseDataFlagOff()
mask.Update()
sphere2 = vtk.vtkImageEllipsoidSource()
sphere2.SetWholeExtent(0,511,0,255,0,0)
sphere2.SetCenter(328,128,0)
sphere2.SetRadius(80,50,1)
sphere2.Update()
# Test the wrapping of the output masked value
mask2 = vtk.vtkImageMask()
mask2.SetImageInputData(mask.GetOutput())
mask2.SetMaskInputData(sphere2.GetOutput())
mask2.SetMaskedOutputValue(100)
mask2.NotMaskOn()
mask2.ReleaseDataFlagOff()
mask2.Update()
sphere3 = vtk.vtkImageEllipsoidSource()
sphere3.SetWholeExtent(0,511,0,255,0,0)
sphere3.SetCenter(228,155,0)
sphere3.SetRadius(80,80,1)
sphere3.Update()
# Test the wrapping of the output masked value
mask3 = vtk.vtkImageMask()
mask3.SetImageInputData(mask2.GetOutput())
mask3.SetMaskInputData(sphere3.GetOutput())
mask3.SetMaskedOutputValue(255)
mask3.NotMaskOn()
mask3.SetMaskAlpha(0.5)
mask3.ReleaseDataFlagOff()
viewer = vtk.vtkImageViewer()
viewer.SetInputConnection(mask3.GetOutputPort())
viewer.SetColorWindow(255)
viewer.SetColorLevel(128)
#viewer DebugOn
viewer.Render()
# --- end of script --
|
nilq/baby-python
|
python
|
"""
Testing area package
"""
from shapes.square.area import area_square
from shapes.square.perimeter import perimeter_square
import pytest
def test_square_area():
"""
testing function area_square
"""
length = 2
A = area_square(length)
assert pytest.approx(A) == 4.0
def test_square_perimeter():
length = 2
P = perimeter_square(length)
assert pytest.approx(P) == 8.0
######################
# Write a test for the triangle function
######################
def test_triangle_area():
print("insert test for triangle area here")
|
nilq/baby-python
|
python
|
from dataset import FontData, make_tfrecodes
gspath='gs://your-bucket-name/'
def make1():
d = FontData()
make_tfrecodes(d, gspath, 512, 64, 8, train=True)
make_tfrecodes(d, gspath, 512, 64, 8, train=False)
if __name__ == "__main__":
make1()
|
nilq/baby-python
|
python
|
import zipfile
import pytest
from git_taxbreak.modules.writer import Writer
@pytest.fixture
def patch_zip_file(monkeypatch):
class ZipFileMock(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
self.output = args[0]
self.content = []
def __enter__(self):
return self
def close(self):
self.output.content = self.content
def writestr(self, *args, **kwargs):
file = args[0]
content = args[1]
self.content.append({"file_name": file, "content": content})
monkeypatch.setattr(zipfile, "ZipFile", ZipFileMock)
class DummyOutput:
content = None
def test_archive_save(patch_zip_file):
ARTIFACTS = [
{
"diff": "diff_content1",
"message": "summary1\n\nMore text1",
"commit_hash": "hash1",
"files": [
{"file_name": "some_path/file_name1.txt", "content": "file_content1"},
{"file_name": "some_path2/file_name2.txt", "content": "file_content2"},
{"file_name": "file_name3.txt", "content": "file_content3"},
],
},
{
"diff": "diff_content2",
"message": "summary2\n\nMore text2",
"commit_hash": "hash2",
"files": [
{"file_name": "some_path/file_name4.txt", "content": "file_content4"},
{"file_name": "file_name5.txt", "content": "file_content5"},
],
},
]
EXPECTED_CONTENT = [
{"file_name": "hash1/diff.txt", "content": "diff_content1"},
{"file_name": "hash1/some_path/file_name1.txt", "content": "file_content1"},
{"file_name": "hash1/some_path2/file_name2.txt", "content": "file_content2"},
{"file_name": "hash1/file_name3.txt", "content": "file_content3"},
{"file_name": "hash2/diff.txt", "content": "diff_content2"},
{"file_name": "hash2/some_path/file_name4.txt", "content": "file_content4"},
{"file_name": "hash2/file_name5.txt", "content": "file_content5"},
{"file_name": "work-commits.txt", "content": "hash1 summary1\nhash2 summary2"},
]
dummy_output = DummyOutput()
with Writer(dummy_output) as writer:
writer.archive(ARTIFACTS)
assert dummy_output.content == EXPECTED_CONTENT
def test_archive_not_throw_when_file_content_not_exist(patch_zip_file):
ARTIFACTS = [
{
"diff": "diff_content1",
"message": "summary1\n\nMore text1",
"commit_hash": "hash1",
"files": [{"file_name": "some_path/file_name1.txt", "content": None}],
}
]
EXPECTED_CONTENT = [
{"file_name": "hash1/diff.txt", "content": "diff_content1"},
{"file_name": "work-commits.txt", "content": "hash1 summary1"},
]
dummy_output = DummyOutput()
with Writer(dummy_output) as writer:
writer.archive(ARTIFACTS)
assert dummy_output.content == EXPECTED_CONTENT
|
nilq/baby-python
|
python
|
import sys
import shutil
from pathlib import Path
import logging
from pcv import DEFAULTS_PATH, CALLER_PATH, SOURCE, STATIC, DIST
"""
Initializes a pcv project in the current folder.
Run from command line:
python -m pcv.start
This will create the following directory tree in the current folder:
.
├── settings.py
├── makecv.py
├── dist (empty directory)
└── source
├── static (empty directory)
└── template.json
"""
logger = logging.getLogger(__name__)
def initialize(destination=None):
""" copy defaults tree to specified destination and create empty folders """
if destination is None:
destination = CALLER_PATH
else:
destination = Path(destination)
path = shutil.copytree(DEFAULTS_PATH, destination, dirs_exist_ok=True)
destination.joinpath(SOURCE).joinpath(STATIC).mkdir(exist_ok=True)
destination.joinpath(DIST).mkdir(exist_ok=True)
logger.info(f'pcv project initialized in {path}')
if __name__ == '__main__':
logger.info(f'script path: {sys.argv.pop(0)}')
initialize(*sys.argv)
|
nilq/baby-python
|
python
|
import numpy as np
import PatternToNumber
def computing_frequencies(text, k):
m = 4**k
frequency_array = np.zeros(shape = (1, m), dtype = int)
for i in range(len(text) - k + 1):
pattern = text[i:i+k]
j = PatternToNumber.pattern_to_number(pattern)
frequency_array[0 , j] = frequency_array[0, j] + 1
return frequency_array
'''def results(text, t):
result = computing_frequencies(text, t).tolist()
for line in result:
print(' '.join(map(str, line)))
results('GAGCGGGACACGTATTATAACGAAGAAAACGGGAGACTAAACTGTAGAGGACTCTTGCCAGCATACGTAACAAGCTCGACGCAGCGCGTAGTCTGATCCGAGTGAATCCGATCCCAGCAAAGGTCGTGGTTGTGTTCGGGGGGGCCTCGCGGGGGCTGCGGACTACCCTCTTGCGCCTAAGCATAAGCTCAGGAACCTGTTTCTTATGCAATTCTGTTAAGACCTTCCAAACGCATAAGGGCTCTGATTGCTAGTATGATAGAACAATCCGCACCCACCAGCTAATAGGAAGCCAACATAGCGGAGGGACACGGCTGCCTAGCCAGGACAACATTCGAGTATGATCAGAGTGCACCGGAATAGGATACTCGTGGTCGCAACGTCGCACGGCTTTCCTAAGCTGTGGAAAAGATTCAACGGACTCAGCGCCAGCGCCCGGGGACCACCGTATCTATGGGGAGTCAGTGAGAGCAGAGCAGTTGGGGGAAAAACAGCGATCCTGGCGGACACCGAGAGTTCGCATTGTATATGAAGTAGCAGCGAGTAGAGTAGATTACTGATCGAATGGTCGCGTGATCCCTGGCAAATCCTACAAAATAGGAGGAGCTGAGGCGCTCGGCCCCGATCCTGGTTTTTCTGACCCGCA', 5)
'''
|
nilq/baby-python
|
python
|
# Copyright(C) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Dict, List
import numpy as np
import xarray as xr
from .base_jags_impl import BaseJagsImplementation
class RobustRegression(BaseJagsImplementation):
def __init__(self, **attrs: Dict) -> None:
self.attrs = attrs
def get_vars(self) -> List[str]:
return ["alpha", "beta", "nu", "sigma"]
def format_data_to_jags(self, data: xr.Dataset) -> Dict:
# transpose the dataset to ensure that it is the way we expect
data = data.transpose("item", "feature")
# we already have all the values to be bound except for X and Y in self.attrs
attrs: dict = self.attrs.copy()
attrs["X"] = data.X.values
attrs["Y"] = data.Y.values
return attrs
def extract_data_from_jags(self, samples: Dict) -> xr.Dataset:
# dim 2 is the chains dimension so we squeeze it out
return xr.Dataset(
{
# alpha, nu, sigma dimensions are [1, samples], we want [samples]
"alpha": (["draw"], samples["alpha"].squeeze(0)),
"nu": (["draw"], samples["nu"].squeeze(0)),
"sigma": (["draw"], samples["sigma"].squeeze(0)),
# beta dimensions are [k, samples], we want [samples, k]
"beta": (["draw", "feature"], samples["beta"].T),
},
coords={
"draw": np.arange(samples["beta"].shape[1]),
"feature": np.arange(samples["beta"].shape[0]),
},
)
def get_code(self) -> str:
return """
model {
# priors
# note: JAGS normal distribution uses precision rather than standard deviation
alpha ~ dnorm(0.0, 1/(alpha_scale**2));
for (j in 1:k) {
beta[j] ~ dnorm(beta_loc, 1/(beta_scale**2));
}
# gamma and exponential are parameterized by shape, rate (=1/scale)
nu ~ dgamma(2, 0.1)
sigma ~ dexp(1/sigma_mean)
# likelihood
for (i in 1:n) {
mu[i] <- alpha + inprod(beta, X[i,])
Y[i] ~ dt(mu[i], 1/(sigma**2), nu)
}
}
"""
|
nilq/baby-python
|
python
|
import discord
import os
from keep_alive import keep_alive
client = discord.Client()
@client.event
async def on_ready():
print('We have logged in as {0.user}'.format(client))
@client.event
async def on_message(message):
if message.author == client.user:
return
if str(message.author) in ["robbbot#6138"]:
await message.add_reaction('🟪')
if str(message.author) in ["barftron666#9511"]:
await message.add_reaction('👐')
if str(message.author) in ["SinSemilla#3965"]:
await message.add_reaction('<:NH:789552992876494888>')
if str(message.author) in ["Desert Ham#2846"]:
await message.add_reaction('<:DS:784463585531265104>')
if message.content.startswith('$hello'):
await message.channel.send('Hello!')
if message.content.startswith('@nickhalford'):
await message.channel.send('SinSemilla#3965')
if message.content.startswith('$darcysux') or message.content.startswith(
'$ds'):
await message.channel.send('<:DS:784463585531265104>')
if message.content.startswith('$thumb'):
channel = message.channel
await channel.send('Send me that 👍')
def check(reaction, user):
return user == message.author and str(reaction.emoji) == '👍'
try:
reaction, user = await client.wait_for(
'reaction_add', timeout=60.0, check=check)
except asyncio.TimeoutError:
await channel.send('👎')
else:
await channel.send('👍')
if message.content.startswith('$commands'):
channel = message.channel
await channel.send('$hello, $ds, $darcysux, $commands')
keep_alive()
client.run(os.getenv('TOKEN'))
|
nilq/baby-python
|
python
|
""" Command line utility for repository """
import fire
from repo_utils import get_repository_path
import os
import importlib
# This will make available all definitions found under the same package that this file is found.
# This allows making a command line out of any package with the repo_utils template by putting start_command_line inside it.
package_name = __file__.replace(get_repository_path(), '').split(os.sep)[1]
mod = importlib.import_module(package_name)
def start_command_line():
"""
Command-line interface for the repository.
Specify the definition to execute and then any arguments.
e.g. "define <name>".
The Fire library converts the specified function or object into a command-line utility.
"""
global mod
fire.Fire(mod)
|
nilq/baby-python
|
python
|
from random import choice
from typing import Union
from datetime import datetime
from discord import User, Member, Embed
from discord.ext import commands
from bot.main import NewCommand
class Hug(commands.Cog):
def __init__(self, client):
self.client = client
def randomise(self, users:list):
messages = [
f'{users[0]} hugs {users[1]}!',
f'{users[0]} gives {users[1]} a big hug!',
f'{users[0]} gives a huge hug to {users[1]}!',
]
gifs = [
'https://media.giphy.com/media/PHZ7v9tfQu0o0/giphy.gif',
'https://media.giphy.com/media/od5H3PmEG5EVq/giphy.gif',
'https://media.giphy.com/media/GMFUrC8E8aWoo/giphy.gif',
'https://media.giphy.com/media/svXXBgduBsJ1u/giphy.gif',
'https://media.giphy.com/media/QFPoctlgZ5s0E/giphy.gif',
'https://media.giphy.com/media/3bqtLDeiDtwhq/giphy.gif',
'https://media.giphy.com/media/sUIZWMnfd4Mb6/giphy.gif',
'https://media.giphy.com/media/lrr9rHuoJOE0w/giphy.gif',
'https://media.giphy.com/media/du8yT5dStTeMg/giphy.gif',
'https://media.giphy.com/media/l2QDM9Jnim1YVILXa/giphy.gif',
'https://media.giphy.com/media/DjczAlIcyK1Co/giphy.gif',
'https://media.giphy.com/media/2A75Y6NodD38I/giphy.gif',
'https://media.giphy.com/media/10BcGXjbHOctZC/giphy.gif',
]
return (choice(gifs), choice(messages))
@commands.command(
name='hug',
cls=NewCommand,
brief='A Hug for you!',
description='Give someone a Tight Hug!',
usage='<user:name/id/@mention>',
explained_usage=["**User:** The User whom you wanna give a Hug!"],
examples=[
'hug @Akshu',
'hug 764462046032560128',
'hug Akshu#7472'
]
)
@commands.guild_only()
@commands.bot_has_permissions(send_messages=True, embed_links=True)
@commands.cooldown(1, 5, commands.BucketType.member)
async def _hug(self, ctx, user:Union[User, Member]):
gif, msg = self.randomise([ctx.author.name, user.name])
embed=Embed(color=0x00eeff, timestamp=datetime.utcnow())
embed.set_author(name=msg, icon_url=ctx.author.avatar_url)
embed.set_footer(text=f'Thanks for using {ctx.guild.me.name}', icon_url=ctx.guild.me.avatar_url)
embed.set_image(url=gif)
await ctx.reply(embed=embed)
def setup(client):
client.add_cog(Hug(client))
|
nilq/baby-python
|
python
|
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.utils import timezone
from transductor.models import EnergyTransductor, TransductorModel
class EnergyTransductorViewsTestCase(TestCase):
def setUp(self):
t_model = TransductorModel()
t_model.name = "TR 4020"
t_model.transport_protocol = "UDP"
t_model.serial_protocol = "Mosbus RTU"
t_model.measurements_type = "EnergyMeasurements"
t_model.register_addresses = [[68, 0], [70, 1]]
t_model.save()
self.t_model = t_model
transductor = self.create_energy_transductor(1, "Test Transductor", "1.1.1.1", t_model)
self.transductor = transductor
def test_index_access_and_template(self):
url = reverse('transductor:index')
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertTemplateUsed(response, 'transductor/index.html')
def test_index_with_transductor(self):
transductor = self.transductor
url = reverse('transductor:index')
response = self.client.get(url)
self.assertIn(transductor.description, response.content)
def test_transductor_creation_page(self):
url = reverse('transductor:new')
response = self.client.get(url)
self.assertEqual(200, response.status_code)
def test_not_create_energy_transductor_without_params(self):
url = reverse('transductor:new')
params = {
'serie_number': u'',
'ip_address': u'',
'description': u'',
'model': u''
}
response = self.client.post(url, params)
self.assertFormError(response, 'form', 'serie_number', 'This field is required.')
self.assertFormError(response, 'form', 'ip_address', 'This field is required.')
self.assertFormError(response, 'form', 'description', 'This field is required.')
self.assertFormError(response, 'form', 'model', 'This field is required.')
def test_create_valid_energy_transductor(self):
t_model = self.t_model
transductor_count = EnergyTransductor.objects.count()
url = reverse('transductor:new')
params = {
'serie_number': 1,
'ip_address': '111.111.111.111',
'description': 'Test',
'model': t_model.id
}
response = self.client.post(url, params)
self.assertEqual(transductor_count + 1, EnergyTransductor.objects.count())
t_id = EnergyTransductor.objects.get(ip_address='111.111.111.111').id
detail_url = reverse('transductor:detail', kwargs={'transductor_id': t_id})
self.assertRedirects(response, detail_url)
def test_not_create_transductor_with_same_ip_address(self):
t_model = self.t_model
transductor = self.transductor
url = reverse('transductor:new')
params = {
'serie_number': 1,
'ip_address': transductor.ip_address,
'description': 'Test',
'model': t_model.id
}
response = self.client.post(url, params)
self.assertFormError(response, 'form', 'ip_address', 'Transductor with this Ip address already exists.')
def test_not_create_transductor_with_wrong_ip_address(self):
t_model = self.t_model
url = reverse('transductor:new')
params = {
'serie_number': 1,
'ip_address': '1',
'description': 'Test',
'model': t_model.id
}
response = self.client.post(url, params)
self.assertFormError(response, 'form', 'ip_address', 'Incorrect IP address format')
def test_energy_transductor_detail(self):
t_model = self.t_model
transductor = self.create_energy_transductor(1, "Test", "111.111.111.111", t_model)
url = reverse('transductor:detail', kwargs={'transductor_id': transductor.id})
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertIn("No measurement avaiable", response.content)
def test_transductor_editing_page(self):
transductor = self.transductor
url = reverse('transductor:edit', kwargs={'transductor_id': transductor.id})
response = self.client.get(url)
self.assertEqual(200, response.status_code)
def test_change_transductor_model(self):
t_model_1 = self.t_model
transductor = self.create_energy_transductor(1, "Test", "111.111.111.111", t_model_1)
t_model_2 = TransductorModel()
t_model_2.name = "Transductor Model 2"
t_model_2.transport_protocol = "TCP/IP"
t_model_2.serial_protocol = "Mosbus"
t_model_2.register_addresses = [[100, 0], [105, 1]]
t_model_2.save()
url = reverse('transductor:edit', kwargs={'transductor_id': transductor.id})
params = {
'serie_number': 2,
'ip_address': '222.222.222.222',
'description': 'Another Test',
'model': t_model_2.id
}
self.client.post(url, params)
transductor = EnergyTransductor.objects.get(ip_address='222.222.222.222')
self.assertEqual(2, transductor.serie_number)
self.assertEqual("Another Test", transductor.description)
self.assertEqual(t_model_2, transductor.model)
def test_not_edit_transductor_with_wrong_params(self):
t_model = self.t_model
url = reverse('transductor:edit', kwargs={'transductor_id': self.transductor.id})
params = {
'serie_number': 2,
'ip_address': 'Wrong Ip Addres',
'description': 'Another Test',
'model': t_model.id
}
response = self.client.post(url, params)
self.assertFormError(response, 'form', 'ip_address', 'Incorrect IP address format')
def test_delete_energy_transductor(self):
t_model = self.t_model
transductor = self.create_energy_transductor(1, "Test", "111.111.111.111", t_model)
transductor_count = EnergyTransductor.objects.count()
url = reverse('transductor:delete', kwargs={'transductor_id': transductor.id})
params = {
'delete': u''
}
self.client.post(url, params)
self.assertEqual(transductor_count - 1, EnergyTransductor.objects.count())
def test_not_delete_energy_transductor_with_get_method(self):
t_model = self.t_model
transductor = self.create_energy_transductor(1, "Test", "111.111.111.111", t_model)
transductor_count = EnergyTransductor.objects.count()
url = reverse('transductor:delete', kwargs={'transductor_id': transductor.id})
self.client.get(url)
self.assertEqual(transductor_count, EnergyTransductor.objects.count())
def test_transductor_model_index(self):
t_model = self.t_model
url = reverse('transductor:model_index')
response = self.client.get(url)
self.assertIn(t_model.name, response.content)
def create_energy_transductor(self, serie_number, description, ip_address, t_model):
transductor = EnergyTransductor()
transductor.serie_number = serie_number
transductor.description = description
transductor.creation_date = timezone.now()
transductor.ip_address = ip_address
transductor.model = t_model
transductor.save()
return transductor
|
nilq/baby-python
|
python
|
'''
Created on March 30, 2018
@author: Alejandro Molina
'''
import numpy as np
from spn.algorithms.StructureLearning import get_next_operation, learn_structure
from spn.algorithms.Validity import is_valid
from spn.algorithms.splitting.Clustering import get_split_rows_KMeans, get_split_rows_TSNE
from spn.algorithms.splitting.RDC import get_split_cols_RDC_py, get_split_rows_RDC_py
from spn.structure.Base import Sum, assign_ids
from spn.structure.leaves.histogram.Histograms import create_histogram_leaf
from spn.structure.leaves.parametric.Parametric import create_parametric_leaf
from spn.structure.leaves.piecewise.PiecewiseLinear import create_piecewise_leaf
def learn_classifier(data, ds_context, spn_learn_wrapper, label_idx, cpus=-1, rand_gen=None):
spn = Sum()
for label, count in zip(*np.unique(data[:, label_idx], return_counts=True)):
branch = spn_learn_wrapper(data[data[:, label_idx] == label, :], ds_context, cpus=cpus, rand_gen=rand_gen)
spn.children.append(branch)
spn.weights.append(count / data.shape[0])
spn.scope.extend(branch.scope)
assign_ids(spn)
valid, err = is_valid(spn)
assert valid, "invalid spn: " + err
return spn
def learn_mspn_with_missing(data, ds_context, cols="rdc", rows="kmeans", min_instances_slice=200, threshold=0.3,
linear=False, ohe=False, leaves=None, memory=None, rand_gen=None, cpus=-1):
if leaves is None:
# leaves = create_histogram_leaf
leaves = create_piecewise_leaf
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def l_mspn_missing(data, ds_context, cols, rows, min_instances_slice, threshold, linear, ohe):
if cols == "rdc":
split_cols = get_split_cols_RDC_py(threshold, rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
if rows == "rdc":
split_rows = get_split_rows_RDC_py(rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
elif rows == "kmeans":
split_rows = get_split_rows_KMeans()
if leaves is None:
leaves = create_histogram_leaf
nextop = get_next_operation(min_instances_slice)
return learn_structure(data, ds_context, split_rows, split_cols, leaves, nextop)
if memory:
l_mspn_missing = memory.cache(l_mspn_missing)
return l_mspn_missing(data, ds_context, cols, rows, min_instances_slice, threshold, linear, ohe)
def learn_mspn(data, ds_context, cols="rdc", rows="kmeans", min_instances_slice=200, threshold=0.3, ohe=False,
leaves=None, memory=None, rand_gen=None, cpus=-1):
if leaves is None:
leaves = create_histogram_leaf
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def l_mspn(data, ds_context, cols, rows, min_instances_slice, threshold, ohe):
if cols == "rdc":
split_cols = get_split_cols_RDC_py(threshold, rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
if rows == "rdc":
split_rows = get_split_rows_RDC_py(rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
elif rows == "kmeans":
split_rows = get_split_rows_KMeans()
nextop = get_next_operation(min_instances_slice)
return learn_structure(data, ds_context, split_rows, split_cols, leaves, nextop)
if memory:
l_mspn = memory.cache(l_mspn)
return l_mspn(data, ds_context, cols, rows, min_instances_slice, threshold, ohe)
def learn_parametric(data, ds_context, cols="rdc", rows="kmeans", min_instances_slice=200, threshold=0.3, ohe=False,
leaves=None, memory=None, rand_gen=None, cpus=-1):
if leaves is None:
leaves = create_parametric_leaf
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def learn_param(data, ds_context, cols, rows, min_instances_slice, threshold, ohe):
if cols == "rdc":
split_cols = get_split_cols_RDC_py(threshold, rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
if rows == "rdc":
split_rows = get_split_rows_RDC_py(rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
elif rows == "kmeans":
split_rows = get_split_rows_KMeans()
nextop = get_next_operation(min_instances_slice)
return learn_structure(data, ds_context, split_rows, split_cols, leaves, nextop)
if memory:
learn_param = memory.cache(learn_param)
return learn_param(data, ds_context, cols, rows, min_instances_slice, threshold, ohe)
|
nilq/baby-python
|
python
|
__version__ = "0.0.1"
from ._widget import LiveIDS
from .video_ui import initui
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('eventlog', '0037_auto_20180911_1252'),
]
operations = [
migrations.RemoveField(
model_name='celerytaskprogress',
name='content_type',
),
migrations.RemoveField(
model_name='celerytaskprogress',
name='object_id',
),
migrations.AlterField(
model_name='celerytaskprogress',
name='task_type',
field=models.IntegerField(default=0, choices=[(0, 'Bulk Site Update'), (1, 'User Assign to Project'), (2, 'User Assign to Site'), (3, 'Site Response Xls Report'), (4, 'Site Import'), (6, 'Zip Site Images'), (7, 'Remove Roles'), (8, 'Site Data Export'), (9, 'Response Pdf Report'), (10, 'Site Progress Xls Report')]),
),
]
|
nilq/baby-python
|
python
|
import pytest
from kaneda.backends import BaseBackend
class DummyBackend(BaseBackend):
reported_data = {}
def report(self, name, metric, value, tags, id_=None):
payload = self._get_payload(name, value, tags)
payload['metric'] = metric
self.reported_data[name] = payload
@pytest.fixture
def dummy_backend():
return DummyBackend()
@pytest.fixture
def empty_settings():
class Settings:
pass
return Settings
@pytest.fixture
def unexisting_backend_settings():
class Settings:
BACKEND = 'kaneda.backends.UnexsitingBackend'
return Settings
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
"""
Copy one netCDF file to another with compression and sensible
chunking
Adapted from nc3tonc4
https://github.com/Unidata/netcdf4-python/blob/master/utils/nc3tonc4
"""
from netCDF4 import Dataset
import numpy as np
import numpy.ma as ma
import os
import sys
import math
import operator
from warnings import warn
import argparse
import copy
import numbers
from six.moves import reduce
dtypes = {
'f' : 4, # f4, 32-bit floating point
'd' : 8, # f8, 64-bit floating point
'e' : 4, # f2, 16-bit floating point
'i' : 4, # i4, 32-bit signed integer
'h' : 2, # i2, 16-bit signed integer
'l' : 8, # i8, 64-bit singed integer
'b' : 1, # i1, 8-bit signed integer
'B' : 1, # u1, 8-bit unsigned integer
'H' : 2, # u2, 16-bit unsigned integer
'I' : 4, # u4, 32-bit unsigned integer
'L' : 8, # u8, 64-bit unsigned integer
'S' : 1 } # S1, single-character string
class FormatError(Exception):
'''Unsupported netCDF format'''
def numVals(shape):
"""Return number of values in chunk of specified shape, given by a list of dimension lengths.
shape -- list of variable dimension sizes"""
if(len(shape) == 0):
return 1
return reduce(operator.mul, shape)
def cascadeRounding(array):
"""Implement cascase rounding
http://stackoverflow.com/questions/792460/how-to-round-floats-to-integers-while-preserving-their-sum
"""
sort_index = np.argsort(array)
integer_array = []
total_float = 0
total_int = 0
# We place a hard limit on the total of the array, which keeps
# the rounded values from exceeding the total of the array
limit = np.floor(sum(array))
for idx in sort_index:
total_float += array[idx]
integer_array.append(min(round(total_float),limit)-total_int)
total_int += integer_array[-1]
rounded_array = np.zeros(len(array))
# Should make this a comprehension, but I couldn't comprehend it
for i in range(len(sort_index)):
rounded_array[sort_index[i]] = integer_array[i]
return rounded_array
def calcChunkShape(chunkVol, varShape):
"""
Calculate a chunk shape for a given volume/area for the dimensions in varShape.
chunkVol -- volume/area of the chunk
chunkVol -- array of dimensions for the whole dataset
"""
return np.array(cascadeRounding(np.asarray(varShape) * (chunkVol / float(numVals(varShape))) ** (1./len(varShape))),dtype="int")
def chunk_shape_nD(varShape, valSize=4, chunkSize=4096, minDim=1):
"""
Return a 'good shape' for an nD variable, assuming balanced 1D, 2D access
varShape -- list of variable dimension sizes
chunkSize -- minimum chunksize desired, in bytes (default 4096)
valSize -- size of each data value, in bytes (default 4)
minDim -- mimimum chunk dimension (if var dimension larger
than this value, otherwise it is just var dimension)
Returns integer chunk lengths of a chunk shape that provides
balanced access of 1D subsets and 2D subsets of a netCDF or HDF5
variable var. 'Good shape' for chunks means that the number of
chunks accessed to read any kind of 1D or 2D subset is approximately
equal, and the size of each chunk (uncompressed) is at least
chunkSize, which is often a disk block size.
"""
varShapema = ma.array(varShape)
chunkVals = min(chunkSize / float(valSize),numVals(varShapema)) # ideal number of values in a chunk
# Make an ideal chunk shape array
chunkShape = ma.array(calcChunkShape(chunkVals,varShapema),dtype=int)
# Short circuit for 1D arrays. Logic below unecessary & can have divide by zero
if len(varShapema) == 1: return chunkShape.filled(fill_value=1)
# And a copy where we'll store our final values
chunkShapeFinal = ma.masked_all(chunkShape.shape,dtype=int)
if chunkVals < numVals(np.minimum(varShapema,minDim)):
while chunkVals < numVals(np.minimum(varShapema,minDim)):
minDim -= 1
sys.stderr.write('Mindim too large for variable, reduced to : %d\n' % minDim)
lastChunkCount = -1
while True:
# Loop over the axes in chunkShape, making sure they are at
# least minDim in length.
for i in range(len(chunkShape)):
if ma.is_masked(chunkShape[i]):
continue
if (chunkShape[i] < minDim):
# Set the final chunk shape for this dimension
chunkShapeFinal[i] = min(minDim,varShapema[i])
# mask it out of the array of possible chunkShapes
chunkShape[i] = ma.masked
# Have we fixed any dimensions and filled them in chunkShapeFinal?
if chunkShapeFinal.count() > 0:
chunkCount = numVals(chunkShapeFinal[~chunkShapeFinal.mask])
else:
if (lastChunkCount == -1):
# Haven't modified initial guess, break out of
# this loop and accept chunkShape
break
if chunkCount != lastChunkCount and len(varShapema[~chunkShape.mask]) > 0:
# Recalculate chunkShape array, with reduced dimensions
chunkShape[~chunkShape.mask] = calcChunkShape(chunkVals/chunkCount,varShapema[~chunkShape.mask])
lastChunkCount = chunkCount
else:
break
# This doesn't work when chunkShape has no masked values. Weird.
# chunkShapeFinal[chunkShapeFinal.mask] = chunkShape[~chunkShape.mask]
for i in range(len(chunkShapeFinal)):
if ma.is_masked(chunkShapeFinal[i]):
chunkShapeFinal[i] = chunkShape[i]
return chunkShapeFinal.filled(fill_value=1)
def nc2nc(filename_o, filename_d, zlib=True, complevel=5, shuffle=True, fletcher32=False,
clobber=False, verbose=False, classic=True, lsd_dict=None, vars=None, chunksize=4, buffersize=50, mindim=1,ignoreformat=False):
"""convert a netcdf file (filename_o) to another netcdf file (filename_d)
The default format is 'NETCDF4_classic', but can be set to NETCDF4 if classic=False.
If the lsd_dict is not None, variable names corresponding to the keys of the dict
will be truncated to the decimal place specified by the values of the dict.
This improves compression by making it 'lossy'..
If vars is not None, only variable names in the list will be copied (plus all the
dimension variables). The zlib, complevel and shuffle keywords control
how the compression is done. buffersize is the size (in KB) of the buffer used to
copy the data from one file to another. mindim sets a minimum size for a dimension
of a chunk. In some cases very large variable dimensions will mean chunk sizes for
the smaller dimensions will be small, with a minimum of at least 1. This can lead to
slow access times.
"""
if os.path.isfile(filename_d) and not clobber:
sys.stderr.write('Output file already exists: %s. Use -o option to overwrite\n' % filename_d)
return False
ncfile_o = Dataset(filename_o,'r')
if ncfile_o.file_format is "NETCDF4":
if ignoreformat:
warn('netCDF4 formatted file .. ignoring')
else:
raise FormatError('nc2nc is not tested to work with netCDF4 files, only netCDF4 Classic, and netCDF3. See --ignoreformat option to ignore warning')
if classic:
ncfile_d = Dataset(filename_d,'w',clobber=clobber,format='NETCDF4_CLASSIC')
else:
ncfile_d = Dataset(filename_d,'w',clobber=clobber,format='NETCDF4')
mval = 1.e30 # missing value if unpackshort=True
# Copy buffer specified in MiB, so convert to bytes
buffersize = buffersize*(1024**2)
# Chunk size specified in KiB, so convert to bytes
chunksize = chunksize*1024
# create dimensions. Check for unlimited dim.
unlimdimname = False
unlimdim = None
# create global attributes.
if verbose: sys.stdout.write('copying global attributes ..\n')
#for attname in ncfile_o.ncattrs():
# setattr(ncfile_d,attname,getattr(ncfile_o,attname))
ncfile_d.setncatts(ncfile_o.__dict__)
# Copy dimensions
if verbose: sys.stdout.write('copying dimensions ..\n')
for dimname,dim in ncfile_o.dimensions.items():
if dim.isunlimited():
unlimdimname = dimname
unlimdim = dim
ncfile_d.createDimension(dimname,None)
else:
ncfile_d.createDimension(dimname,len(dim))
# create variables.
if vars is None:
varnames = ncfile_o.variables.keys()
else:
# variables to copy specified
varnames = vars
# add dimension variables
for dimname in ncfile_o.dimensions.keys():
if dimname in ncfile_o.variables.keys() and dimname not in varnames:
varnames.append(dimname)
for varname in varnames:
ncvar = ncfile_o.variables[varname]
if verbose: sys.stdout.write('copying variable %s\n' % varname)
# quantize data?
if lsd_dict is not None and varname in lsd_dict:
lsd = int(lsd_dict[varname])
if verbose: sys.stdout.write('truncating to least_significant_digit = %d\n'%lsd)
else:
lsd = None # no quantization.
datatype = ncvar.dtype
# is there an unlimited dimension?
if unlimdimname and unlimdimname in ncvar.dimensions:
hasunlimdim = True
else:
hasunlimdim = False
if hasattr(ncvar, '_FillValue'):
FillValue = ncvar._FillValue
else:
FillValue = None
chunksizes = None
# check we have a mapping from the type to a number of bytes
if ncvar.dtype.char in dtypes:
if verbose: sys.stdout.write('Variable shape: %s\n' % str(ncvar.shape))
if (ncvar.shape != ()): chunksizes=chunk_shape_nD(ncvar.shape,valSize=dtypes[ncvar.dtype.char],minDim=mindim,chunkSize=chunksize)
if verbose: sys.stdout.write('Chunk sizes: %s\n' % str(chunksizes))
else:
sys.stderr.write("This datatype not supported: dtype : %s\n" % ncvar.dtype.char)
sys.exit(1)
# Create the variable we will copy to
var = ncfile_d.createVariable(varname, datatype, ncvar.dimensions, fill_value=FillValue, least_significant_digit=lsd, zlib=zlib, complevel=complevel, shuffle=shuffle, fletcher32=fletcher32, chunksizes=chunksizes)
# fill variable attributes.
attdict = ncvar.__dict__
if '_FillValue' in attdict: del attdict['_FillValue']
var.setncatts(attdict)
# fill variable with data.
dimlim = np.asarray(ncvar.shape)
# bufferChunk is a multiple of the chunksize which is less than the size of copy buffer
if (ncvar.shape != ()): bufferChunk = chunk_shape_nD(ncvar.shape,valSize=dtypes[ncvar.dtype.char],chunkSize=buffersize)
# Don't bother copying in steps if all our data fits inside the bufferChunk
if ncvar.shape == () or np.all(bufferChunk >= dimlim):
var[:] = ncvar[:]
else:
# Make sure our chunk size is no larger than the dimension in that direction
for ind, chunk in enumerate(bufferChunk):
if chunk > dimlim[ind]: bufferChunk[ind] = dimlim[ind]
if verbose: sys.stdout.write('Buffer chunk : %s\n' % str(bufferChunk))
# bufferSteps is the number of copies of bufferChunk that fit along each axis
bufferSteps = (dimlim-1)//bufferChunk + 1
# Make an iterator out of all possible combinations of the bufferOffsets, which
# are just steps along each dimension
for index in np.ndindex(*bufferSteps):
index *= bufferChunk
slices = []
# Make up slices of size bufferChunk
for start, step, end in zip(index, bufferChunk, dimlim):
# min checks we don't go beyond the limits of the variable
slices.append(slice(start,min(start+step,end),None))
# Copy the data
var[slices] = ncvar[slices]
ncfile_d.sync() # flush data to disk
# close files.
ncfile_o.close()
ncfile_d.close()
def parse_args(arglist):
"""
Parse arguments given as list (arglist)
"""
class DictAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
try:
k, v = values.split("=", 1)
except ValueError:
raise argparse.ArgumentError(self, "Format must be key=value")
# Implementation is from argparse._AppendAction
items = copy.copy(argparse._ensure_value(namespace, self.dest, {})) # Default mutables, use copy!
try:
items[k] = int(v)
except ValueError:
raise argparse.ArgumentError(self, "value must be an integer")
if items[k] < 0: raise argparse.ArgumentError(self, "value cannot be negative")
setattr(namespace, self.dest, items)
def positive_int(value):
ivalue = int(value)
if ivalue < 1:
raise argparse.ArgumentTypeError("%s is an invalid positive int value" % value)
return ivalue
parser = argparse.ArgumentParser(description="Make a copy of a netCDF file with automatic chunk sizing")
parser.add_argument("-d","--dlevel", help="Set deflate level. Valid values 0-9 (default=5)", type=int, default=5, choices=range(0,10), metavar='{1-9}')
parser.add_argument("-m","--mindim", help="Minimum dimension of chunk. Valid values 1-dimsize", type=positive_int, default=1)
parser.add_argument("-s","--chunksize", help="Set chunksize - total size of one chunk in KiB (default=64)", type=int, default=64)
parser.add_argument("-b","--buffersize", help="Set size of copy buffer in MiB (default=500)", type=int, default=500)
parser.add_argument("-n","--noshuffle", help="Don't shuffle on deflation (default is to shuffle)", action='store_true')
parser.add_argument("-v","--verbose", help="Verbose output", action='store_true')
parser.add_argument("-c","--classic", help="use NETCDF4_CLASSIC output instead of NETCDF4 (default true)", action='store_false')
parser.add_argument("-f","--fletcher32", help="Activate Fletcher32 checksum", action='store_true')
parser.add_argument("-va","--vars", help="Specify variables to copy (default is to copy all)", action='append')
parser.add_argument("-q","--quantize", help="Truncate data in variable to a given decimal precision, e.g. -q speed=2 -q temp=0 causes variable speed to be truncated to a precision of 0.01 and temp to a precision of 1", action=DictAction)
parser.add_argument("-o","--overwrite", help="Write output file even if already it exists (default is to not overwrite)", action='store_true')
parser.add_argument("-i","--ignoreformat", help="Ignore warnings about netCDF4 formatted file: BE CAREFUL! (default false)", action='store_true')
parser.add_argument("origin", help="netCDF file to be compressed")
parser.add_argument("destination", help="netCDF output file")
return parser.parse_args(arglist)
def main(args):
zlib=False
if args.dlevel > 0: zlib=True
verbose = args.verbose
# copy the data from origin to destination
nc2nc(args.origin, args.destination, zlib=zlib, complevel=args.dlevel, shuffle=not args.noshuffle,
fletcher32=args.fletcher32, clobber=args.overwrite, lsd_dict=args.quantize,
verbose=verbose, vars=args.vars, classic=args.classic, chunksize=args.chunksize, buffersize=args.buffersize, ignoreformat=args.ignoreformat)
def main_parse_args(arglist):
"""
Call main with list of arguments. Callable from tests
"""
# Must return so that check command return value is passed back to calling routine
# otherwise py.test will fail
return main(parse_args(arglist))
def main_argv():
"""
Call main and pass command line arguments. This is required for setup.py entry_points
"""
main_parse_args(sys.argv[1:])
if __name__ == "__main__":
main_argv()
|
nilq/baby-python
|
python
|
#!/usr/bin/python3
__author__ = 'yangdd'
'''
example 034
'''
def hello_world():
print('Hello World')
def three_hello():
for i in range(3):
hello_world()
if __name__ == '__main__':
three_hello()
|
nilq/baby-python
|
python
|
from xml.etree import ElementTree
import csocha
from . import board, moves
class GameState:
def __init__(self, c: str, t: int, b: board.Board, undep: list):
self.color = c
self.opponent = "BLUE" if c == "RED" else "RED"
self.turn = t
self.board = b
self.undeployed = undep
def is_connected(self, fields: set) -> bool:
visited = [fields.pop()]
while len(visited) > 0:
neighbours = fields.intersection(csocha.neighbours(visited.pop(0)))
fields.difference_update(neighbours)
visited.extend(neighbours)
return len(fields) == 0
def can_be_disconnected(self, piece: set) -> bool:
nonempty = self.board.nonempty()
if len(nonempty) == 1:
return True
neighbours = csocha.neighbours(piece)
length = len(nonempty.intersection(neighbours))
if length < 2 or length > 5:
return True
return self.is_connected(set(nonempty).difference({piece}))
def get_possible_moves(self) -> set:
# Get possible set moves
possible_moves = self.get_possible_set_moves()
# Add possible drag moves
possible_moves.update(self.get_possible_drag_moves())
# If no move is possible, add skip move
if len(possible_moves) == 0:
possible_moves.add(moves.SkipMove())
# Return possible moves
return possible_moves
def get_possible_set_moves(self) -> set:
# First turn
if self.turn == 0:
# All empty fields are possible
dests = self.board.empty()
# Second turn
elif self.turn == 1:
# Get first set piece
field = self.board.color(self.opponent).__iter__().__next__()
# Get empty fields next to first piece
dests = self.board.empty().intersection(csocha.neighbours(field))
# All other turns
else:
# Get own pieces
dests = self.board.color(self.color)
# Get neighbours of own pieces
dests = {y for x in dests for y in csocha.neighbours(x)}
# Only empty fields
dests.intersection_update(self.board.empty())
# Get opponent pieces
opponent = self.board.color(self.opponent)
# Get neighbours of opponent pieces
opponent = {y for x in opponent for y in csocha.neighbours(x)}
# Only fields not next to opponent pieces
dests = dests.difference(opponent)
# If bee isn't set until fith turn player has to set bee
if (self.turn > 5 and (self.color, "BEE") in self.undeployed):
types = {"BEE"}
else:
types = {x[1] for x in self.undeployed if x[0] == self.color}
# Return all combinations of pieces and destinations
return {
moves.SetMove((self.color, y), x)
for x in dests
for y in types
}
def get_possible_drag_moves(self) -> set:
# Drag moves are only possible when bee is set
if (self.color, "BEE") in self.undeployed:
return set()
possible_moves = set()
# Loop through all set pieces
for position in self.board.color(self.color):
# When there is no piece under piece
if len(self.board.fields[position]) == 1:
if not self.can_be_disconnected(position):
continue
else:
# Piece is stacked therefore has to be a beetle
dests = self.get_beetle_move_dests(position)
# Call function to get piece type specific destinations
if self.board.fields[position][-1][1] == "BEETLE":
dests = self.get_beetle_move_dests(position)
elif self.board.fields[position][-1][1] == "BEE":
dests = self.get_bee_move_dests(position, position)
elif self.board.fields[position][-1][1] == "SPIDER":
dests = self.get_spider_move_dests(position)
elif self.board.fields[position][-1][1] == "ANT":
dests = self.get_ant_move_dests(position)
elif self.board.fields[position][-1][1] == "GRASSHOPPER":
dests = self.get_grasshopper_move_dests(position)
else:
continue
# Add all destinations to possible_moves
possible_moves.update(moves.DragMove(position, x) for x in dests)
# Return possible moves
return possible_moves
def get_beetle_move_dests(self, pos: tuple) -> set:
# Get neighbours of pos
all_neighbours = csocha.neighbours(pos)
# Only take fields with pieces
neighbours = set(self.board.nonempty().intersection(all_neighbours))
# If we are on top of another piece add it aswell
if len(self.board.fields[pos]) > 1:
neighbours.add(pos)
# Get fields next to fields
dests = {y for x in neighbours for y in csocha.neighbours(x)}
# Only take fields in reach
dests.intersection_update(all_neighbours)
# Only take valid fields
dests.intersection_update(self.board.fields.keys())
# Return fields
return dests
def get_bee_move_dests(self, pos: tuple, start_pos: tuple) -> set:
# Get neighbours of pos
all_neighbours = csocha.neighbours(pos)
# Only take fields with pieces
neighbours = set(self.board.nonempty().intersection(all_neighbours))
# Remove own field
neighbours.discard(start_pos)
# Get fields next to fields
dests = set()
for neighbour in neighbours:
dests = dests.symmetric_difference(csocha.neighbours(neighbour))
# Get obstructed fields
obstructed = self.board.obstructed.copy()
# Only take obstructed fields in reach
obstructed.intersection_update(all_neighbours)
# Get fields next to obscructed fields
obstructed = (y for x in obstructed for y in csocha.neighbours(x))
# Remove fields next to obstructed
dests = dests.difference(obstructed)
# Only take fields in reach
dests.intersection_update(all_neighbours)
# Only take empty fields
dests.intersection_update(self.board.empty())
# Return fields
return dests
def get_spider_move_dests(self, pos: tuple) -> set:
dests = {pos}
all_dests = dests.copy()
for _ in range(3):
dests = {
y
for x in dests
for y in self.get_bee_move_dests(x, pos)
}.difference(all_dests)
all_dests.update(dests)
return dests
def get_ant_move_dests(self, pos: tuple) -> set:
found = set()
todo = {pos}
while len(todo) > 0:
dest = todo.pop()
found.add(dest)
dests = self.get_bee_move_dests(dest, pos).difference(found)
todo.update(dests)
found.discard(pos)
return found
def get_grasshopper_move_dests(self, pos: tuple) -> set:
dests = set()
for direction in [(1, 0), (1, -1), (0, -1), (-1, 0), (-1, 1), (0, 1)]:
dest = (pos[0] + direction[0], pos[1] + direction[1])
if dest in self.board.empty():
continue
while dest in self.board.nonempty():
dest = (dest[0] + direction[0], dest[1] + direction[1])
dests.add(dest)
dests.intersection_update(self.board.empty())
return dests
def get_bee(self, color: str) -> tuple:
# Loop through all fields
for position, pieces in self.board.fields.items():
# If bee is on this field return it
if len(pieces) > 0 and pieces[0] == (color, "BEE"):
return position
# Bee is not set jet, return none
return None
def game_ended(self):
# Game can only end if color is blue
if self.color != "RED":
return False
# Get empty fields for use later
empty = self.board.empty()
# Get own bee
ownbee = self.get_bee(self.color)
# If own bee is set
if ownbee is not None:
# If own bee has been surrounded, game has ended
if len(set(csocha.neighbours(ownbee)).difference(empty)) == 6:
return True
# Get opponent bee
oppbee = self.get_bee(self.opponent)
# If opponent bee is set
if oppbee is not None:
# If opponent bee has been surrounded, game has ended
if len(set(csocha.neighbours(oppbee)).difference(empty)) == 6:
return True
# If turn limit is reach, game has ended
return self.turn >= 60
def hash(self, depth: int) -> bytes:
if self.turn > 7 and self.turn < 60 - depth:
return csocha.hash(self.board.fields) + str(self.color).encode()
return csocha.hash(self.board.fields) + str(self.turn).encode()
def parse(xml: ElementTree.Element) -> GameState:
color = xml.get("currentPlayerColor")
turn = int(xml.get("turn"))
_board = board.parse(xml.find("board"))
undeployed = []
for piece in xml.findall("*/piece"):
undeployed.append((piece.get("owner"), piece.get("type")))
return GameState(color, turn, _board, undeployed)
|
nilq/baby-python
|
python
|
from fastapi import FastAPI
TAREFAS = [
{"id": 1, "titulo": "Cristiano"},
{"id": 2, "titulo": "Araujo"}
]
app = FastAPI()
@app.get('/tarefas')
def listar():
return TAREFAS
|
nilq/baby-python
|
python
|
import string
from model_mommy import mommy
from datetime import datetime
from django_rq import job
from django.contrib.auth.models import User
from django.utils import timezone
from dateutil.parser import parse as extract_date
from django.conf import settings
from survey.models import *
from survey.utils.decorators import static_var
from survey.tests.base_test import BaseTest
from survey.forms.answer import SurveyAllocationForm, AddMoreLoopForm
from .survey_base_test import SurveyBaseTest
class InterviewsTest(SurveyBaseTest):
def test_get_answer_with_question_not_yet_answered(self):
self._create_ussd_non_group_questions()
num_question = Question.objects.filter(answer_type=NumericalAnswer.choice_name()).last()
self.assertEquals(self.interview.get_answer(num_question), '')
def test_save_answers_with_interview_id(self):
self._create_ussd_non_group_questions(self.qset)
answers = []
n_quest = Question.objects.get(answer_type=NumericalAnswer.choice_name())
t_quest = Question.objects.get(answer_type=TextAnswer.choice_name())
m_quest = Question.objects.get(answer_type=MultiChoiceAnswer.choice_name())
answers = [{n_quest.id: 1, t_quest.id: 'Hey Man', m_quest.id: 'Y'},
{n_quest.id: 5, t_quest.id: 'Hey Boy', m_quest.id: 'Y'},
{n_quest.id: 15, t_quest.id: 'Hey Girl!', m_quest.id: 'N'},
{n_quest.id: 15, t_quest.id: 'Hey Part!'}
]
question_map = {n_quest.id: n_quest, t_quest.id: t_quest, m_quest.id: m_quest}
Interview.save_answers(self.qset, self.survey, self.ea,
self.access_channel, question_map, answers, reference_interview=self.interview.id)
self.assertEquals(NumericalAnswer.objects.count(), 4)
self.assertEquals(TextAnswer.objects.count(), 4)
self.assertEquals(MultiChoiceAnswer.objects.count(), 3)
self.assertEquals(TextAnswer.objects.first().to_text().lower(), 'Hey Man'.lower())
self.assertEquals(MultiChoiceAnswer.objects.first().as_text.lower(), 'Y'.lower())
self.assertEquals(MultiChoiceAnswer.objects.first().as_value, str(QuestionOption.objects.get(text='Y').order))
self.assertEquals(Interview.objects.last().interview_reference, self.interview)
def test_interview_with_survey_parameters(self):
# self._create_ussd_group_questions()
pass
|
nilq/baby-python
|
python
|
from scraper.web_scraper import WebScraper
from loguru import logger
uri = 'https://www.investing.com/technical/technical-summary'
class SummaryTableScraper(WebScraper):
def __init__(self, uri, class_name):
super(SummaryTableScraper, self).__init__()
self.goto(uri)
self.n_table_pairs = 12
self.table_class_name = class_name
self.technical_summary = self.__get_technical_summary()
def __get_technical_summary(self):
return self.find('.'+self.table_class_name, first=True).text.split('\n')[6:]
def get_pairs_info(self):
"""
returns pairs data with keys as cur pairs and
values as dicts with keys - ratio ... summary
"""
summary_list = self.__get_technical_summary()
pairs_data = {}
tot_pairs = len(summary_list)//self.n_table_pairs
for i in range(0, len(summary_list), tot_pairs):
pairs_data[summary_list[i]] = {
'Pair' : summary_list[i],
'Ratio' : summary_list[i+1],
'MovingAvg' : summary_list[i+3:i+7],
'Indicators' : summary_list[i+8:i+12],
'Summary' : summary_list[i+13:i+17],
}
return pairs_data
def proc_pair_info(pair_info):
"""
return true if all are either `Strong Buy` OR `Strong Sell`
"""
if (len(set(pair_info['Summary'])) == 1) and (pair_info['Summary'][0][:6] == 'Strong'):
logger.debug(f"[TRUE ] scored .. {pair_info['Pair']} : {pair_info['Summary']}")
return True
logger.debug(f"[FALSE] scored .. {pair_info['Pair']} : {pair_info['Summary']}")
return False
class PairScores:
"""
Simple scores based on frequency [0,100]
"""
def __init__(self):
self.scores = {}
for pair in ['EUR/USD', 'GBP/USD', 'USD/JPY', 'USD/CHF', 'AUD/USD', 'EUR/GBP', 'USD/CAD', 'NZD/USD', 'EUR/JPY', 'EUR/CHF', 'GBP/JPY', 'GBP/CHF']:
self.scores[pair] = 0
def increment(self, cur_strong_pair):
self.scores[cur_strong_pair] = min(100, self.scores[cur_strong_pair]+1)
def decrement(self, cur_weak_pair):
self.scores[cur_weak_pair] = max(0, self.scores[cur_weak_pair]-1)
import os, time
from pprint import pprint
if __name__ == '__main__':
scraper = SummaryTableScraper(uri=uri, class_name='technicalSummaryTbl')
data = scraper.get_pairs_info()
pair_scores = PairScores()
while True:
in_ = input('\nenter pair : ')
scraper.goto(uri)
data = scraper.get_pairs_info()
for _, pair_info in data.items():
if pair_info['Pair'] == in_:
pprint(pair_info['Summary'])
'''
scraper = SummaryTableScraper(uri=uri, class_name='technicalSummaryTbl')
data = scraper.get_pairs_info()
pair_scores = PairScores()
while True:
scraper.goto(uri)
data = scraper.get_pairs_info()
for _, pair_info in data.items():
strong = proc_pair_info(pair_info)
if strong:
pair_scores.increment(pair_info['Pair'])
elif not strong:
pair_scores.decrement(pair_info['Pair'])
print(pair_scores.scores)
print("="*200)
time.sleep(3)
'''
|
nilq/baby-python
|
python
|
import Bdecode as BD
import pprint
class Torrent:
def __init__(self, filename):
decoder = BD.Bdecode(filename)
self.torrentData = decoder.decode()
# self.meta_info = bencoding.Decoder(meta_info).decode()
def __str__(self):
whatever = pprint.pprint(self.torrentData.items())
# announce = self.torrentData[b'meta_info'][b'announce'].decode('utf-8')
return whatever
if __name__ == "__main__":
torrent = Torrent("test.torrent")
print(torrent)
|
nilq/baby-python
|
python
|
import collections
from collections import OrderedDict
class ValidationError(Exception):
def __init__(self, errors):
self.errors = ValidationError.normalise(errors)
@staticmethod
def normalise(errors):
if isinstance(errors, dict):
new_errors = OrderedDict()
for k, v in errors.items():
if isinstance(v, (dict, list)):
v = ValidationError.normalise(v)
else:
v = [v]
new_errors[k] = v
elif isinstance(errors, list):
new_errors = []
for x in errors:
if isinstance(x, (dict, list)):
x = ValidationError.normalise(x)
new_errors.append(x)
else:
new_errors = [errors]
return new_errors
@staticmethod
def _first(errors):
r = None
if isinstance(errors, list):
for x in errors:
r = ValidationError._first(x)
if r is not None:
break
elif isinstance(errors, dict):
for k, v in errors.items():
r = ValidationError._first(v)
if r is not None:
if r[0] is None:
path = (k,)
else:
path = (k,) + r[0]
r = (path, r[1])
break
else:
r = (None, errors)
return r
def first(self):
return ValidationError._first(self.errors)
@staticmethod
def _flatten(errors, path=None):
flattened_errors = []
if path is None:
path = tuple()
for field_name, field_errors in errors.items():
field_path = path + (field_name,)
if isinstance(field_errors, collections.Mapping):
flattened_field_errors = ValidationError._flatten(field_errors, path=field_path)
flattened_errors.extend(flattened_field_errors)
else:
for field_error in field_errors:
flattened_errors.append((field_path, field_error))
return flattened_errors
def flatten(self):
return ValidationError._flatten(self.errors)
def __str__(self):
return str(self.errors)
class SkipField(Exception):
pass
|
nilq/baby-python
|
python
|
from school.models.class_model import Class
from account.models.instructor_model import InstructorProfile
from country.models.country_model import Country
from country.models.city_model import City
from school.models.school_model import School
from django.urls.base import reverse
from rest_framework.test import APITestCase
from django.contrib.auth import get_user_model
User = get_user_model()
class StudentListTests(APITestCase):
url_list = reverse("school:student_list")
url_add = reverse("school:add_student_list_item")
url_list_by_instructor = reverse("school:student_list_by_class_instructor")
url_login = reverse("token_obtain_pair")
def setUp(self) -> None:
self.country = Country.objects.create(name = "Türkiye", code = "Tur")
self.city = City.objects.create(country = self.country, name = "Konya", code = "42")
self.school = School.objects.create(city = self.city, name = "Example School", address = "Example Address", website = "Example website")
self.password = "testpass123"
self.normal_user = User.objects.create_user(username = "normaldoe", password = self.password, user_type = 1)
self.instructor_user1 = User.objects.create_user(
username="johndoe",
password=self.password,
user_type=4,
email = "johndoe@example.com",
identity_number = "12345678910",
)
instructor_profile = InstructorProfile.objects.get(user = self.instructor_user1)
instructor_profile.school = self.school
instructor_profile.save()
self.instructor_user2 = User.objects.create_user(
username="johndoe2",
password=self.password,
user_type=4,
email = "johndoe2@example.com",
identity_number = "12345678911",
)
instructor_profile2 = InstructorProfile.objects.get(user = self.instructor_user2)
instructor_profile2.school = self.school
instructor_profile2.save()
self.school_class = Class.objects.create(school = self.school, instructor = self.instructor_user1.user_instructor, name = "Class A", grade = 1)
self.child = User.objects.create_user(
username = "johndoechild",
password = self.password,
email = "childdoe@example.com",
identity_number = "12345678912",
user_type = 2
)
self.url_destroy = reverse("school:student_list_item_destroy", kwargs={"class_id": self.school_class.id, "child_id": self.child.id})
def login_with_token(self, login_data):
"""
A method for using login process.
"""
response = self.client.post(self.url_login, login_data)
self.assertEqual(200, response.status_code)
token = response.data["access"]
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
def test_student_list(self):
"""
Tests that school list page returns a status code of 200.
"""
response = self.client.get(self.url_list)
self.assertEqual(200, response.status_code)
def test_add_item_is_authenticated(self):
"""
Tests whether the user is authenticated.
"""
response = self.client.get(self.url_add)
self.assertEqual(401, response.status_code)
def test_add_item_is_instructor(self):
"""
Tests whether the user is instructor.
"""
data = {
"username": self.normal_user.username,
"password": self.password
}
self.login_with_token(data)
response = self.client.get(self.url_add)
self.assertEqual(403, response.status_code)
def test_add_list_item_is_own_class(self):
"""
Child-class relation record add page test.
"""
login_data = {
"username": self.instructor_user2.username,
"password": self.password
}
self.login_with_token(login_data)
data = {"school_class": self.school_class.id, "child": self.child.id}
response = self.client.post(self.url_add, data)
self.assertEqual(403, response.status_code)
def test_add_list_item(self):
"""
Child-class relation record add page test.
"""
login_data = {
"username": self.instructor_user1.username,
"password": self.password
}
self.login_with_token(login_data)
data = {"school_class": self.school_class.id, "child": self.child.id}
response = self.client.post(self.url_add, data)
self.assertEqual(201, response.status_code)
def test_destroy_student_list_item_is_authenticated(self):
"""
Tests whether the user is authenticated, and if not, the user cannot access the "student list item destroy" page.
"""
response = self.client.get(self.url_destroy)
assert 401 == response.status_code
def test_destroy_student_list_item_is_instructor(self):
"""
Tests whether the user is instructor.
"""
login_data = {
"username": self.normal_user.username,
"password": self.password
}
self.login_with_token(login_data)
response = self.client.get(self.url_add)
assert 403 == response.status_code
def test_student_list_item_destroy_is_own_class(self):
"""
This test has to return a 404 result. This is because we used get_object_or_404 in Destroy View. Test user hasn't got any student so this test will return 404.
"""
login_data = {
"username": self.instructor_user2.username,
"password": self.password
}
self.login_with_token(login_data)
response = self.client.delete(self.url_destroy)
self.assertEqual(404, response.status_code)
def test_student_list_item_destroy(self):
"""
Tests whether the user can delete class-child relation properly.
"""
login_data = {
"username": self.instructor_user1.username,
"password": self.password
}
self.login_with_token(login_data)
self.test_add_list_item()
response = self.client.delete(self.url_destroy)
self.assertEqual(204, response.status_code)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Persistent identifier minters."""
from __future__ import absolute_import, print_function
from flask import current_app
from .providers.recordid import RecordIdProvider
from .providers.recordid_v2 import RecordIdProviderV2
def recid_minter_v2(record_uuid, data):
"""Mint record identifiers with RecordIDProviderV2.
This minter is recommended to be used when creating records to get
PersistentIdentifier with ``object_type='rec'`` and the new random
alphanumeric `pid_value`.
Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
``data``. The minted ``pid_value`` will be stored in that field.
:param record_uuid: The object UUID of the record.
:param data: The record metadata.
:returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
"""
pid_field = current_app.config['PIDSTORE_RECID_FIELD']
assert pid_field not in data
provider = RecordIdProviderV2.create(
object_type='rec', object_uuid=record_uuid)
data[pid_field] = provider.pid.pid_value
return provider.pid
def recid_minter(record_uuid, data):
"""Mint record identifiers.
This is a minter specific for records.
With the help of
:class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
the PID instance with `rec` as predefined `object_type`.
Procedure followed: (we will use `control_number` as value of
`PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#. If a `control_number` field is already there, a `AssertionError`
exception is raised.
#. The provider is initialized with the help of
:class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
It's called with default value 'rec' for `object_type` and `record_uuid`
variable for `object_uuid`.
#. The new `id_value` is stored inside `data` as `control_number` field.
:param record_uuid: The record UUID.
:param data: The record metadata.
:returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
"""
pid_field = current_app.config['PIDSTORE_RECID_FIELD']
assert pid_field not in data
provider = RecordIdProvider.create(
object_type='rec', object_uuid=record_uuid)
data[pid_field] = provider.pid.pid_value
return provider.pid
|
nilq/baby-python
|
python
|
from threading import local
from django.test import TestCase
from django.test import override_settings
from cid.locals import generate_new_cid
from cid.locals import get_cid
from cid.locals import set_cid
_thread_locals = local()
class TestCidStorage(TestCase):
def setUp(self):
self.clear_cid()
self.cid = 'test-cid'
def tearDown(self):
self.clear_cid()
def clear_cid(self):
try:
delattr(_thread_locals, 'CID')
except AttributeError:
pass
def test_get_empty_cid(self):
self.assertIsNone(get_cid())
def test_set_cid(self):
self.assertIsNone(get_cid())
set_cid(self.cid)
self.assertEqual(self.cid, get_cid())
@override_settings(CID_GENERATE=True, CID_GENERATOR=lambda: 'constant_correlation')
def test_custom_generator(self):
assert generate_new_cid() == 'constant_correlation'
|
nilq/baby-python
|
python
|
'''Tools for interaction with IDF build system'''
def build_name(name):
name_parts = name.split('/')
return '__'.join(name_parts)
|
nilq/baby-python
|
python
|
from torch.utils.data import Dataset, DataLoader
from albumentations import (ShiftScaleRotate, Compose, CoarseDropout, RandomCrop, HorizontalFlip, OneOf, ElasticTransform,
OpticalDistortion, RandomGamma, Resize, GaussNoise, VerticalFlip, RandomBrightnessContrast)
import cv2
import os
import torch
import pickle
import matplotlib.pyplot as plt
import numpy as np
from skimage import io
import random
from utils import remove_small_areas, keep_large_area, fit_Ellipse, crop_mask_expand, roi_extend
class Dataset_train(Dataset):
def __init__(self, data_root='data', size=(512, 512), fold=0):
self.root = data_root
file = open(os.path.join(data_root, 'train_val_split_200803.pkl'), 'rb')
pkl_data = pickle.load(file)
if fold == -1:
self.train_name_list = pkl_data[0][0]
self.train_name_list.append(pkl_data[0][1])
else:
self.train_name_list = pkl_data[fold][0]
self.len = len(self.train_name_list)
self.transforms = Compose([Resize(size[0], size[0]),
ShiftScaleRotate(shift_limit=0.1, scale_limit=0.2, rotate_limit=30, p=0.7,
border_mode=cv2.BORDER_CONSTANT, value=0),
VerticalFlip(p=0.5),
OneOf([ElasticTransform(p=1, alpha=50, sigma=30, alpha_affine=30,
border_mode=cv2.BORDER_CONSTANT, value=0),
OpticalDistortion(p=1, distort_limit=0.5, shift_limit=0.1,
border_mode=cv2.BORDER_CONSTANT, value=0)], p=0.5),
RandomGamma(gamma_limit=(80, 120), p=0.5),
GaussNoise(var_limit=(10, 100), mean=0, p=0.5),
RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.2, p=0.5),
CoarseDropout(max_holes=2, max_height=256, max_width=256, min_holes=1, min_height=5,
min_width=5, fill_value=0, p=0.5)
])
self.pseudo_mask_transformation = Compose([ShiftScaleRotate(shift_limit=0.1, scale_limit=0.1, rotate_limit=180, p=0.7,
border_mode=cv2.BORDER_CONSTANT, value=0, interpolation=cv2.INTER_NEAREST),
OneOf([ElasticTransform(p=1, alpha=50, sigma=30, alpha_affine=30, border_mode=cv2.BORDER_CONSTANT,
value=0, interpolation=cv2.INTER_NEAREST),
OpticalDistortion(p=1, distort_limit=0.5, shift_limit=0.1, border_mode=cv2.BORDER_CONSTANT,
value=0, interpolation=cv2.INTER_NEAREST)], p=0.5)])
def __getitem__(self, idx):
name = self.train_name_list[idx]
if random.randint(0, 1) == 1:
img = io.imread(os.path.join(self.root, 'img_match_challenge_val', name))
else:
img = io.imread(os.path.join(self.root, 'img', name))
Disc_Cup_mask = cv2.imread(os.path.join(self.root, 'mask', name.rstrip('.jpg') + '.png'), 0) # [0, 100, 200]
cate = int(name.strip('.jpg').split('_')[-1])
x1_new, x2_new, y1_new, y2_new = crop_mask_expand(Disc_Cup_mask, expand_Percentage=0.2)
Disc_Cup_mask_ROI = Disc_Cup_mask[x1_new: x2_new, y1_new: y2_new]
Disc_Cup_mask_ROI = self.pseudo_mask_transformation(image=Disc_Cup_mask_ROI)['image']
Disc_Cup_mask[x1_new: x2_new, y1_new: y2_new] = Disc_Cup_mask_ROI
augmented = self.transforms(image=img, mask=Disc_Cup_mask)
img, Disc_Cup_mask = augmented['image'], augmented['mask']
img = torch.from_numpy(img).float().permute(2, 0, 1) / 255
Disc_mask = (Disc_Cup_mask > 0).astype(np.uint8)
Cup_mask = (Disc_Cup_mask == 200).astype(np.uint8)
Disc_mask = torch.from_numpy(Disc_mask).unsqueeze(0).float()
Cup_mask = torch.from_numpy(Cup_mask).unsqueeze(0).float()
img = torch.cat((img, Disc_mask, Cup_mask), dim=0)
cate = torch.tensor(cate)
return img, cate
def __len__(self):
return self.len
class Dataset_val(Dataset):
def __init__(self, data_root='data', size=(512, 512), fold=0):
self.root = data_root
file = open(os.path.join(data_root, 'train_val_split_200803.pkl'), 'rb')
pkl_data = pickle.load(file)
self.val_name_list = pkl_data[fold][1]
self.len = len(self.val_name_list)
self.transforms = Compose([Resize(size[0], size[1])])
self.pseudo_mask_transformation = Compose(
[ShiftScaleRotate(shift_limit=0.1, scale_limit=0.1, rotate_limit=180, p=0.7,
border_mode=cv2.BORDER_CONSTANT, value=0, interpolation=cv2.INTER_NEAREST),
OneOf([ElasticTransform(p=1, alpha=50, sigma=30, alpha_affine=30, border_mode=cv2.BORDER_CONSTANT,
value=0, interpolation=cv2.INTER_NEAREST),
OpticalDistortion(p=1, distort_limit=0.5, shift_limit=0.1, border_mode=cv2.BORDER_CONSTANT,
value=0, interpolation=cv2.INTER_NEAREST)], p=0.5)])
def __getitem__(self, idx):
name = self.val_name_list[idx]
if random.randint(0, 1) == 1:
img = io.imread(os.path.join(self.root, 'img_match_challenge_val', name))
else:
img = io.imread(os.path.join(self.root, 'img', name))
Disc_Cup_mask = cv2.imread(os.path.join(self.root, 'mask', name.rstrip('.jpg') + '.png'), 0) # [0, 100, 200]
cate = int(name.strip('.jpg').split('_')[-1])
x1_new, x2_new, y1_new, y2_new = crop_mask_expand(Disc_Cup_mask, expand_Percentage=0.2)
Disc_Cup_mask_ROI = Disc_Cup_mask[x1_new: x2_new, y1_new: y2_new]
Disc_Cup_mask_ROI = self.pseudo_mask_transformation(image=Disc_Cup_mask_ROI)['image']
Disc_Cup_mask[x1_new: x2_new, y1_new: y2_new] = Disc_Cup_mask_ROI
augmented = self.transforms(image=img, mask=Disc_Cup_mask)
img, Disc_Cup_mask = augmented['image'], augmented['mask']
img = torch.from_numpy(img).float().permute(2, 0, 1) / 255
Disc_mask = (Disc_Cup_mask > 0).astype(np.uint8)
Cup_mask = (Disc_Cup_mask == 200).astype(np.uint8)
Disc_mask = torch.from_numpy(Disc_mask).unsqueeze(0).float()
Cup_mask = torch.from_numpy(Cup_mask).unsqueeze(0).float()
img = torch.cat((img, Disc_mask, Cup_mask), dim=0)
cate = torch.tensor(cate)
return img, cate
def __len__(self):
return self.len
if __name__ == '__main__':
train_data = Dataset_val(data_root='data', size=(512, 512), fold=0)
train_dataloader = DataLoader(dataset=train_data, batch_size=1, shuffle=True)
for i, (inputs, cate) in enumerate(train_dataloader):
print(cate, cate.size())
img = inputs[:, :3, :, :].squeeze(0).permute(1, 2, 0).numpy()
Disc = inputs[:, 3, :, :].squeeze(0).numpy()
Cup = inputs[:, 4, :, :].squeeze(0).numpy()
plt.subplot(131)
plt.imshow(img)
plt.subplot(132)
plt.imshow(Disc)
plt.subplot(133)
plt.imshow(Cup)
plt.show()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
from subprocess import check_call
import sys
import os
import traceback
def safe_remove(f):
try:
if os.path.exists(f):
os.remove(f)
except:
traceback.print_exc()
pass
def tsprint(msg):
sys.stderr.write(msg)
sys.stderr.write("\n")
if __name__ == "__main__":
tsprint("WARNING: The s3cp.py script is deprecated. Use 's3mi cp' or 's3mi cat' instead.")
if sys.argv[2] == "-":
check_call(["s3mi", "cat", sys.argv[1]])
else:
safe_remove(sys.argv[2])
with open(sys.argv[2], "ab") as dest:
check_call(["s3mi", "cat", sys.argv[1]], stdout=dest)
|
nilq/baby-python
|
python
|
from .__init__ import *
def gen_func(maxRadius=100, format='string'):
r = random.randint(1, maxRadius)
ans = round((2 * math.pi / 3) * r**3, 3)
if format == 'string':
problem = f"Volume of hemisphere with radius {r} m = "
solution = f"{ans} m^3"
return problem, solution
elif format == 'latex':
return "Latex unavailable"
else:
return r, ans
volume_sphere = Generator("Volume of Hemisphere", 117, gen_func,
["maxRadius=100"])
|
nilq/baby-python
|
python
|
# -*- coding:utf8 -*-
'''
export data
'''
import logging
import xlwt
from LMDI import Lmdi
from SinglePeriodAAM import Spaam
from MultiPeriodAAM import Mpaam
class WriteLmdiData(object):
'''
write data using with surrounding
'''
def __init__(self, xls_file_name, *lmdis):
'''
construction
Args:
xls_file_name: to save excel file name
lmdis: the total lmdis to write
'''
self._xls_file_name = xls_file_name
self._lmdis = lmdis
def __enter__(self):
self._workbook = xlwt.Workbook(encoding='utf8')
return self
def write(self):
'''
write the excel
'''
for lmdi in self._lmdis:
if lmdi.name == '':
raise Exception(Lmdi.__name__+' should initialize by name. ')
sheet = self._workbook.add_sheet(lmdi.name)
self._write_columns_names(sheet)
self._write_column(sheet, 0, lmdi.province_names)
self._write_column(sheet, 1, lmdi.pro_t)
self._write_column(sheet, 2, lmdi.pro_t1)
self._write_column(sheet, 3, lmdi.energy_t)
self._write_column(sheet, 4, lmdi.energy_t1)
self._write_column(sheet, 5, lmdi.co2_t)
self._write_column(sheet, 6, lmdi.co2_t1)
self._write_column(sheet, 7, lmdi.lambda_t_t)
self._write_column(sheet, 8, lmdi.lambda_t_t1)
self._write_column(sheet, 9, lmdi.lambda_t1_t)
self._write_column(sheet, 10, lmdi.lambda_t1_t1)
self._write_column(sheet, 11, lmdi.theta_t_t)
self._write_column(sheet, 12, lmdi.theta_t_t1)
self._write_column(sheet, 13, lmdi.theta_t1_t)
self._write_column(sheet, 14, lmdi.theta_t1_t1)
self._write_column(sheet, 15, lmdi.emx())
self._write_column(sheet, 16, lmdi.pei())
self._write_column(sheet, 17, lmdi.pis())
self._write_column(sheet, 18, lmdi.isg())
self._write_column(sheet, 19, lmdi.eue())
self._write_column(sheet, 20, lmdi.est())
self._write_column(sheet, 21, lmdi.yoe())
self._write_column(sheet, 22, lmdi.yct())
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._workbook.save(self._xls_file_name)
elif exc_type is Exception:
logging.error(exc_val)
else:
pass
def _write_columns_names(self, sheet):
sheet.write(0, 0, label=u'省份')
sheet.write(0, 1, label=u'T 期产出')
sheet.write(0, 2, label=u'T+1 期产出')
sheet.write(0, 3, label=u'T 期能源消耗')
sheet.write(0, 4, label=u'T+1 期能源消耗')
sheet.write(0, 5, label=u'T 期Co2排放')
sheet.write(0, 6, label=u'T+1 期Co2排放')
sheet.write(0, 7, label=u'lambda_t_t')
sheet.write(0, 8, label=u'lambda_t_t1')
sheet.write(0, 9, label=u'lambda_t1_t')
sheet.write(0, 10, label=u'lambda_t1_t1')
sheet.write(0, 11, label=u'theta_t_t')
sheet.write(0, 12, label=u'theta_t_t1')
sheet.write(0, 13, label=u'theta_t1_t')
sheet.write(0, 14, label=u'theta_t1_t1')
sheet.write(0, 15, label=u'emx')
sheet.write(0, 16, label=u'pei')
sheet.write(0, 17, label=u'pis')
sheet.write(0, 18, label=u'isg')
sheet.write(0, 19, label=u'eue')
sheet.write(0, 20, label=u'est')
sheet.write(0, 21, label=u'yoe')
sheet.write(0, 22, label=u'yct')
def _write_column(self, sheet, column, values):
'''
Args:
sheet: the sheet
column: the column to WriteData
values: the values to write
'''
try:
row = 1
for value in values:
sheet.write(row, column, label=value)
row += 1
except TypeError:
logging.error('the type error in '+str(column)+ ' column')
raise
def WriteLmdi(object):
'''
输出lmdi结果, 按照word的格式
'''
def __init__(self, xls_file_name, *lmdis):
self._xls_file_name = xls_file_name
self._lmdis = lmdis
def __enter__(self):
self._workbook = xlwt.Workbook(encoding='utf8')
return self
def write(self):
pass
def _write_base_previous(self, sheet):
columns = ['periods', '']
def __exit__(self, exc_type, exc_val, ext_tb):
if exc_type is None:
self._workbook.save(self._xls_file_name)
elif exc_type is Exception:
logging.error(exc_val)
raise Exception
else:
pass
class WriteSpaamData(object):
'''
write the spaam data
'''
def __init__(self, xls_file_name, *spaams):
'''
construction
Args:
xls_file_name: to save excel file name
spaams: the total spaam to write
'''
self._xls_file_name = xls_file_name
self._spaams = spaams
def __enter__(self):
self._workbook = xlwt.Workbook(encoding='utf8')
return self
def write(self):
'''
write value
'''
for spaam in self._spaams:
if spaam.name == '':
raise Exception(Spaam.__name__ + ' should be initialized by name')
sheet = self._workbook.add_sheet(spaam.name)
self._write_columns_names(sheet)
self._write_column(sheet, 0, spaam.province_names)
self._write_column(sheet, 1, spaam.emx_attributions)
self._write_column(sheet, 2, spaam.pei_attributions)
self._write_column(sheet, 3, spaam.pis_attributions)
self._write_column(sheet, 4, spaam.isg_attributions)
self._write_column(sheet, 5, spaam.eue_attributions)
self._write_column(sheet, 6, spaam.est_attributions)
self._write_column(sheet, 7, spaam.yoe_attributions)
self._write_column(sheet, 8, spaam.yct_attributions)
def _write_columns_names(self, sheet):
sheet.write(0, 0, label=u'省份')
sheet.write(0, 1, label=u'emx')
sheet.write(0, 2, label=u'pei')
sheet.write(0, 3, label=u'pis')
sheet.write(0, 4, label=u'isg')
sheet.write(0, 5, label=u'eue')
sheet.write(0, 6, label=u'est')
sheet.write(0, 7, label=u'yoe')
sheet.write(0, 8, label=u'yct')
def _write_column(self, sheet, column, values):
'''
write values to a perticular column
'''
row = 1
for value in values:
if column != 0:
value *= 100.0
sheet.write(row, column, label=value)
row += 1
def __exit__(self, exc_type, exc_val, ect_tb):
if exc_type is None:
self._workbook.save(self._xls_file_name)
elif exc_type is Exception:
logging.error(exc_val)
else:
pass
class WriteMpaamData(object):
'''
write the mpaam data
'''
def __init__(self, xls_file_name, *mpaams):
'''
construction
Args:
xls_file_name: to save excel file name
mpaams: the total mpaam to write
'''
self._xls_file_name = xls_file_name
self._mpaams = mpaams
def __enter__(self):
self._workbook = xlwt.Workbook(encoding='utf8')
return self
def write(self):
'''
write value
'''
for mpaam in self._mpaams:
if mpaam.name == '':
raise Exception(Mpaam.__name__ + ' should be initialized by name')
sheet = self._workbook.add_sheet(mpaam.name)
self._write_columns_names(sheet)
self._write_column(sheet, 0, mpaam.province_names)
self._write_column(sheet, 1, mpaam.emx())
self._write_column(sheet, 2, mpaam.pei())
self._write_column(sheet, 3, mpaam.pis())
self._write_column(sheet, 4, mpaam.isg())
self._write_column(sheet, 5, mpaam.eue())
self._write_column(sheet, 6, mpaam.est())
self._write_column(sheet, 7, mpaam.yoe())
self._write_column(sheet, 8, mpaam.yct())
def _write_columns_names(self, sheet):
sheet.write(0, 0, label=u'省份')
sheet.write(0, 1, label=u'emx')
sheet.write(0, 2, label=u'pei')
sheet.write(0, 3, label=u'pis')
sheet.write(0, 4, label=u'isg')
sheet.write(0, 5, label=u'eue')
sheet.write(0, 6, label=u'est')
sheet.write(0, 7, label=u'yoe')
sheet.write(0, 8, label=u'yct')
def _write_column(self, sheet, column, values):
'''
write the values to perticualar column
'''
row = 1
for value in values:
if column != 0:
value *= 100
sheet.write(row, column, label=value)
row += 1
def __exit__(self, exc_type, exc_val, ect_tb):
if exc_type is None:
self._workbook.save(self._xls_file_name)
elif exc_type is Exception:
logging.error(exc_val)
else:
pass
|
nilq/baby-python
|
python
|
from pathlib import PurePath
def part1(l: list[int]) -> int:
l = l.copy()
i = 0
while i < len(l):
match l[i]:
case 1:
l[l[i + 3]] = l[l[i + 1]] + l[l[i + 2]]
i += 3
case 2:
l[l[i + 3]] = l[l[i + 1]] * l[l[i + 2]]
i += 3
case 99:
break
i += 1
return l[0]
def part2(l: list[int]) -> int:
ans = 19690720
for noun in range(0, 100):
for verb in range(0, 100):
curr = l.copy()
curr[1] = noun
curr[2] = verb
try:
result = part1(curr)
except IndexError:
continue
else:
# ++noun -> result + 345600
# ++verb -> result + 1
if ans - result > 99:
break
if result == ans:
return 100 * noun + verb
return -1
def test() -> None:
assert part1([1,9,10,3,2,3,11,0,99,30,40,50]) == 3500
if __name__ == '__main__':
with open(f'./data/input-{PurePath(__file__).stem}.txt', 'r') as f:
l = list(map(int, f.read().split(',')))
print("Part 1:", part1(l))
print("Part 2:", part2(l))
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-06 21:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('marketgrab', '0008_remove_data_v'),
]
operations = [
migrations.AddField(
model_name='data',
name='volume',
field=models.DecimalField(decimal_places=0, default=1, max_digits=15),
preserve_default=False,
),
]
|
nilq/baby-python
|
python
|
from django.contrib import admin
from . import models
def push_to_influxdb(modeladmin, request, queryset):
for row in queryset:
row.push_to_influxdb()
@admin.register(models.Instance)
class InstanceAdmin(admin.ModelAdmin):
list_display = [
'name',
'url',
'users',
'statuses',
'up',
'open_registrations',
'https_score',
'last_fetched',
]
search_fields = ['name']
actions = [push_to_influxdb]
|
nilq/baby-python
|
python
|
"""https://open.kattis.com/problems/provincesandgold"""
from collections import OrderedDict
vic, tres = OrderedDict(), OrderedDict()
vic = {"Province": 8, "Duchy": 5, "Estate": 2}
tres = {"Gold": 6, "Silver": 3, "Copper": 0}
inp = list(map(int, input().split()))
money = inp[0] * 3 + inp[1] * 2 + inp[2]
options = []
for coin, cost in tres.items():
if money >= cost:
options.append(coin)
break
for prov, cost in vic.items():
if money >= cost:
options.insert(0, prov)
break
if len(options) == 2:
print(options[0], "or", options[1])
else:
print(options[0])
|
nilq/baby-python
|
python
|
#This script is hijacked from targetscan_parsecontextscores.py. It asks which miRNAs
#are enriched for having sites in a particular sequence set. Actually, more precisely,
#it just gives the density of sites for each miRNA. Number of sites for a miRNA / total sequence
#search space.
import os
import gffutils
import argparse
from numpy import mean as mean
from numpy import median as median
def parsecontextscores(csfile, gff, featurename):
#Make dictionary of this form:
# {UTRname : [[UTRlength], [names of all miRNAs that have sites in that UTR]]}
#csfile = output of targetscan_60_context_scores.pl
#gff = gff file of regions of interest
#featurename = feature category in gff file (3rd field)
lengthdict = {}
CSdict = {}
#First need to get lengths
gff_fn = gff
db_fn = os.path.basename(gff_fn) + '.db'
if os.path.isfile(db_fn) == False: #if database doesn't exist, create it
gffutils.create_db(gff_fn, db_fn)
db = gffutils.FeatureDB(db_fn)
features = db.features_of_type(featurename)
for feature in features:
featureid = feature.id
featurelength = feature.stop - feature.start
lengthdict[featureid] = featurelength
os.remove(db_fn)
#Now get miRNA names
csfilehandle = open(csfile, 'r')
for line in csfilehandle:
line = line.strip().split('\t')
if line[0] != 'Gene ID': #skip header line
featureid = line[0].split(';')[0] #Remove Parent=...
species = line[1]
miRNAname = line[2]
if species == '10090': #this is mouse; for other species, change this number
if featureid not in CSdict:
CSdict[featureid] = [[lengthdict[featureid]], [miRNAname]]
elif featureid in CSdict:
CSdict[featureid][1].append(miRNAname)
csfilehandle.close()
return CSdict
def parseCSdict(CSdict):
#CSdict = {UTRname : [[UTRlength], [names of all miRNAs that have sites in that UTR]]}
miRNAsites = {} #{miRNA : number of sites}
miRNAdensities = {} #{miRNA : density of sites}
totalsequencelength = 0
for UTR in CSdict:
totalsequencelength += int(CSdict[UTR][0][0])
print 'The total sequence search space was {0} nt'.format(totalsequencelength)
#Count miRNA occurences
for UTR in CSdict:
miRNAs = CSdict[UTR][1]
for miRNA in miRNAs:
if miRNA not in miRNAsites:
miRNAsites[miRNA] = 1
elif miRNA in miRNAsites:
miRNAsites[miRNA] +=1
for miRNA in miRNAsites:
miRNAdensities[miRNA] = miRNAsites[miRNA] / float(totalsequencelength)
return miRNAdensities
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--csfile', type = str, help = 'Targetscan_60_context_scores.pl output.')
parser.add_argument('--gff', type = str, help = 'Gff of regions that targetscan looked through.')
parser.add_argument('--featurename', type = str, help = 'Feature category in gff file (3rd field of gff)')
parser.add_argument('--outfile', type = str, help = 'Output file.')
args = parser.parse_args()
CSdict = parsecontextscores(args.csfile, args.gff, args.featurename)
miRNAdensities = parseCSdict(CSdict)
outfh = open(args.outfile, 'w')
outfh.write('miRNA' + '\t' + 'density' + '\n')
for entry in miRNAdensities:
outfh.write(entry + '\t' + str(miRNAdensities[entry]) + '\n')
outfh.close()
|
nilq/baby-python
|
python
|
from cansfr import *
class can11xx (object):
'''
can11xx hierarchy
----------------- canm0
{sfr /
updrpl - ams ~ ~
{i2c / |
sfr1108 cani2c - - - tsti2c ~ isp
/ / /
sfr11xx - sfr111x - sfr1110 } can11xx nvm - atm
\ \ \
sfr1112 - - - - - - tstcsp ~ csp
/ {canm0
cspnvm
'''
def __init__ (me):
me.sfr = sfr11xx() # initial
if me.is_master_rdy():
revid = me.get_revid ()
# print 'master is ready', revid
if revid > 0: # found
if sfr1108().check (revid): me.sfr = sfr1108(revid)
elif sfr1110().check (revid): me.sfr = sfr1110(revid)
elif sfr1112().check (revid): me.sfr = sfr1112(revid)
elif sfr1124().check (revid): me.sfr = sfr1124(revid)
else:
print 'un-recognized REVID: %02X' % revid
me.sfr = sfr11xx()
# else:
# print 'master is not ready'
def is_master_rdy (me): raise NotImplementedError()
def sfrwx (me, adr, wdat): raise NotImplementedError() # non-INC write
def sfrwi (me, adr, wdat): raise NotImplementedError() # INC write
def sfrrx (me, adr, cnt): raise NotImplementedError() # non-INC read
def sfrri (me, adr, cnt): raise NotImplementedError() # INC read
def get_revid (me):
sav = me.sfrrx (me.sfr.DEC, 1) # try slave
if len(sav): # data returned
me.sfrwx (me.sfr.DEC, [me.sfr.REVID])
revid = \
me.sfrrx (me.sfr.REVID, 1)[0] & 0x7f
me.sfrwx (me.sfr.DEC, [sav[0]])
return revid
return 0
class cani2c (can11xx):
def __init__ (me, busmst, deva, rpt=0):
me.deva = deva
me.busmst = busmst # SFR master (I2C)
can11xx.__init__ (me) # SFR target
if me.sfr.revid:
if rpt:
print 'I2C master finds %s, 0x%02x' % (me.sfr.name, me.deva)
if me.sfr.inc == 1: # CAN1108/11
me.sfrwx (me.sfr.I2CCTL, [me.sfrrx (me.sfr.I2CCTL,1)[0] | 0x01]) # we'll work in NINC mode
def is_master_rdy (me):
''' Is this master ready for issuing things?
'''
return TRUE if me.busmst else FALSE
def sfrwx (me, adr, wdat):
return me.busmst.write (me.deva, adr, wdat)
def sfrrx (me, adr, cnt):
return me.busmst.read (me.deva, adr, cnt, FALSE)
def sfrri (me, adr, cnt):
sav = me.sfrrx (me.sfr.I2CCTL, 1)[0]
setinc = sav & 0xfe if me.sfr.inc else sav | 0x01
me.sfrwx (me.sfr.I2CCTL, [setinc]) # INC mode
rdat = me.busmst.read (me.deva, adr, cnt)
me.sfrwx (me.sfr.I2CCTL, [sav])
return rdat
def sfrwi (me, adr, wdat):
sav = me.sfrrx (me.sfr.I2CCTL, 1)[0]
setinc = sav & 0xfe if me.sfr.inc else sav | 0x01
me.sfrwx (me.sfr.I2CCTL, [setinc]) # INC mode
ret = me.busmst.write (me.deva, adr, wdat)
me.sfrwx (me.sfr.I2CCTL, [sav])
return ret
|
nilq/baby-python
|
python
|
from __future__ import print_function
import argparse
import pickle
import os
import time
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import numpy as np
from torchvision import datasets, transforms
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 10)
def forward(self, x):
x = F.relu(F.max_pool2d(self.conv1(x), 2))
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
x = self.fc2(x)
return F.log_softmax(x, dim=1)
def get_stat(data):
# TODO: Add num backpropped
stat = {}
stat["average"] = np.average(data)
stat["p25"] = np.percentile(data, 25)
stat["p50"] = np.percentile(data, 50)
stat["p75"] = np.percentile(data, 75)
stat["p90"] = np.percentile(data, 90)
stat["max"] = max(data)
stat["min"] = min(data)
return stat
def update_batch_stats(batch_stats, num_backpropped, pool_losses=None, chosen_losses=None, gradients=None):
'''
batch_stats = [{'chosen_losses': {stat},
'pool_losses': {stat}}]
'''
snapshot = {"chosen_losses": get_stat(chosen_losses),
"pool_losses": get_stat(pool_losses)}
batch_stats.append(snapshot)
def train(args,
model,
device,
trainloader,
optimizer,
epoch,
total_num_images_backpropped,
images_hist,
batch_stats=None):
print('\nEpoch: %d' % epoch)
model.train()
train_loss = 0
correct = 0
total = 0
losses_pool = []
data_pool = []
targets_pool = []
ids_pool = []
num_backprop = 0
loss_reduction = None
for batch_idx, (data, targets, image_id) in enumerate(trainloader):
data, targets = data.to(device), targets.to(device)
if args.selective_backprop:
output = model(data)
loss = F.nll_loss(output, targets)
losses_pool.append(loss.item())
data_pool.append(data)
targets_pool.append(targets)
ids_pool.append(image_id.item())
if len(losses_pool) == args.pool_size:
# Choose frames from pool to backprop
indices = np.array(losses_pool).argsort()[-args.top_k:]
chosen_data = [data_pool[i] for i in indices]
chosen_targets = [targets_pool[i] for i in indices]
chosen_ids = [ids_pool[i] for i in indices]
chosen_losses = [losses_pool[i] for i in indices]
data_batch = torch.stack(chosen_data, dim=1)[0]
targets_batch = torch.cat(chosen_targets)
output_batch = model(data_batch) # redundant
for chosen_id in chosen_ids:
images_hist[chosen_id] += 1
# Get stats for batches
if batch_stats is not None:
update_batch_stats(batch_stats,
total_num_images_backpropped,
pool_losses = losses_pool,
chosen_losses = chosen_losses)
# Note: This will only work for batch size of 1
loss_reduction = F.nll_loss(output_batch, targets_batch)
optimizer.zero_grad()
loss_reduction.backward()
optimizer.step()
train_loss += loss_reduction.item()
num_backprop += args.top_k
losses_pool = []
data_pool = []
targets_pool = []
ids_pool = []
output = output_batch
targets = targets_batch
else:
output = net(data)
loss_reduction = F.nll_loss(output, targets)
optimizer.zero_grad()
loss_reduction.backward()
optimizer.step()
train_loss += loss_reduction.item()
num_backprop += args.batch_size
_, predicted = output.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
if batch_idx % args.log_interval == 0 and loss_reduction is not None:
print('train_debug,{},{},{:.6f},{:.6f},{},{:.6f}'.format(
epoch,
total_num_images_backpropped + num_backprop,
loss_reduction.item(),
train_loss / float(num_backprop),
time.time(),
100.*correct/total))
return num_backprop
def test(args, model, device, test_loader, epoch, total_num_images_backpropped):
model.eval()
test_loss = 0
correct = 0
total = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.max(1, keepdim=True)[1] # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
total += target.size(0)
test_loss /= len(test_loader.dataset)
print('test_debug,{},{},{:.6f},{:.6f},{}'.format(
epoch,
total_num_images_backpropped,
test_loss,
100.*correct/total,
time.time()))
def main():
# Training settings
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=1, metavar='N',
help='input batch size for training (default: 1)')
parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N',
help='input batch size for testing (default: 1000)')
parser.add_argument('--epochs', type=int, default=500, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--lr', type=float, default=0.01, metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument('--decay', default=0, type=float, help='decay')
parser.add_argument('--momentum', type=float, default=0.5, metavar='M',
help='SGD momentum (default: 0.5)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--selective-backprop', type=bool, default=False, metavar='N',
help='whether or not to use selective-backprop')
parser.add_argument('--top-k', type=int, default=8, metavar='N',
help='how many images to backprop per batch')
parser.add_argument('--pool-size', type=int, default=16, metavar='N',
help='how many images to backprop per batch')
parser.add_argument('--pickle-dir', default="/tmp/",
help='directory for pickles')
parser.add_argument('--pickle-prefix', default="stats",
help='file prefix for pickles')
args = parser.parse_args()
use_cuda = not args.no_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if use_cuda else "cpu")
kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
trainset = datasets.MNIST('../data', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
trainset = [t + (i,) for i, t in enumerate(trainset)] # Add image index to train set
chunk_size = args.pool_size * 10
partitions = [trainset[i:i + chunk_size] for i in xrange(0, len(trainset), chunk_size)]
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.test_batch_size, shuffle=True, **kwargs)
model = Net().to(device)
optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.decay)
# Store frequency of each image getting backpropped
keys = range(len(trainset))
images_hist = dict(zip(keys, [0] * len(keys)))
batch_stats = []
# Make images hist pickle path
image_id_pickle_dir = os.path.join(args.pickle_dir, "image_id_hist")
if not os.path.exists(image_id_pickle_dir):
os.mkdir(image_id_pickle_dir)
image_id_pickle_file = os.path.join(image_id_pickle_dir,
"{}_images_hist.pickle".format(args.pickle_prefix))
# Make batch stats pickle path
batch_stats_pickle_dir = os.path.join(args.pickle_dir, "batch_stats")
if not os.path.exists(batch_stats_pickle_dir):
os.mkdir(batch_stats_pickle_dir)
batch_stats_pickle_file = os.path.join(batch_stats_pickle_dir,
"{}_batch_stats.pickle".format(args.pickle_prefix))
total_num_images_backpropped = 0
for epoch in range(1, args.epochs + 1):
for partition in partitions:
trainloader = torch.utils.data.DataLoader(partition, batch_size=args.batch_size, shuffle=True, num_workers=2)
test(args, model, device, test_loader, epoch, total_num_images_backpropped)
num_images_backpropped = train(args,
model,
device,
trainloader,
optimizer,
epoch,
total_num_images_backpropped,
images_hist,
batch_stats=batch_stats)
total_num_images_backpropped += num_images_backpropped
with open(image_id_pickle_file, "wb") as handle:
pickle.dump(images_hist, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open(batch_stats_pickle_file, "wb") as handle:
print(batch_stats_pickle_file)
pickle.dump(batch_stats, handle, protocol=pickle.HIGHEST_PROTOCOL)
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
# coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from intersight.api_client import ApiClient
from intersight.exceptions import (ApiTypeError, ApiValueError)
class FirmwareApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_firmware_distributable(self, firmware_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_distributable(firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_distributable_with_http_info(
firmware_distributable, **kwargs) # noqa: E501
def create_firmware_distributable_with_http_info(self,
firmware_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_distributable_with_http_info(firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method create_firmware_distributable" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_distributable' is set
if self.api_client.client_side_validation and (
'firmware_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_distributable` when calling `create_firmware_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_distributable' in local_var_params:
body_params = local_var_params['firmware_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_firmware_driver_distributable(self,
firmware_driver_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_driver_distributable(firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_driver_distributable_with_http_info(
firmware_driver_distributable, **kwargs) # noqa: E501
def create_firmware_driver_distributable_with_http_info(
self, firmware_driver_distributable, **kwargs): # noqa: E501
"""Create a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_driver_distributable_with_http_info(firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_driver_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_firmware_driver_distributable" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_driver_distributable' is set
if self.api_client.client_side_validation and (
'firmware_driver_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_driver_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_driver_distributable` when calling `create_firmware_driver_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_driver_distributable' in local_var_params:
body_params = local_var_params['firmware_driver_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_firmware_eula(self, firmware_eula, **kwargs): # noqa: E501
"""Create a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_eula(firmware_eula, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareEula firmware_eula: The 'firmware.Eula' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareEula
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_eula_with_http_info(
firmware_eula, **kwargs) # noqa: E501
def create_firmware_eula_with_http_info(self, firmware_eula,
**kwargs): # noqa: E501
"""Create a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_eula_with_http_info(firmware_eula, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareEula firmware_eula: The 'firmware.Eula' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareEula, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_eula'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method create_firmware_eula" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_eula' is set
if self.api_client.client_side_validation and (
'firmware_eula' not in local_var_params or # noqa: E501
local_var_params['firmware_eula'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_eula` when calling `create_firmware_eula`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_eula' in local_var_params:
body_params = local_var_params['firmware_eula']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Eulas',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareEula', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_firmware_server_configuration_utility_distributable(
self, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_server_configuration_utility_distributable(firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_server_configuration_utility_distributable_with_http_info(
firmware_server_configuration_utility_distributable,
**kwargs) # noqa: E501
def create_firmware_server_configuration_utility_distributable_with_http_info(
self, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_server_configuration_utility_distributable_with_http_info(firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_server_configuration_utility_distributable'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_firmware_server_configuration_utility_distributable"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_server_configuration_utility_distributable' is set
if self.api_client.client_side_validation and (
'firmware_server_configuration_utility_distributable' not in
local_var_params or # noqa: E501
local_var_params[
'firmware_server_configuration_utility_distributable'] is
None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_server_configuration_utility_distributable` when calling `create_firmware_server_configuration_utility_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_server_configuration_utility_distributable' in local_var_params:
body_params = local_var_params[
'firmware_server_configuration_utility_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_firmware_upgrade(self, firmware_upgrade,
**kwargs): # noqa: E501
"""Create a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_upgrade(firmware_upgrade, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareUpgrade firmware_upgrade: The 'firmware.Upgrade' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgrade
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_upgrade_with_http_info(
firmware_upgrade, **kwargs) # noqa: E501
def create_firmware_upgrade_with_http_info(self, firmware_upgrade,
**kwargs): # noqa: E501
"""Create a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_upgrade_with_http_info(firmware_upgrade, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareUpgrade firmware_upgrade: The 'firmware.Upgrade' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgrade, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_upgrade'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method create_firmware_upgrade" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_upgrade' is set
if self.api_client.client_side_validation and (
'firmware_upgrade' not in local_var_params or # noqa: E501
local_var_params['firmware_upgrade'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_upgrade` when calling `create_firmware_upgrade`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_upgrade' in local_var_params:
body_params = local_var_params['firmware_upgrade']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Upgrades',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgrade', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_firmware_distributable(self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_distributable(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_firmware_distributable_with_http_info(
moid, **kwargs) # noqa: E501
def delete_firmware_distributable_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Delete a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_distributable_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method delete_firmware_distributable" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `delete_firmware_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables/{Moid}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_firmware_driver_distributable(self, moid,
**kwargs): # noqa: E501
"""Delete a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_driver_distributable(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_firmware_driver_distributable_with_http_info(
moid, **kwargs) # noqa: E501
def delete_firmware_driver_distributable_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_driver_distributable_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_firmware_driver_distributable" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `delete_firmware_driver_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables/{Moid}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_firmware_server_configuration_utility_distributable(
self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_server_configuration_utility_distributable(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_firmware_server_configuration_utility_distributable_with_http_info(
moid, **kwargs) # noqa: E501
def delete_firmware_server_configuration_utility_distributable_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_server_configuration_utility_distributable_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_firmware_server_configuration_utility_distributable"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `delete_firmware_server_configuration_utility_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables/{Moid}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_firmware_upgrade(self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_upgrade(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_firmware_upgrade_with_http_info(
moid, **kwargs) # noqa: E501
def delete_firmware_upgrade_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Delete a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_upgrade_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method delete_firmware_upgrade" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `delete_firmware_upgrade`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Upgrades/{Moid}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_distributable_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_distributable_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_distributable_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_distributable_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_distributable_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_distributable_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_distributable_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_distributable_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_distributable_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributableList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_distributable_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_distributable_list_with_http_info(self,
**kwargs): # noqa: E501
"""Read a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_distributable_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributableList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_distributable_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributableList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_driver_distributable_by_moid(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_driver_distributable_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_driver_distributable_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_driver_distributable_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_driver_distributable_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_driver_distributable_by_moid" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_driver_distributable_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_driver_distributable_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_driver_distributable_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributableList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_driver_distributable_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_driver_distributable_list_with_http_info(
self, **kwargs): # noqa: E501
"""Read a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_driver_distributable_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributableList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_driver_distributable_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributableList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_eula_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_eula_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareEula
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_eula_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_eula_by_moid_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_eula_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareEula, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_firmware_eula_by_moid" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_eula_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Eulas/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareEula', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_eula_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_eula_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareEulaList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_eula_list_with_http_info(**
kwargs) # noqa: E501
def get_firmware_eula_list_with_http_info(self, **kwargs): # noqa: E501
"""Read a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_eula_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareEulaList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_firmware_eula_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Eulas',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareEulaList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_running_firmware_by_moid(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_running_firmware_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareRunningFirmware
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_running_firmware_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_running_firmware_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_running_firmware_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareRunningFirmware, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_running_firmware_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_running_firmware_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/RunningFirmwares/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareRunningFirmware', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_running_firmware_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_running_firmware_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareRunningFirmwareList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_running_firmware_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_running_firmware_list_with_http_info(
self, **kwargs): # noqa: E501
"""Read a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_running_firmware_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareRunningFirmwareList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_running_firmware_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/RunningFirmwares',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareRunningFirmwareList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_server_configuration_utility_distributable_by_moid(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_server_configuration_utility_distributable_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_server_configuration_utility_distributable_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_server_configuration_utility_distributable_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_server_configuration_utility_distributable_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_server_configuration_utility_distributable_by_moid"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_server_configuration_utility_distributable_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_server_configuration_utility_distributable_list(
self, **kwargs): # noqa: E501
"""Read a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_server_configuration_utility_distributable_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributableList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_server_configuration_utility_distributable_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_server_configuration_utility_distributable_list_with_http_info(
self, **kwargs): # noqa: E501
"""Read a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_server_configuration_utility_distributable_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributableList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_server_configuration_utility_distributable_list"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributableList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_upgrade_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgrade
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_upgrade_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_upgrade_by_moid_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgrade, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_firmware_upgrade_by_moid" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_upgrade_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Upgrades/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgrade', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_upgrade_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgradeList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_upgrade_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_upgrade_list_with_http_info(self, **kwargs): # noqa: E501
"""Read a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgradeList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_firmware_upgrade_list" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Upgrades',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgradeList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_upgrade_status_by_moid(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.UpgradeStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_status_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgradeStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_upgrade_status_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_upgrade_status_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.UpgradeStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_status_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgradeStatus, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_upgrade_status_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_upgrade_status_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/UpgradeStatuses/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgradeStatus', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_upgrade_status_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.UpgradeStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_status_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgradeStatusList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_upgrade_status_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_upgrade_status_list_with_http_info(
self, **kwargs): # noqa: E501
"""Read a 'firmware.UpgradeStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_status_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgradeStatusList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_upgrade_status_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/UpgradeStatuses',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgradeStatusList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_firmware_distributable(self, moid, firmware_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_distributable(moid, firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_firmware_distributable_with_http_info(
moid, firmware_distributable, **kwargs) # noqa: E501
def patch_firmware_distributable_with_http_info(self, moid,
firmware_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_distributable_with_http_info(moid, firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method patch_firmware_distributable" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `patch_firmware_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_distributable' is set
if self.api_client.client_side_validation and (
'firmware_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_distributable` when calling `patch_firmware_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_distributable' in local_var_params:
body_params = local_var_params['firmware_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables/{Moid}',
'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_firmware_driver_distributable(self, moid,
firmware_driver_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_driver_distributable(moid, firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_firmware_driver_distributable_with_http_info(
moid, firmware_driver_distributable, **kwargs) # noqa: E501
def patch_firmware_driver_distributable_with_http_info(
self, moid, firmware_driver_distributable, **kwargs): # noqa: E501
"""Update a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_driver_distributable_with_http_info(moid, firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_driver_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_firmware_driver_distributable" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `patch_firmware_driver_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_driver_distributable' is set
if self.api_client.client_side_validation and (
'firmware_driver_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_driver_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_driver_distributable` when calling `patch_firmware_driver_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_driver_distributable' in local_var_params:
body_params = local_var_params['firmware_driver_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables/{Moid}',
'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_firmware_running_firmware(self, moid, firmware_running_firmware,
**kwargs): # noqa: E501
"""Update a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_running_firmware(moid, firmware_running_firmware, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareRunningFirmware firmware_running_firmware: The 'firmware.RunningFirmware' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareRunningFirmware
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_firmware_running_firmware_with_http_info(
moid, firmware_running_firmware, **kwargs) # noqa: E501
def patch_firmware_running_firmware_with_http_info(
self, moid, firmware_running_firmware, **kwargs): # noqa: E501
"""Update a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_running_firmware_with_http_info(moid, firmware_running_firmware, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareRunningFirmware firmware_running_firmware: The 'firmware.RunningFirmware' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareRunningFirmware, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_running_firmware'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_firmware_running_firmware" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `patch_firmware_running_firmware`"
) # noqa: E501
# verify the required parameter 'firmware_running_firmware' is set
if self.api_client.client_side_validation and (
'firmware_running_firmware' not in local_var_params
or # noqa: E501
local_var_params['firmware_running_firmware'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_running_firmware` when calling `patch_firmware_running_firmware`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_running_firmware' in local_var_params:
body_params = local_var_params['firmware_running_firmware']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/RunningFirmwares/{Moid}',
'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareRunningFirmware', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_firmware_server_configuration_utility_distributable(
self, moid, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_server_configuration_utility_distributable(moid, firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_firmware_server_configuration_utility_distributable_with_http_info(
moid, firmware_server_configuration_utility_distributable,
**kwargs) # noqa: E501
def patch_firmware_server_configuration_utility_distributable_with_http_info(
self, moid, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_server_configuration_utility_distributable_with_http_info(moid, firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'moid', 'firmware_server_configuration_utility_distributable'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_firmware_server_configuration_utility_distributable"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `patch_firmware_server_configuration_utility_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_server_configuration_utility_distributable' is set
if self.api_client.client_side_validation and (
'firmware_server_configuration_utility_distributable' not in
local_var_params or # noqa: E501
local_var_params[
'firmware_server_configuration_utility_distributable'] is
None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_server_configuration_utility_distributable` when calling `patch_firmware_server_configuration_utility_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_server_configuration_utility_distributable' in local_var_params:
body_params = local_var_params[
'firmware_server_configuration_utility_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables/{Moid}',
'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware_distributable(self, moid, firmware_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_distributable(moid, firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_firmware_distributable_with_http_info(
moid, firmware_distributable, **kwargs) # noqa: E501
def update_firmware_distributable_with_http_info(self, moid,
firmware_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_distributable_with_http_info(moid, firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method update_firmware_distributable" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `update_firmware_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_distributable' is set
if self.api_client.client_side_validation and (
'firmware_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_distributable` when calling `update_firmware_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_distributable' in local_var_params:
body_params = local_var_params['firmware_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables/{Moid}',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware_driver_distributable(self, moid,
firmware_driver_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_driver_distributable(moid, firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_firmware_driver_distributable_with_http_info(
moid, firmware_driver_distributable, **kwargs) # noqa: E501
def update_firmware_driver_distributable_with_http_info(
self, moid, firmware_driver_distributable, **kwargs): # noqa: E501
"""Update a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_driver_distributable_with_http_info(moid, firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_driver_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_firmware_driver_distributable" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `update_firmware_driver_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_driver_distributable' is set
if self.api_client.client_side_validation and (
'firmware_driver_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_driver_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_driver_distributable` when calling `update_firmware_driver_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_driver_distributable' in local_var_params:
body_params = local_var_params['firmware_driver_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables/{Moid}',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware_running_firmware(self, moid, firmware_running_firmware,
**kwargs): # noqa: E501
"""Update a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_running_firmware(moid, firmware_running_firmware, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareRunningFirmware firmware_running_firmware: The 'firmware.RunningFirmware' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareRunningFirmware
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_firmware_running_firmware_with_http_info(
moid, firmware_running_firmware, **kwargs) # noqa: E501
def update_firmware_running_firmware_with_http_info(
self, moid, firmware_running_firmware, **kwargs): # noqa: E501
"""Update a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_running_firmware_with_http_info(moid, firmware_running_firmware, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareRunningFirmware firmware_running_firmware: The 'firmware.RunningFirmware' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareRunningFirmware, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_running_firmware'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_firmware_running_firmware" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `update_firmware_running_firmware`"
) # noqa: E501
# verify the required parameter 'firmware_running_firmware' is set
if self.api_client.client_side_validation and (
'firmware_running_firmware' not in local_var_params
or # noqa: E501
local_var_params['firmware_running_firmware'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_running_firmware` when calling `update_firmware_running_firmware`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_running_firmware' in local_var_params:
body_params = local_var_params['firmware_running_firmware']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/RunningFirmwares/{Moid}',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareRunningFirmware', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware_server_configuration_utility_distributable(
self, moid, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_server_configuration_utility_distributable(moid, firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_firmware_server_configuration_utility_distributable_with_http_info(
moid, firmware_server_configuration_utility_distributable,
**kwargs) # noqa: E501
def update_firmware_server_configuration_utility_distributable_with_http_info(
self, moid, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_server_configuration_utility_distributable_with_http_info(moid, firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'moid', 'firmware_server_configuration_utility_distributable'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_firmware_server_configuration_utility_distributable"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `update_firmware_server_configuration_utility_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_server_configuration_utility_distributable' is set
if self.api_client.client_side_validation and (
'firmware_server_configuration_utility_distributable' not in
local_var_params or # noqa: E501
local_var_params[
'firmware_server_configuration_utility_distributable'] is
None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_server_configuration_utility_distributable` when calling `update_firmware_server_configuration_utility_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_server_configuration_utility_distributable' in local_var_params:
body_params = local_var_params[
'firmware_server_configuration_utility_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables/{Moid}',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2020 all rights reserved
#
"""
This package provides the implementation of a simple evaluation network.
There are three fundamental abstractions: variables, operators, and literals. Variables hold
the values computed by the evaluation network, operators compute their values by acting on the
values of other nodes, and literals encapsulate foreign objects, such as numeric constants.
These abstractions provide the machinery for representing arbitrary expressions as graphs.
The interesting aspect of this package is that nodal values get updated automatically when the
values of any of the nodes in their domain change. Nodes keep track of the set of dependents
that are interested in their values and post notifications when their values change.
In addition, this package provides {SymbolTable}, a simple manager for evaluation nodes. Beyond
node storage, {SymbolTable} enables the naming of nodes and can act as the name resolution
context for {Expression} nodes, which evaluate strings with arbitrary python expressions that
may involve the values of other nodes in the model. The other nodes provided here operate
independently of {SymbolTable}. However, it is a good idea to build some kind of container to
hold nodes while the evaluation graph is in use.
Simple examples of the use of the ideas in this package are provided in the unit tests. For a
somewhat more advanced example, take a look at {pyre.config.Configurator}, which is a
{Hierarchical} model that builds an evaluation network out of the traits of pyre components, so
that trait settings can refer to the values of other traits in the configuration files.
"""
# the node generator
from .Calculator import Calculator as calculator
# implementation note: these factories are functions (rather than a raw import of the
# corresponding constructor) in order to prevent the secondary {import} from happening when the
# package itself is first imported. this enables the package to override compile time settings
# and makes it possible to implement the {debug} capability
# factories
# model
def model(**kwds):
"""
Build a node container that specializes in names that have encoded hierarchical levels,
such as file paths or namespaces
"""
from .Hierarchical import Hierarchical
return Hierarchical(**kwds)
# nodes
def var(value=None, **kwds):
"""
Build a variable, i.e. a node that can hold an arbitrary value
"""
# get the base node
from .Node import Node
# build a variable and return it
return Node.variable(value=value, **kwds)
def expression(*, formula, model):
"""
Build a new node that evaluates a {formula} that involves the names of other nodes as
resolved in the symbol table {model}.
"""
# build the node and return it
return model.expression(value=formula)
def sequence(*operands):
"""
Build a node that holds a sequence of other nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.sequence(operands=operands)
def mapping(**operands):
"""
Build a node that holds a sequence of other nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.mapping(operands=operands)
def average(*operands):
"""
Compute the average of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.average(operands=operands)
def count(*operands):
"""
Compute the length of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.count(operands=operands)
def max(*operands):
"""
Compute the minimum of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.max(operands=operands)
def min(*operands):
"""
Compute the minimum of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.min(operands=operands)
def product(*operands):
"""
Compute the sum of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.product(operands=operands)
def sum(*operands):
"""
Compute the sum of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.sum(operands=list(operands))
def debug():
"""
Support for debugging the calc package
"""
# print(" ++ debugging 'pyre.calc'")
# attach {ExtentAware} as the metaclass of {Node} so we can verify that all instances of
# this class are properly garbage collected
from ..patterns.ExtentAware import ExtentAware
# get the normal metaclass
global calculator
# derive a new one
class counted(calculator, ExtentAware): pass
# and set it as the default
calculator = counted
# all done
return
# end of file
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# coding: utf-8
import os
import glob
import sfml as sf
class Animation:
"""
An animated texture.
"""
def __init__(self, frames, interval=0):
"""
:param frames: Iterable of sf.Texture objects
:param interval: Time between two frames (default: 0.0s)
"""
self.frames = frames
self.interval = interval
self.index = 0
self.time = 0
@classmethod
def load_from_dir(cls, path, interval=None):
"""
Load an animation from a directory. Directory must contain some image
files named by their index (e.g. "1.png", "2.png", etc...)
:param path: str object, path to the directory to load
:param interval: Time between two frames
:return: Animation
"""
if path[-1] not in (os.sep, '/'):
path += os.sep
frames = list()
for frame_path in glob.iglob(path + '[0-9].png'):
frame = sf.Texture.from_file(frame_path)
frames.append(frame)
if interval is None:
return cls(frames)
else:
return cls(frames, interval)
def get_frame(self, dt):
"""
Returns the texture of the entity.
:param dt: The time between the current and the previous frame.
:return: A sf.Texture instance
"""
self.time += dt
if self.time > self.interval:
self.time = 0
self.index += 1
self.index %= len(self.frames)
return self.frames[self.index]
def reset(self):
self.time = 0
self.index = 0
|
nilq/baby-python
|
python
|
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from utils import get_args, get, print_args, get_seed, extract_seed_from_ckpt
from logger import set_logger
from vilmedic.executors import Trainor, Validator
def main():
# Get args and create seed
config, override = get_args()
seed = get_seed()
# Create checkpoint dir
config.ckpt_dir = os.path.join(config.ckpt_dir, config.name)
os.makedirs(config.ckpt_dir, exist_ok=True)
# If ckpt is specified, we continue training. Lets extract seed
if config.ckpt is not None:
config.ckpt = os.path.join(config.ckpt_dir, config.ckpt)
seed = extract_seed_from_ckpt(config.ckpt)
# Create logger according to seed
set_logger(config.ckpt_dir, seed)
# Nice print args
print_args(config, ['trainor', 'validator'], seed, override)
# Fetch args for training and validation
train_config = get(config, 'trainor')
val_config = get(config, 'validator')
# Trainor
trainor = Trainor(config=train_config, # train_config is all args but the other executors args
seed=seed)
# Evaluator
evaluator = Validator(config=val_config,
models=[trainor.model],
seed=seed,
from_training=True)
# Lets be gentle, give evaluator to trainor
trainor.evaluator = evaluator
# Boom
trainor.start()
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
import h5py
import os
from ._core import hfile
from .. import utils
from .. import h5tree
def test_h5tree(hfile):
assert hfile is not None
assert os.path.exists(hfile)
assert not utils.isHdf5FileObject(hfile)
str_list = [
b"Q=1",
b"Q=0.1",
b"Q=0.01",
b"Q=0.001",
b"Q=0.0001",
b"Q=0.00001",
]
with h5py.File(hfile, "w") as f:
assert not utils.isHdf5FileObject(hfile)
assert f is not None
assert utils.isHdf5FileObject(f)
f.create_dataset("str_list", data=str_list)
f.create_dataset("title", data=b"this is the title")
f.create_dataset("subtitle", data=[b"<a subtitle>"])
f.create_dataset("names", data=[b"one", b"two"])
assert os.path.exists(hfile)
mc = h5tree.Hdf5TreeView(hfile)
assert mc is not None
assert len(mc.report()) == 5
|
nilq/baby-python
|
python
|
from uuid import uuid1
POSTGRES_MAX_TABLE_NAME_LEN_CHARS = 63
NULL_CHARACTER = "\\N"
def generate_table_name(source_table_name: str) -> str:
table_name_template = "loading_{source_table_name}_" + uuid1().hex
# postgres has a max table name length of 63 characters, so it's possible
# the staging table name could exceed the max table length. when this happens,
# use only the uuid portion of the staging table name to ensure that the
# table name is unique.
max_source_table_name_length = POSTGRES_MAX_TABLE_NAME_LEN_CHARS - len(
table_name_template.replace("{source_table_name}", "")
)
truncated_source_table_name = source_table_name[: max_source_table_name_length - 1]
return table_name_template.format(source_table_name=truncated_source_table_name)
|
nilq/baby-python
|
python
|
'''
black_scholes.py
Created on Oct 11, 2018
@author: William Quintano
'''
from scipy.stats import norm
import math
'''
Calculates the price of a stock option using the black scholes model
:param s strike price
:param t remaining lifespan of option in years
:param u price of underlying stock (To get call buying or put selling price: u = highest bid for stock.
To get call selling or put buying price: u = asking price for stock.)
:param r risk-free-rate. This should be the rate of a US treasury bill/bond with a duration close to t
:param v volatility
:param c option type. True for call, false for put
'''
def black_scholes(s,t,u,r,v,c):
if(c):
sign=1
else:
sign=-1
d1 = sign*(math.log(u/s)+(r+.5*v**2)*t)/(v*t**.5)
d2 = sign*(d1 - v*t**.5)
return sign*(u*norm.cdf(d1,0,1)) - sign*((s*norm.cdf(d2,0,1))/math.exp(r*t))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
from argparse import ArgumentParser
from distutils.util import get_platform
from setuptools import find_packages, setup
parser = ArgumentParser()
parser.add_argument("--plat-name", type=str, default=get_platform())
args, unknown_args = parser.parse_known_args()
if args.plat_name == "win32":
source_path = "src/main/win32"
elif args.plat_name == "win-amd64":
source_path = "src/main/win-amd64"
else:
raise OSError("mosi-cbc does not support '%s' platform" % args.plat_name)
long_description = "!!! pypandoc and/or pandoc not found, long_description is bad, don't upload this to PyPI !!!"
if any(arg in unknown_args for arg in ["sdist", "bdist_wheel"]):
try:
# noinspection PyUnresolvedReferences
from pypandoc import convert, download_pandoc
download_pandoc()
long_description = convert("README.md", "rst")
except (ImportError, OSError):
pass
setup(
name="mosi-cbc",
version="0.0.1",
description="CBC solver plugin for the mosi package.",
long_description=long_description,
url="https://github.com/alexbahnisch/mosi-cbc",
author="Alex Bahnisch",
author_email="alexbahnisch@gmail.com",
license="MIT",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5"
"Programming Language :: Python :: 3.6"
],
keywords="mosi cbc",
packages=find_packages(source_path),
package_dir={"": source_path},
package_data={"": ["cbc.exe"]},
install_requires=[
"mosi>=0.0.3"
],
setup_requires=[
"pypandoc>=1.4"
],
tests_require=[
"pytest>=3.2.3",
"pytest-runner>=2.12.1"
],
test_suite="src.tests"
)
|
nilq/baby-python
|
python
|
# Generated by Django 3.2.3 on 2021-06-06 17:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('asset', '0001_initial'),
('category', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Investment',
fields=[
('investment_id', models.AutoField(primary_key=True, serialize=False)),
('quantity', models.DecimalField(decimal_places=10, default=1.0, max_digits=15)),
('asset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='asset', to='asset.asset')),
],
options={
'db_table': 'ewallet_investment',
},
),
migrations.CreateModel(
name='Wallet',
fields=[
('wallet_id', models.AutoField(primary_key=True, serialize=False)),
('name', models.TextField(max_length=255)),
('describe', models.TextField(blank=True, default=None, max_length=255, null=True)),
('balance', models.DecimalField(decimal_places=10, default=0.0, max_digits=15)),
('category_id', models.ForeignKey(db_column='category_id', on_delete=django.db.models.deletion.CASCADE, to='category.category')),
('investment', models.ManyToManyField(through='wallet.Investment', to='asset.Asset')),
],
options={
'db_table': 'ewallet_wallet',
},
),
migrations.AddField(
model_name='investment',
name='wallet',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='wallet', to='wallet.wallet'),
),
]
|
nilq/baby-python
|
python
|
#!/usr/bin/env dls-python2.7
"""Write coordinated magnet moves to different outputs.
A PvWriter and a Simulation writer are available to take magnet_jogs.Moves
and apply them to their respective interfaces.
"""
from cothread.catools import caput
from controls import PvReferences, PvMonitors, Arrays
import magnet_jogs
class AbstractWriter(object):
"""
Abstract writer.
Takes coordinated magnet moves keys and writes the values to a location.
"""
def __init__(self):
self.magnet_coordinator = magnet_jogs.MagnetCoordinator()
def write(self, move, factor):
"""
Apply the requested move.
Args:
move (magnet_jogs.Move): which move to perform.
factor (float): scale factor to apply to move.
"""
raise NotImplementedError()
class PvWriter(AbstractWriter):
"""Write coordinated magnets moves to PV's on the machine."""
def __init__(self):
AbstractWriter.__init__(self)
self.scale_pvs = [ctrl + ':WFSCA' for ctrl in PvReferences.CTRLS]
self.set_scale_pvs = [name + ':SETWFSCA' for name in PvReferences.NAMES]
self.offset_pvs = [ctrl + ':OFFSET' for ctrl in PvReferences.CTRLS]
def write(self, move, factor):
if move == 'SCALE':
scale_jog_values = self.magnet_coordinator.jog(
PvMonitors.get_instance().get_scales(), move, factor)
set_scale_jog_values = self.magnet_coordinator.jog(
PvMonitors.get_instance().get_set_scales(), move, factor)
self.write_to_pvs(self.scale_pvs, scale_jog_values)
self.write_to_pvs(self.set_scale_pvs, set_scale_jog_values)
else:
offset_jog_values = self.magnet_coordinator.jog(
PvMonitors.get_instance().get_offsets(), move, factor)
self.write_to_pvs(self.offset_pvs, offset_jog_values)
def write_to_pvs(self, pvs, jog_values):
caput(pvs, jog_values)
class SimWriter(AbstractWriter):
"""Write coordinated magnets moves to the manual simulation controller."""
def __init__(self, controller):
"""
Class initialised with instance of the simulation controller.
Args:
controller (straight.SimModeController): write to the controller's
stored /scales and offsets
"""
AbstractWriter.__init__(self)
self.controller = controller
def write(self, move, factor):
if move == magnet_jogs.Moves.SCALE:
jog_values = self.magnet_coordinator.jog(
self.controller.scales, move, factor)
else:
jog_values = self.magnet_coordinator.jog(
self.controller.offsets, move, factor)
self.check_bounds(move, jog_values)
self.update_sim_values(move, jog_values)
def update_sim_values(self, key, jog_values):
"""Pass jog values to the controller."""
if key == magnet_jogs.Moves.SCALE:
self.controller.update_sim(Arrays.SCALES, jog_values)
else:
self.controller.update_sim(Arrays.OFFSETS, jog_values)
def reset(self):
"""Reset simulation with the PVs to reflect the real chicane."""
simulated_scales = PvMonitors.get_instance().get_scales()
self.controller.update_sim(Arrays.SCALES, simulated_scales)
simulated_offsets = PvMonitors.get_instance().get_offsets()
self.controller.update_sim(Arrays.OFFSETS, simulated_offsets)
def check_bounds(self, key, jog_values):
"""Raise exception if new value exceeds magnet current limit."""
pvm = PvMonitors.get_instance()
scales = self.controller.scales
offsets = self.controller.offsets
imaxs = pvm.get_max_currents()
imins = pvm.get_min_currents()
# Check errors on limits.
for idx, (max_val, min_val, offset, scale, new_val) in enumerate(
zip(imaxs, imins, offsets, scales, jog_values)):
if key == magnet_jogs.Moves.SCALE:
high = offset + new_val
low = offset - new_val
else:
high = new_val + scale
low = new_val - scale
if high > max_val or low < min_val:
raise magnet_jogs.OverCurrentException(idx)
|
nilq/baby-python
|
python
|
"""Test BrownianExcursion."""
import pytest
from stochastic.processes.continuous import BrownianExcursion
def test_brownian_excursion_str_repr(t):
instance = BrownianExcursion(t)
assert isinstance(repr(instance), str)
assert isinstance(str(instance), str)
def test_brownian_excursion_sample(t, n, threshold):
instance = BrownianExcursion(t)
s = instance.sample(n)
assert len(s) == n + 1
assert (s >= 0).all()
assert s[0] == pytest.approx(0, threshold)
assert s[-1] == pytest.approx(0, threshold)
def test_brownian_excursion_sample_at(t, times, threshold):
instance = BrownianExcursion(t)
s = instance.sample_at(times)
assert len(s) == len(times)
assert (s >= 0).all()
if times[0] == 0:
assert s[0] == pytest.approx(0, threshold)
assert s[-1] == pytest.approx(0, threshold)
|
nilq/baby-python
|
python
|
import random
class RandomFlip(object):
"""Flips node positions along a given axis randomly with a given
probability.
Args:
axis (int): The axis along the position of nodes being flipped.
p (float, optional): Probability that node positions will be flipped.
(default: :obj:`0.5`)
.. testsetup::
import torch
from torch_geometric.data import Data
.. testcode::
from torch_geometric.transforms import RandomFlip
pos = torch.tensor([[-1, 1], [-3, 0], [2, -1]], dtype=torch.float)
data = Data(pos=pos)
data = RandomFlip(axis=0, p=1)(data)
print(data.pos)
.. testoutput::
tensor([[ 1., 1.],
[ 3., 0.],
[-2., -1.]])
"""
def __init__(self, axis, p=0.5):
self.axis = axis
self.p = p
def __call__(self, data):
if random.random() < self.p:
data.pos[:, self.axis] = -data.pos[:, self.axis]
return data
def __repr__(self):
return '{}(axis={}, p={})'.format(self.__class__.__name__, self.axis,
self.p)
|
nilq/baby-python
|
python
|
def is_lock_ness_monster(s):
return any(phrase in s for phrase in ["tree fiddy", "three fifty", "3.50"])
|
nilq/baby-python
|
python
|
import json
import pandas as pd
import time
#################################
#
#with open('logs.json', 'r') as data:
# data = data.read()
#
#logs = json.loads(data)
#
########################
def get_data(file):
with open(file, 'r') as data:
data = data.read()
logs = json.loads(data)
#s = Sender('Test', '192.168.1.214')
#logs = s.list_logs()
df = pd.DataFrame(columns=['acquired_time'])
lenghth = len(logs)
i = 0
while i < lenghth:
for x in logs[i]:
if x == "create_params_file:output":
stats = logs[i][6]
stats = stats.split(',')
acquired_time = stats[3].split('"')
acquired_time = acquired_time[3]
print(acquired_time)
df_temp = pd.DataFrame({'acquired_time': [acquired_time]})
df = pd.concat([df, df_temp])
i += 1
df = df.reset_index(drop=True)
return df
def wirte_logs_to_txt():
f = open("logs.txt", "a+")
lenghth = len(logs)
i = 0
while i < lenghth:
for x in logs[i]:
f.write(str(x))
f.write('\r\n')
f.write('#################################################### \r\n')
i += 1
f.close()
#df = get_data('logs.json')
#
#print(df)
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
return fibonacci(n - 1) + fibonacci(n - 2)
def memoize(func):
cache = dict()
def memoized_func(*args):
if args in cache:
return cache[args]
result = func(*args)
cache[args] = result
return result
return memoized_func
memoized_fibonacci = memoize(fibonacci)
x = memoized_fibonacci(10)
#x = fibonacci(35)
x = memoized_fibonacci(12)
#x = fibonacci(35)
print(x)
|
nilq/baby-python
|
python
|
# coding: utf-8
import datetime
import random
from http import HTTPStatus
from unittest.mock import Mock
from django.test.client import RequestFactory
import pytest
from src.infrastructure.api.views.exchange_rate import (
CurrencyViewSet, CurrencyExchangeRateViewSet)
from tests.fixtures import currency, exchange_rate
@pytest.mark.unit
def test_currency_viewset_get(currency):
viewset = CurrencyViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.get.return_value = (
vars(currency),
HTTPStatus.OK.value
)
response = viewset.get(RequestFactory(), currency.code)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, dict)
@pytest.mark.unit
def test_currency_viewset_list(currency):
viewset = CurrencyViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.list.return_value = (
[vars(currency) for _ in range(random.randint(1, 10))],
HTTPStatus.OK.value
)
response = viewset.list(RequestFactory(), currency.code)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, list)
@pytest.mark.unit
def test_currency_exchange_rate_viewset_convert(exchange_rate):
viewset = CurrencyExchangeRateViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.convert.return_value = (
{
'exchanged_currency': exchange_rate.exchanged_currency,
'exchanged_amount': round(random.uniform(10, 100), 2),
'rate_value': round(random.uniform(0.5, 1.5), 6)
},
HTTPStatus.OK.value
)
request = RequestFactory()
request.query_params = {
'source_currency': exchange_rate.source_currency,
'exchanged_currency': exchange_rate.exchanged_currency,
'amount': round(random.uniform(10, 100), 2)
}
response = viewset.convert(request)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, dict)
@pytest.mark.unit
def test_currency_exchange_rate_viewset_list(exchange_rate):
series_length = random.randint(1, 10)
viewset = CurrencyExchangeRateViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.list.return_value = (
[exchange_rate for _ in range(series_length)],
HTTPStatus.OK.value
)
request = RequestFactory()
request.query_params = {
'source_currency': exchange_rate.source_currency,
'date_from': (
datetime.date.today() + datetime.timedelta(days=-series_length)
).strftime('%Y-%m-%d'),
'date_to': datetime.date.today().strftime('%Y-%m-%d'),
}
response = viewset.list(request)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, list)
@pytest.mark.unit
def test_currency_exchange_rate_viewset_calculate_twr(exchange_rate):
viewset = CurrencyExchangeRateViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.calculate_twr.return_value = (
{'time_weighted_rate': round(random.uniform(0.5, 1.5), 6)},
HTTPStatus.OK.value
)
request = RequestFactory()
request.query_params = {
'source_currency': exchange_rate.source_currency,
'exchanged_currency': exchange_rate.exchanged_currency,
'date_from': (
datetime.date.today() + datetime.timedelta(days=-5)
).strftime('%Y-%m-%d'),
'date_to': datetime.date.today().strftime('%Y-%m-%d'),
}
response = viewset.calculate_twr(request)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, dict)
|
nilq/baby-python
|
python
|
import mysql.connector
# Multicraft Cred
mydb = mysql.connector.connect(
host="",
user="",
password="",
database=""
)
mycursor = mydb.cursor()
# WHMCS Cred
mydb_whmcs = mysql.connector.connect(
host="",
user="",
password="",
database=""
)
mycursor_whmcs = mydb_whmcs.cursor()
|
nilq/baby-python
|
python
|
import argparse
import sys
import analyse
import calibration
from logginghelpers import configure_logging
ALL_CMD = 'all'
MUNICIPALITY_CMD = 'municipality'
def parse_settings(settings) -> dict:
result = {}
if settings is not None:
for setting in settings:
k, v = setting.split('=')
result[k] = v
return result
def schedule_calibration_run(args):
settings = parse_settings(args.override_settings)
tags = args.tags
if tags is None:
tags = []
if args.calibrateSubCommand == ALL_CMD:
calibration.schedule_for_all_municipalities(args.settings_key, settings, args.force_all, args.limit, tags)
elif args.calibrateSubCommand == MUNICIPALITY_CMD:
calibration.schedule_for_single_municipality(args.municipality_id, args.settings_key, settings, tags)
def analyse_data(args):
tags = args.tags
if tags is None:
tags = []
if args.analyseSubCommand == ALL_CMD:
analyse.analyse_all_calibration_runs(args.output_file, args.limit, tags)
def add_settings_override_parser(parser):
parser.add_argument('-o', '--override-settings', type=str, nargs='*', dest='override_settings', metavar='KEY=VALUE', help='override individual scrapy settings')
def add_tags_parser(parser, help_text):
parser.add_argument('-t', '--tags', type=str, nargs='*', dest='tags', metavar='TAG_VALUE', help=help_text)
def add_settings_key_parser(parser, default):
parser.add_argument('-s', '--settings-key', type=str, default=default, dest='settings_key', help='use settings stored in default_scrapy_settings with this key')
def add_calibration_parser(subparsers):
schedule_calibration_parser = subparsers.add_parser('calibrate', help='schedule calibration runs')
schedule_calibration_parser.set_defaults(func=schedule_calibration_run)
calibration_subparsers = schedule_calibration_parser.add_subparsers(
help='schedule calibration run for:', required=True, dest='calibrateSubCommand'
)
all_subparser = calibration_subparsers.add_parser(ALL_CMD,
help='all municipalities')
all_subparser.add_argument(
'-f',
'--force',
default=False,
action='store_true',
dest='force_all',
help='Schedule calibration run for all municipalities even if they are already calibrated',
)
all_subparser.add_argument(
'-l',
'--limit',
type=int,
dest='limit',
help='limit amount of runs to be scheduled',
)
add_settings_key_parser(all_subparser, 'CALIBRATE')
add_settings_override_parser(all_subparser)
add_tags_parser(all_subparser, 'tags to attach to the created queue entries')
municipality_subparser = calibration_subparsers.add_parser(MUNICIPALITY_CMD,
help='given municipality id')
municipality_subparser.add_argument('municipality_id', type=int, help='id of the municipality')
add_settings_key_parser(municipality_subparser, 'CALIBRATE')
add_settings_override_parser(municipality_subparser)
add_tags_parser(municipality_subparser, 'tags to attach to the created queue entry')
def add_analyse_parser(subparsers):
analyse_parser = subparsers.add_parser('analyse', help='analyse data')
analyse_subparsers = analyse_parser.add_subparsers(help='analyse what')
parser = analyse_subparsers.add_parser('calibration', help='analyse calibration runs')
parser.set_defaults(func=analyse_data)
calibration_subparser = parser.add_subparsers(required=True, dest='analyseSubCommand', help='analyse calibration runs')
all_subparser = calibration_subparser.add_parser(ALL_CMD, help='analyse all finished calibration runs')
all_subparser.add_argument('-o', '--output-file', type=str, help='output data to this csv file', required=True)
all_subparser.add_argument(
'-l',
'--limit',
type=int,
dest='limit',
help='limit amount of runs to be analysed',
)
add_tags_parser(all_subparser, 'only entries which contain all of the specified tags')
def parse_args(args):
parser = argparse.ArgumentParser('scheduler.py')
subparsers = parser.add_subparsers(help='action types')
add_calibration_parser(subparsers)
add_analyse_parser(subparsers)
arguments = parser.parse_args(args)
return arguments
if __name__ == '__main__':
configure_logging()
parsed_args = parse_args(sys.argv[1:])
if hasattr(parsed_args, 'func'):
parsed_args.func(parsed_args)
else:
parse_args(['-h'])
|
nilq/baby-python
|
python
|
import sys
import time
import pygame
from pygame.locals import *
from classes.disk import Disk
from classes.robot import Robot
from classes.exit import Exit
"""
This class is used for the simulation
It loops 60 times a second so it can draw and update the simulation
"""
class Window:
def __init__(self):
"""Initialize PyGame"""
pygame.init()
"""Set the window Size"""
self.width = 600
self.height = 600
"""Create the Screen"""
self.font = pygame.font.SysFont("monospace", 20)
self.screen = pygame.display.set_mode((self.width, self.height))
"""Initialize classes as None"""
self.disk = None
self.r1 = None
self.r2 = None
self.exit = None
self.clock = None
def new(self, radius, r1StartPos, r2StartPos, exitPos,
startPointOnEdge, r1TravelToEdge, r2TravelToEdge,
r1TravelOnCircleEdge, r2TravelOnCircleEdge):
"""Create our classes"""
self.disk = Disk(radius, (300,300))
self.r1 = Robot(self.disk, r1StartPos, exitPos, startPointOnEdge, r1TravelToEdge, r1TravelOnCircleEdge, False)
self.r2 = Robot(self.disk, r2StartPos, exitPos, startPointOnEdge, r2TravelToEdge, r2TravelOnCircleEdge, True)
self.exit = Exit(exitPos)
self.clock = pygame.time.Clock()
def _draw(self):
self.screen.fill(pygame.Color(255,255,255))
self.disk.draw(self.screen)
self.exit.draw(self.screen)
self.r1.draw(self.screen, self.font)
self.r2.draw(self.screen, self.font)
pygame.display.update()
def _update(self):
elapsedTime = self.clock.tick_busy_loop(60)/1000 #Seconds since last update
self.r1.update(elapsedTime)
self.r2.update(elapsedTime)
def MainLoop(self):
"""This is the Main Draw Loop"""
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if(None not in (self.r1, self.r2, self.disk, self.exit, self.clock)):
if(self.r1.getIsFinished() and self.r2.getIsFinished()):
time.sleep(2)
return
self._update()
self._draw()
|
nilq/baby-python
|
python
|
from os import name
from django.urls import path, include
from .views import *
urlpatterns = [
path('latest-products/', LatestProductsList.as_view(), name="latest-products"),
path('checkout/', checkout, name="checkout"),
path('orders/', OrdersList.as_view(), name="orders"),
path('products/search/', search, name="search"),
path('products/<slug:category_slug>/', CategoryDetail.as_view(), name="category-detail"),
path('products/<slug:category_slug>/<slug:product_slug>/', ProductDetail.as_view(), name="product-detail"),
]
|
nilq/baby-python
|
python
|
from Cartas import Baraja, Carta
import os
Line = '---------------------------'
def clear():
if os.name == "nt":
os.system("cls")
else:
os.system("clear")
class Jugador:
def __init__(self, nombre: str, mazo: Baraja):
self.nombre = nombre
self.cardSum = 0
self.acabar = False
self.mazo = mazo
self.baraja = Baraja()
def hit(self):
carta = self.mazo.getRandom()
self.baraja + carta
self.cardSum = self.baraja.valor
if self.cardSum > 21:
self.acabar = True
def Turnar(self, baraja: Baraja) -> Baraja:
self.mazo = baraja
self.cardSum = self.baraja.valor
while 1:
try:
print(f'Por medio del siguiente puede seleccionar las acciones que desea realizar')
print('1. Tomar')
print('2. Parar')
respuesta = int(input())
if (not isinstance(respuesta, int)) or (not respuesta > 0 and not respuesta < 3):
raise TypeError
if respuesta == 1:
self.hit()
elif respuesta == 2:
self.stay()
break
except TypeError and ValueError:
print('El valor ingresado no es un número válido')
clear()
return self.mazo
def stay(self):
self.acabar = True
def mostrarCartas(self):
space = ' ' * (len(Line) - len(self.nombre) - 7)
print(f'{Line}--')
print(f'| [{self.baraja.valor}]\t{self.nombre}{space}|')
print(f'{Line}--')
print('|| ', end='')
for i in range(len(self.baraja)):
print(f'{str(self.baraja[i].valor.sign)}{self.baraja[i].simbolo}', end='')
if i < len(self.baraja) - 1:
print(' | ', end='')
print(f' ||')
print(f'{Line}--')
print()
def addCarta(self, carta: Carta) -> Baraja:
self.baraja + carta
for card in self.mazo.cartas:
if carta == card:
self.mazo.cartas.remove(card)
return self.mazo
class Crupier(Jugador):
def __init__(self, mazo: Baraja):
super().__init__('Crupier', mazo)
def hit(self):
carta = self.mazo.getRandom()
if carta.valor == 'A':
if carta.value + self.cardSum < 21:
self.cardSum = self.baraja.valor + 11
self.baraja + carta
else:
self.baraja + carta
self.cardSum = self.baraja.valor
else:
self.baraja + carta
self.cardSum = self.baraja.valor
if self.cardSum > 21:
self.acabar = True
def Turnar(self, baraja: Baraja) -> Baraja:
self.mazo = baraja
if self.cardSum < 16:
self.hit()
return self.mazo
def mostrarCartas(self, mostrar: bool = False):
space = ' '
space *= (len(Line) - len(self.nombre) - 7)
print(f'{Line}--')
if not mostrar:
print(f'| [{self.baraja[0].value}]\t{self.nombre}{space}|')
else:
print(f'| [{self.baraja.valor}]\t{self.nombre}{space}|')
print(f'{Line}--')
print('|| ', end='')
for i in range(len(self.baraja)):
if i > 0 and not mostrar:
print(f'??', end='')
else:
print(f'{str(self.baraja[i].valor.sign)}{self.baraja[i].simbolo}', end='')
if i < len(self.baraja) - 1:
print(' | ', end='')
print(f' ||')
print(f'{Line}--')
print()
|
nilq/baby-python
|
python
|
import time
import random
import itertools as it
from pathlib import Path
from collections import namedtuple
import numpy as np
import torch.nn.functional as F
from vizdoom import DoomGame, ScreenResolution, \
ScreenFormat, GameVariable, Mode, Button
from utils.helpers import get_logger
logger = get_logger(__file__)
class DoomEnvironment():
def __init__(self, cfgs):
scenario_name = cfgs['name']
filepath = Path(__file__).parent
config_file = filepath.joinpath('assets/{}.cfg'.format(scenario_name))
scenario_file = filepath.joinpath('assets/{}.wad'.format(scenario_name))
logger.info('Loading game config from {}'.format(config_file.name))
logger.info('Loading scenario config from {}'.format(scenario_file.name))
assert config_file.is_file(), \
"{} no such file".format(config_file)
assert scenario_file.is_file(), \
"{} no such file".format(scenario_file)
self.game = DoomGame()
self.game.load_config(config_file.as_posix())
self.game.set_doom_scenario_path(scenario_file.as_posix())
self.game.set_screen_resolution(ScreenResolution.RES_320X240)
self.game.set_screen_format(ScreenFormat.GRAY8)
# Enables depth buffer.
self.game.set_depth_buffer_enabled(True)
# Enables labeling of in game objects labeling.
self.game.set_labels_buffer_enabled(True)
# Enables buffer with top down map of the current episode/level.
self.game.set_automap_buffer_enabled(True)
# Sets other rendering options (all of these options except
# crosshair are enabled (set to True) by default)
self.game.set_render_hud(True)
self.game.set_render_minimal_hud(False) # If hud is enabled
self.game.set_render_crosshair(False)
self.game.set_render_weapon(True)
self.game.set_render_decals(False) # Bullet holes and blood on the walls
self.game.set_render_particles(False)
self.game.set_render_effects_sprites(False) # Smoke and blood
self.game.set_render_messages(False) # In-game messages
self.game.set_render_corpses(False)
# Effect upon taking damage or picking up items
self.game.set_render_screen_flashes(True)
# Makes episodes start after 10 tics (~after raising the weapon)
self.game.set_episode_start_time(10)
# Makes the window appear (turned on by default)
self.game.set_window_visible(True)
# Turns on the sound. (turned off by default)
self.game.set_sound_enabled(True)
# Sets ViZDoom mode (PLAYER, ASYNC_PLAYER, SPECTATOR, ASYNC_SPECTATOR,
# PLAYER mode is default)
self.game.set_mode(Mode.PLAYER)
self.game.init()
self.action_size = self.game.get_available_buttons_size()
self.actions = [a.tolist() for a in np.eye(self.action_size, dtype=bool)]
logger.debug('Action space size {}'.format(self.action_size))
logger.info('Environment setup')
def step(self, action):
reward = self.game.make_action(self.actions[action])
done = self.game.is_episode_finished()
next_state = self.get_frame()
return next_state, reward, done, {}
def reset(self):
self.game.new_episode()
return self.get_frame()
def get_total_reward(self):
return self.game.get_total_reward()
def close(self):
self.game.close()
def get_frame(self):
state = self.game.get_state()
return state.screen_buffer if state is not None else None
def update_env(self, update_fn, **kwargs):
pass
|
nilq/baby-python
|
python
|
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
import pandas as pd
import plotly.graph_objs as go
dataset_train = pd.read_csv('sg_train.csv')
dataset_train_copy = dataset_train.copy()
dataset_train_copy = dataset_train_copy.sort_values(by='Date')
dataset_test = pd.read_csv('sg_test_predict.csv')
dataset_test = dataset_test.drop(columns=['Unnamed: 0'])
fig = go.Figure()
fig.add_trace(go.Candlestick(x=pd.to_datetime(dataset_train_copy['Date']),open=dataset_train_copy['Open'],high=dataset_train_copy['High'],
low=dataset_train_copy['Low'],close=dataset_train_copy['Close'],increasing=dict(line=dict(color='blue')),
decreasing=dict(line=dict(color='red')),name='Real'))
fig.add_trace(go.Candlestick(x=pd.to_datetime(dataset_test['Date']),open=dataset_test['Open'],high=dataset_test['High'],
low=dataset_test['Low'],close=dataset_test['Close'],increasing=dict(line=dict(color='green')),
decreasing=dict(line=dict(color='pink')),name='Predicted'))
fig.update_layout(title='Stock Price Trend',title_x=0.5,xaxis=dict(tickformat='%Y-%m-%d',title='Date', nticks=10, tickangle=-45), yaxis_title='Stock Price')
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.CERULEAN])
server = app.server
app.layout = html.Div(
[
dbc.Row([
dbc.Col((html.H1('Stock Predictor',
style={'textAlign': 'center', 'color': 'white', 'marginTop': 90})), width=12)
], style={'background-color': '#87D3F8', 'marginBottom': 20, 'height': 200}),
html.Div([
dbc.Row([
dbc.Col(html.H2(html.B('Predictions for Open,High,Low and Close Prices'),
style={'textAlign': 'left', 'marginBottom': 30, 'marginLeft': 10}), width=12)])
]),
html.Div([
dbc.Row([
dbc.Col(html.H5('Select the Range of dates using the Range slider below the graph',style={'textAlign':'left','marginBottom':20,'marginLeft':10}),width=12)])
]),
dbc.Row([
dbc.Col(dcc.Graph(id='candle-stick-chart', figure=fig, config={'displayModeBar': False})),
]),
])
if __name__ == '__main__':
app.run_server(debug=False)
|
nilq/baby-python
|
python
|
import unittest
import xmlconfigparse
import xml.etree.ElementTree as ET
import xml.etree.ElementPath as EP
class XmlToDictTest(unittest.TestCase):
"""
"""
@classmethod
def setUpClass(cls):
"""Creates new xml file to test"""
# Creates xml file to be modified by test
root = ET.Element("root")
foo = ET.SubElement(root, "foo")
ET.SubElement(foo, "bar")
ET.SubElement(foo, "bar")
tree = ET.ElementTree(root)
tree.write("test.xml")
# Creates xml file to be tested against
test_root = ET.Element("root")
test_foo = ET.SubElement(test_root, "foo")
ET.SubElement(test_foo, "bar")
test_bar = ET.SubElement(test_foo, "bar")
test_name = ET.SubElement(test_bar, "name")
test_no = ET.SubElement(test_name, "no")
ET.SubElement(test_no, "more")
test_value = ET.SubElement(test_name, "value")
test_name.text = "test"
test_name.set("veg", "3")
test_value.text = "total"
test_tree = ET.ElementTree(test_root)
test_tree.write("testcase.xml")
def test_xmlinsert(self):
""" Test module
"""
xmlconfigparse.xmlinsert(
"name[@veg=3 text()=test][no/more]/value[text()=total]",
"test.xml",
tag="foo/bar[last()]",
)
try:
xmlroot = ET.parse("test.xml").getroot()
xmltestroot = ET.parse("testcase.xml").getroot()
except ET.ParseError:
self.fail(msg="Xml Parse Error")
xmlteststring = ET.tostring(xmlroot)
xmltempstring = ET.tostring(xmltestroot)
self.assertEqual(xmlteststring, xmltempstring, msg="Unexpected string returned")
def test_elementinset(self):
"""Test method insert subelements
"""
element_test = ET.Element("test")
element_temp = ET.Element("test")
new_temp = ET.SubElement(element_temp, "new")
ET.SubElement(new_temp, "insert")
token_iter = EP.xpath_tokenizer("new/insert")
xmlconfigparse.elementinsert(token_iter, element_test)
element_temp_string = ET.tostring(element_temp)
element_test_string = ET.tostring(element_test)
self.assertEqual(
element_test_string, element_temp_string, msg="Unexpected string returned"
)
def test_predicate(self):
"""Test predicate addition
"""
element_test = ET.Element("test")
element_temp = ET.Element("test")
element_temp.text = "Hey"
element_temp.set("val", "8")
ET.SubElement(element_temp, "ins")
token_iter = EP.xpath_tokenizer("@val=8]")
xmlconfigparse.add_predicate(token_iter, element_test)
token_iter = EP.xpath_tokenizer("text()=Hey]")
xmlconfigparse.add_predicate(token_iter, element_test)
token_iter = EP.xpath_tokenizer("ins/]")
xmlconfigparse.add_predicate(token_iter, element_test)
element_temp_string = ET.tostring(element_temp)
element_test_string = ET.tostring(element_test)
self.assertEqual(
element_test_string, element_temp_string, msg="Unexpected string returned"
)
def test_attribute(self):
"""Test attribute setting
"""
# template elements
attrib_element = ET.Element("test")
text_element = ET.Element("test")
attrib_element.set("val", "4")
text_element.text = "foo"
# testing elements
no_text_element = ET.Element("test")
no_attrib_element = ET.Element("test")
xmlconfigparse.set_xml_attribute(["@", "="], ["val", "4"], no_attrib_element)
xmlconfigparse.set_xml_attribute(["()", "="], ["text", "foo"], no_text_element)
element_attrib_string = ET.tostring(attrib_element)
element_text_string = ET.tostring(text_element)
no_attrib_string = ET.tostring(no_attrib_element)
no_text_string = ET.tostring(no_text_element)
self.assertEqual(
element_text_string, no_text_string, msg="Unexpected string returned"
)
self.assertEqual(
element_attrib_string, no_attrib_string, msg="Unexpected string returned"
)
if __name__ == "__main__":
unittest.main()
|
nilq/baby-python
|
python
|
from typing import Literal
foo: Literal[""] = ""
bar: Literal[''] = ''
|
nilq/baby-python
|
python
|
time_travel = int(input())
speed_travel = int(input())
liters = (speed_travel * time_travel) / 12
print('{:.3f}'.format(liters))
|
nilq/baby-python
|
python
|
import os
import re
import ssl
from operator import itemgetter
import requests
import urllib3
from bs4 import BeautifulSoup
from smart_open import open
ssl._create_default_https_context = ssl._create_unverified_context
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
MAIN_PAGE = "https://tff.org/default.aspx?pageID=545"
FIRST_VALID_SEASON_URL = "http://www.tff.org/default.aspx?pageID=561"
class Match:
def __init__(self, url_match):
self.match_url = url_match
self.match_soup = BeautifulSoup(requests.get(url_match, timeout=5, verify=ssl.CERT_NONE, headers={"User-agent":
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36"}).content,
"html.parser")
self.home_team = None
self.away_team = None
self.referee = None
self.day = None
self.time = None
self.goals = None
self.subs = None
self.red_cards = None
self.events_array = None
def parse_match_data(self):
self.parse_teams()
self.parse_referee()
self.parse_goals()
self.parse_date()
self.parse_subs()
self.parse_red_cards()
self.create_json()
def parse_teams(self):
teams_tag = self.match_soup.find_all('a', {'id': re.compile(r'Takim.$')})
team_one, team_two = teams_tag[0].text, teams_tag[1].text
self.home_team = team_one.strip()
self.away_team = team_two.strip()
def parse_referee(self):
ref_tag = self.match_soup.find('a', {'id': re.compile(r'Hakem')})
ref_name = ref_tag.text[:-7]
self.referee = ref_name
def parse_date(self):
date_tag = self.match_soup.find('span', id=lambda x: x and "Tarih" in x).text
date_splitted = date_tag.split("-")
try:
date, time = date_splitted[0], date_splitted[1]
except IndexError:
date = date_splitted[0]
time = 'Yok'
self.day = date
self.time = time
def parse_goals(self):
all_goals = []
goals = self.match_soup.findAll('a', id=lambda x: x and "Goller" in x)
for i in goals:
goal_info = i.text
team_id = re.search(r'Takim.', str(i)).group(0)[-1]
splitted_goal_info = goal_info.split(",")
scorer = splitted_goal_info[0]
scored_with = goal_info[-2]
try:
goal_minute = int(splitted_goal_info[1].split(".")[0])
except ValueError: # if goal scored in extra minutes
splitted_by_plus = splitted_goal_info[1].split(".")[0].split("+")
goal_minute = int(splitted_by_plus[0]) + int(splitted_by_plus[1])
if int(team_id) == 1:
goal_element = ['home', scorer, goal_minute, scored_with]
else:
goal_element = ['away', scorer, goal_minute, scored_with]
all_goals.append(goal_element)
self.goals = sorted(all_goals, key=itemgetter(2))
def parse_subs(self):
team_one_out, team_two_out = [], []
team_one_in, team_two_in = [], []
subs_in_tags = self.match_soup.find_all(['a', "span"], {'id': re.compile(r'Takim._rptCikanlar')})
team_id = 1
for e, i in enumerate(subs_in_tags):
if e == 0:
continue
if i.text == "Oyundan Çıkanlar":
team_id = 2
continue
if team_id == 1:
team_one_out.append(i.text)
else:
team_two_out.append(i.text)
subs_out_tags = self.match_soup.find_all(['a', "span"], {'id': re.compile(r'Takim._rptGirenler')})
team_id = 1
for e, i in enumerate(subs_out_tags):
if e == 0:
continue
if i.text == "Oyuna Girenler":
team_id = 2
continue
if team_id == 1:
team_one_in.append(i.text)
else:
team_two_in.append(i.text)
team_one_all = [team_one_in, team_one_out]
team_two_all = [team_two_in, team_two_out]
all_subs = [team_one_all, team_two_all]
self.subs = all_subs
def parse_red_cards(self):
team_one_red, team_two_red = [], []
red_card_tags = self.match_soup.find_all('img', {'alt': ["Çift Sarı Kart", "Kırmızı Kart"]})
for i in red_card_tags:
team_id = re.search(r'Takim.', str(i)).group(0)[-1]
name_minute = i.text.strip()
name_minute_splitted = name_minute.split("\n")
# Discard cards seen after the match
if name_minute_splitted[-1] == "MS":
continue
if int(team_id) == 1:
team_one_red.extend(name_minute_splitted)
else:
team_two_red.extend(name_minute_splitted)
all_red_cards = [team_one_red, team_two_red]
self.red_cards = all_red_cards
def create_json(self):
constant_info = {
'match_start_date': self.day.strip(),
'match_start_time': self.time.strip(),
'home_team': self.home_team,
'away_team': self.away_team,
'referee': self.referee
}
events = []
if len(self.goals) > 0:
home_score, away_score = 0, 0
for goal in self.goals:
dict_merge = {}
if goal[0] == 'home':
home_score += 1
else:
away_score += 1
event_json = {'type': 'goal', 'scoring_team': goal[0], 'scorer': goal[1], 'event_minute': goal[2],
'scored_with': goal[3], 'current_home_score': home_score,
'current_away_score': away_score}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
sub_time = None
if self.subs is not None:
for home_sub_in, home_sub_out in zip(reversed(self.subs[0][0]), reversed(self.subs[0][1])):
if ".dk" in home_sub_in:
sub_time = home_sub_in.split(".")[0]
continue
dict_merge = {}
event_json = {'type': 'home_sub', 'event_minute': sub_time, 'sub_in': home_sub_in,
'sub_out': home_sub_out}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
for away_sub_in, away_sub_out in zip(reversed(self.subs[1][0]), reversed(self.subs[1][1])):
if ".dk" in away_sub_in:
sub_time = away_sub_in.split(".")[0]
continue
dict_merge = {}
event_json = {'type': 'away_sub', 'event_minute': sub_time, 'sub_in': away_sub_in,
'sub_out': away_sub_out}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
if self.red_cards is not None:
card_time = None
for home_red in reversed(self.red_cards[0]):
if ".dk" in home_red:
card_time = home_red.split(".")[0]
continue
dict_merge = {}
event_json = {'type': 'home_red', 'event_minute': card_time, 'info': home_red}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
for away_red in reversed(self.red_cards[1]):
if ".dk" in away_red:
card_time = away_red.split(".")[0]
continue
dict_merge = {}
event_json = {'type': 'away_red', 'event_minute': card_time, 'info': away_red}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
self.events_array = events
def __str__(self):
return_str = f"""
'Match date: ' {self.day}
'Match time: ' {self.time}
'Home team: ' {self.home_team}
'Away team: ' {self.away_team}
'Referee: ' {self.referee}
'Home team goals: ' {self.goals[0]}
'Away team goals: ' {self.goals[1]}
'Home team subs: ' {self.subs[0]}
'Away team subs: ' {self.subs[1]}
'Home team red cards: ' {self.red_cards[0]}
'Away team red cards: ' {self.red_cards[1]}
""".strip()
return return_str
def create_match_obj_list():
"""
read match_urls file from s3 and create match objects
"""
with open(f"s3://{os.environ['BUCKET_NAME']}/match_urls.txt") as f:
content = f.readlines()
all_matches = [x.strip() for x in content]
match_obj_list = []
for match_url in all_matches:
print("Parsing match url", match_url)
try:
match = Match(match_url)
match.parse_match_data()
match_obj_list.append(match)
except Exception as e:
print(str(e))
continue
return match_obj_list
|
nilq/baby-python
|
python
|
# Copyright 2020 Dylan Baker
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for ConstList.
These work by using mypy ignore statements for code that should break, and
relying on mypy to catch "useless ignore" statements
"""
from __future__ import annotations
import typing
from constprotocol import ConstList
class MyList(list):
pass
def test_const_list_paramater() -> None:
"""Test passing various kinds into a function that returns a ConstList."""
def func(l: ConstList[str]) -> None:
pass
# Plain list does work
func(['a', 'b'])
# A ConstList does work
c: ConstList[str] = ['1', '2']
func(c)
# Sequence doesn't work, because it doesn't implement add or copy
a: typing.Sequence[str] = ['str']
func(a) # type: ignore
# Set doesn't work
b = {'a', 'b'}
func(b) # type: ignore
# Dict keys don't work
func({'a': 'b'}.keys()) # type: ignore
def test_const_list_return() -> None:
"""Test returning a ConstList and using that value."""
def func() -> ConstList[str]:
return ['a', 'b', 'c']
a = func()
# Does not have an append method
a.append('d') # type: ignore
# Does not have an iadd method
a += ['b'] # type: ignore
# Works, since a is not mutated
b: ConstList[str] = a + ['a']
# Does not have an append method
b.append('d') # type: ignore
# Works, since a is not mutated
c = func() + ['d']
c.append('e')
assert c == ['a', 'b', 'c', 'd', 'e']
# With a custom class
d: ConstList[str] = MyList(['a', 'b'])
e = d + ['c']
|
nilq/baby-python
|
python
|
# FinSim
#
# Copyright 2019 Carnegie Mellon University. All Rights Reserved.
#
# NO WARRANTY. THIS CARNEGIE MELLON UNIVERSITY AND SOFTWARE ENGINEERING INSTITUTE MATERIAL IS FURNISHED ON AN "AS-IS" BASIS. CARNEGIE MELLON UNIVERSITY MAKES NO WARRANTIES OF ANY KIND, EITHER EXPRESSED OR IMPLIED, AS TO ANY MATTER INCLUDING, BUT NOT LIMITED TO, WARRANTY OF FITNESS FOR PURPOSE OR MERCHANTABILITY, EXCLUSIVITY, OR RESULTS OBTAINED FROM USE OF THE MATERIAL. CARNEGIE MELLON UNIVERSITY DOES NOT MAKE ANY WARRANTY OF ANY KIND WITH RESPECT TO FREEDOM FROM PATENT, TRADEMARK, OR COPYRIGHT INFRINGEMENT.
#
# Released under a MIT (SEI)-style license, please see license.txt or contact permission@sei.cmu.edu for full terms.
#
# [DISTRIBUTION STATEMENT A] This material has been approved for public release and unlimited distribution. Please see Copyright notice for non-US Government use and distribution.
#
# This Software includes and/or makes use of the following Third-Party Software subject to its own license:
#
# 1. Django (https://www.djangoproject.com/foundation/faq/) Copyright 2005-2018 Django Software Foundation.
# 2. bootstrap (https://getbootstrap.com/docs/4.0/about/license/) Copyright 2018 Twitter.
# 3. glyphicons (https://www.glyphicons.com/license/) Copyright 2010-2018 GLYPHICONS.
# 4. jquery (https://jquery.org/license/) Copyright 2018 jquery foundation.
# 5. jquery tablesorter (https://mottie.github.io/tablesorter/docs/) Copyright 2007-2018 Christian Bach, Rob Garrison and Contributing Authors (see AUTHORS file).
# 6. jquery validate (https://jqueryvalidation.org/) Copyright 2006-2018 J�rn Zaefferer, Markus Staab, Brahim Arkni, and contributors .
# 7. jquery validate unobtrusive (https://github.com/aspnet/jquery-validation-unobtrusive/blob/master/LICENSE.txt) Copyright 2014-2018 .NET Foundation.
# 8. modernizr (https://github.com/Modernizr/Modernizr/blob/master/LICENSE) Copyright 2009-2018 https://github.com/Modernizr/Modernizr/graphs/contributors.
# 9. respond.js (https://github.com/scottjehl/Respond) Copyright 2011 Scott Jehl.
# 10. roboto fonts (https://fonts.google.com/specimen/Roboto) Copyright 2015-2018 Google, Inc..
# 11. xregexp (http://xregexp.com/) Copyright 2007-2012 Steven Levithan.
#
# DM19-0396
#
"""
Definition of forms.
"""
from django import forms
from django.contrib.auth.forms import AuthenticationForm
from django.utils.translation import ugettext_lazy as _
class BootstrapAuthenticationForm(AuthenticationForm): #Authenticate - username and password
"""Authentication form which uses boostrap CSS."""
username = forms.CharField(max_length=254,
widget=forms.TextInput({
'class': 'form-control',
'placeholder': 'User name'}))
password = forms.CharField(label=_("Password"),
widget=forms.PasswordInput({
'class': 'form-control',
'placeholder': 'Password'}))
# class BootstrapTemplateOfficialDatatableView(DatatableView): #JA comments
# model = CommercialAccounts
# datatable_options = {
# 'structure_template': "datatableview/bootstrap_structure.html",
# 'columns': [
# '',
# 'headline',
# 'blog',
# 'pub_date',
# ],
# }
|
nilq/baby-python
|
python
|
#Follow up for "Unique Paths":
#
#Now consider if some obstacles are added to the grids. How many unique paths would there be?
#
#An obstacle and empty space is marked as 1 and 0 respectively in the grid.
#
#For example,
#There is one obstacle in the middle of a 3x3 grid as illustrated below.
#
#[
# [0,0,0],
# [0,1,0],
# [0,0,0]
#]
#The total number of unique paths is 2.
#
#Note: m and n will be at most 100.
class Solution(object):
def uniquePathsWithObstacles(self, obstacleGrid):
"""
:type obstacleGrid: List[List[int]]
:rtype: int
"""
m,n=len(obstacleGrid),len(obstacleGrid[0])
l=[[0 for i in xrange(n)] for i in xrange(m)]
for i in xrange(m):
for j in xrange(n):
if obstacleGrid[i][j]==0:
if i==0 and j==0:
l[i][j]=1
elif i==0:
l[i][j]=l[i][j-1]
elif j==0:
l[i][j]=l[i-1][j]
else:
l[i][j]+=l[i-1][j]+l[i][j-1]
return l[m-1][n-1]
|
nilq/baby-python
|
python
|
# 给定一棵二叉树,你需要计算它的直径长度。一棵二叉树的直径长度是任意两个结点路径长度中的最大值。这条路径可能穿过根结点。
#
# 示例 :
# 给定二叉树
#
# 1
# / \
# 2 3
# / \
# 4 5
# 返回 3, 它的长度是路径 [4,2,1,3] 或者 [5,2,1,3]。
#
# 注意:两结点之间的路径长度是以它们之间边的数目表示。
#
# 来源:力扣(LeetCode)
# 链接:https://leetcode-cn.com/problems/diameter-of-binary-tree
# 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def __init__(self):
self.ans = 0
def depth(self, root: TreeNode) -> int:
if not root:
return 0
l = self.depth(root.left)
r = self.depth(root.right)
self.ans = max(self.ans, l + r + 1)
return max(l, r) + 1
def diameterOfBinaryTree(self, root: TreeNode) -> int:
if not root:
return 0
self.depth(root)
return self.ans - 1
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
"""
Creates MRI axial pictures for custom T-shirt.
Usage:
mripicture.py [options] <study>
mripicture.py [options] <study> [-s <subject>]...
mripicture.py [options] <study> [-s <subject>]... [-t <tag>]
Arguments:
<study> Nickname of the study to process
Options:
-s --subject Subjects
-o --output=FOLDER Output directory (default: /archive/data/{study}/data/tshirt)
-t --tag=TAG Scan tag [default: T1]
-f --force Force overwrite of output files [default: False]
-h --help Show this screen
-q, --quiet Show minimal output
-d, --debug Show debug messages
-v, --verbose Show intermediate steps
"""
import os
import glob
import logging
from nilearn import plotting
import numpy as np
from docopt import docopt
import datman.config
import datman.scan
logging.basicConfig(level=logging.WARN,
format="[%(name)s] %(levelname)s: %(message)s")
logger = logging.getLogger(os.path.basename(__file__))
def get_all_subjects(config):
nii_dir = config.get_path("nii")
subject_nii_dirs = glob.glob(os.path.join(nii_dir, "*"))
all_subs = [os.path.basename(path) for path in subject_nii_dirs]
return all_subs
def main():
arguments = docopt(__doc__)
study = arguments["<study>"]
outdir = arguments["--output"]
subs = arguments["<subject>"]
tag = arguments["--tag"]
force = arguments["--force"]
quiet = arguments["--quiet"]
debug = arguments["--debug"]
verbose = arguments["--verbose"]
config = datman.config.config(study=study)
# setup logging
if quiet:
logger.setLevel(logging.ERROR)
if verbose:
logger.setLevel(logging.INFO)
if debug:
logger.setLevel(logging.DEBUG)
if subs:
logger.info(
f"Creating pictures for subjects [ {', '.join(subs)} ] from "
f"{study} project using {tag} scans."
)
else:
subs = get_all_subjects(config)
logger.info(
f"Creating pictures for all {len(subs)} subjects from {study} "
f"project using {tag} scans."
)
if not outdir:
outdir = os.path.join(config.get_path("data"), "tshirt")
os.makedirs(outdir, exist_ok=True)
logger.debug(f"Output location set to: {outdir}")
if force:
logger.info("Overwriting existing files")
for subject in subs:
scan = datman.scan.Scan(subject, config)
tagged_scan = scan.get_tagged_nii(tag)
idx = np.argmax([ss.series_num for ss in tagged_scan])
# Set Path
imgpath = tagged_scan[idx].path
outpath = os.path.join(outdir, subject + "_T1.pdf")
if os.path.isfile(outpath) and not force:
logger.debug(f"Skipping subject {subject} as files already exist.")
else:
# Output Image
t1_pic = plotting.plot_anat(
imgpath,
cut_coords=(-20, -10, 2),
display_mode="x",
annotate=False,
draw_cross=False,
vmin=100,
vmax=1100,
threshold="auto",
)
t1_pic.savefig(outpath, dpi=1000)
logger.debug(
f"Created new brain pictures for subject {subject} from file "
f"{imgpath} and saved as {outpath}"
)
logger.info(f"Saved all output to: {outdir}")
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
from calendar import monthrange
year = int( input() )
( startDay, endDay ) = monthrange( year-543, 2 )
print( endDay )
|
nilq/baby-python
|
python
|
#!/usr/bin/python3
import os
import time
from datetime import datetime as dt
import logging
#from .database.database import *
from threading import Thread
class BloquearSites:
def __init__(self):
logging.basicConfig(filename="C:\\ConectaIT\\modules" + '\\logs\\logBloquearSites.log')
#self.db = DataBase()
self.hosts_path = "C:\Windows\System32\drivers\etc\hosts"
self.redirect = "127.0.0.1"
self.stop = False
self.webSiteList = [
"facebook.com", "www.facebook.com", "gmail.com", "www.gmail.com", "topconecta.com.br",
"www.topconecta.com.br"
]
while True:
if dt(dt.now().year, dt.now().month, dt.now().day, 8) < dt.now() < dt(dt.now().year,
dt.now().month,
dt.now().day,
16):
logging.info("Woorking Hours")
with open(self.hosts_path, 'r+') as file:
self.content = file.read()
for website in self.webSiteList:
if website in self.content:
pass
else:
file.write(self.redirect+" "+website+"\n")
else:
with open(self.hosts_path, "r+") as file:
content = file.readlines()
file.seek(0)
for line in content:
if not any(website in line for website in self.webSiteList):
file.write(line)
file.truncate()
logging.info("Fun hours...")
time.sleep(5)
BloquearSites()
|
nilq/baby-python
|
python
|
#numeric integration using the 2-point trapezoidal rule
from math import *
EPSILON = .0001 #base length of trapezoids
def evaluate_function(func, a):
func_at_a = eval(func.replace('x', str(a)))
return func_at_a
#doesnt yet take into account ability of domain a,b to have b<a
def integrate(func, domain):
start_val = float(domain.strip().split(',')[0])
end_val = float(domain.strip().split(',')[1])
sum = 0.0
interval_len = float(end_val - start_val) # in case of flooring
evaluate_at = start_val
if start_val == end_val:
return 0
else:
while evaluate_at < end_val:
sum += EPSILON*(evaluate_function(func, evaluate_at) + evaluate_function(func, evaluate_at + EPSILON))*.5
evaluate_at += EPSILON
length_completed = evaluate_at - start_val
percentage_completed = (length_completed/interval_len)*100
per_comp = '%3d%%' % int(percentage_completed)
print '\b\b\b\b\b' + per_comp,
return '%.3f' % sum
while True: # main program loop
function = raw_input('enter function: ')
evaluate = raw_input('enter domain for integral: ')
if ',' in evaluate:
integral = integrate(function, evaluate)
print '\n'+str(integral)
elif evaluate:
domain = evaluate+','+evaluate
integral = integrate(function, domain)
print str(integral)
else: break
|
nilq/baby-python
|
python
|
__author__ = 'Jay Hennessy <tjay.hennessy@gmail.com>'
__license__ = 'MIT'
import os
__version__ = '0.0.1' # fantraxpy version
VERSION = '17.0.0' # fantrax version
URL = 'https://www.fantrax.com/fxpa/req'
FANTRAX_TOKEN = os.environ.get('FANTRAX_TOKEN', None)
|
nilq/baby-python
|
python
|
"""
This module contains tests for the utility functions in the test_mapping module.
"""
import pytest
from sqlalchemy import (
Column,
Index,
Integer,
UniqueConstraint,
)
from sqlalchemy.orm import registry
from galaxy.model import _HasTable
from . import (
collection_consists_of_objects,
has_index,
has_unique_constraint,
)
from ...testing_utils import (
get_stored_instance_by_id,
initialize_model,
persist,
)
def test_has_index(session):
assert has_index(Bar.__table__, ("field1",))
assert not has_index(Foo.__table__, ("field1",))
def test_has_unique_constraint(session):
assert has_unique_constraint(Bar.__table__, ("field2",))
assert not has_unique_constraint(Foo.__table__, ("field1",))
def test_collection_consists_of_objects(session):
# create objects
foo1 = Foo()
foo2 = Foo()
foo3 = Foo()
# store objects
persist(session, foo1)
persist(session, foo2)
persist(session, foo3)
# retrieve objects from storage
stored_foo1 = get_stored_instance_by_id(session, Foo, foo1.id)
stored_foo2 = get_stored_instance_by_id(session, Foo, foo2.id)
stored_foo3 = get_stored_instance_by_id(session, Foo, foo3.id)
# verify retrieved objects are not the same python objects as those we stored
assert stored_foo1 is not foo1
assert stored_foo2 is not foo2
assert stored_foo3 is not foo3
# trivial case
assert collection_consists_of_objects([stored_foo1, stored_foo2], foo1, foo2)
# empty collection and no objects
assert collection_consists_of_objects([])
# ordering in collection does not matter
assert collection_consists_of_objects([stored_foo2, stored_foo1], foo1, foo2)
# contains wrong object
assert not collection_consists_of_objects([stored_foo1, stored_foo3], foo1, foo2)
# contains wrong number of objects
assert not collection_consists_of_objects([stored_foo1, stored_foo1, stored_foo2], foo1, foo2)
# if an object's primary key is not set, it cannot be equal to another object
foo1.id, stored_foo1.id = None, None
assert not collection_consists_of_objects([stored_foo1], foo1)
# Test utilities
mapper_registry = registry()
@mapper_registry.mapped
class Foo(_HasTable):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
field1 = Column(Integer)
@mapper_registry.mapped
class Bar(_HasTable):
__tablename__ = "bar"
id = Column(Integer, primary_key=True)
field1 = Column(Integer)
field2 = Column(Integer)
__table_args__ = (
Index("ix", "field1"),
UniqueConstraint("field2"),
)
@pytest.fixture(scope="module")
def init_model(engine):
"""Create model objects in the engine's database."""
# Must use the same engine as the session fixture used by this module.
initialize_model(mapper_registry, engine)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-03-22 14:40
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('content', models.CharField(default='New Comment', max_length=1000)),
('time', models.DateTimeField(default=django.utils.timezone.now)),
('owner', models.CharField(default='User', max_length=100)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=50)),
('partition', models.CharField(choices=[('Chat', 'Chat'), ('Announcement', 'Announcement'), ('Transaction', 'Transaction'), ('Activity', 'Activity')], default='Chat', max_length=50)),
('owner', models.CharField(default='User', max_length=100)),
('content', models.CharField(blank=True, max_length=1000)),
('time', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='forum.Post'),
),
]
|
nilq/baby-python
|
python
|
# Mod01.py
from MyCalc01 import Calc01
from MyCalc01.Calc01 import *
x= 100; y= 200
print(Calc01.__name__)
Calc01.Sum(x, y)
Mul(x,y)
|
nilq/baby-python
|
python
|
class ValidationException(Exception):
pass
class DataApiException(Exception):
"For errors raised when reading from data api"
|
nilq/baby-python
|
python
|
# Copyright (c) 2019-2020 SAP SE or an SAP affiliate company. All rights reserved. This file is
# licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ensure
import functools
import ci.log
import ci.util
import concourse.client.api
import ctx
import model.concourse
def lookup_cc_team_cfg(
concourse_cfg_name,
cfg_set,
team_name,
) -> model.concourse.ConcourseTeamConfig:
for cc_team_cfg in cfg_set._cfg_elements('concourse_team_cfg'):
if cc_team_cfg.team_name() != team_name:
continue
if concourse_cfg_name != cc_team_cfg.concourse_endpoint_name():
continue
return cc_team_cfg
raise KeyError(f'No concourse team config for team name {team_name} found')
@ensure.ensure_annotations
def client_from_parameters(
base_url: str,
password: str,
team_name: str,
username: str,
verify_ssl: bool = True,
concourse_api_version=None,
) -> concourse.client.api.ConcourseApiBase:
"""
returns a concourse-client w/ the credentials valid for the current execution environment.
The returned client is authorised to perform operations in the same concourse-team as the
credentials provided calling this function.
"""
concourse_api = concourse.client.api.ConcourseApiFactory.create_api(
base_url=base_url,
team_name=team_name,
verify_ssl=verify_ssl,
concourse_api_version=concourse_api_version,
)
concourse_api.login(
username=username,
passwd=password,
)
return concourse_api
@functools.lru_cache()
@ensure.ensure_annotations
def client_from_cfg_name(
concourse_cfg_name: str,
team_name: str,
cfg_factory=None,
):
if not cfg_factory:
cfg_factory = ci.util.ctx().cfg_factory()
concourse_team_config = lookup_cc_team_cfg(
concourse_cfg_name=concourse_cfg_name,
cfg_set=cfg_factory,
team_name=team_name,
)
concourse_endpoint = cfg_factory.concourse_endpoint(
concourse_team_config.concourse_endpoint_name()
)
return client_from_parameters(
base_url=concourse_endpoint.base_url(),
password=concourse_team_config.password(),
team_name=team_name,
username=concourse_team_config.username(),
)
def client_from_env(
team_name: str=None,
):
cfg_set = ctx.cfg_set()
if not team_name:
team_name = ci.util.check_env('CONCOURSE_CURRENT_TEAM')
concourse_team_config = lookup_cc_team_cfg(
concourse_cfg_name=cfg_set.concourse().name(),
cfg_set=cfg_set,
team_name=team_name,
)
concourse_endpoint = cfg_set.concourse_endpoint(
concourse_team_config.concourse_endpoint_name()
)
return client_from_parameters(
base_url=concourse_endpoint.base_url(),
password=concourse_team_config.password(),
team_name=team_name,
username=concourse_team_config.username(),
)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Class for a multi-panel structure
"""
__version__ = '1.0'
__author__ = 'Noemie Fedon'
import sys
import numpy as np
sys.path.append(r'C:\BELLA')
from src.BELLA.parameters import Parameters
from src.BELLA.constraints import Constraints
from src.BELLA.panels import Panel
from src.BELLA.reduced_multipanels import ReducedMultiPanel
class MultiPanel():
"""
Class for multi-panel structures
"""
def __init__(self, panels, boundary_weights=None):
"""Create object for storing multi-panel structures information"""
# list of panels (classes)
self.panels = panels
if not isinstance(panels, list):
raise MultiPanelDefinitionError(
'Attention, panels must be a list!')
# total area of structure
self.area = sum([el.area for el in panels])
# total area for all patches
self.area_patches = sum([el.area * el.n_plies for el in panels])
# minimum ply count
self.n_plies_min = min((el.n_plies for el in panels))
# maximum ply count
self.n_plies_max = max((el.n_plies for el in panels))
self.is_thick_panels = [panel.n_plies == self.n_plies_max \
for panel in self.panels]
# number of panels
self.n_panels = len(panels)
# number of plies in the laminates
self.n_plies_in_panels = np.array([self.panels[ind_panel].n_plies \
for ind_panel in range(self.n_panels)])
self.has_a_middle_ply()
self.identify_one_thickest_panel()
self.calc_panel_boundary_dict(panels, boundary_weights)
def should_you_use_BELLA(self):
""" Tells the user when using LAYLA is better than employing BELLA
Displays a message when BELLA is employed to design a composite
laminate structure with one panel to indicate that LAYLA is better
suited for the task than BELLA.
Returns
-------
None.
"""
if self.n_panels == 1:
print("""
You are using BELLA to design a composite laminate structure with one panel.
LAYLA is better suited for this task than BELLA, please consider using LAYLA
instead of BELLA.""")
def filter_target_lampams(self, constraints, obj_func_param):
"""
filters applied to the lamination parameters to account for orthotropy
requirements
"""
for panel in self.panels:
panel.filter_target_lampams(constraints, obj_func_param)
def filter_lampam_weightings(self, constraints, obj_func_param):
"""
filter of the lamination-parameter weightings in the panel
objective function to account for the design guidelines
lampam_weightings_3: for blending steps 3 (contain penalty for
out-of-plane orthotropy and may contain penalty for balance)
lampam_weightings: for all other blending steps (contain penalty for
out-of-plane orthotropy and does not contain penalty for balance)
"""
for panel in self.panels:
panel.filter_lampam_weightings(constraints, obj_func_param)
def from_mp_to_blending_strip(self, constraints, n_plies_ref_panel=1):
"""
performs the blending step 2: maps the multi-panel structure to a
blending strip, i.e. a series a panels in a row
"""
self.reduced = ReducedMultiPanel(self, constraints, n_plies_ref_panel)
def calc_panel_boundary_dict(self, panels, boundary_weights):
"""
checks that all panels have a different ID
collates all the panel boundaries in self.boundaries
checks that all panels are connected
"""
## checks that all panels have a different ID
self.dict_ID_to_indices = dict()
for ind_panel, panel in enumerate(panels):
panel.ID_code = ind_panel
self.dict_ID_to_indices[panel.ID] = ind_panel
if len(self.dict_ID_to_indices) != self.n_panels:
raise MultiPanelDefinitionError("""
Several panels with the same index!""")
# print('dict_ID_to_indices', self.dict_ID_to_indices)
## create the dictionary of panel boundaries
self.boundaries = []
for ind_panel, panel in enumerate(panels):
neighbours = [self.dict_ID_to_indices[neighbour] \
for neighbour in panel.neighbour_panels]
for elem in neighbours:
self.boundaries.append(np.sort([ind_panel, elem]))
self.boundaries.append(np.flip(np.sort([ind_panel, elem])))
if len(self.boundaries) == 0:
self.boundaries = np.array((), int).reshape((0,2))
else:
self.boundaries = np.unique(self.boundaries, axis=0)
# print('boundaries', self.boundaries)
## checks that all panels are connected
visited_nodes = []
set_avail_nodes = set([0])
while len(set_avail_nodes) != 0 and len(visited_nodes) < self.n_panels:
current_node = set_avail_nodes.pop()
visited_nodes.append(current_node)
for elem in self.boundaries:
if elem[0] == current_node and elem[1] not in visited_nodes\
and elem[1] not in set_avail_nodes:
set_avail_nodes.add(elem[1])
# print('visited_nodes', visited_nodes)
if not len(visited_nodes) == self.n_panels:
raise MultiPanelDefinitionError("""
The panels of the multipanel-component are not all connected!""")
if len(self.boundaries) == 0:
self.boundaries = np.array((), int).reshape((0,2))
else:
self.boundaries = np.unique(
np.array([np.sort(elem) for elem in self.boundaries]), axis=0)
# print('boundaries', self.boundaries)
## dictionary with panel Ids
self.boundaries_in_IDs = np.empty((self.boundaries.shape[0], 2), int)
for ind_row, (first, second) in enumerate(self.boundaries):
self.boundaries_in_IDs[ind_row, 0] = self.panels[first].ID
self.boundaries_in_IDs[ind_row, 1] = self.panels[second].ID
## reorganise the boundary weightings
self.boundary_weights_in_IDs = dict()
self.boundary_weights = dict()
if boundary_weights is not None:
for weight in boundary_weights.values():
if weight < 0:
raise Exception(
'The boundary weightings should be positive.')
if len(boundary_weights) < self.boundaries.shape[0]:
print(len(boundary_weights), self.boundaries)
raise Exception(
'Insufficient number of boundary weightings.')
for ind_panel1, ind_panel2 in self.boundaries_in_IDs:
ind_panel1_mod = self.dict_ID_to_indices[ind_panel1]
ind_panel2_mod = self.dict_ID_to_indices[ind_panel2]
ind_panel1, ind_panel2 = sorted((ind_panel1, ind_panel2))
ind_panel1_mod, ind_panel2_mod = sorted((ind_panel1_mod,
ind_panel2_mod))
weight = boundary_weights.get((ind_panel1, ind_panel2), None)
if weight:
self.boundary_weights_in_IDs[
(ind_panel1, ind_panel2)] = weight
self.boundary_weights[
(ind_panel1_mod, ind_panel2_mod)] = weight
else:
weight = boundary_weights.get(
(ind_panel2, ind_panel1), None)
if not weight:
raise Exception('Missing boundary weightings.')
self.boundary_weights_in_IDs[
(ind_panel2, ind_panel1)] = weight
self.boundary_weights[
(ind_panel2_mod, ind_panel1_mod)] = weight
else: # all boundary weightings set to one
for ind_panel1, ind_panel2 in self.boundaries_in_IDs:
ind_panel1_mod = self.dict_ID_to_indices[ind_panel1]
ind_panel2_mod = self.dict_ID_to_indices[ind_panel2]
ind_panel1, ind_panel2 = sorted((ind_panel1, ind_panel2))
ind_panel1_mod, ind_panel2_mod = sorted((ind_panel1_mod,
ind_panel2_mod))
self.boundary_weights_in_IDs[(ind_panel1, ind_panel2)] = 1
self.boundary_weights[(ind_panel1_mod, ind_panel2_mod)] = 1
return 0
def has_a_middle_ply(self):
"""
returns:
- middle_ply_indices: the locations of middle plies per panel
(0 if no middle ply)
- has_middle_ply: True if one panel at least has a middle ply
- thick_panel_has_middle_ply: True if thickest panel has a middle
ply
"""
# locations of middle plies per panel (0 if no middle ply)
self.middle_ply_indices = np.array(
[self.panels[ind_panel].middle_ply_index \
for ind_panel in range(self.n_panels)])
self.has_middle_ply = bool(sum(self.middle_ply_indices))
if self.has_middle_ply and self.n_plies_max % 2:
self.thick_panel_has_middle_ply = True
else:
self.thick_panel_has_middle_ply = False
def calc_ply_drops(self, inner_step):
"""
returns a vector of the number of ply drops at each panel boundary of
the blending strip for the inner_step-eme group of plies
"""
n_ply_drops = np.zeros((self.reduced.n_panels,), dtype='int16')
for index, panel in enumerate(self.reduced.panels):
n_ply_drops[index] = self.reduced.n_plies_per_group[inner_step] \
- panel.n_plies_per_group[inner_step]
return n_ply_drops
def calc_weight(self, density_area):
"""
returns the weight of the multipanel structure
"""
return density_area*sum([panel.area*panel.n_plies \
for panel in self.panels])
def calc_weight_per_panel(self, density_area):
"""
returns the weight of the multipanel structure per panel
"""
self.weight_ref_per_panel = density_area * \
np.array([panel.area*panel.n_plies for panel in self.panels])
def calc_weight_from_sst(self, sst, density_area):
"""
returns the weight of the multipanel structure from a stacking sequence
table
"""
return density_area*sum([panel.area * sum(sst[ind_panel] != -1) \
for ind_panel,
panel in enumerate(self.panels)])
def identify_neighbour_panels(self):
"""
returns the indices of the neighbouring panels for each panel
"""
liste = []
for ind_panel in range(self.n_panels):
liste.append([])
for boundary in self.boundaries:
liste[boundary[0]].append(boundary[1])
liste[boundary[1]].append(boundary[0])
return liste
def identify_one_thickest_panel(self):
"""
returns the index of one of the thickest panels
"""
for ind_panel, panel in enumerate(self.panels):
if panel.n_plies == self.n_plies_max:
self.ind_thick = ind_panel
return 0
raise Exception("""
The maximum number of plies should be the ply count of a panel""")
def identify_thickest_panels(self, sym=False):
"""
returns the index of all of the thickest panels
"""
liste = []
if sym and self.n_plies_max % 2 == 1: # midlle ply in thickest panels
for ind_panel, panel in enumerate(self.panels):
if panel.n_plies == self.n_plies_max \
or panel.n_plies == self.n_plies_max - 1:
liste.append(ind_panel)
else:
for ind_panel, panel in enumerate(self.panels):
if panel.n_plies == self.n_plies_max:
liste.append(ind_panel)
if liste:
return liste
raise Exception("""
The maximum number of plies should be the ply count of a panel""")
def __repr__(self):
" Display object "
to_add = ''
# number of groups
if hasattr(self, 'n_groups'):
to_add = to_add + 'Number of groups : ' + str(self.n_groups) \
+ '\n'
# number of plies per group for thickest laminates
if hasattr(self, 'n_plies_per_group'):
to_add = to_add + 'Max number of plies per group : ' \
+ str(self.n_plies_per_group) + '\n'
# position of the group first plies for thickest laminates
if hasattr(self, 'n_first_plies'):
to_add = to_add + 'Position first plies : ' \
+ str(self.n_first_plies) + '\n'
return f"""
Number of panels : {self.n_panels}
Maximum number of plies in a panel: {self.n_plies_max}
Index of one of the thickest panels: {self.ind_thick}
Area : {self.area}
Area for all patches: {self.area_patches}
Panel boundary matrix : {self.boundaries_in_IDs}
""" + to_add
class MultiPanelDefinitionError(Exception):
" Errors during the definition of a multi-panel structure"
if __name__ == "__main__":
print('*** Test for the class MultiPanel ***\n')
constraints = Constraints(
sym=True,
dam_tol=False,
covering=False,
pdl_spacing=True,
min_drop=2)
parameters = Parameters(constraints=constraints, n_plies_ref_panel=48)
n_plies_target1 = 48
n_plies_target2 = 46
n_plies_target3 = 40
n_plies_target4 = 40
panel1 = Panel(ID=1,
n_plies=n_plies_target1,
constraints=constraints,
neighbour_panels=[2])
panel2 = Panel(ID=2,
n_plies=n_plies_target2,
constraints=constraints,
neighbour_panels=[1, 3])
panel3 = Panel(ID=3,
n_plies=n_plies_target3,
constraints=constraints,
neighbour_panels=[2, 4])
panel4 = Panel(ID=4,
n_plies=n_plies_target4,
constraints=constraints,
neighbour_panels=[3])
multipanel = MultiPanel([panel1, panel2, panel3, panel4])
print(multipanel)
from src.BELLA.divide_panels import divide_panels
divide_panels(multipanel, parameters, constraints)
print('multipanel.reduced.n_plies_in_panels', multipanel.reduced.n_plies_in_panels)
print('multipanel.calc_ply_drops(0)', multipanel.calc_ply_drops(0))
print('multipanel.reduced.n_plies_per_group', multipanel.reduced.n_plies_per_group)
print('multipanel.reduced.middle_ply_indices', multipanel.reduced.middle_ply_indices)
|
nilq/baby-python
|
python
|
import warnings
import dateutil.parser
from requests import Session
from time import sleep
from .config import ( # noqa
__version__,
API_ROOT,
DEFAULT_USER_AGENT,
API_KEY_ENV_VAR,
ENVIRON_API_KEY,
)
session = Session()
session.headers.update({"Accept": "application/json"})
session.headers.update({"User-Agent": DEFAULT_USER_AGENT})
if ENVIRON_API_KEY:
session.headers.update({"X-Api-Key": ENVIRON_API_KEY})
else:
warnings.warn(f"Warning: No API Key found, set {API_KEY_ENV_VAR}")
class APIError(RuntimeError):
"""
Raised when the Open States API returns an error
"""
pass
class NotFound(APIError):
"""Raised when the API cannot find the requested object"""
pass
def _make_params(**kwargs):
return {k: v for k, v in kwargs.items() if v is not None}
def _get(uri, params=None):
"""
An internal method for making API calls and error handling easy and
consistent
Args:
uri: API URI
params: GET parameters
Returns:
JSON as a Python dictionary
"""
def _convert_timestamps(result):
"""Converts a string timestamps from an api result API to a datetime"""
if isinstance(result, dict):
for key in result.keys():
if key in (
"created_at",
"updated_at",
"latest_people_update",
"latest_bill_update",
):
try:
result[key] = dateutil.parser.parse(result[key])
except ValueError:
pass
elif isinstance(result[key], dict):
result[key] = _convert_timestamps(result[key])
elif isinstance(result[key], list):
result[key] = [_convert_timestamps(r) for r in result[key]]
elif isinstance(result, list):
result = [_convert_timestamps(r) for r in result]
return result
def _convert(result):
"""Convert results to standard Python data structures"""
result = _convert_timestamps(result)
return result
url = f"{API_ROOT}/{uri}"
response = session.get(url, params=params)
if response.status_code != 200:
if response.status_code == 404:
raise NotFound(f"Not found: {response.url}")
else:
raise APIError(response.text)
return _convert(response.json())
def set_user_agent(user_agent):
"""Appends a custom string to the default User-Agent string
(e.g. ``pyopenstates/__version__ user_agent``)"""
session.headers.update({"User-Agent": f"{DEFAULT_USER_AGENT} {user_agent}"})
def set_api_key(apikey):
"""Sets API key. Can also be set as OPENSTATES_API_KEY environment
variable."""
session.headers["X-Api-Key"] = apikey
def get_metadata(state=None, include=None, fields=None):
"""
Returns a list of all states with data available, and basic metadata
about their status. Can also get detailed metadata for a particular
state.
Args:
state: The abbreviation of state to get detailed metadata on, or leave
as None to get high-level metadata on all states.
include: Additional includes.
fields: An optional list of fields to return; returns all fields by
default
Returns:
Dict: The requested :ref:`Metadata` as a dictionary
"""
uri = "jurisdictions"
params = dict()
if include:
params["include"] = _include_list(include)
if state:
uri += "/" + _jurisdiction_id(state)
state_response = _get(uri, params=params)
if fields is not None:
return {k: state_response[k] for k in fields}
else:
return state_response
else:
params["page"] = "1"
params["per_page"] = "52"
return _get(uri, params=params)["results"]
def get_organizations(state):
uri = "jurisdictions"
uri += "/" + _jurisdiction_id(state)
state_response = _get(uri, params={"include": "organizations"})
return state_response["organizations"]
def _alt_parameter(param, other_param, param_name, other_param_name):
"""ensure that only one name was specified"""
if param and other_param:
raise ValueError(
f"cannot specify both {param_name} and variant {other_param_name}"
)
elif other_param:
warnings.warn(f"{other_param_name} is deprecated, use {param_name}")
return other_param
return param
def search_bills(
jurisdiction=None,
identifier=None,
session=None,
chamber=None,
classification=None,
subject=None,
updated_since=None,
created_since=None,
action_since=None,
sponsor=None,
sponsor_classification=None,
q=None,
# control params
sort=None,
include=None,
page=1,
per_page=10,
all_pages=True,
# alternate names for other parameters
state=None,
):
"""
Find bills matching a given set of filters
For a list of each field, example values, etc. see
https://v3.openstates.org/docs#/bills/bills_search_bills_get
"""
uri = "bills/"
args = {}
jurisdiction = _alt_parameter(state, jurisdiction, "state", "jurisdiction")
if jurisdiction:
args["jurisdiction"] = jurisdiction
if session:
args["session"] = session
if chamber:
args["chamber"] = chamber
if classification:
args["classification"] = classification
if subject:
args["subject"] = subject
if updated_since:
args["updated_since"] = updated_since
if created_since:
args["created_since"] = created_since
if action_since:
args["action_since"] = action_since
if sponsor:
args["sponsor"] = sponsor
if sponsor_classification:
args["sponsor_classification"] = sponsor_classification
if q:
args["q"] = q
if sort:
args["sort"] = sort
if include:
args["include"] = include
results = []
if all_pages:
args["per_page"] = 20
args["page"] = 1
else:
args["per_page"] = per_page
args["page"] = page
resp = _get(uri, params=args)
results += resp["results"]
if all_pages:
while resp["pagination"]["page"] < resp["pagination"]["max_page"]:
args["page"] += 1
sleep(1)
resp = _get(uri, params=args)
results += resp["results"]
return results
def get_bill(uid=None, state=None, session=None, bill_id=None, include=None):
"""
Returns details of a specific bill Can be identified by the Open States
unique bill id (uid), or by specifying the state, session, and
legislative bill ID
Args:
uid: The Open States unique bill ID
state: The postal code of the state
session: The legislative session (see state metadata)
bill_id: Yhe legislative bill ID (e.g. ``HR 42``)
**kwargs: Optional keyword argument options, such as ``fields``,
which specifies the fields to return
Returns:
The :ref:`Bill` details as a dictionary
"""
args = {"include": include} if include else {}
if uid:
if state or session or bill_id:
raise ValueError(
"Must specify an Open States bill (uid), or the "
"state, session, and bill ID"
)
uid = _fix_id_string("ocd-bill/", uid)
return _get(f"bills/{uid}", params=args)
else:
if not state or not session or not bill_id:
raise ValueError(
"Must specify an Open States bill (uid), "
"or the state, session, and bill ID"
)
return _get(f"bills/{state.lower()}/{session}/{bill_id}", params=args)
def search_legislators(
jurisdiction=None,
name=None,
id_=None,
org_classification=None,
district=None,
include=None,
):
"""
Search for legislators.
Returns:
A list of matching :ref:`Legislator` dictionaries
"""
params = _make_params(
jurisdiction=jurisdiction,
name=name,
id=id_,
org_classification=org_classification,
district=district,
include=include,
)
return _get("people", params)["results"]
def get_legislator(leg_id):
"""
Gets a legislator's details
Args:
leg_id: The Legislator's Open States ID
fields: An optional custom list of fields to return
Returns:
The requested :ref:`Legislator` details as a dictionary
"""
leg_id = _fix_id_string("ocd-person/", leg_id)
return _get("people/", params={"id": [leg_id]})["results"][0]
def locate_legislators(lat, lng, fields=None):
"""
Returns a list of legislators for the given latitude/longitude coordinates
Args:
lat: Latitude
long: Longitude
fields: An optional custom list of fields to return
Returns:
A list of matching :ref:`Legislator` dictionaries
"""
return _get(
"people.geo/", params=dict(lat=float(lat), lng=float(lng), fields=fields)
)["results"]
def search_districts(state, chamber):
"""
Search for districts
Args:
state: The state to search in
chamber: the upper or lower legislative chamber
fields: Optionally specify a custom list of fields to return
Returns:
A list of matching :ref:`District` dictionaries
"""
if chamber:
chamber = chamber.lower()
if chamber not in ["upper", "lower"]:
raise ValueError('Chamber must be "upper" or "lower"')
organizations = get_organizations(state=state)
for org in organizations:
if org["classification"] == chamber:
return org["districts"]
def _fix_id_string(prefix, id):
if id.startswith(prefix):
return id
else:
return prefix + id
def _jurisdiction_id(state):
if state.startswith("ocd-jurisdiction/"):
return state
else:
return f"ocd-jurisdiction/country:us/state:{state.lower()}/government"
def _include_list(include):
if include is None:
return None
elif isinstance(include, str):
return [include]
elif isinstance(include, (list, tuple)):
return include
else:
raise ValueError("include must be a str or list")
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Distributed under the terms of the MIT License.
"""
Script to append window and cage COM's to a CIF.
Author: Andrew Tarzia
Date Created: 19 Feb 2019
"""
import sys
from ase.io import read
import pywindow as pw
import logging
import os
import atools
def main():
if (not len(sys.argv) == 3):
print("""
Usage: append_all_COM.py pdb ignore
pdb: file (.pdb) :
to analyze and add pseudo atoms to
('*.pdb' for all in working dir)
ignore (str) :
string to use to ignore certain files
(set NONE if not used)
""")
sys.exit()
if '*' in sys.argv[1]:
from glob import glob
if sys.argv[2] != 'NONE':
pdbs = sorted(
[i for i in glob(sys.argv[1]) if sys.argv[2] not in i]
)
else:
pdbs = sorted([i for i in glob(sys.argv[1])])
logging.info(f'{len(pdbs)} pdbs to analyze')
else:
pdbs = [sys.argv[1]]
count = 1
for file in pdbs:
# do not redo
if os.path.isfile(file.replace('.pdb', '_appended.cif')):
count += 1
continue
logging.info(f'doing {file}: {count} of {len(pdbs)}')
ASE_structure = read(file)
if ASE_structure is None:
count += 1
continue
pdb = file
if '_nosolv' in pdb:
# if solvent is removed and pdb is used, then this is
# already the rebuilt structure
struct = pw.MolecularSystem.load_file(pdb)
struct.make_modular()
else:
# rebuild system
struct = atools.modularize(file=pdb)
# print(struct)
if struct is None:
# handle pyWindow failure
sys.exit(f'pyWindow failure on {pdb}')
# run analysis
COM_dict = atools.analyze_rebuilt(
struct,
atom_limit=20,
file_prefix=file.replace('.pdb', ''),
verbose=False,
include_coms=True
)
# append atoms to ASE structure as pseudo atoms and write out
# new CIF
atools.append_and_write_COMs(
COM_dict,
ASE_structure,
file,
suffix='.pdb'
)
count += 1
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO, format='')
main()
|
nilq/baby-python
|
python
|
# Copyright (C) 2013 by Brian Neal.
# This file is part of m209, the M-209 simulation.
# m209 is released under the MIT License (see LICENSE.txt).
"""test_converter.py - Unit tests for the M209 class for the M-209 simulation."""
import unittest
from .. import M209Error
from ..converter import M209
# Data taken from Mark J. Blair's AA key list
AA_LUGS = '0-4 0-5*4 0-6*6 1-0*5 1-2 1-5*4 3-0*3 3-4 3-6 5-6'
AA_PIN_LIST = [
'FGIKOPRSUVWYZ',
'DFGKLMOTUY',
'ADEFGIORTUVX',
'ACFGHILMRSU',
'BCDEFJKLPS',
'EFGHIJLMNP'
]
AA_CHECK = 'QLRRN TPTFU TRPTN MWQTV JLIJE J'
class M209TestCase(unittest.TestCase):
def test_invalid_set_pins(self):
"""Ensure invalid inputs raise errors."""
m = M209()
pins = 'BFJKLOSTUWXZ'
self.assertRaises(M209Error, m.set_pins, -1, pins)
self.assertRaises(M209Error, m.set_pins, -2, pins)
self.assertRaises(M209Error, m.set_pins, 6, pins)
self.assertRaises(M209Error, m.set_pins, 7, pins)
self.assertRaises(M209Error, m.set_pins, 100, pins)
def test_invald_set_all_pins(self):
m = M209()
self.assertRaises(M209Error, m.set_all_pins, 'A')
bad_pins1 = AA_PIN_LIST * 2
self.assertRaises(M209Error, m.set_all_pins, bad_pins1)
bad_pins2 = ['ABCD', 'EFGH', 'XYZ']
self.assertRaises(M209Error, m.set_all_pins, bad_pins2)
def letter_check(self, lugs, pin_list, check):
"""Generic letter check routine"""
pt = 'A' * 26
ct = check
m = M209(lugs, pin_list)
result = m.encrypt(pt)
self.assertEqual(result, ct)
self.assertEqual(m.letter_counter, 26)
m.letter_counter = 0
m.set_key_wheels('A' * 6)
result = m.decrypt(ct)
self.assertEqual(result, pt)
self.assertEqual(m.letter_counter, 26)
def test_aa_letter_check(self):
"""See if we can pass a letter check using Mark J. Blair's AA key list."""
self.letter_check(AA_LUGS, AA_PIN_LIST, AA_CHECK)
def test_yl_letter_check(self):
"""See if we can pass a letter check using Mark J. Blair's YL key list."""
lugs = '1-0 2-0*4 0-3 0-4*3 0-5*3 0-6*11 2-5 2-6 3-4 4-5'
pin_list = [
'BFJKLOSTUWXZ',
'ABDJKLMORTUV',
'EHJKNPQRSX',
'ABCHIJLMPQR',
'BCDGJLNOPQS',
'AEFHIJP',
]
check = 'OZGPK AFVAJ JYRZW LRJEG MOVLU M'
self.letter_check(lugs, pin_list, check)
def test_fm_letter_check(self):
"""See if we can pass a letter check using Mark J. Blair's FM key list."""
lugs = '1-0 2-0*8 0-3*7 0-4*5 0-5*2 1-5 1-6 3-4 4-5'
pin_list = [
'BCEJOPSTUVXY',
'ACDHJLMNOQRUYZ',
'AEHJLOQRUV',
'DFGILMNPQS',
'CEHIJLNPS',
'ACDFHIMN'
]
check = 'TNMYS CRMKK UHLKW LDQHM RQOLW R'
self.letter_check(lugs, pin_list, check)
def test_no_group(self):
m = M209(AA_LUGS, AA_PIN_LIST)
result = m.encrypt('A' * 26, group=False)
expected = AA_CHECK.replace(' ', '')
self.assertEqual(result, expected)
def test_encrpyt_no_spaces(self):
m = M209()
self.assertRaises(M209Error, m.encrypt, 'ATTACK AT DAWN', spaces=False)
def test_encrypt_spaces(self):
m = M209(AA_LUGS, AA_PIN_LIST)
wheels = 'YGXREL'
m.set_key_wheels(wheels)
result1 = m.encrypt('ATTACK AT DAWN')
m.set_key_wheels(wheels)
result2 = m.encrypt('ATTACKZATZDAWN', spaces=False)
m.set_key_wheels(wheels)
result3 = m.encrypt('ATTACKZATZDAWN', spaces=True)
self.assertTrue(result1 == result2 == result3)
def test_decrpyt_no_spaces(self):
m = M209()
self.assertRaises(M209Error, m.decrypt, 'ATTACK AT DAWN', spaces=False)
def test_decrypt_no_z_sub(self):
m = M209(AA_LUGS, AA_PIN_LIST)
pt = 'ATTACK AT DAWN'
wheels = 'YGXREL'
m.set_key_wheels(wheels)
ct = m.encrypt(pt)
m.set_key_wheels(wheels)
result = m.decrypt(ct, z_sub=False)
self.assertEqual(pt.replace(' ', 'Z'), result)
def test_set_pins_vs_all_pins(self):
m1 = M209(AA_LUGS, AA_PIN_LIST)
pt = 'ATTACK AT DAWN'
wheels = 'YGXREL'
m1.set_key_wheels(wheels)
ct1 = m1.encrypt(pt)
m2 = M209()
m2.set_drum_lugs(AA_LUGS)
for n, pins in enumerate(AA_PIN_LIST):
m2.set_pins(n, pins)
m2.set_key_wheels(wheels)
ct2 = m2.encrypt(pt)
self.assertEqual(ct1, ct2)
def test_get_settings(self):
m = M209(AA_LUGS, AA_PIN_LIST)
settings = m.get_settings()
self.assertEqual(settings.lugs, AA_LUGS)
self.assertEqual(settings.pin_list, AA_PIN_LIST)
|
nilq/baby-python
|
python
|
from os import listdir
import json
from pymongo import MongoClient
prediction_output = '../data/prediction_output/' # Edit
def connect_db(mode, db_name):
client = MongoClient('localhost', 27017)
db = client[db_name]
collection = db['train'] if mode == 'train' else db['val']
return collection
if __name__ == '__main__':
mode = 'train' # Edit
db_name = 'geovisuals_bdd'
collection = connect_db(mode, db_name)
for file_name in listdir(prediction_output):
trip_id = file_name.split('.')[0]
output_file = prediction_output + file_name
with open(output_file) as json_file:
try:
output_data = json.load(json_file)
tcnn1 = output_data['tcnn1'] if 'tcnn1' in output_data else []
cnn_lstm = output_data['cnn_lstm'] if 'cnn_lstm' in output_data else []
fcn_lstm = output_data['fcn_lstm'] if 'fcn_lstm' in output_data else []
try:
res = collection.update({
'trip_id': trip_id
},{
'$set':{
'predict.tcnn1': tcnn1,
'predict.cnn_lstm': cnn_lstm,
'predict.fcn_lstm': fcn_lstm
}
}, upsert=True, multi=False)
print('Update response: ', res)
print(trip_id)
except Exception as e:
print('error: ' + str(e))
except Exception as e:
print('error: ' + str(e))
|
nilq/baby-python
|
python
|
from wsgiref.util import FileWrapper
from django.conf import settings
from django.http import Http404, HttpResponse, StreamingHttpResponse
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.urls import reverse
from wagtail.core import hooks
from wagtail.core.forms import PasswordViewRestrictionForm
from wagtail.core.models import CollectionViewRestriction
from wagtail.documents.models import document_served, get_document_model
from wagtail.utils import sendfile_streaming_backend
from wagtail.utils.sendfile import sendfile
def serve(request, document_id, document_filename):
Document = get_document_model()
doc = get_object_or_404(Document, id=document_id)
# We want to ensure that the document filename provided in the URL matches the one associated with the considered
# document_id. If not we can't be sure that the document the user wants to access is the one corresponding to the
# <document_id, document_filename> pair.
if doc.filename != document_filename:
raise Http404('This document does not match the given filename.')
for fn in hooks.get_hooks('before_serve_document'):
result = fn(doc, request)
if isinstance(result, HttpResponse):
return result
# Send document_served signal
document_served.send(sender=Document, instance=doc, request=request)
try:
local_path = doc.file.path
except NotImplementedError:
local_path = None
if local_path:
# Use wagtail.utils.sendfile to serve the file;
# this provides support for mimetypes, if-modified-since and django-sendfile backends
if hasattr(settings, 'SENDFILE_BACKEND'):
return sendfile(request, local_path, attachment=True, attachment_filename=doc.filename)
else:
# Fallback to streaming backend if user hasn't specified SENDFILE_BACKEND
return sendfile(
request,
local_path,
attachment=True,
attachment_filename=doc.filename,
backend=sendfile_streaming_backend.sendfile
)
else:
# We are using a storage backend which does not expose filesystem paths
# (e.g. storages.backends.s3boto.S3BotoStorage).
# Fall back on pre-sendfile behaviour of reading the file content and serving it
# as a StreamingHttpResponse
wrapper = FileWrapper(doc.file)
response = StreamingHttpResponse(wrapper, content_type='application/octet-stream')
response['Content-Disposition'] = 'attachment; filename=%s' % doc.filename
# FIXME: storage backends are not guaranteed to implement 'size'
response['Content-Length'] = doc.file.size
return response
def authenticate_with_password(request, restriction_id):
"""
Handle a submission of PasswordViewRestrictionForm to grant view access over a
subtree that is protected by a PageViewRestriction
"""
restriction = get_object_or_404(CollectionViewRestriction, id=restriction_id)
if request.method == 'POST':
form = PasswordViewRestrictionForm(request.POST, instance=restriction)
if form.is_valid():
restriction.mark_as_passed(request)
return redirect(form.cleaned_data['return_url'])
else:
form = PasswordViewRestrictionForm(instance=restriction)
action_url = reverse('wagtaildocs_authenticate_with_password', args=[restriction.id])
password_required_template = getattr(settings, 'DOCUMENT_PASSWORD_REQUIRED_TEMPLATE', 'wagtaildocs/password_required.html')
context = {
'form': form,
'action_url': action_url
}
return TemplateResponse(request, password_required_template, context)
|
nilq/baby-python
|
python
|
# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
from datadog_api_client.v1.model_utils import (
ModelNormal,
cached_property,
)
def lazy_import():
from datadog_api_client.v1.model.agent_check import AgentCheck
from datadog_api_client.v1.model.host_meta_install_method import HostMetaInstallMethod
globals()["AgentCheck"] = AgentCheck
globals()["HostMetaInstallMethod"] = HostMetaInstallMethod
class HostMeta(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
validations = {}
@cached_property
def openapi_types():
lazy_import()
return {
"agent_checks": ([AgentCheck],),
"agent_version": (str,),
"cpu_cores": (int,),
"fbsd_v": ([str],),
"gohai": (str,),
"install_method": (HostMetaInstallMethod,),
"mac_v": ([str],),
"machine": (str,),
"nix_v": ([str],),
"platform": (str,),
"processor": (str,),
"python_v": (str,),
"socket_fqdn": (str,),
"socket_hostname": (str,),
"win_v": ([str],),
}
attribute_map = {
"agent_checks": "agent_checks",
"agent_version": "agent_version",
"cpu_cores": "cpuCores",
"fbsd_v": "fbsdV",
"gohai": "gohai",
"install_method": "install_method",
"mac_v": "macV",
"machine": "machine",
"nix_v": "nixV",
"platform": "platform",
"processor": "processor",
"python_v": "pythonV",
"socket_fqdn": "socket-fqdn",
"socket_hostname": "socket-hostname",
"win_v": "winV",
}
read_only_vars = {}
def __init__(self, *args, **kwargs):
"""HostMeta - a model defined in OpenAPI
Keyword Args:
agent_checks ([AgentCheck]): [optional] A list of Agent checks running on the host.
agent_version (str): [optional] The Datadog Agent version.
cpu_cores (int): [optional] The number of cores.
fbsd_v ([str]): [optional] An array of Mac versions.
gohai (str): [optional] JSON string containing system information.
install_method (HostMetaInstallMethod): [optional]
mac_v ([str]): [optional] An array of Mac versions.
machine (str): [optional] The machine architecture.
nix_v ([str]): [optional] Array of Unix versions.
platform (str): [optional] The OS platform.
processor (str): [optional] The processor.
python_v (str): [optional] The Python version.
socket_fqdn (str): [optional] The socket fqdn.
socket_hostname (str): [optional] The socket hostname.
win_v ([str]): [optional] An array of Windows versions.
"""
super().__init__(kwargs)
self._check_pos_args(args)
@classmethod
def _from_openapi_data(cls, *args, **kwargs):
"""Helper creating a new instance from a response."""
self = super(HostMeta, cls)._from_openapi_data(kwargs)
self._check_pos_args(args)
return self
|
nilq/baby-python
|
python
|
import demistomock as demisto
from CommonServerPython import * # noqa: E402 lgtm [py/polluting-import]
from CommonServerUserPython import * # noqa: E402 lgtm [py/polluting-import]
# IMPORTS
from typing import Tuple, Optional
import traceback
import dateparser
import httplib2
import urllib.parse
from oauth2client import service_account
from googleapiclient import discovery
import json
import requests
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
# CONSTANTS
SERVICE_NAME = "pubsub"
SERVICE_VERSION = "v1"
SCOPES = ["https://www.googleapis.com/auth/cloud-platform"]
ISO_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
LAST_RUN_TIME_KEY = "fetch_time"
LAST_RUN_FETCHED_KEY = "fetched_ids"
""" HELPER CLASSES """
class GoogleNameParser:
"""
Used to easily transform Google Cloud Pub/Sub names
"""
FULL_PROJECT_PREFIX = "projects/{}"
FULL_TOPIC_PREFIX = "/topics/{}"
FULL_SUBSCRIPTION_PREFIX = "/subscriptions/{}"
FULL_SNAPSHOT_PREFIX = "/snapshots/{}"
@staticmethod
def get_project_name(project_id):
return GoogleNameParser.FULL_PROJECT_PREFIX.format(project_id)
@staticmethod
def get_topic_name(project_id, topic_id):
return GoogleNameParser.get_project_name(
project_id
) + GoogleNameParser.FULL_TOPIC_PREFIX.format(topic_id)
@staticmethod
def get_subscription_project_name(project_id, subscription_id):
return GoogleNameParser.get_project_name(
project_id
) + GoogleNameParser.FULL_SUBSCRIPTION_PREFIX.format(subscription_id)
@staticmethod
def get_subscription_topic_name(project_id, topic_id, subscription_id):
return GoogleNameParser.get_topic_name(
project_id, topic_id
) + GoogleNameParser.FULL_SUBSCRIPTION_PREFIX.format(subscription_id)
@staticmethod
def get_snapshot_project_name(project_id, snapshot_id):
return GoogleNameParser.get_project_name(
project_id
) + GoogleNameParser.FULL_SNAPSHOT_PREFIX.format(snapshot_id)
# disable-secrets-detection-start
class BaseGoogleClient:
"""
A Client class to wrap the google cloud api library as a service.
"""
def __init__(
self,
service_name: str,
service_version: str,
client_secret: dict,
scopes: list,
proxy: bool,
insecure: bool,
**kwargs,
):
"""
:param service_name: The name of the service. You can find this and the service here
https://github.com/googleapis/google-api-python-client/blob/master/docs/dyn/index.md
:param service_version: The version of the API.
:param client_secret: A string of the generated credentials.json
:param scopes: The scope needed for the project. (i.e. ['https://www.googleapis.com/auth/cloud-platform'])
:param proxy: Proxy flag
:param insecure: Insecure flag
:param kwargs: Potential arguments dict
"""
credentials = service_account.ServiceAccountCredentials.from_json_keyfile_dict(
client_secret, scopes=scopes
)
if proxy or insecure:
http_client = credentials.authorize(
self.get_http_client_with_proxy(proxy, insecure)
)
self.service = discovery.build(
service_name, service_version, http=http_client
)
else:
self.service = discovery.build(
service_name, service_version, credentials=credentials
)
@staticmethod
def get_http_client_with_proxy(proxy, insecure):
"""
Create an http client with proxy with whom to use when using a proxy.
:param proxy: Whether to use a proxy.
:param insecure: Whether to disable ssl and use an insecure connection.
:return:
"""
if proxy:
proxies = handle_proxy()
https_proxy = proxies.get("https")
http_proxy = proxies.get("http")
proxy_conf = https_proxy if https_proxy else http_proxy
# if no proxy_conf - ignore proxy
if proxy_conf:
if not proxy_conf.startswith("https") and not proxy_conf.startswith(
"http"
):
proxy_conf = "https://" + proxy_conf
parsed_proxy = urllib.parse.urlparse(proxy_conf)
proxy_info = httplib2.ProxyInfo(
proxy_type=httplib2.socks.PROXY_TYPE_HTTP,
proxy_host=parsed_proxy.hostname,
proxy_port=parsed_proxy.port,
proxy_user=parsed_proxy.username,
proxy_pass=parsed_proxy.password,
)
return httplib2.Http(
proxy_info=proxy_info, disable_ssl_certificate_validation=insecure
)
return httplib2.Http(disable_ssl_certificate_validation=insecure)
# disable-secrets-detection-end
class PubSubClient(BaseGoogleClient):
def __init__(
self,
default_project,
default_subscription,
default_max_msgs,
client_secret,
**kwargs,
):
super().__init__(client_secret=client_secret, **kwargs)
self.default_project = default_project
if not default_project:
self.default_project = self._extract_project_from_client_secret(
client_secret
)
self.default_subscription = default_subscription
self.default_max_msgs = default_max_msgs
def _extract_project_from_client_secret(self, client_secret):
"""Extracts project name from a client secret json"""
project_id = client_secret.get("project_id")
if isinstance(project_id, list):
project_id = project_id[0]
return project_id
def _create_subscription_body(
self,
ack_deadline_seconds,
expiration_ttl,
labels,
message_retention_duration,
push_attributes,
push_endpoint,
retain_acked_messages,
topic_name,
):
"""Create a subscription body"""
if push_endpoint or push_attributes:
push_config = assign_params(
pushEndpoint=push_endpoint, attributes=push_attributes,
)
else:
push_config = None
body = assign_params(
topic=topic_name,
pushConfig=push_config,
ackDeadlineSeconds=ack_deadline_seconds,
retainAckedMessages=retain_acked_messages,
messageRetentionDuration=message_retention_duration,
labels=labels,
expirationPolicy=assign_params(ttl=expiration_ttl),
)
return body
def _create_topic_body(self, allowed_persistence_regions, kms_key_name, labels):
"""Create a topic body"""
message_storage_policy = assign_params(
allowedPersistenceRegions=allowed_persistence_regions
)
body = assign_params(
labels=labels,
messageStoragePolicy=message_storage_policy,
kmsKeyName=kms_key_name,
)
return body
def list_topic(self, project_id, page_size, page_token=None):
"""Get topic list from GoogleClient"""
return (
self.service.projects()
.topics()
.list(project=project_id, pageSize=page_size, pageToken=page_token)
.execute()
)
def list_topic_subs(self, topic_id, page_size, page_token=None):
"""Get topic subscriptions from GoogleClient"""
return (
self.service.projects()
.topics()
.subscriptions()
.list(topic=topic_id, pageSize=page_size, pageToken=page_token)
.execute()
)
def list_project_subs(self, project_id, page_size, page_token=None):
"""Get project subscriptions list from GoogleClient"""
return (
self.service.projects()
.subscriptions()
.list(project=project_id, pageSize=page_size, pageToken=page_token)
.execute()
)
def get_sub(self, sub_name):
"""Get subscription by name from GoogleClient"""
return (
self.service.projects().subscriptions().get(subscription=sub_name).execute()
)
def publish_message(self, project_id, topic_id, req_body):
"""Publish a topic message via GoogleClient"""
return (
self.service.projects()
.topics()
.publish(
topic=GoogleNameParser.get_topic_name(project_id, topic_id),
body=req_body,
)
.execute()
)
def pull_messages(self, sub_name, max_messages, ret_immediately=True):
"""
Pull messages for the subscription
:param sub_name: Subscription name
:param max_messages: The maximum number of messages to return for this request. Must be a positive integer
:param ret_immediately: when set to true will return immediately, otherwise will be async
:return: Messages
"""
req_body = {"returnImmediately": ret_immediately, "maxMessages": max_messages}
return (
self.service.projects()
.subscriptions()
.pull(subscription=sub_name, body=req_body)
.execute()
)
def ack_messages(self, sub_name, acks):
"""
Ack a list of messages
:param sub_name: subscription name
:param acks: ack ids to ack
:return:
"""
body = {"ackIds": acks}
return (
self.service.projects()
.subscriptions()
.acknowledge(subscription=sub_name, body=body)
.execute()
)
def create_subscription(
self,
sub_name,
topic_name,
push_endpoint,
push_attributes,
ack_deadline_seconds,
retain_acked_messages,
message_retention_duration,
labels,
expiration_ttl,
):
"""
Creates a subscription
:param sub_name: full sub name
:param topic_name: full topic name
:param push_endpoint: A URL locating the endpoint to which messages should be pushed.
:param push_attributes: Input format: "key=val" pairs sepearated by ",".
:param ack_deadline_seconds: The amount of time Pub/Sub waits for the subscriber to ack.
:param retain_acked_messages: if 'true' then retain acknowledged messages
:param message_retention_duration: How long to retain unacknowledged messages
:param labels: Input format: "key=val" pairs sepearated by ",".
:param expiration_ttl: The "time-to-live" duration for the subscription.
:return: Subscription
"""
body = self._create_subscription_body(
ack_deadline_seconds,
expiration_ttl,
labels,
message_retention_duration,
push_attributes,
push_endpoint,
retain_acked_messages,
topic_name,
)
return (
self.service.projects()
.subscriptions()
.create(name=sub_name, body=body)
.execute()
)
def update_subscription(
self,
sub_name,
topic_name,
update_mask,
push_endpoint,
push_attributes,
ack_deadline_seconds,
retain_acked_messages,
message_retention_duration,
labels,
expiration_ttl,
):
"""
Updates a subscription
:param sub_name: full sub name
:param topic_name: full topic name
:param update_mask: Indicates which fields in the provided subscription to update.
:param push_endpoint: A URL locating the endpoint to which messages should be pushed.
:param push_attributes: Input format: "key=val" pairs sepearated by ",".
:param ack_deadline_seconds: The amount of time Pub/Sub waits for the subscriber to ack.
:param retain_acked_messages: if 'true' then retain acknowledged messages
:param message_retention_duration: How long to retain unacknowledged messages
:param labels: Input format: "key=val" pairs sepearated by ",".
:param expiration_ttl: The "time-to-live" duration for the subscription.
:return: Subscription
"""
sub_body = self._create_subscription_body(
ack_deadline_seconds,
expiration_ttl,
labels,
message_retention_duration,
push_attributes,
push_endpoint,
retain_acked_messages,
topic_name,
)
body = assign_params(subscription=sub_body, updateMask=update_mask)
return (
self.service.projects()
.subscriptions()
.patch(name=sub_name, body=body)
.execute()
)
def create_topic(
self, topic_name, labels, allowed_persistence_regions, kms_key_name
):
"""
Create a topic in the project
:param topic_name: name of the topic to be created
:param labels: "key=val" pairs sepearated by ",".'
:param allowed_persistence_regions: an str representing a list of IDs of GCP regions
:param kms_key_name: The full name of the Cloud KMS CryptoKey to be used to restrict access on this topic.
:return: Topic
"""
body = self._create_topic_body(
allowed_persistence_regions, kms_key_name, labels
)
return (
self.service.projects()
.topics()
.create(name=topic_name, body=body)
.execute()
)
def delete_topic(self, topic_name):
"""
Deletes a topic in the project
:param topic_name: name of the topic to be created
:return: Delete response
"""
return self.service.projects().topics().delete(topic=topic_name).execute()
def update_topic(
self, topic_name, labels, allowed_persistence_regions, kms_key_name, update_mask
):
"""
Updates a topic in the project
:param topic_name: name of the topic to be updated
:param labels: "key=val" pairs sepearated by ",".'
:param allowed_persistence_regions: an str representing a list of IDs of GCP regions
:param kms_key_name: The full name of the Cloud KMS CryptoKey to be used to restrict access on this topic.
:param update_mask: Indicates which fields in the provided topic to update.
:return: Topic
"""
topic = self._create_topic_body(
allowed_persistence_regions, kms_key_name, labels
)
body = assign_params(topic=topic, updateMask=update_mask)
return (
self.service.projects().topics().patch(name=topic_name, body=body).execute()
)
def subscription_seek_message(self, subscription_name, time_string, snapshot=None):
"""
Seeks messages in subscription
:param subscription_name: Subscription to seek messages for
:param time_string: A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds,
:param snapshot: The snapshot to seek to.
:return: Empty string if successful
"""
body = assign_params(time=time_string, snapshot=snapshot)
return (
self.service.projects()
.subscriptions()
.seek(subscription=subscription_name, body=body)
.execute()
)
def get_topic_snapshots_list(self, topic_name, page_size, page_token=None):
"""
Get snapshots list
:param topic_name: The name of the topic from which this snapshot is retaining messages.
:param page_size: Max number of results
:param page_token: Next page token as returned from the API.
:return:
"""
return (
self.service.projects()
.topics()
.snapshots()
.list(topic=topic_name, pageSize=page_size, pageToken=page_token)
.execute()
)
def get_project_snapshots_list(self, project_name, page_size, page_token):
"""
Get snapshots list
:param project_name: The name of the project from which this snapshot is retaining messages.
:param page_size: Max number of results
:param page_token: Next page token as returned from the API.
:return: Snapshot list
"""
return (
self.service.projects()
.snapshots()
.list(project=project_name, pageSize=page_size, pageToken=page_token)
.execute()
)
def create_snapshot(self, subscription_name, snapshot_name, labels):
"""
Create a snapshot
:param subscription_name: The subscription whose backlog the snapshot retain
:param snapshot_name: The name of the snapshot
:param labels: labels dict
:return: Snapshot
"""
body = assign_params(subscription=subscription_name, labels=labels)
return (
self.service.projects()
.snapshots()
.create(name=snapshot_name, body=body)
.execute()
)
def update_snapshot(
self, snapshot_name, topic_name, update_mask, expire_time, labels
):
"""
:param snapshot_name: The name of the snapshot
:param topic_name: The ID of the topic from which this snapshot is retaining messages.
:param update_mask: Indicates which fields in the provided snapshot to update.
:param expire_time: A timestamp in RFC3339 UTC "Zulu" format
:param labels: labels dict
:return: Snapshot
"""
snapshot = assign_params(
name=snapshot_name, topic=topic_name, expireTime=expire_time, labels=labels
)
body = assign_params(snapshot=snapshot, updateMask=update_mask)
return (
self.service.projects()
.snapshots()
.patch(name=snapshot_name, body=body)
.execute()
)
def delete_snapshot(self, snapshot_name):
"""
Delete a snapshot
:param snapshot_name: full snapshot name
:return: Empty response
"""
return (
self.service.projects().snapshots().delete(snapshot=snapshot_name).execute()
)
""" HELPER FUNCTIONS"""
def init_google_client(
service_account_json,
default_subscription,
default_project,
default_max_msgs,
insecure,
**kwargs,
) -> PubSubClient:
"""
Initializes google client
:param service_account_json: A string of the generated credentials.json
:param default_subscription: Default subscription to use
:param default_project: Default project to use
:param default_max_msgs: Max messages to pull per fetch
:param insecure: Flag - do not validate https certs
:param kwargs:
:return:
"""
try:
service_account_json = json.loads(service_account_json)
client = PubSubClient(
default_project=default_project,
default_subscription=default_subscription,
default_max_msgs=default_max_msgs,
service_name=SERVICE_NAME,
service_version=SERVICE_VERSION,
client_secret=service_account_json,
scopes=SCOPES,
insecure=insecure,
**kwargs,
)
return client
except ValueError as e:
return_error(
"Failed to parse Service Account Private Key in json format, please make sure you entered it correctly"
)
raise e
def message_to_incident(message):
"""
Create incident from a message
"""
published_time_dt = dateparser.parse(message.get("publishTime"))
incident = {
"name": f'Google PubSub Message {message.get("messageId")}',
"rawJSON": json.dumps(message),
"occurred": convert_datetime_to_iso_str(published_time_dt),
}
return incident
def get_messages_ids_and_max_publish_time(msgs):
"""
Get message IDs and max publish time from given pulled messages
"""
msg_ids = set()
max_publish_time = None
for msg in msgs:
msg_ids.add(msg.get("messageId"))
publish_time = msg.get("publishTime")
if publish_time:
publish_time = dateparser.parse(msg.get("publishTime"))
if not max_publish_time:
max_publish_time = publish_time
else:
max_publish_time = max(max_publish_time, publish_time)
if max_publish_time:
max_publish_time = convert_datetime_to_iso_str(max_publish_time)
return msg_ids, max_publish_time
def convert_datetime_to_iso_str(publish_time):
"""
Converts datetime to str in "%Y-%m-%dT%H:%M:%S.%fZ" format
:param publish_time: Datetime
:return: date str in "%Y-%m-%dT%H:%M:%S.%fZ" format
"""
try:
return publish_time.strftime(ISO_DATE_FORMAT)
except ValueError:
return publish_time.strftime("%Y-%m-%dT%H:%M:%SZ")
def attribute_pairs_to_dict(attrs_str: Optional[str], delim_char: str = ","):
"""
Transforms a string of multiple inputs to a dictionary list
:param attrs_str: attributes separated by key=val pairs sepearated by ','
:param delim_char: delimiter character between atrribute pairs
:return:
"""
if not attrs_str:
return attrs_str
attrs = {}
regex = re.compile(r"(.*)=(.*)")
for f in attrs_str.split(delim_char):
match = regex.match(f)
if match is None:
raise ValueError(f"Could not parse field: {f}")
attrs.update({match.group(1): match.group(2)})
return attrs
""" COMMAND FUNCTIONS """
def test_module(client: PubSubClient, is_fetch: bool):
"""
Returning 'ok' indicates that the integration works like it is supposed to:
1. Connection to the service is successful.
2. Fetch incidents is configured properly
:param client: GoogleClient
:return: 'ok' if test passed, anything else will fail the test.
"""
client.list_topic(
GoogleNameParser.get_project_name(client.default_project), page_size=1
)
if is_fetch:
client.pull_messages(
GoogleNameParser.get_subscription_project_name(
client.default_project, client.default_subscription
),
max_messages=1,
)
return "ok"
def topics_list_command(
client: PubSubClient,
project_id: str,
page_size: str = None,
page_token: str = None,
) -> Tuple[str, dict, dict]:
"""
Get topics list by project_id
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param project_id: project name
:param page_size: page size
:param page_token: page token, as returned from the api
:return: list of topics
"""
full_project_name = GoogleNameParser.get_project_name(project_id)
res = client.list_topic(full_project_name, page_size, page_token)
topics = list(res.get("topics", []))
next_page_token = res.get("nextPageToken")
readable_output = tableToMarkdown(
f"Topics for project {project_id}", topics, ["name"]
)
outputs = {"GoogleCloudPubSubTopics(val && val.name === obj.name)": topics}
if next_page_token:
outputs["GoogleCloudPubSub.Topics.nextPageToken"] = next_page_token
readable_output += f"**Next Page Token: {next_page_token}**"
return readable_output, outputs, res
def publish_message_command(
client: PubSubClient,
topic_id: str,
project_id: str,
data: str = None,
attributes: str = None,
) -> Tuple[str, dict, dict]:
"""
Publishes message in the topic
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param project_id: project name
:param topic_id: topic name without project name prefix
:param attributes: message attributes separated by key=val pairs sepearated by ','
:param data: message data str
:param client: GoogleClient
:return: list of topics
"""
body = get_publish_body(attributes, data)
published_messages = client.publish_message(project_id, topic_id, body)
output = []
for msg_id in published_messages.get("messageIds"):
output.append(
{
"topic": topic_id,
"messageId": msg_id,
"data": data,
"attributes": body.get("attributes"),
}
)
ec = {"GoogleCloudPubSubPublishedMessages(val.messageId === obj.messageId)": output}
return (
tableToMarkdown(
"Google Cloud PubSub has published the message successfully",
output,
removeNull=True,
headerTransform=pascalToSpace,
),
ec,
published_messages,
)
def get_publish_body(message_attributes, message_data):
"""
Creates publish messages body from given arguments
:param message_attributes: message attributes
:param message_data: message data
:return: publish message body
"""
message = {}
if message_data:
# convert to base64 string
message["data"] = str(base64.b64encode(message_data.encode("utf8")))[2:-1]
if message_attributes:
message["attributes"] = attribute_pairs_to_dict(message_attributes)
body = {"messages": [message]}
return body
def pull_messages_command(
client: PubSubClient,
subscription_id: str,
project_id: str,
max_messages: str = None,
ack: str = None,
) -> Tuple[str, dict, list]:
"""
Pulls messages from the subscription
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param project_id: project name
:param subscription_id: Subscription name to pull messages from
:param max_messages: The maximum number of messages to return for this request. Must be a positive integer
:param ack: Acknowledge the messages pulled if set to true.
:return: list of messages
"""
full_subscription_name = GoogleNameParser.get_subscription_project_name(
project_id, subscription_id
)
raw_msgs = client.pull_messages(full_subscription_name, max_messages)
if "receivedMessages" in raw_msgs:
acknowledges, msgs = extract_acks_and_msgs(raw_msgs)
ec = {
"GoogleCloudPubSubPulledMessages(val && val.messageId === obj.messageId)": msgs
}
if ack == "true":
client.ack_messages(full_subscription_name, acknowledges)
hr = tableToMarkdown("Google Cloud PubSub Messages", msgs, removeNull=True)
return hr, ec, raw_msgs
else:
return "No new messages found", {}, raw_msgs
def ack_messages_command(
client: PubSubClient, ack_ids: str, subscription_id: str, project_id: str,
) -> Tuple[str, dict, list]:
"""
ACKs previously pulled messages using ack Ids
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param ack_ids: csv str with ack ids
:param project_id: project name
:param subscription_id: Subscription name to pull messages from
:return: Success message
"""
sub_name = GoogleNameParser.get_subscription_project_name(
project_id, subscription_id
)
ack_ids = argToList(ack_ids)
raw_res = client.ack_messages(sub_name, ack_ids)
title = f"Subscription {subscription_id} had the following ids acknowledged"
readable_output = tableToMarkdown(title, ack_ids, headers=["ACK ID"])
return readable_output, {}, raw_res
def extract_acks_and_msgs(raw_msgs, add_ack_to_msg=True):
"""
Extracts acknowledges and message data from raw_msgs
:param raw_msgs: Raw messages object
:param add_ack_to_msg: Boolean flag - if true, will add ack to message under "ackId"
:return:
"""
msg_list = []
acknowledges = []
if isinstance(raw_msgs, dict):
rcvd_msgs = raw_msgs.get("receivedMessages", [])
for raw_msg in rcvd_msgs:
msg = raw_msg.get("message", {})
decoded_data = str(msg.get("data", ""))
try:
decoded_data = str(base64.b64decode(decoded_data))[2:-1]
except Exception:
# display message with b64 value
pass
msg["data"] = decoded_data
ack_id = raw_msg.get("ackId")
if ack_id:
acknowledges.append(ack_id)
if add_ack_to_msg:
msg["ackId"] = ack_id
msg_list.append(msg)
return acknowledges, msg_list
def subscriptions_list_command(
client: PubSubClient,
project_id: str,
page_size: str = None,
page_token: str = None,
topic_id: str = None,
) -> Tuple[str, dict, dict]:
"""
Get subscription list by project_id or by topic_id
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param project_id: project name
:param page_size: page size
:param page_token: page token, as returned from the api
:param topic_id: topic name
:return: list of subscriptions
"""
title = "Subscriptions"
if topic_id:
full_topic_name = GoogleNameParser.get_topic_name(project_id, topic_id)
raw_response = client.list_topic_subs(full_topic_name, page_size, page_token)
subs = [{"name": sub} for sub in raw_response.get("subscriptions", [])]
next_page_token = raw_response.get("nextPageToken")
title += f" for topic {topic_id} in project {project_id}"
readable_output = tableToMarkdown(
title, subs, headers=["name"], headerTransform=pascalToSpace
)
else:
full_project_name = GoogleNameParser.get_project_name(project_id)
raw_response = client.list_project_subs(
full_project_name, page_size, page_token
)
subs = raw_response.get("subscriptions", "")
next_page_token = raw_response.get("nextPageToken")
title += f" in project {project_id}"
for sub in subs:
sub["deliveryType"] = "Push" if sub.get("pushConfig") else "Pull"
readable_output = tableToMarkdown(
title,
subs,
headers=["name", "topic", "ackDeadlineSeconds", "labels"],
headerTransform=pascalToSpace,
)
outputs = {"GoogleCloudPubSubSubscriptions(val && val.name === obj.name)": subs}
if next_page_token:
outputs["GoogleCloudPubSubSubscriptions.nextPageToken"] = next_page_token
readable_output += f"**Next Page Token: {next_page_token}**"
return readable_output, outputs, raw_response
def get_subscription_command(
client: PubSubClient, subscription_id: str, project_id: str
) -> Tuple[str, dict, dict]:
"""
Get subscription list by project_id or by topic_id
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param subscription_id:
:param client: GoogleClient
:param project_id: project name
:return: subscription
"""
full_sub_name = GoogleNameParser.get_subscription_project_name(
project_id, subscription_id
)
sub = client.get_sub(full_sub_name)
sub["deliveryType"] = "Push" if sub.get("pushConfig") else "Pull"
title = f"Subscription {subscription_id}"
readable_output = tableToMarkdown(title, sub, headerTransform=pascalToSpace)
outputs = {"GoogleCloudPubSubSubscriptions(val && val.name === obj.name)": sub}
return readable_output, outputs, sub
def create_subscription_command(
client: PubSubClient,
subscription_id: str,
topic_id: str,
project_id: str,
push_endpoint: str = "",
push_attributes: str = "",
ack_deadline_seconds: str = "",
retain_acked_messages: str = "",
message_retention_duration: str = "",
labels: str = "",
expiration_ttl: str = "",
) -> Tuple[str, dict, dict]:
"""
Creates a subscription
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param project_id: Name of the project from which the subscription is receiving messages.
:param subscription_id: Name of the created subscription.
:param topic_id: Name of the topic from which the subscription is receiving messages.
:param push_endpoint: A URL locating the endpoint to which messages should be pushed.
:param push_attributes: Input format: "key=val" pairs sepearated by ",".
:param ack_deadline_seconds: The amount of time Pub/Sub waits for the subscriber to ack.
:param retain_acked_messages: if 'true' then retain acknowledged messages
:param message_retention_duration: How long to retain unacknowledged messages
:param labels: Input format: "key=val" pairs sepearated by ",".
:param expiration_ttl: The "time-to-live" duration for the subscription.
:return: Created subscription
"""
full_sub_name = GoogleNameParser.get_subscription_project_name(
project_id, subscription_id
)
full_topic_name = GoogleNameParser.get_topic_name(project_id, topic_id)
labels = attribute_pairs_to_dict(labels)
push_attributes = attribute_pairs_to_dict(push_attributes)
raw_sub = client.create_subscription(
full_sub_name,
full_topic_name,
push_endpoint,
push_attributes,
ack_deadline_seconds,
retain_acked_messages,
message_retention_duration,
labels,
expiration_ttl,
)
sub = dict(raw_sub)
title = f"Subscription {subscription_id} was created successfully"
readable_output = tableToMarkdown(title, sub)
sub["projectName"] = project_id
sub["subscriptionName"] = subscription_id
sub["deliveryType"] = "Push" if sub.get("pushConfig") else "Pull"
outputs = {"GoogleCloudPubSubSubscriptions": sub}
return readable_output, outputs, raw_sub
def update_subscription_command(
client: PubSubClient,
subscription_id: str,
topic_id: str,
update_mask: str,
project_id: str,
push_endpoint: str = "",
push_attributes: str = "",
ack_deadline_seconds: str = "",
retain_acked_messages: str = "",
message_retention_duration: str = "",
labels: str = "",
expiration_ttl: str = "",
) -> Tuple[str, dict, dict]:
"""
Creates a subscription
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param project_id: Name of the project from which the subscription is receiving messages.
:param subscription_id: Name of the created subscription.
:param topic_id: Name of the topic from which the subscription is receiving messages.
:param update_mask: Indicates which fields in the provided subscription to update.
:param push_endpoint: A URL locating the endpoint to which messages should be pushed.
:param push_attributes: Input format: "key=val" pairs sepearated by ",".
:param ack_deadline_seconds: The amount of time Pub/Sub waits for the subscriber to ack.
:param retain_acked_messages: if 'true' then retain acknowledged messages
:param message_retention_duration: How long to retain unacknowledged messages
:param labels: Input format: "key=val" pairs sepearated by ",".
:param expiration_ttl: The "time-to-live" duration for the subscription.
:return: Created subscription
"""
full_sub_name = GoogleNameParser.get_subscription_project_name(
project_id, subscription_id
)
full_topic_name = GoogleNameParser.get_topic_name(project_id, topic_id)
labels = attribute_pairs_to_dict(labels)
push_attributes = attribute_pairs_to_dict(push_attributes)
raw_sub = client.update_subscription(
full_sub_name,
full_topic_name,
update_mask,
push_endpoint,
push_attributes,
ack_deadline_seconds,
retain_acked_messages,
message_retention_duration,
labels,
expiration_ttl,
)
sub = dict(raw_sub)
title = f"Subscription {subscription_id} was updated successfully"
readable_output = tableToMarkdown(title, sub)
sub["projectName"] = project_id
sub["subscriptionName"] = subscription_id
sub["deliveryType"] = "Push" if sub.get("pushConfig") else "Pull"
outputs = {"GoogleCloudPubSubSubscriptions(val && val.name === obj.name)": sub}
return readable_output, outputs, raw_sub
def create_topic_command(
client: PubSubClient,
topic_id: str,
project_id: str,
allowed_persistence_regions: str = "",
kms_key_name: str = None,
labels: str = None,
) -> Tuple[str, dict, dict]:
"""
Creates a topic
:param client: PubSub client instance
:param project_id: project ID
:param topic_id: topic ID
:param labels: "key=val" pairs sepearated by ",".'
:param allowed_persistence_regions: an str representing a list of IDs of GCP regions
:param kms_key_name: The full name of the Cloud KMS CryptoKey to be used to restrict access on this topic.
:return: Created topic
"""
topic_name = GoogleNameParser.get_topic_name(project_id, topic_id)
allowed_persistence_regions = argToList(allowed_persistence_regions)
labels = attribute_pairs_to_dict(labels)
raw_topic = client.create_topic(
topic_name, labels, allowed_persistence_regions, kms_key_name
)
title = f"Topic **{topic_id}** was created successfully"
readable_output = tableToMarkdown(title, raw_topic, headerTransform=pascalToSpace)
outputs = {"GoogleCloudPubSubTopics": raw_topic}
return readable_output, outputs, raw_topic
def delete_topic_command(
client: PubSubClient, project_id: str, topic_id: str
) -> Tuple[str, dict, dict]:
"""
Delete a topic
:param client: PubSub client instance
:param project_id: project ID
:param topic_id: topic ID
:return: Command success/error message
"""
topic_name = GoogleNameParser.get_topic_name(project_id, topic_id)
raw_topic = client.delete_topic(topic_name)
readable_output = f"Topic **{topic_id}** was deleted successfully"
return readable_output, {}, raw_topic
def update_topic_command(
client: PubSubClient,
project_id: str,
topic_id: str,
update_mask: str,
allowed_persistence_regions: str = "",
kms_key_name: str = None,
labels: str = None,
) -> Tuple[str, dict, dict]:
"""
Creates a topic
:param client: PubSub client instance
:param project_id: project ID
:param topic_id: topic ID
:param labels: "key=val" pairs sepearated by ",".'
:param allowed_persistence_regions: an str representing a list of IDs of GCP regions
:param kms_key_name: The full name of the Cloud KMS CryptoKey to be used to restrict access on this topic.
:param update_mask: Indicates which fields in the provided topic to update.
:return: Created topic
"""
topic_name = GoogleNameParser.get_topic_name(project_id, topic_id)
allowed_persistence_regions = argToList(allowed_persistence_regions)
labels = attribute_pairs_to_dict(labels)
raw_topic = client.update_topic(
topic_name, labels, allowed_persistence_regions, kms_key_name, update_mask
)
title = f"Topic {topic_id} was updated successfully"
readable_output = tableToMarkdown(title, raw_topic, headerTransform=pascalToSpace)
outputs = {"GoogleCloudPubSubTopics(val && val.name === obj.name)": raw_topic}
return readable_output, outputs, raw_topic
def seek_message_command(
client: PubSubClient,
project_id: str,
subscription_id: str,
time_string: str = None,
snapshot: str = None,
) -> Tuple[str, dict, dict]:
"""
Get topics list by project_id
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param project_id: ID of the subscription, without project/topic prefix.
:param subscription_id: ID of the project from which the subscription is receiving messages.
:param time_string: A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds,
:param snapshot: The snapshot to seek to.
:return: list of topics
"""
if not time_string and not snapshot:
return_error("Please provide either a time_string or a snapshot")
sub_name = GoogleNameParser.get_subscription_project_name(
project_id, subscription_id
)
raw_res = client.subscription_seek_message(sub_name, time_string, snapshot)
readable_output = (
"Message seek was successful for **"
+ (f"time: {time_string}" if time_string else f"snapshot:{snapshot}")
+ "**"
)
return readable_output, {}, raw_res
def snapshot_list_command(
client: PubSubClient,
project_id: str,
topic_id: str = None,
page_size: str = None,
page_token: str = None,
) -> Tuple[str, dict, dict]:
"""
Get snapshots list by project_id or topic_id
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param project_id: project id
:param topic_id:
:param page_size: page size
:param page_token: page token, as returned from the api
:return: list of snapshots
"""
if topic_id:
topic_name = GoogleNameParser.get_topic_name(project_id, topic_id)
res = client.get_topic_snapshots_list(topic_name, page_size, page_token)
title = f"Snapshots for topic {topic_id}"
else:
project_name = GoogleNameParser.get_project_name(project_id)
res = client.get_project_snapshots_list(project_name, page_size, page_token)
title = f"Snapshots for project {project_id}"
snapshots = list(res.get("snapshots", []))
next_page_token = res.get("nextPageToken")
readable_output = tableToMarkdown(title, snapshots, ["name"])
outputs = {"GoogleCloudPubSubSnapshots(val && val.name === obj.name)": snapshots}
if next_page_token:
outputs["GoogleCloudPubSub.Snapshots.nextPageToken"] = next_page_token
readable_output += f"**Next Page Token: {next_page_token}**"
return readable_output, outputs, res
def snapshot_create_command(
client: PubSubClient,
project_id: str,
subscription_id: str,
snapshot_id: str,
labels: str = None,
) -> Tuple[str, dict, dict]:
"""
Create a snapshot
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param project_id: project id
:param subscription_id: The subscription whose backlog the snapshot retains.
:param snapshot_id: The id of the snapshot.
:param labels: Input format: "key=val" pairs sepearated by ",".
:return: list of topics
"""
subscription_name = GoogleNameParser.get_subscription_project_name(
project_id, subscription_id
)
snapshot_name = GoogleNameParser.get_snapshot_project_name(project_id, snapshot_id)
labels = attribute_pairs_to_dict(labels)
raw_snapshot = client.create_snapshot(subscription_name, snapshot_name, labels)
title = f"Snapshot **{snapshot_id}** was created successfully"
readable_output = tableToMarkdown(
title, raw_snapshot, headerTransform=pascalToSpace
)
outputs = {"GoogleCloudPubSubSnapshots": raw_snapshot}
return readable_output, outputs, raw_snapshot
def snapshot_update_command(
client: PubSubClient,
project_id: str,
topic_id: str,
snapshot_id: str,
update_mask: str,
expire_time: str = None,
labels: str = None,
) -> Tuple[str, dict, dict]:
"""
Updates a snapshot
Requires one of the following OAuth scopes:
https://www.googleapis.com/auth/pubsub
https://www.googleapis.com/auth/cloud-platform
:param client: GoogleClient
:param project_id: ID of the project from which the subscription is receiving messages.
:param topic_id: The ID of the topic from which this snapshot is retaining messages.
:param snapshot_id: The id of the snapshot.
:param update_mask: Indicates which fields in the provided snapshot to update.
:param expire_time: The snapshot is guaranteed to exist up until this time
:param labels: An object containing a list of "key": value pairs
:return:
"""
snapshot_name = GoogleNameParser.get_snapshot_project_name(project_id, snapshot_id)
topic_name = GoogleNameParser.get_topic_name(project_id, topic_id)
labels = attribute_pairs_to_dict(labels)
raw_snapshot = client.update_snapshot(
snapshot_name, topic_name, update_mask, expire_time, labels
)
title = f"Snapshot **{snapshot_id}** was updated successfully"
readable_output = tableToMarkdown(
title, raw_snapshot, headerTransform=pascalToSpace
)
outputs = {
"GoogleCloudPubSubSnapshots(val && val.name === obj.name)": raw_snapshot
}
return readable_output, outputs, raw_snapshot
def snapshot_delete_command(
client: PubSubClient, project_id: str, snapshot_id: str
) -> Tuple[str, dict, dict]:
"""
Delete a topic
:param client: PubSub client instance
:param project_id: The ID of the project from which the subscription is receiving messages.
:param snapshot_id: The id of the snapshot.
:return: Command success/error message
"""
snapshot_name = GoogleNameParser.get_snapshot_project_name(project_id, snapshot_id)
raw_res = client.delete_snapshot(snapshot_name)
readable_output = f"Snapshot **{snapshot_id}** was deleted successfully"
return readable_output, {}, raw_res
def fetch_incidents(
client: PubSubClient, last_run: dict, first_fetch_time: str, ack_incidents: bool
):
"""
This function will execute each interval (default is 1 minute).
:param client: GoogleClient initialized with default_project, default_subscription and default_max_msgs
:param last_run: last run dict containing last run data
:param first_fetch_time: how long ago should the subscription seek in first fetch
:param ack_incidents: Boolean flag - when set to True will ack back the fetched messages
:return: incidents: Incidents that will be created in Demisto
"""
sub_name = GoogleNameParser.get_subscription_project_name(
client.default_project, client.default_subscription
)
# Setup subscription for fetch
last_run_fetched_ids, last_run_time = setup_subscription_last_run(
client, first_fetch_time, last_run, sub_name, ack_incidents
)
# Pull unique messages if available
msgs, msg_ids, acknowledges, max_publish_time = try_pull_unique_messages(
client, sub_name, last_run_fetched_ids, last_run_time, retry_times=1
)
# Handle fetch results
return handle_fetch_results(
client,
sub_name,
last_run,
acknowledges,
last_run_time,
max_publish_time,
msg_ids,
msgs,
ack_incidents,
)
def setup_subscription_last_run(
client, first_fetch_time, last_run, sub_name, ack_incidents
):
"""
Setups the subscription last run data, and seeks the subscription to a previous time if relevant
:param client: PubSub client
:param first_fetch_time: First fetch time provided by the user
:param last_run: Last run dict
:param sub_name: Name of the subscription
:param ack_incidents: ACK flag - if true, will not use seek except for first time fetch
:return:
"""
last_run_fetched_ids = set()
# Handle first time fetch
if not last_run or LAST_RUN_TIME_KEY not in last_run:
last_run_time, _ = parse_date_range(first_fetch_time, ISO_DATE_FORMAT)
# Seek previous message state
client.subscription_seek_message(sub_name, last_run_time)
else:
last_run_time = last_run.get(LAST_RUN_TIME_KEY)
last_run_fetched_val = last_run.get(LAST_RUN_FETCHED_KEY)
if last_run_fetched_val:
last_run_fetched_ids = set(last_run_fetched_val)
if not ack_incidents:
# Seek previous message state
client.subscription_seek_message(sub_name, last_run_time)
return last_run_fetched_ids, last_run_time
def try_pull_unique_messages(
client, sub_name, previous_msg_ids, last_run_time, retry_times=0
):
"""
Tries to pull unique messages for the subscription
:param client: PubSub client
:param sub_name: Subscription name
:param previous_msg_ids: Previous message ids set
:param last_run_time: previous run time
:param retry_times: How many times to retry pulling
:return:
1. Unique list of messages
2. Unique set of message ids
3. Messages acks
4. max_publish_time
"""
res_msgs = None
res_msg_ids = None
res_acks = None
res_max_publish_time = None
raw_msgs = client.pull_messages(sub_name, client.default_max_msgs)
if "receivedMessages" in raw_msgs:
res_acks, msgs = extract_acks_and_msgs(raw_msgs)
# continue only if messages were extracted successfully
if msgs:
msg_ids, max_publish_time = get_messages_ids_and_max_publish_time(msgs)
new_msg_ids = msg_ids.difference(previous_msg_ids)
# all messages are unique - return as is
if len(new_msg_ids) == len(msg_ids):
return msgs, msg_ids, res_acks, max_publish_time
# no new messages - retry -1
elif len(new_msg_ids) == 0 and retry_times > 0:
demisto.debug(
f"GCP_PUBSUB_MSG Duplicates with max_publish_time: {max_publish_time}"
)
return try_pull_unique_messages(
client, sub_name, previous_msg_ids, retry_times - 1
)
# clean non-unique ids from raw_msgs
else:
filtered_raw_msgs = filter_non_unique_messages(
raw_msgs, previous_msg_ids, last_run_time
)
res_acks, res_msgs = extract_acks_and_msgs(filtered_raw_msgs)
(
res_msg_ids,
res_max_publish_time,
) = get_messages_ids_and_max_publish_time(res_msgs)
return res_msgs, res_msg_ids, res_acks, res_max_publish_time
def is_unique_msg(msg, previous_msg_ids, previous_run_time):
"""
Determines if message is unique given previous message ids, and that it's greater than previous run time
:param msg: raw Message object
:param previous_msg_ids: set of previously fetched message ids
:param previous_run_time: previous run time string
:return: True if message is unique
"""
message_dict = msg.get("message", {})
if message_dict:
msg_id = message_dict.get("messageId")
msg_pub_time = message_dict.get("publishTime", "")
return msg_id not in previous_msg_ids and msg_pub_time > previous_run_time
return False
def filter_non_unique_messages(raw_msgs, previous_msg_ids, previous_run_time):
"""
Filters messages that appear in previous_msg_ids or are older than the previous_run_time
:param raw_msgs: Raw message object
:param previous_msg_ids:
:param previous_run_time:
:return:
"""
raw_msgs = raw_msgs.get("receivedMessages", [])
# filter messages using `previous_msg_ids` and `previous_run_time`
filtered_raw_msgs = list(
filter(
lambda msg: is_unique_msg(msg, previous_msg_ids, previous_run_time),
raw_msgs,
)
)
return {"receivedMessages": filtered_raw_msgs}
def handle_fetch_results(
client,
sub_name,
last_run,
acknowledges,
last_run_time,
max_publish_time,
pulled_msg_ids,
pulled_msgs,
ack_incidents,
):
"""
Handle the fetch results
:param client: PubSub Client
:param sub_name: Subscription name
:param last_run: last run dict
:param acknowledges: acknowledges to make given ack_incidents is True
:param last_run_time: last run time
:param max_publish_time: max publish time of pulled messages
:param pulled_msg_ids: pulled message ids
:param pulled_msgs: pulled messages
:param ack_incidents: ack incidents flag
:return: incidents and last run
"""
incidents = []
if pulled_msg_ids and max_publish_time:
if last_run_time <= max_publish_time:
# Create incidents
for msg in pulled_msgs:
incident = message_to_incident(msg)
incidents.append(incident)
# ACK messages if relevant
if ack_incidents:
client.ack_messages(sub_name, acknowledges)
# Recreate last run to return with new values
last_run = {
LAST_RUN_TIME_KEY: max_publish_time,
LAST_RUN_FETCHED_KEY: list(pulled_msg_ids),
}
# We didn't manage to pull any unique messages, so we're trying to increment micro seconds - not relevant for ack
elif not ack_incidents:
last_run_time_dt = dateparser.parse(
max_publish_time if max_publish_time else last_run_time
)
assert last_run_time_dt is not None
last_run_time = convert_datetime_to_iso_str(
last_run_time_dt + timedelta(microseconds=1)
)
# Update last run time
last_run[LAST_RUN_TIME_KEY] = last_run_time
return incidents, last_run
def main():
params = demisto.params()
client = init_google_client(**params)
command = demisto.command()
LOG(f"Command being called is {command}")
try:
commands = {
"gcp-pubsub-topic-publish-message": publish_message_command,
"gcp-pubsub-topic-messages-pull": pull_messages_command,
"gcp-pubsub-topic-ack-messages": ack_messages_command,
"gcp-pubsub-topic-subscriptions-list": subscriptions_list_command,
"gcp-pubsub-topic-subscription-get-by-name": get_subscription_command,
"gcp-pubsub-topic-subscription-create": create_subscription_command,
"gcp-pubsub-topic-subscription-update": update_subscription_command,
"gcp-pubsub-topics-list": topics_list_command,
"gcp-pubsub-topic-create": create_topic_command,
"gcp-pubsub-topic-delete": delete_topic_command,
"gcp-pubsub-topic-update": update_topic_command,
"gcp-pubsub-topic-messages-seek": seek_message_command,
"gcp-pubsub-topic-snapshots-list": snapshot_list_command,
"gcp-pubsub-topic-snapshot-create": snapshot_create_command,
"gcp-pubsub-topic-snapshot-update": snapshot_update_command,
"gcp-pubsub-topic-snapshot-delete": snapshot_delete_command,
}
if command == "test-module":
demisto.results(test_module(client, params.get("isFetch")))
elif command == "fetch-incidents":
ack_incidents = params.get("ack_incidents")
first_fetch_time = params.get("first_fetch_time").rstrip()
last_run = demisto.getLastRun()
incidents, last_run = fetch_incidents(
client, last_run, first_fetch_time, ack_incidents
)
demisto.incidents(incidents)
demisto.setLastRun(last_run)
else:
args = demisto.args()
# project_id is expected to be in all commands. If not provided defaults on client.default_project
if "project_id" not in args:
args["project_id"] = client.default_project
return_outputs(*commands[command](client, **args)) # type: ignore[operator]
# Log exceptions
except Exception as e:
return_error(
f"Failed to execute {demisto.command()} command. Error: {str(e)} , traceback: {traceback.format_exc()}"
)
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
|
nilq/baby-python
|
python
|
from django.shortcuts import render, redirect
from django import forms
from django.contrib import messages
from django.contrib.auth.forms import PasswordChangeForm
from django.contrib.auth import authenticate, login, update_session_auth_hash
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from django.core.mail import EmailMessage
from django.db.models import Count
from django.contrib.auth.views import LoginView
from django.contrib.messages.views import SuccessMessageMixin
from .forms import RegisterForm, ProfileEdit, NewRegister
from .token import account_activation_token
from applications.events_news.models import Event, Attendees
from applications.alumniprofile.models import Profile, Constants
from applications.news.models import News
from applications.gallery.models import Album
from applications.geolocation.views import addPoints
import datetime
from django.utils import timezone
from itertools import chain
# Create your views here.
class LoginFormView(SuccessMessageMixin, LoginView):
template_name = 'AlumniConnect/login.html'
redirect_authenticated_user = True
# success_url = '/'
success_message = "Logged in successfully!"
def index(request):
sname = None
if request.user.is_authenticated:
sname = request.user.get_short_name()
now = timezone.now()
events = Event.objects.filter(start_date__gte=now).order_by('start_date').annotate(
count=Count('attendees__user_id'))
events_completed = Event.objects.filter(end_date__lt=now).order_by('-start_date').annotate(
count=Count('attendees__user_id'))
# Add Check here
news = News.objects.filter().order_by('-date')
# messages.success(request, 'Your password was successfully updated!')
events_to_display = list(chain(events, events_completed))[:3]
albums_list = Album.objects.order_by('-created').annotate(images_count=Count('albumimage'))[:3]
return render(request, "AlumniConnect/index.html",
{'name': sname, 'events': events_to_display, 'news': news, 'albums': albums_list})
def alumniBody(request):
return render(request, "AlumniConnect/alumnibody.html")
def alumniCard(request):
return render(request, "AlumniConnect/alumnicard.html")
def gallery(request):
return render(request, "AlumniConnect/gallery.html")
def job_posting(request):
return render(request, "AlumniConnect/job_posting.html")
# def jobboard(request):
# return render(request, "env/Lib/site-packages/gallery.html")
def register(request):
check = False
l = None
if request.method == 'POST':
form = RegisterForm(request.POST)
print(request.POST)
if form.is_valid():
batch = form.cleaned_data.get('batch')
branch = form.cleaned_data.get('branch')
programme = form.cleaned_data.get('programme')
l = Profile.objects.filter(batch=batch, programme=programme, branch=branch)
print('Testing output\n')
print(l)
check = True
else:
form = RegisterForm()
return render(request, 'AlumniConnect/registration.html', {'form': form, 'check': check, 'l': l})
def reg_no_gen(degree_, spec_, year):
degree = {"B.Tech": "1", "B.Des": '2', "M.Tech": '3', "M.Des": '4', "PhD": '5'}
spec = {"NA": '00', "CSE": "01", "ECE": "02", "ME": "03", "MT": "04", "NS": "05", "DS": "06"}
last_reg_no = Profile.objects.filter(year_of_admission=year).order_by('user__date_joined').last()
# print(last_reg_no)
new_reg_no = (int(str(last_reg_no.reg_no)[-4:]) + 1) if last_reg_no else 1
return degree[degree_] + spec[spec_] + str(year)[2:] + str(convert_int(new_reg_no, 4))
def convert_int(number, decimals):
return str(number).zfill(decimals)
def new_register(request):
if request.method == 'POST':
form = NewRegister(request.POST, request.FILES)
# print (request.POST)
if form.is_valid():
try:
first_name, last_name = request.POST['name'].split(' ', 1)
except:
first_name = request.POST['name']
last_name = ""
# print (form.cleaned_data.get('date_of_joining'))
profile = form.save(commit=False)
profile.reg_no = reg_no_gen(profile.programme, profile.branch, profile.year_of_admission)
profile.country = request.POST['country']
profile.state = request.POST['state']
profile.city = request.POST['city']
password = User.objects.make_random_password(length=10)
# password = '12345678'
user = User.objects.create_user(
username=str(form.cleaned_data.get('roll_no')),
first_name=first_name,
last_name=last_name,
email=str(form.cleaned_data.get('email')),
password=password,
is_active=True
)
profile.user = user
profile.save()
mappt = addPoints({'city': str(request.POST['city']), 'state': str(request.POST['state']),
'country': str(request.POST['country'])})
print('Adding Map Point Status: ' + str(mappt))
return render(request, 'AlumniConnect/confirm_email.html')
else:
form = NewRegister()
return render(request, 'AlumniConnect/profileedit.html', {'form': form, 'edit': False})
@login_required
def profileedit(request, id):
if request.user.username == id:
profile = Profile.objects.get(roll_no=id)
if request.method == 'POST':
form = ProfileEdit(request.POST, request.FILES, instance=profile)
if form.is_valid():
profile = form.save()
profile.save()
return HttpResponseRedirect('/profile/' + id)
else:
print("here")
form = ProfileEdit(instance=profile)
return render(request, 'AlumniConnect/profileedit.html',
{'form': form, 'C': profile.country, 's': profile.state, 'c': profile.city, 'edit': True})
else:
return HttpResponseRedirect('/')
def activate(request, uidb64, token):
print('inside activate')
try:
uid = urlsafe_base64_decode(uidb64)
print(uid)
u = User.objects.get(username=uid)
print(u)
except(TypeError, ValueError, OverflowError):
u = None
if u is not None and account_activation_token.check_token(u, token):
u.is_active = True
u.save()
login(request, u)
# return HttpResponse('Thank you for your email confirmation. Now you can login your account.')
return HttpResponseRedirect('/password/')
else:
return HttpResponse('Activation link is invalid!')
return redirect('/')
@login_required
def change_password(request):
if request.method == 'POST':
form = PasswordChangeForm(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user) # Important!
messages.success(request, 'Your password was successfully updated!')
return redirect('home')
else:
messages.error(request, 'Please correct the error below.')
else:
form = PasswordChangeForm(request.user)
return render(request, 'AlumniConnect/change_password.html', {'form': form})
|
nilq/baby-python
|
python
|
import math, itertools
from functools import lru_cache
def distance_squared(p1, p2):
x1, y1 = p1
x2, y2 = p2
dx, dy = x1 - x2, y2 - y1
return dx * dx + dy * dy
def points_up_tile_size_px(size):
return math.floor(size * math.sqrt(3)), size * 2
def flats_up_tile_size_px(size):
return size * 2, math.floor(size * math.sqrt(3))
@lru_cache(maxsize=128)
def points_up_tile_center_point(grid_position, width, height, offset):
x, y = grid_position
dx, dy = offset
height = math.floor(height * 3/4)
# stagger odd rows
if y % 2:
dx += width // 2
# diamond-shaped grid
x += y // 2
return (x * width + dx, y * height + dy)
@lru_cache(maxsize=128)
def flats_up_tile_center_point(grid_position, width, height, offset):
x, y = grid_position
dx, dy = offset
width = math.floor(width * 3/4)
# stagger odd columns
if x % 2:
dy += height // 2
# diamond-shaped grid
y += x // 2
return (x * width + dx, y * height + dy)
def points_up_tile_corner_point(radius, index, position_px):
theta = math.tau * index / 6 + math.tau / 12
x, y = position_px
return (radius * math.cos(theta) + x, radius * math.sin(theta) + y)
def flats_up_tile_corner_point(radius, index, position_px):
theta = math.tau * index / 6
x, y = position_px
return (radius * math.cos(theta) + x, radius * math.sin(theta) + y)
@lru_cache(maxsize=128)
def points_up_tile_corner_points(grid_position, width, height, offset):
radius = height // 2
position_px = points_up_tile_center_point(grid_position, width, height, offset)
return [points_up_tile_corner_point(radius, i, position_px) for i in range(6)]
@lru_cache(maxsize=128)
def flats_up_tile_corner_points(grid_position, width, height, offset):
radius = width // 2
position_px = flats_up_tile_center_point(grid_position, width, height, offset)
return [flats_up_tile_corner_point(radius, i, position_px) for i in range(6)]
class HexTile:
def __init__(self, grid_x, grid_y, size_px, points_up):
self.grid_position = (grid_x, grid_y)
self.neighbours = []
if points_up:
self.width, self.height = points_up_tile_size_px(size_px)
else:
self.width, self.height = flats_up_tile_size_px(size_px)
self.points_up = points_up
def __str__(self):
return f'{self.grid_position}'
def __repr__(self):
return f'HexTile{self.grid_position}'
def center_point(self, offset=0):
if self.points_up:
return points_up_tile_center_point(
self.grid_position,
self.width,
self.height,
offset)
else:
return flats_up_tile_center_point(
self.grid_position,
self.width,
self.height,
offset)
def corner_points(self, offset=0):
if self.points_up:
return points_up_tile_corner_points(
self.grid_position,
self.width,
self.height,
offset)
else:
return flats_up_tile_corner_points(
self.grid_position,
self.width,
self.height,
offset)
def distance_squared(self, position, offset):
return distance_squared(self.center_point(offset), position)
class HexGrid:
def __init__(self, width, height, tile_size, points_up):
self.width = width
self.height = height
self.tiles = {
(x,y): HexTile(x, y, tile_size, points_up)
for (x,y) in itertools.product(range(width), range(height)) }
for tile in self.tiles.values():
self.populate_neighbours(tile)
def populate_neighbours(self, tile):
x, y = tile.grid_position
if x > 0:
tile.neighbours.append(self.tiles[(x-1, y)])
if x < self.width-1:
tile.neighbours.append(self.tiles[(x+1, y)])
if y > 0:
tile.neighbours.append(self.tiles[(x, y-1)])
if x < self.width-1:
tile.neighbours.append(self.tiles[(x+1, y-1)])
if y < self.height-1:
tile.neighbours.append(self.tiles[(x, y+1)])
if x > 0:
tile.neighbours.append(self.tiles[(x-1, y+1)])
def find_path(self, from_tile, to_tiles, filter, visited=None):
if visited == None:
visited = []
if not filter(from_tile) or from_tile in visited:
return None
if from_tile in to_tiles:
return [from_tile]
visited.append(from_tile)
for neighbour in from_tile.neighbours:
result = self.find_path(neighbour, to_tiles, filter, visited)
if result != None:
result.append(from_tile)
return result
return None
def top_row(self):
return [self.tiles[(x, 0)] for x in range(self.width)]
def bottom_row(self):
return [self.tiles[(x, self.height-1)] for x in range(self.width)]
def left_column(self):
return [self.tiles[(0, y)] for y in range(self.height)]
def right_column(self):
return [self.tiles[(self.width-1, y)] for y in range(self.height)]
|
nilq/baby-python
|
python
|
class Board:
ROW_COL_BASE = 1
def __init__(self, board, N):
"""
Builds a Board from a list of blocks and the size of the board. The list must hold N^2 blocks.
:param board: the list with the blocks.
:param N: the size of the board (length = width = N)
:return: a new Board with the blocks setup as passed.
"""
self.board = board
self.N = N
@classmethod
def fromMatrix(cls, blocks):
"""
Builds a Board from a matrix of blocks, i.e. from a list of list of blocks.
:param blocks: the matrix with the blocks
:return: a new Board with the blocks setup as passed.
"""
l = []
for row in blocks:
for block in row:
l.append(block)
N = len(row)
return cls(l, N)
def dimension(self):
""" board dimension N """
return self.N
# public int hamming()
# number of blocks out of place
def manhattan(self):
'''
:return: the sum of Manhattan distances between blocks and goal
'''
manhval = 0
for idx, tile in enumerate(self.board):
if tile == 0: continue
rm = (tile -1) / self.N - idx / self.N
cm = (tile -1) % self.N - idx % self.N
manhval += abs(rm) + abs(cm)
return manhval
def isgoal(self):
'''
is this board the goal board?
:return: a boolean, true if this board is the goal board
'''
NN = self.N * self.N
for idx, val in enumerate(self.board):
if (idx != (NN-1) and val != idx +1):
return False
return True
def _index(self, row, col):
return self.N * (row - self.ROW_COL_BASE) + (col - self.ROW_COL_BASE)
def twin(self):
"""
a board that is obtained by exchanging any pair of blocks
:return: Board
"""
twin = self.board[:]
idx11 = self._index(1, 1)
idx12 = self._index(1, 2)
idx21 = self._index(2, 1)
idx22 = self._index(2, 2)
if (twin[idx11] == 0):
twin[idx12] = self.board[idx22] # 0 A
twin[idx22] = self.board[idx12] # x B
else:
if (twin[idx12] == 0):
twin[idx11] = self.board[idx21] # A 0
twin[idx21] = self.board[idx11] # B x
else:
twin[idx11] = self.board[idx12] # A B
twin[idx12] = self.board[idx11] ## ? ?
return Board(twin, self.N);
def neighbors(self):
"""
all neighboring boards
:return: a list of neighbours Boards
"""
blankidx = self.board.index(0)
brow = blankidx / self.N
bcol = blankidx % self.N
idx0 = self._index(brow + 1, bcol + 1)
nbrs = []
if brow > 0: # move space up
nbrs.append(self._neighborBoard(brow, bcol + 1, idx0))
if brow < self.N - 1: # move space down
nbrs.append(self._neighborBoard(brow + 2, bcol + 1, idx0))
if bcol > 0: # move space left
nbrs.append(self._neighborBoard(brow + 1, bcol, idx0))
if bcol < self.N - 1: # move space right
nbrs.append(self._neighborBoard(brow + 1, bcol + 2, idx0))
return nbrs
def _neighborBoard(self, brow, bcol, idx0):
idxdst = self._index(brow, bcol) # +1 is for 1 based row and cols
brd = self.board[:]
brd[idx0] = self.board[idxdst];
brd[idxdst] = self.board[idx0];
b = Board(brd, self.N)
return b
def __eq__(self, other):
if other is None: return False
if other is self: return True
if type(other) is type(self):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other):
if other is None: return True
return not self.__eq__(other)
def toString(self):
"""
:return: the string representation of this board.
"""
s = '{N}\n'.format(N = self.N)
for idx, val in enumerate(self.board):
s += '{0:2d}'.format(val)
if ((idx + 1) % self.N) == 0: s += '\n'
return s
# public static void main(String[] args) // unit tests (not graded)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
"""
Clean up (making tar) a single simulation directory after successful cybershake submissions
"""
import os
import glob
import shutil
import tarfile
import argparse
from qcore import utils
SUBMISSION_DIR_NAME = "submission_temp"
SUBMISSION_TAR = "submission.tar"
SUBMISSION_FILES = [
"flist_*",
"*_header.cfg",
"machine_*.json",
"submit.sh",
"*.template",
"*py",
"*.pyc",
]
SUBMISSION_SL_LOGS = ["*.sl", "*.err", "*.out"]
LF_DIR_NAME = "LF_temp"
LF_SUB_DIR_NAME = "OutBin"
LF_TAR = "LF.tar"
LF_FILES = ["Rlog", "Restart", "SlipOut"]
def tar_files(directory_to_tar, archive_name):
"""params: directory_to_tar:source dir of all files to tar
archive_name: output dir for the tar.gz
"""
if os.path.isfile(archive_name):
open_type = "a"
print("Adding files to existing tar")
else:
open_type = "w"
print("Start making new tar")
try:
with tarfile.open(archive_name, open_type) as tar:
tar.add(directory_to_tar, arcname=os.path.basename(directory_to_tar))
except Exception as e:
print("Failed to make tar with exception {}".format(e))
else:
print("Finished adding files to tar")
def move_files(sim_dir, dest_dir, file_patterns):
"""
move all files that match any of the specified file patterns from sim dir to dest dir
:param sim_dir: path to source realization folder, eg. /home/melody.zhu/Albury/Runs/Albury/Albury_HYP15-21_S1384
:param dest_dir: path to destination dir
:param file_patterns: a list of files/file_pattern to copy
:return:
"""
for f in file_patterns:
for p in glob.glob1(sim_dir, f):
try:
shutil.move(os.path.join(sim_dir, p), os.path.join(dest_dir, p))
except Exception as e:
print(
"error while copy ing file from {} to {}\n{}".format(
sim_dir, dest_dir, e
)
)
def create_temp_dirs(sim_dir, outer_dir_name, inner_dir_name=""):
"""
creates two nested temp dirs containing files to be tared
:param sim_dir: path to realization folder
:param outer_dir_name: name of temporary dir for storing submission/lf related files to be tared
:param inner_dir_name: name of sub_dir inside the temporary dir for storing submission/lf related files to be tared
:return: paths to outer_dir and inner dir
"""
outer_dir = os.path.join(sim_dir, outer_dir_name)
utils.setup_dir(outer_dir)
inner_dir = ""
if inner_dir_name is not "":
inner_dir = os.path.join(sim_dir, outer_dir_name, inner_dir_name)
utils.setup_dir(inner_dir)
return outer_dir, inner_dir
def clean_up_submission_lf_files(
sim_dir, submission_files_to_tar=[], lf_files_to_tar=[]
):
"""
main function for moving, taring submission/lf files and deleting any temporary dirs created
:param submission_files_to_tar: a list of additional submission related files to tar
:param lf_files_to_tar: a list of additional lf related files to tar
:return: creates submisson and lf tar.gz
"""
submission_files_to_tar += SUBMISSION_FILES + SUBMISSION_SL_LOGS
lf_files_to_tar += LF_FILES
# create temporary submission dir
submission_dir, _ = create_temp_dirs(sim_dir, SUBMISSION_DIR_NAME)
# create temporary lf dir
lf_dir, lf_sub_dir = create_temp_dirs(sim_dir, LF_DIR_NAME, LF_SUB_DIR_NAME)
# move files to submission dir
move_files(sim_dir, submission_dir, submission_files_to_tar)
tar_files(submission_dir, os.path.join(sim_dir, SUBMISSION_TAR))
# move files to lf dir
move_files(os.path.join(sim_dir, "LF"), lf_dir, lf_files_to_tar)
# copy e3d segments to lf sub dir
e3d_segs_dir = os.path.join(sim_dir, "LF", "OutBin")
for f in os.listdir(e3d_segs_dir):
if "-" in f: # e3d segments have '-' in the name
shutil.move(os.path.join(e3d_segs_dir, f), os.path.join(lf_sub_dir, f))
tar_files(lf_dir, os.path.join(sim_dir, LF_TAR))
# remove temporary submission and lf dir
shutil.rmtree(lf_dir)
shutil.rmtree(submission_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"sim_dir",
help="path to realization dir eg./home/melody.zhu/Albury/Runs/Albury/Albury_HYP15-21_S1384",
)
parser.add_argument(
"-submission",
"--submission_files_to_tar",
nargs="+",
default=[],
help="Please specify additional submission related file(s)/file_pattern(with '*') to tar separated by a space(if more than one). Default is {}".format(
" ".join(SUBMISSION_FILES + SUBMISSION_SL_LOGS)
),
)
parser.add_argument(
"-lf",
"--lf_files_to_tar",
nargs="+",
default=[],
help="Please specify additional LF related file(s)/file_pattern(with '*')to tar separated by a space(if more than one). Default is {}".format(
" ".join(LF_FILES)
),
)
args = parser.parse_args()
clean_up_submission_lf_files(
args.sim_dir,
submission_files_to_tar=args.submission_files_to_tar,
lf_files_to_tar=args.lf_files_to_tar,
)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Spyder Editor
APRI和FIB4推测肝纤维化或肝硬化情况
This is a temporary script file.
"""
import math
#APRI缩写:AST to Platelet Ratio Index
#AST单位iu/l
#PRI单位10**9/L
#如果APRI>2,可能有肝硬化
def APRI(AST,upper_AST,PRI):
apri=((AST*1.0/upper_AST)*100)/PRI
return apri
#FIB-4缩写Fibrosis-4
#age单位:年
#AST和ALT单位:U/L,(U/L和iu/L一般可以通用,前者是中国单位,后者是国际单位)
def FIB4(age,AST,ALT,PRI):
fib4=(age*AST)/(PRI*math.sqrt(ALT))
return fib4
#肝情况推测
def Liver_condition(apri,fib4):
if apri>2:
print ("可能发生肝硬化")
print("如果是慢性乙肝感染者,需要考虑抗病毒药物治疗")
if fib4<1.45:
print("无明显肝纤维化或2级以下肝纤维化(轻度纤维化)")
if fib4>3.25:
print("肝纤维化程度为3~4级或以上")
#提示
def Print_warming():
print("因算法不断改进,计算结果仅供参考。请随访感染科或肝病科专业医生")
def Print_unit():
print("生化指标来自肝功检测和血常规检测")
print("AST单位:iu/l")
print("ALT单位:U/L")
print("PRI单位:10**9/L")
print("年龄单位:年")
print("U/L和iu/L一般可以通用,前者是中国单位,后者是国际单位")
#提示
Print_warming()
#输出生化值单位
print("-"*30)
Print_unit()
print("-"*30)
print("")
print("")
#输入参数
print("请输入以下参数(例如10,23.5等等):")
AST=float(input("天门冬氨酸转移酶值(AST):"))
upper_AST=float(input("天门冬氨酸转移酶(AST)上限值:"))
ALT=float(input("丙氨酸氨基转移酶值(ALT):"))
PRI=float(input("血小板计数值(PRI):"))
age=float(input("年龄:"))
apri=APRI(AST,upper_AST,PRI)
fib4=FIB4(age,AST,ALT,PRI)
print("-"*30)
print("")
print("")
print("推测结果:")
#肝情况推测
Liver_condition(apri,fib4)
|
nilq/baby-python
|
python
|
"""
:class:`Registrable` is a "mixin" for endowing
any base class with a named registry for its subclasses and a decorator
for registering them.
"""
import importlib
import logging
from collections import defaultdict
from typing import (
Callable,
ClassVar,
DefaultDict,
Dict,
List,
Optional,
Set,
Tuple,
Type,
TypeVar,
cast,
)
from .exceptions import ConfigurationError, IntegrationMissingError, RegistryKeyError
from .from_params import FromParams
from .util import (
could_be_class_name,
find_integrations,
find_submodules,
import_module_and_submodules,
)
logger = logging.getLogger(__name__)
_T = TypeVar("_T")
_RegistrableT = TypeVar("_RegistrableT", bound="Registrable")
_SubclassRegistry = Dict[str, Tuple[type, Optional[str]]]
class Registrable(FromParams):
"""
Any class that inherits from ``Registrable`` gains access to a named registry for its
subclasses. To register them, just decorate them with the classmethod
``@BaseClass.register(name)``.
After which you can call ``BaseClass.list_available()`` to get the keys for the
registered subclasses, and ``BaseClass.by_name(name)`` to get the corresponding subclass.
Note that the registry stores the subclasses themselves; not class instances.
In most cases you would then call :meth:`~tango.common.from_params.FromParams.from_params()`
on the returned subclass.
You can specify a default by setting ``BaseClass.default_implementation``.
If it is set, it will be the first element of :meth:`list_available()`.
Note that if you use this class to implement a new ``Registrable`` abstract class,
you must ensure that all subclasses of the abstract class are loaded when the module is
loaded, because the subclasses register themselves in their respective files. You can
achieve this by having the abstract class and all subclasses in the ``__init__.py`` of the
module in which they reside (as this causes any import of either the abstract class or
a subclass to load all other subclasses and the abstract class).
"""
_registry: ClassVar[DefaultDict[type, _SubclassRegistry]] = defaultdict(dict)
default_implementation: Optional[str] = None
@classmethod
def register(
cls, name: str, constructor: Optional[str] = None, exist_ok: bool = False
) -> Callable[[Type[_T]], Type[_T]]:
"""
Register a class under a particular name.
:param name:
The name to register the class under.
:param constructor:
The name of the method to use on the class to construct the object. If this is given,
we will use this method (which must be a ``@classmethod``) instead of the default
constructor.
:param exist_ok:
If True, overwrites any existing models registered under ``name``. Else,
throws an error if a model is already registered under ``name``.
Examples
--------
To use this class, you would typically have a base class that inherits from ``Registrable``::
class Vocabulary(Registrable):
...
Then, if you want to register a subclass, you decorate it like this::
@Vocabulary.register("my-vocabulary")
class MyVocabulary(Vocabulary):
def __init__(self, param1: int, param2: str):
...
Registering a class like this will let you instantiate a class from a config file, where you
give ``"type": "my-vocabulary"``, and keys corresponding to the parameters of the ``__init__``
method (note that for this to work, those parameters must have type annotations).
If you want to have the instantiation from a config file call a method other than the
constructor, either because you have several different construction paths that could be
taken for the same object (as we do in ``Vocabulary``) or because you have logic you want to
happen before you get to the constructor (as we do in ``Embedding``), you can register a
specific ``@classmethod`` as the constructor to use, like this::
@Vocabulary.register("my-vocabulary-from-instances", constructor="from_instances")
@Vocabulary.register("my-vocabulary-from-files", constructor="from_files")
class MyVocabulary(Vocabulary):
def __init__(self, some_params):
...
@classmethod
def from_instances(cls, some_other_params) -> MyVocabulary:
... # construct some_params from instances
return cls(some_params)
@classmethod
def from_files(cls, still_other_params) -> MyVocabulary:
... # construct some_params from files
return cls(some_params)
"""
registry = Registrable._registry[cls]
def add_subclass_to_registry(subclass: Type[_T]) -> Type[_T]:
# Add to registry, raise an error if key has already been used.
if name in registry:
def fullname(c: type) -> str:
return f"{c.__module__}.{c.__qualname__}"
already_in_use_for = registry[name][0]
if already_in_use_for.__module__ == "__main__":
# Sometimes the same class shows up under module.submodule.Class and __main__.Class, and we
# don't want to make a fuss in that case. We prefer the class without __main__, so we go
# ahead and overwrite the entry.
pass
elif subclass.__module__ == "__main__":
# We don't want to overwrite the entry because the new one comes from the __main__ module.
return already_in_use_for
elif exist_ok:
message = (
f"Registering {fullname(subclass)} as a {fullname(cls)} under the name {name} overwrites "
f"existing entry {fullname(already_in_use_for)}, which is fine because you said "
"exist_ok=True."
)
logger.info(message)
else:
message = (
f"Attempting to register {fullname(subclass)} as a {fullname(cls)} under the name "
f"'{name}' failed. {fullname(already_in_use_for)} is already registered under that name."
)
raise ConfigurationError(message)
registry[name] = (subclass, constructor)
return subclass
return add_subclass_to_registry
@classmethod
def by_name(cls: Type[_RegistrableT], name: str) -> Callable[..., _RegistrableT]:
"""
Returns a callable function that constructs an argument of the registered class. Because
you can register particular functions as constructors for specific names, this isn't
necessarily the ``__init__`` method of some class.
"""
logger.debug(f"instantiating registered subclass {name} of {cls}")
subclass, constructor = cls.resolve_class_name(name)
if not constructor:
return cast(Type[_RegistrableT], subclass)
else:
return cast(Callable[..., _RegistrableT], getattr(subclass, constructor))
@classmethod
def search_modules(cls: Type[_RegistrableT], name: str):
"""
Search for and import modules where ``name`` might be registered.
"""
if could_be_class_name(name) or name in Registrable._registry[cls]:
return None
def try_import(module):
try:
import_module_and_submodules(module)
except IntegrationMissingError:
pass
except ImportError as e:
if e.name != module:
raise
integrations = {m.split(".")[-1]: m for m in find_integrations()}
integrations_imported: Set[str] = set()
if name in integrations:
try_import(integrations[name])
integrations_imported.add(name)
if name in Registrable._registry[cls]:
return None
if "::" in name:
maybe_integration = name.split("::")[0]
if maybe_integration in integrations:
try_import(integrations[maybe_integration])
integrations_imported.add(maybe_integration)
if name in Registrable._registry[cls]:
return None
for module in find_submodules(exclude={"tango.integrations*"}, recursive=False):
try_import(module)
if name in Registrable._registry[cls]:
return None
# If we still haven't found the registered 'name', try importing all other integrations.
for integration_name, module in integrations.items():
if integration_name not in integrations_imported:
try_import(module)
integrations_imported.add(integration_name)
if name in Registrable._registry[cls]:
return None
@classmethod
def resolve_class_name(
cls: Type[_RegistrableT],
name: str,
search_modules: bool = True,
) -> Tuple[Type[_RegistrableT], Optional[str]]:
"""
Returns the subclass that corresponds to the given ``name``, along with the name of the
method that was registered as a constructor for that ``name``, if any.
This method also allows ``name`` to be a fully-specified module name, instead of a name that
was already added to the ``Registry``. In that case, you cannot use a separate function as
a constructor (as you need to call ``cls.register()`` in order to tell us what separate
function to use).
If the ``name`` given is not in the registry and ``search_modules`` is ``True``,
it will search for and import modules where the class might be defined according to
:meth:`search_modules()`.
"""
if name in Registrable._registry[cls]:
subclass, constructor = Registrable._registry[cls][name]
return subclass, constructor
elif could_be_class_name(name):
# This might be a fully qualified class name, so we'll try importing its "module"
# and finding it there.
parts = name.split(".")
submodule = ".".join(parts[:-1])
class_name = parts[-1]
try:
module = importlib.import_module(submodule)
except ModuleNotFoundError:
raise ConfigurationError(
f"tried to interpret {name} as a path to a class "
f"but unable to import module {submodule}"
)
try:
subclass = getattr(module, class_name)
constructor = None
return subclass, constructor
except AttributeError:
raise ConfigurationError(
f"tried to interpret {name} as a path to a class "
f"but unable to find class {class_name} in {submodule}"
)
else:
# is not a qualified class name
if search_modules:
cls.search_modules(name)
return cls.resolve_class_name(name, search_modules=False)
available = cls.list_available()
suggestion = _get_suggestion(name, available)
raise RegistryKeyError(
(
f"'{name}' is not a registered name for '{cls.__name__}'"
+ (". " if not suggestion else f", did you mean '{suggestion}'? ")
)
+ "If your registered class comes from custom code, you'll need to import "
"the corresponding modules. If you're using Tango or AllenNLP from the command-line, "
"this is done by using the '--include-package' flag, or by specifying your imports "
"in a '.allennlp_plugins' file. "
"Alternatively, you can specify your choices "
"""using fully-qualified paths, e.g. {"model": "my_module.models.MyModel"} """
"in which case they will be automatically imported correctly."
)
@classmethod
def list_available(cls) -> List[str]:
"""List default first if it exists"""
keys = list(Registrable._registry[cls].keys())
default = cls.default_implementation
if default is None:
return keys
if default not in keys:
cls.search_modules(default)
keys = list(Registrable._registry[cls].keys())
if default not in keys:
raise ConfigurationError(f"Default implementation '{default}' is not registered")
else:
return [default] + [k for k in keys if k != default]
def _get_suggestion(name: str, available: List[str]) -> Optional[str]:
# Check for simple mistakes like using '-' instead of '_', or vice-versa.
for ch, repl_ch in (("_", "-"), ("-", "_")):
suggestion = name.replace(ch, repl_ch)
if suggestion in available:
return suggestion
return None
|
nilq/baby-python
|
python
|
import functools
import os
import pickle
# decorator for pickle-caching the result of a function
def pickle_cache(cache_filename, compare_filename_time=None, overwrite=False):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
exists = os.path.exists(cache_filename)
needs_redo = overwrite
if exists and compare_filename_time is not None:
needs_redo |= os.path.getmtime(cache_filename) < os.path.getmtime(compare_filename_time)
if not exists or needs_redo:
result = func(*args, **kwargs)
pkl_save(result, cache_filename)
else:
result = pkl_load(cache_filename)
return result
return wrapper
return decorator
def pkl_save(obj, filename):
pathname = os.path.split(filename)[0]
if not os.path.exists(pathname):
os.makedirs(pathname)
with open(filename, 'wb') as f:
pickle.dump(obj, f, protocol=2)
def pkl_load(filename):
with open(filename, 'rb') as f:
result = pickle.load(f)
return result
def isnewer(src, dst):
if os.path.exists(dst):
return os.path.getmtime(src) > os.path.getmtime(dst)
else:
return True
def ensure_path(path):
if not os.path.exists(path):
os.makedirs(path)
|
nilq/baby-python
|
python
|
# Generated by Django 3.0.4 on 2021-04-13 19:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0004_uploader_image_l'),
]
operations = [
migrations.AlterField(
model_name='uploader',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
|
nilq/baby-python
|
python
|
# "THE BEER-WARE LICENSE" (Revision 42):
# <flal@melix.net> wrote this file. As long as you retain this notice you can do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return
# read a json describing people do the magic to pick two different people to send
# gifts to so that you don't have to gift yourself, or your partner and send a mail to tell you, so that no-one know.
#
# expects SMTP config by environment variables (SMTP_HOST, SMTP_LOGIN, SMTP_PASS)
# usage : main.py data.json
#
# ex json
# [
# {"nom":"Alice",
# "blacklist": ["Bob"],
# "email": "alice@corp.com"},
# {"nom":"Bob",
# "blacklist": ["Alice", "Floflo"],
# "email": "alice@corp.com"},
# ..]
import itertools
import random
import smtplib
import sys, os
import json
with open(sys.argv[1], 'br') as input_data :
raw_dat = json.load(input_data)
persons = [d["nom"] for d in raw_dat]
email_addresses = {}
blacklist = {}
for p in raw_dat :
email_addresses[p["nom"]] = p["email"]
blacklist[p["nom"]] = p["blacklist"]
def filter_first_in_blacklist(c):
""" this to avoid aving a couple """
return not c[0] == blacklist[c[1]][0] or not c[1] == blacklist[c[0]][0]
tentatives = 0
while tentatives < 1000 :
# generate a list of all possible binomes, excluding blacklisted combinations
binomes = list(filter(filter_first_in_blacklist, itertools.combinations(persons, 2)))
random.shuffle(binomes)
result = {}
has_present = set()
for p in persons :
try :
# find the first pair NOT containing this person
not_me = filter(lambda b: not p in b, binomes)
not_blacklist = filter(lambda b: not b[0] in blacklist[p], not_me)
not_blacklist = filter(lambda b: not b[1] in blacklist[p], not_blacklist)
binome = next(not_blacklist)
except StopIteration :
# no solution
break
for target in binome :
if target in has_present :
# this one has 2 presents, remove all possible couple
# containing this person.
binomes = list(filter(lambda c: not target in c, binomes))
else :
has_present.add(target)
result[p] = binome
try :
binomes.remove(binome)
except ValueError:
# binome already removed
pass
if len(result) == len(persons):
break
print("failed", tentatives)
tentatives += 1
with open("result_kdo.json", 'w') as f :
f.write(json.dumps(result, ensure_ascii=False))
# check
counts = {k:0 for k in persons}
for k,(a,b) in result.items() :
counts[a] += 1
counts[b] += 1
assert(a != k)
assert(b != k)
assert(not a in blacklist[k])
assert(not b in blacklist[k])
for c in counts.values() :
assert(c == 2)
SMTP_HOST = os.environ['SMTP_HOST']
SMTP_PORT = 587
SMTP_LOGIN = os.environ['SMTP_LOGIN']
SMTP_PASS = os.environ['SMTP_PASS']
# sending emails
s = smtplib.SMTP(host=SMTP_HOST, port=SMTP_PORT)
s.starttls()
s.login(SMTP_LOGIN, SMTP_PASS)
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
message_template = "Salut {name}!\n\nTu as l'honneur et le privilège d'offrir un truc à {gift_one} et {gift_two} ! Chic.\n\nGros bisous\nLe robot super content de Noël ( https://github.com/flo-dhalluin/tirage-kdo-bot )"
for name, gifts in result.items():
msg = MIMEMultipart()
msg["From"] = "SuperContent <no-reply@flal.net>"
msg["To"] = email_addresses[name]
msg["Subject"] = "[Cadeaux Famille D'halluin] A qui va tu faire un cadeau à Noël ?"
msg.attach(MIMEText(message_template.format(name=name, gift_one=gifts[0], gift_two=gifts[1]), 'plain'))
s.send_message(msg)
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.