hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f708eee42ccb35a6cc075bb1efa20fbf89f53cc0
| 5,340
|
py
|
Python
|
server/extensions/json/__init__.py
|
ojengwa/zapcore
|
f9eace7dc8ab4bc8bc3bb9c212ba43395e0459c1
|
[
"MIT"
] | null | null | null |
server/extensions/json/__init__.py
|
ojengwa/zapcore
|
f9eace7dc8ab4bc8bc3bb9c212ba43395e0459c1
|
[
"MIT"
] | 3
|
2020-09-05T08:03:34.000Z
|
2021-05-07T20:03:30.000Z
|
server/extensions/json/__init__.py
|
ojengwa/zapcore
|
f9eace7dc8ab4bc8bc3bb9c212ba43395e0459c1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import datetime
import decimal
import io
import uuid
from flask import current_app
from flask import json as _json
from flask import request
from sqlalchemy import types
import arrow
text_type = str
def _wrap_reader_for_text(fp, encoding):
if isinstance(fp.read(0), bytes):
fp = io.TextIOWrapper(io.BufferedReader(fp), encoding)
return fp
def _wrap_writer_for_text(fp, encoding):
try:
fp.write('')
except TypeError:
fp = io.TextIOWrapper(fp, encoding)
return fp
class JSONEncoder(_json.JSONEncoder):
"""Custom JSON encoder that will serialize more complex datatypes.
This class adds support for the following datatypes:
- ``phonenumbers.phonenumber.PhoneNumber``: This will be serialized to
a E.164 phonenumber. This will only be run if ``phonenumbers`` is
installed.
- ``decimal.Decimal``: This will serialize to a pretty decimal number with
no trailing zeros and no unnecessary values. For example:
- 2.01 -> 2.01
- 2.0 -> 2
- 2.010 -> 2.01
- 2.000 -> 2
- ``arrow.Arrow``: This will be serialized to an ISO8601 datetime string
with the offset included.
- ``datetime.datetime``: This will be serialized to an ISO8601 datetime
string with the offset included.
- ``datetime.date``: This will be serialized to an ISO8601 date string.
Extended from http://flask.pocoo.org/snippets/119.
"""
def __init__(self, *args, **kwargs):
super(JSONEncoder, self).__init__(*args, **kwargs)
self.use_decimal = False
def default(self, obj):
"""
Encode individual objects into their JSON representation.
This method is used by :class:`flask.json.JSONEncoder` to encode
individual items in the JSON object.
Args:
obj (object): Any Python object we wish to convert to JSON.
Returns:
str: The stringified, valid JSON representation of our provided
object.
"""
if isinstance(obj, decimal.Decimal):
obj = format(obj, 'f')
str_digit = str(obj)
return (str_digit.rstrip('0').rstrip('.')
if '.' in str_digit
else str_digit)
elif isinstance(obj, types.TypeEngine):
return str(obj)
elif isinstance(obj, arrow.Arrow):
return str(obj)
if isinstance(obj, datetime.datetime):
if obj.tzinfo:
# eg: '2015-09-25T23:14:42.588601+00:00'
return obj.isoformat('T')
else:
# No timezone present - assume UTC.
# eg: '2015-09-25T23:14:42.588601Z'
return obj.isoformat('T') + 'Z'
if isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, uuid.UUID):
return str(obj)
try:
return list(iter(obj))
except TypeError:
pass
return super(JSONEncoder, self).default(obj)
def _dump_arg_defaults(kwargs):
"""Inject default arguments for dump functions."""
if current_app:
kwargs.setdefault('cls', current_app.json_encoder)
if not current_app.config['JSON_AS_ASCII']:
kwargs.setdefault('ensure_ascii', False)
kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS'])
else:
kwargs.setdefault('sort_keys', True)
kwargs.setdefault('cls', JSONEncoder)
def dumps(obj, **kwargs):
"""Serialize ``obj`` to a JSON formatted ``str`` by using the application's
configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an
application on the stack.
This function can return ``unicode`` strings or ascii-only bytestrings by
default which coerce into unicode strings automatically. That behavior by
default is controlled by the ``JSON_AS_ASCII`` configuration variable
and can be overridden by the simplejson ``ensure_ascii`` parameter.
"""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
rv = _json.dumps(obj, **kwargs)
if encoding is not None and isinstance(rv, text_type):
rv = rv.encode(encoding)
return rv
def dump(obj, fp, **kwargs):
"""Like :func:`dumps` but writes into a file object."""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
if encoding is not None:
fp = _wrap_writer_for_text(fp, encoding)
_json.dump(obj, fp, **kwargs)
def jsonify(*args, **kwargs):
"""
copied from the flask jsonify function with modifcations added
"""
indent = None
separators = (',', ':')
if current_app.config['JSONIFY_PRETTYPRINT_REGULAR']\
and not request.is_xhr:
indent = 2
separators = (', ', ': ')
if args and kwargs:
raise TypeError(
'jsonify() behavior undefined when passed both args and kwargs')
elif len(args) == 1: # single args are passed directly to dumps()
data = args[0]
else:
data = args or kwargs
return current_app.response_class(
(dumps(data, indent=indent, separators=separators), '\n'),
mimetype=current_app.config['JSONIFY_MIMETYPE']
)
| 29.502762
| 79
| 0.626966
|
from __future__ import absolute_import
import datetime
import decimal
import io
import uuid
from flask import current_app
from flask import json as _json
from flask import request
from sqlalchemy import types
import arrow
text_type = str
def _wrap_reader_for_text(fp, encoding):
if isinstance(fp.read(0), bytes):
fp = io.TextIOWrapper(io.BufferedReader(fp), encoding)
return fp
def _wrap_writer_for_text(fp, encoding):
try:
fp.write('')
except TypeError:
fp = io.TextIOWrapper(fp, encoding)
return fp
class JSONEncoder(_json.JSONEncoder):
def __init__(self, *args, **kwargs):
super(JSONEncoder, self).__init__(*args, **kwargs)
self.use_decimal = False
def default(self, obj):
if isinstance(obj, decimal.Decimal):
obj = format(obj, 'f')
str_digit = str(obj)
return (str_digit.rstrip('0').rstrip('.')
if '.' in str_digit
else str_digit)
elif isinstance(obj, types.TypeEngine):
return str(obj)
elif isinstance(obj, arrow.Arrow):
return str(obj)
if isinstance(obj, datetime.datetime):
if obj.tzinfo:
return obj.isoformat('T')
else:
return obj.isoformat('T') + 'Z'
if isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, uuid.UUID):
return str(obj)
try:
return list(iter(obj))
except TypeError:
pass
return super(JSONEncoder, self).default(obj)
def _dump_arg_defaults(kwargs):
if current_app:
kwargs.setdefault('cls', current_app.json_encoder)
if not current_app.config['JSON_AS_ASCII']:
kwargs.setdefault('ensure_ascii', False)
kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS'])
else:
kwargs.setdefault('sort_keys', True)
kwargs.setdefault('cls', JSONEncoder)
def dumps(obj, **kwargs):
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
rv = _json.dumps(obj, **kwargs)
if encoding is not None and isinstance(rv, text_type):
rv = rv.encode(encoding)
return rv
def dump(obj, fp, **kwargs):
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
if encoding is not None:
fp = _wrap_writer_for_text(fp, encoding)
_json.dump(obj, fp, **kwargs)
def jsonify(*args, **kwargs):
indent = None
separators = (',', ':')
if current_app.config['JSONIFY_PRETTYPRINT_REGULAR']\
and not request.is_xhr:
indent = 2
separators = (', ', ': ')
if args and kwargs:
raise TypeError(
'jsonify() behavior undefined when passed both args and kwargs')
elif len(args) == 1: data = args[0]
else:
data = args or kwargs
return current_app.response_class(
(dumps(data, indent=indent, separators=separators), '\n'),
mimetype=current_app.config['JSONIFY_MIMETYPE']
)
| true
| true
|
f708ef284ffde13b9c42794d15d4414bf0e90f92
| 5,765
|
py
|
Python
|
matchzoo/layers/matching_layer.py
|
songzy12/MatchZoo
|
a43dc3b1d43b3f2a1b43b11d3fc4009616507e23
|
[
"Apache-2.0"
] | null | null | null |
matchzoo/layers/matching_layer.py
|
songzy12/MatchZoo
|
a43dc3b1d43b3f2a1b43b11d3fc4009616507e23
|
[
"Apache-2.0"
] | null | null | null |
matchzoo/layers/matching_layer.py
|
songzy12/MatchZoo
|
a43dc3b1d43b3f2a1b43b11d3fc4009616507e23
|
[
"Apache-2.0"
] | null | null | null |
"""An implementation of Matching Layer."""
import typing
import tensorflow as tf
from tensorflow.keras import layers
class MatchingLayer(layers.Layer):
"""
Layer that computes a matching matrix between samples in two tensors.
:param normalize: Whether to L2-normalize samples along the
dot product axis before taking the dot product.
If set to True, then the output of the dot product
is the cosine proximity between the two samples.
:param matching_type: the similarity function for matching
:param kwargs: Standard layer keyword arguments.
Examples:
>>> import matchzoo as mz
>>> layer = mz.layers.MatchingLayer(matching_type='dot',
... normalize=True)
>>> num_batch, left_len, right_len, num_dim = 5, 3, 2, 10
>>> layer.build([[num_batch, left_len, num_dim],
... [num_batch, right_len, num_dim]])
"""
def __init__(self, normalize: bool = False,
matching_type: str = 'dot', **kwargs):
""":class:`MatchingLayer` constructor."""
super().__init__(**kwargs)
self._normalize = normalize
self._validate_matching_type(matching_type)
self._matching_type = matching_type
self._shape1 = None
self._shape2 = None
@classmethod
def _validate_matching_type(cls, matching_type: str = 'dot'):
valid_matching_type = ['dot', 'mul', 'plus', 'minus', 'concat']
if matching_type not in valid_matching_type:
raise ValueError(f"{matching_type} is not a valid matching type, "
f"{valid_matching_type} expected.")
def build(self, input_shape: list):
"""
Build the layer.
:param input_shape: the shapes of the input tensors,
for MatchingLayer we need tow input tensors.
"""
# Used purely for shape validation.
if not isinstance(input_shape, list) or len(input_shape) != 2:
raise ValueError('A `MatchingLayer` layer should be called '
'on a list of 2 inputs.')
self._shape1 = input_shape[0]
self._shape2 = input_shape[1]
for idx in 0, 2:
if self._shape1[idx] != self._shape2[idx]:
raise ValueError(
'Incompatible dimensions: '
f'{self._shape1[idx]} != {self._shape2[idx]}.'
f'Layer shapes: {self._shape1}, {self._shape2}.'
)
def call(self, inputs: list, **kwargs) -> typing.Any:
"""
The computation logic of MatchingLayer.
:param inputs: two input tensors.
"""
x1 = inputs[0]
x2 = inputs[1]
if self._matching_type == 'dot':
if self._normalize:
x1 = tf.math.l2_normalize(x1, axis=2)
x2 = tf.math.l2_normalize(x2, axis=2)
return tf.expand_dims(tf.einsum('abd,acd->abc', x1, x2), 3)
else:
if self._matching_type == 'mul':
def func(x, y):
return x * y
elif self._matching_type == 'plus':
def func(x, y):
return x + y
elif self._matching_type == 'minus':
def func(x, y):
return x - y
elif self._matching_type == 'concat':
def func(x, y):
return tf.concat([x, y], axis=3)
else:
raise ValueError(f"Invalid matching type."
f"{self._matching_type} received."
f"Mut be in `dot`, `mul`, `plus`, "
f"`minus` and `concat`.")
x1_exp = tf.stack([x1] * self._shape2[1], 2)
x2_exp = tf.stack([x2] * self._shape1[1], 1)
return func(x1_exp, x2_exp)
def compute_output_shape(self, input_shape: list) -> tuple:
"""
Calculate the layer output shape.
:param input_shape: the shapes of the input tensors,
for MatchingLayer we need tow input tensors.
"""
if not isinstance(input_shape, list) or len(input_shape) != 2:
raise ValueError('A `MatchingLayer` layer should be called '
'on a list of 2 inputs.')
shape1 = list(input_shape[0])
shape2 = list(input_shape[1])
if len(shape1) != 3 or len(shape2) != 3:
raise ValueError('A `MatchingLayer` layer should be called '
'on 2 inputs with 3 dimensions.')
if shape1[0] != shape2[0] or shape1[2] != shape2[2]:
raise ValueError('A `MatchingLayer` layer should be called '
'on 2 inputs with same 0,2 dimensions.')
if self._matching_type in ['mul', 'plus', 'minus']:
return shape1[0], shape1[1], shape2[1], shape1[2]
elif self._matching_type == 'dot':
return shape1[0], shape1[1], shape2[1], 1
elif self._matching_type == 'concat':
return shape1[0], shape1[1], shape2[1], shape1[2] + shape2[2]
else:
raise ValueError(f"Invalid `matching_type`."
f"{self._matching_type} received."
f"Must be in `mul`, `plus`, `minus` "
f"`dot` and `concat`.")
def get_config(self) -> dict:
"""Get the config dict of MatchingLayer."""
config = {
'normalize': self._normalize,
'matching_type': self._matching_type,
}
base_config = super(MatchingLayer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
| 40.886525
| 78
| 0.542411
|
import typing
import tensorflow as tf
from tensorflow.keras import layers
class MatchingLayer(layers.Layer):
def __init__(self, normalize: bool = False,
matching_type: str = 'dot', **kwargs):
super().__init__(**kwargs)
self._normalize = normalize
self._validate_matching_type(matching_type)
self._matching_type = matching_type
self._shape1 = None
self._shape2 = None
@classmethod
def _validate_matching_type(cls, matching_type: str = 'dot'):
valid_matching_type = ['dot', 'mul', 'plus', 'minus', 'concat']
if matching_type not in valid_matching_type:
raise ValueError(f"{matching_type} is not a valid matching type, "
f"{valid_matching_type} expected.")
def build(self, input_shape: list):
if not isinstance(input_shape, list) or len(input_shape) != 2:
raise ValueError('A `MatchingLayer` layer should be called '
'on a list of 2 inputs.')
self._shape1 = input_shape[0]
self._shape2 = input_shape[1]
for idx in 0, 2:
if self._shape1[idx] != self._shape2[idx]:
raise ValueError(
'Incompatible dimensions: '
f'{self._shape1[idx]} != {self._shape2[idx]}.'
f'Layer shapes: {self._shape1}, {self._shape2}.'
)
def call(self, inputs: list, **kwargs) -> typing.Any:
x1 = inputs[0]
x2 = inputs[1]
if self._matching_type == 'dot':
if self._normalize:
x1 = tf.math.l2_normalize(x1, axis=2)
x2 = tf.math.l2_normalize(x2, axis=2)
return tf.expand_dims(tf.einsum('abd,acd->abc', x1, x2), 3)
else:
if self._matching_type == 'mul':
def func(x, y):
return x * y
elif self._matching_type == 'plus':
def func(x, y):
return x + y
elif self._matching_type == 'minus':
def func(x, y):
return x - y
elif self._matching_type == 'concat':
def func(x, y):
return tf.concat([x, y], axis=3)
else:
raise ValueError(f"Invalid matching type."
f"{self._matching_type} received."
f"Mut be in `dot`, `mul`, `plus`, "
f"`minus` and `concat`.")
x1_exp = tf.stack([x1] * self._shape2[1], 2)
x2_exp = tf.stack([x2] * self._shape1[1], 1)
return func(x1_exp, x2_exp)
def compute_output_shape(self, input_shape: list) -> tuple:
if not isinstance(input_shape, list) or len(input_shape) != 2:
raise ValueError('A `MatchingLayer` layer should be called '
'on a list of 2 inputs.')
shape1 = list(input_shape[0])
shape2 = list(input_shape[1])
if len(shape1) != 3 or len(shape2) != 3:
raise ValueError('A `MatchingLayer` layer should be called '
'on 2 inputs with 3 dimensions.')
if shape1[0] != shape2[0] or shape1[2] != shape2[2]:
raise ValueError('A `MatchingLayer` layer should be called '
'on 2 inputs with same 0,2 dimensions.')
if self._matching_type in ['mul', 'plus', 'minus']:
return shape1[0], shape1[1], shape2[1], shape1[2]
elif self._matching_type == 'dot':
return shape1[0], shape1[1], shape2[1], 1
elif self._matching_type == 'concat':
return shape1[0], shape1[1], shape2[1], shape1[2] + shape2[2]
else:
raise ValueError(f"Invalid `matching_type`."
f"{self._matching_type} received."
f"Must be in `mul`, `plus`, `minus` "
f"`dot` and `concat`.")
def get_config(self) -> dict:
config = {
'normalize': self._normalize,
'matching_type': self._matching_type,
}
base_config = super(MatchingLayer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
| true
| true
|
f708ef89a08aef7d612c148ff2981aa853e4aef5
| 3,386
|
py
|
Python
|
test/noise.py
|
738844605/DualResidualNetworks
|
6d025e074d4c914fae86f51cd8b93569a2c05335
|
[
"MIT"
] | 144
|
2019-04-08T02:22:00.000Z
|
2022-02-13T09:11:33.000Z
|
test/noise.py
|
738844605/DualResidualNetworks
|
6d025e074d4c914fae86f51cd8b93569a2c05335
|
[
"MIT"
] | 14
|
2019-05-09T09:07:08.000Z
|
2020-07-20T15:45:41.000Z
|
test/noise.py
|
738844605/DualResidualNetworks
|
6d025e074d4c914fae86f51cd8b93569a2c05335
|
[
"MIT"
] | 27
|
2019-07-19T03:09:20.000Z
|
2021-12-13T07:48:57.000Z
|
# python 2.7, pytorch 0.3.1
import os, sys
sys.path.insert(1, '../')
import torch
import cv2
import shutil
import torchvision
import numpy as np
import itertools
import subprocess
import random
import matplotlib.pyplot as plt
import torch.nn as nn
import torch.optim as optim
import torchvision.transforms as transforms
import torch.nn.functional as F
from torch.autograd import Variable
from torch.utils.data import DataLoader
from PIL import Image
from pietorch import data_convertors
from pietorch.DuRN_P import cleaner as cleaner
from pietorch.DuRN_P_no_norm import cleaner as cleaner_no_norm
from pietorch.pytorch_ssim import ssim as ssim
from skimage.measure import compare_psnr as psnr
from skimage.measure import compare_ssim as ski_ssim
#------ Options -------
tag = 'DuRN_P_no_norm' # 'DuRN_P' or 'DuRN_P_no_norm' for gaussion or real-world noise removal
data_name = 'RealNoiseHKPoly' # 'BSD_gray' or 'RealNoiseHKPoly'
# Gaussian noise level. Comment it if you set data_name = 'RealNoiseHKPoly'.
#noise_level = 70 # choose one from [30, 50, 70]
#----------------------
if data_name == 'BSD_gray':
testroot = "../data/"+data_name+"/test/"
test_list_pth = '../lists/'+data_name+'/testlist.txt'
else:
testroot = "../data/"+data_name+"/test1/"
test_list_pth = '../lists/'+data_name+'/test1_list.txt'
Pretrained = '../trainedmodels/'+data_name+'/'+tag+'_model.pt'
show_dst = '../cleaned_images/'+data_name+'/'+tag+'/'
subprocess.check_output(['mkdir', '-p', show_dst])
# Make the transformer and the network
if data_name == 'BSD_gray':
transform = [transforms.ToTensor(), noise_level]
cleaner = cleaner().cuda()
else:
transform = transforms.ToTensor()
cleaner = cleaner_no_norm().cuda()
cleaner.load_state_dict(torch.load(Pretrained))
cleaner.eval()
# Make the dataloader
convertor = data_convertors.ConvertImageSet(testroot, test_list_pth, data_name,
transform=transform)
dataloader = DataLoader(convertor, batch_size=1, shuffle=False, num_workers=1)
ave_psnr = 0
ave_ssim = 0
ct_num = 0
for i, data in enumerate(dataloader):
ct_num+= 1.0
im_input, label, im_name = data
im_input = Variable(im_input, requires_grad=False).cuda()
res = cleaner(im_input)
res = res.data.cpu().numpy()
res[res>1] = 1
res[res<0] = 0
res*= 255
if data_name == 'BSD_gray':
res = res.astype(np.uint8)[0,0]
label = label.numpy()[0,0]
label*= 255
label = label.astype(np.uint8)
cv2.imwrite(show_dst+im_name[0].split('.')[0]+'_'+str(noise_level)+'.png', res)
ave_psnr+= psnr(res, label, data_range=255)
ave_ssim+= ski_ssim(res, label, data_range=255, multichannel=False)
elif data_name == 'RealNoiseHKPoly':
res = res.astype(np.uint8)[0]
res = res.transpose((1,2,0))
label = label.numpy()[0].transpose((1,2,0))
label*= 255
label = label.astype(np.uint8)
Image.fromarray(res).save(show_dst+im_name[0].split('real')[0]+'.png')
ave_psnr+= psnr(res, label, data_range=255)
ave_ssim+= ski_ssim(res, label, data_range=255, multichannel=True)
else:
print('Unknown dataset name.')
print('psnr: '+str(ave_psnr/ct_num))
print('ssim: '+str(ave_ssim/ct_num))
print('Test done.')
| 33.196078
| 94
| 0.66775
|
import os, sys
sys.path.insert(1, '../')
import torch
import cv2
import shutil
import torchvision
import numpy as np
import itertools
import subprocess
import random
import matplotlib.pyplot as plt
import torch.nn as nn
import torch.optim as optim
import torchvision.transforms as transforms
import torch.nn.functional as F
from torch.autograd import Variable
from torch.utils.data import DataLoader
from PIL import Image
from pietorch import data_convertors
from pietorch.DuRN_P import cleaner as cleaner
from pietorch.DuRN_P_no_norm import cleaner as cleaner_no_norm
from pietorch.pytorch_ssim import ssim as ssim
from skimage.measure import compare_psnr as psnr
from skimage.measure import compare_ssim as ski_ssim
tag = 'DuRN_P_no_norm' data_name = 'RealNoiseHKPoly'
if data_name == 'BSD_gray':
testroot = "../data/"+data_name+"/test/"
test_list_pth = '../lists/'+data_name+'/testlist.txt'
else:
testroot = "../data/"+data_name+"/test1/"
test_list_pth = '../lists/'+data_name+'/test1_list.txt'
Pretrained = '../trainedmodels/'+data_name+'/'+tag+'_model.pt'
show_dst = '../cleaned_images/'+data_name+'/'+tag+'/'
subprocess.check_output(['mkdir', '-p', show_dst])
if data_name == 'BSD_gray':
transform = [transforms.ToTensor(), noise_level]
cleaner = cleaner().cuda()
else:
transform = transforms.ToTensor()
cleaner = cleaner_no_norm().cuda()
cleaner.load_state_dict(torch.load(Pretrained))
cleaner.eval()
convertor = data_convertors.ConvertImageSet(testroot, test_list_pth, data_name,
transform=transform)
dataloader = DataLoader(convertor, batch_size=1, shuffle=False, num_workers=1)
ave_psnr = 0
ave_ssim = 0
ct_num = 0
for i, data in enumerate(dataloader):
ct_num+= 1.0
im_input, label, im_name = data
im_input = Variable(im_input, requires_grad=False).cuda()
res = cleaner(im_input)
res = res.data.cpu().numpy()
res[res>1] = 1
res[res<0] = 0
res*= 255
if data_name == 'BSD_gray':
res = res.astype(np.uint8)[0,0]
label = label.numpy()[0,0]
label*= 255
label = label.astype(np.uint8)
cv2.imwrite(show_dst+im_name[0].split('.')[0]+'_'+str(noise_level)+'.png', res)
ave_psnr+= psnr(res, label, data_range=255)
ave_ssim+= ski_ssim(res, label, data_range=255, multichannel=False)
elif data_name == 'RealNoiseHKPoly':
res = res.astype(np.uint8)[0]
res = res.transpose((1,2,0))
label = label.numpy()[0].transpose((1,2,0))
label*= 255
label = label.astype(np.uint8)
Image.fromarray(res).save(show_dst+im_name[0].split('real')[0]+'.png')
ave_psnr+= psnr(res, label, data_range=255)
ave_ssim+= ski_ssim(res, label, data_range=255, multichannel=True)
else:
print('Unknown dataset name.')
print('psnr: '+str(ave_psnr/ct_num))
print('ssim: '+str(ave_ssim/ct_num))
print('Test done.')
| true
| true
|
f708f0c7bbe3d285c8aedcc6394fd2a3abb0e815
| 5,624
|
py
|
Python
|
main.py
|
sayabiws/simple-image-recommender
|
27162c544fc08b5774049039694f0fa7c7faac3f
|
[
"MIT"
] | null | null | null |
main.py
|
sayabiws/simple-image-recommender
|
27162c544fc08b5774049039694f0fa7c7faac3f
|
[
"MIT"
] | null | null | null |
main.py
|
sayabiws/simple-image-recommender
|
27162c544fc08b5774049039694f0fa7c7faac3f
|
[
"MIT"
] | null | null | null |
# Simple image recommender
#
# required:
# data/images: a folder containing your images dataset
# data/users: can be empty, but the folder needs to exist (for now ?)
#
# optional:
# data/tags.csv: a comma-separated list containing the names of your
# images and the corresponding semicolon-separated tags
# (eg. "37.png,sky;blue;cliff")
# Libraries import
from PIL import Image
from sklearn.cluster import MiniBatchKMeans
from operator import itemgetter
import pandas
from sklearn.ensemble import RandomForestClassifier
import numpy as np
import pandas as pd
import json
import math
import os
import json
import csv
# User data gathering
def user_data_gathering():
name = input("Please enter your username: ")
user_favs = []
user_dislikes = []
try:
with open("data/users/" + name + ".txt", "r") as userfile:
user_favs = userfile.readline().rstrip().split(",")
user_dislikes = userfile.readline().rstrip().split(",")
except FileNotFoundError:
print("This user doesn't exist. Creating it...")
if not user_favs:
print("No favourite images defined!")
if not user_dislikes:
print("No disliked images defined!")
do_fav = input("Would you like to define your favourite images? ([y]es/[n]o/[a]dd): ")
if do_fav == "y":
user_favs = input("Please enter your favourite images, separated by a comma: ").split(",")
elif do_fav == "a":
user_favs += input("Please enter the images you want to add, separated by a comma: ").split(",")
elif do_fav == "n":
pass
else:
print("Incorrect choice. Exiting")
exit()
do_dislike = input("Would you like to define your disliked images? ([y]es/[n]o/[a]dd): ")
if do_dislike == "y":
user_dislikes = input("Please enter your disliked images, separated by a comma: ").split(",")
elif do_dislike == "a":
user_dislikes += input("Please enter the images you want to add, separated by a comma: ").split(",")
elif do_dislike == "n":
pass
else:
print("Incorrect choice. Exiting")
exit()
userfile = open("data/users/" + name + ".txt", "w+")
userfile.write(",".join(user_favs) + "\n")
userfile.write(",".join(user_dislikes) + "\n")
userfile.close()
return user_favs,user_dislikes
# Get all images filenames in data/images/
def get_image_list():
imagelist = []
for file in os.listdir("data/images"):
if file.endswith(".png") or file.endswith(".jpg") or file.endswith(".gif") or file.endswith(".tif") or file.endswith(".bmp"):
imagelist.append(file)
return imagelist
# Get color clusters per image
def get_clusters(filename, n_clusters):
imgfile = Image.open("data/images/" + filename).convert('RGBA')
numarray = np.array(imgfile.getdata(), np.uint8)
clusters = MiniBatchKMeans(n_clusters=n_clusters)
clusters.fit(numarray)
npbins = np.arange(0, n_clusters + 1)
histogram = np.histogram(clusters.labels_, bins=npbins)
# Sort histogram
pairs = sorted(zip(histogram[0], histogram[1]), key=itemgetter(0))
histogram = (np.array([v for v, i in pairs]),
np.array([i for v, i in pairs]))
colors = []
for i in range(n_clusters):
j = histogram[1][i]
colors.append(
(
math.ceil(clusters.cluster_centers_[j][0]),
math.ceil(clusters.cluster_centers_[j][1]),
math.ceil(clusters.cluster_centers_[j][2])
)
)
return colors
# Returns a pandas dataframe with the tags info
def get_tags(filename):
try:
tags_df = pd.read_csv(filename)
except FileNotFoundError:
print("No tags have been defined. Ignoring tags.")
tags_df["tags"] = tags_df.tags.str.split(";")
return tags_df
# Clean the clusters data
def clean_data(clusters):
for image in clusters:
tmp = []
for color in image["colors"]:
tmp.append(((color[0])<<16)|((color[1])<<8)|(color[2]))
image["colors"] = tmp
tmp = []
return clusters
# The actual prediction algorithm
def predict(clusters, user_fav, user_dislikes):
images = sorted(clusters, key=lambda x: x['name'])
color_clusters = [image["colors"] for image in images]
# Build training data
training_data = color_clusters
result_data = [(image['name'] in user_fav) for image in images]
# Build dataframes
training_df = pandas.DataFrame(training_data, columns=['color1', 'color2', 'color3'])
result_df = pandas.DataFrame(result_data, columns=['favorite'])
# Train decision tree
classifier = RandomForestClassifier(n_estimators=10, max_depth=10)
classifier = classifier.fit(training_df, result_df.values.ravel())
predicted = classifier.predict(list(map(lambda x: x['colors'], images)))
print("# Predicted as favorites")
for index, favorite in enumerate(predicted):
name = images[index]['name']
# Only print new images
if favorite and name not in user_fav and name not in user_dislikes:
print(name)
# Main function
def main():
print("Loading...")
print(" -- Looking up images...")
imagelist = get_image_list()
print(" -- Calculating color clusters (this can take some time if it has never been done before)...")
n_clusters = 3
try:
clustersData = open("data/clusters.json", "r")
clusters = json.load(clustersData)
except:
clusters = [{"name":filename, "colors":get_clusters(filename, n_clusters)} for filename in imagelist]
r = json.dumps(clusters)
clusersfile = open("data/clusters.json", "w")
clusersfile.write(r)
clusersfile.close()
print(" -- Extracting tags...")
tags = get_tags("data/tags.csv")
print("Loading done!")
# Gathering user data
print("Gathering user data...")
(user_favs, user_dislikes) = user_data_gathering()
# Recommendation system
print("Computing recommendation...")
cleanedclusters = clean_data(clusters)
predict(cleanedclusters, user_favs, user_dislikes)
if __name__ == "__main__":
main()
| 29.914894
| 127
| 0.707681
|
from PIL import Image
from sklearn.cluster import MiniBatchKMeans
from operator import itemgetter
import pandas
from sklearn.ensemble import RandomForestClassifier
import numpy as np
import pandas as pd
import json
import math
import os
import json
import csv
def user_data_gathering():
name = input("Please enter your username: ")
user_favs = []
user_dislikes = []
try:
with open("data/users/" + name + ".txt", "r") as userfile:
user_favs = userfile.readline().rstrip().split(",")
user_dislikes = userfile.readline().rstrip().split(",")
except FileNotFoundError:
print("This user doesn't exist. Creating it...")
if not user_favs:
print("No favourite images defined!")
if not user_dislikes:
print("No disliked images defined!")
do_fav = input("Would you like to define your favourite images? ([y]es/[n]o/[a]dd): ")
if do_fav == "y":
user_favs = input("Please enter your favourite images, separated by a comma: ").split(",")
elif do_fav == "a":
user_favs += input("Please enter the images you want to add, separated by a comma: ").split(",")
elif do_fav == "n":
pass
else:
print("Incorrect choice. Exiting")
exit()
do_dislike = input("Would you like to define your disliked images? ([y]es/[n]o/[a]dd): ")
if do_dislike == "y":
user_dislikes = input("Please enter your disliked images, separated by a comma: ").split(",")
elif do_dislike == "a":
user_dislikes += input("Please enter the images you want to add, separated by a comma: ").split(",")
elif do_dislike == "n":
pass
else:
print("Incorrect choice. Exiting")
exit()
userfile = open("data/users/" + name + ".txt", "w+")
userfile.write(",".join(user_favs) + "\n")
userfile.write(",".join(user_dislikes) + "\n")
userfile.close()
return user_favs,user_dislikes
# Get all images filenames in data/images/
def get_image_list():
imagelist = []
for file in os.listdir("data/images"):
if file.endswith(".png") or file.endswith(".jpg") or file.endswith(".gif") or file.endswith(".tif") or file.endswith(".bmp"):
imagelist.append(file)
return imagelist
# Get color clusters per image
def get_clusters(filename, n_clusters):
imgfile = Image.open("data/images/" + filename).convert('RGBA')
numarray = np.array(imgfile.getdata(), np.uint8)
clusters = MiniBatchKMeans(n_clusters=n_clusters)
clusters.fit(numarray)
npbins = np.arange(0, n_clusters + 1)
histogram = np.histogram(clusters.labels_, bins=npbins)
# Sort histogram
pairs = sorted(zip(histogram[0], histogram[1]), key=itemgetter(0))
histogram = (np.array([v for v, i in pairs]),
np.array([i for v, i in pairs]))
colors = []
for i in range(n_clusters):
j = histogram[1][i]
colors.append(
(
math.ceil(clusters.cluster_centers_[j][0]),
math.ceil(clusters.cluster_centers_[j][1]),
math.ceil(clusters.cluster_centers_[j][2])
)
)
return colors
# Returns a pandas dataframe with the tags info
def get_tags(filename):
try:
tags_df = pd.read_csv(filename)
except FileNotFoundError:
print("No tags have been defined. Ignoring tags.")
tags_df["tags"] = tags_df.tags.str.split(";")
return tags_df
# Clean the clusters data
def clean_data(clusters):
for image in clusters:
tmp = []
for color in image["colors"]:
tmp.append(((color[0])<<16)|((color[1])<<8)|(color[2]))
image["colors"] = tmp
tmp = []
return clusters
# The actual prediction algorithm
def predict(clusters, user_fav, user_dislikes):
images = sorted(clusters, key=lambda x: x['name'])
color_clusters = [image["colors"] for image in images]
# Build training data
training_data = color_clusters
result_data = [(image['name'] in user_fav) for image in images]
# Build dataframes
training_df = pandas.DataFrame(training_data, columns=['color1', 'color2', 'color3'])
result_df = pandas.DataFrame(result_data, columns=['favorite'])
# Train decision tree
classifier = RandomForestClassifier(n_estimators=10, max_depth=10)
classifier = classifier.fit(training_df, result_df.values.ravel())
predicted = classifier.predict(list(map(lambda x: x['colors'], images)))
print("# Predicted as favorites")
for index, favorite in enumerate(predicted):
name = images[index]['name']
# Only print new images
if favorite and name not in user_fav and name not in user_dislikes:
print(name)
# Main function
def main():
print("Loading...")
print(" -- Looking up images...")
imagelist = get_image_list()
print(" -- Calculating color clusters (this can take some time if it has never been done before)...")
n_clusters = 3
try:
clustersData = open("data/clusters.json", "r")
clusters = json.load(clustersData)
except:
clusters = [{"name":filename, "colors":get_clusters(filename, n_clusters)} for filename in imagelist]
r = json.dumps(clusters)
clusersfile = open("data/clusters.json", "w")
clusersfile.write(r)
clusersfile.close()
print(" -- Extracting tags...")
tags = get_tags("data/tags.csv")
print("Loading done!")
# Gathering user data
print("Gathering user data...")
(user_favs, user_dislikes) = user_data_gathering()
# Recommendation system
print("Computing recommendation...")
cleanedclusters = clean_data(clusters)
predict(cleanedclusters, user_favs, user_dislikes)
if __name__ == "__main__":
main()
| true
| true
|
f708f23f4476ea85a0d78e7a4200c72925111a1e
| 3,509
|
py
|
Python
|
tensorflow/python/pywrap_tensorflow.py
|
Nickmeagan70/tensorflow
|
6bfedde8466daced9f40a0e11840f5ce274abc7d
|
[
"Apache-2.0"
] | 7
|
2022-03-04T21:14:47.000Z
|
2022-03-22T23:07:39.000Z
|
tensorflow/python/pywrap_tensorflow.py
|
Nickmeagan70/tensorflow
|
6bfedde8466daced9f40a0e11840f5ce274abc7d
|
[
"Apache-2.0"
] | 1
|
2022-03-08T18:28:46.000Z
|
2022-03-08T18:37:20.000Z
|
tensorflow/python/pywrap_tensorflow.py
|
Nickmeagan70/tensorflow
|
6bfedde8466daced9f40a0e11840f5ce274abc7d
|
[
"Apache-2.0"
] | 1
|
2022-03-22T00:45:15.000Z
|
2022-03-22T00:45:15.000Z
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""A Python wrapper that loads _pywrap_tensorflow_internal.so."""
import ctypes
import sys
import traceback
from tensorflow.python.platform import self_check
# TODO(mdan): Cleanup antipattern: import for side effects.
# Perform pre-load sanity checks in order to produce a more actionable error.
self_check.preload_check()
# pylint: disable=wildcard-import,g-import-not-at-top,unused-import,line-too-long
try:
# This import is expected to fail if there is an explicit shared object
# dependency (with_framework_lib=true), since we do not need RTLD_GLOBAL.
from tensorflow.python import pywrap_dlopen_global_flags
_use_dlopen_global_flags = True
except ImportError:
_use_dlopen_global_flags = False
# On UNIX-based platforms, pywrap_tensorflow is a python library that
# dynamically loads _pywrap_tensorflow.so.
_can_set_rtld_local = (
hasattr(sys, 'getdlopenflags') and hasattr(sys, 'setdlopenflags'))
if _can_set_rtld_local:
_default_dlopen_flags = sys.getdlopenflags()
try:
if _use_dlopen_global_flags:
pywrap_dlopen_global_flags.set_dlopen_flags()
elif _can_set_rtld_local:
# Ensure RTLD_LOCAL behavior for platforms where it isn't the default
# (macOS). On Linux RTLD_LOCAL is 0, so this does nothing (and would not
# override an RTLD_GLOBAL in _default_dlopen_flags).
sys.setdlopenflags(_default_dlopen_flags | ctypes.RTLD_LOCAL)
# Python2.7 does not have a ModuleNotFoundError.
try:
ModuleNotFoundError
except NameError:
ModuleNotFoundError = ImportError # pylint: disable=redefined-builtin
# pylint: disable=wildcard-import,g-import-not-at-top,line-too-long,undefined-variable
try:
from tensorflow.python._pywrap_tensorflow_internal import *
# This try catch logic is because there is no bazel equivalent for py_extension.
# Externally in opensource we must enable exceptions to load the shared object
# by exposing the PyInit symbols with pybind. This error will only be
# caught internally or if someone changes the name of the target _pywrap_tensorflow_internal.
# This logic is used in other internal projects using py_extension.
except ModuleNotFoundError:
pass
if _use_dlopen_global_flags:
pywrap_dlopen_global_flags.reset_dlopen_flags()
elif _can_set_rtld_local:
sys.setdlopenflags(_default_dlopen_flags)
except ImportError:
raise ImportError(
f'{traceback.format_exc()}'
f'\n\nFailed to load the native TensorFlow runtime.\n'
f'See https://www.tensorflow.org/install/errors '
f'for some common causes and solutions.\n'
f'If you need help, create an issue '
f'at https://github.com/tensorflow/tensorflow/issues '
f'and include the entire stack trace above this error message.')
# pylint: enable=wildcard-import,g-import-not-at-top,unused-import,line-too-long
| 40.333333
| 95
| 0.756341
|
import ctypes
import sys
import traceback
from tensorflow.python.platform import self_check
self_check.preload_check()
try:
from tensorflow.python import pywrap_dlopen_global_flags
_use_dlopen_global_flags = True
except ImportError:
_use_dlopen_global_flags = False
_can_set_rtld_local = (
hasattr(sys, 'getdlopenflags') and hasattr(sys, 'setdlopenflags'))
if _can_set_rtld_local:
_default_dlopen_flags = sys.getdlopenflags()
try:
if _use_dlopen_global_flags:
pywrap_dlopen_global_flags.set_dlopen_flags()
elif _can_set_rtld_local:
# (macOS). On Linux RTLD_LOCAL is 0, so this does nothing (and would not
# override an RTLD_GLOBAL in _default_dlopen_flags).
sys.setdlopenflags(_default_dlopen_flags | ctypes.RTLD_LOCAL)
# Python2.7 does not have a ModuleNotFoundError.
try:
ModuleNotFoundError
except NameError:
ModuleNotFoundError = ImportError # pylint: disable=redefined-builtin
# pylint: disable=wildcard-import,g-import-not-at-top,line-too-long,undefined-variable
try:
from tensorflow.python._pywrap_tensorflow_internal import *
# This try catch logic is because there is no bazel equivalent for py_extension.
# Externally in opensource we must enable exceptions to load the shared object
# by exposing the PyInit symbols with pybind. This error will only be
# caught internally or if someone changes the name of the target _pywrap_tensorflow_internal.
# This logic is used in other internal projects using py_extension.
except ModuleNotFoundError:
pass
if _use_dlopen_global_flags:
pywrap_dlopen_global_flags.reset_dlopen_flags()
elif _can_set_rtld_local:
sys.setdlopenflags(_default_dlopen_flags)
except ImportError:
raise ImportError(
f'{traceback.format_exc()}'
f'\n\nFailed to load the native TensorFlow runtime.\n'
f'See https://www.tensorflow.org/install/errors '
f'for some common causes and solutions.\n'
f'If you need help, create an issue '
f'at https://github.com/tensorflow/tensorflow/issues '
f'and include the entire stack trace above this error message.')
# pylint: enable=wildcard-import,g-import-not-at-top,unused-import,line-too-long
| true
| true
|
f708f2f0b553930c894e78d06a8b5edf7efb032f
| 6,398
|
py
|
Python
|
src/scipp/plotting/tools.py
|
nvaytet/scipp
|
f14f56ed19cccb4162d55b1123df7225eeedb395
|
[
"BSD-3-Clause"
] | 43
|
2019-04-08T14:13:11.000Z
|
2022-02-08T06:09:35.000Z
|
src/scipp/plotting/tools.py
|
nvaytet/scipp
|
f14f56ed19cccb4162d55b1123df7225eeedb395
|
[
"BSD-3-Clause"
] | 1,342
|
2019-03-30T07:06:08.000Z
|
2022-03-28T13:12:47.000Z
|
src/scipp/plotting/tools.py
|
nvaytet/scipp
|
f14f56ed19cccb4162d55b1123df7225eeedb395
|
[
"BSD-3-Clause"
] | 12
|
2019-06-13T08:56:12.000Z
|
2021-11-04T08:24:18.000Z
|
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2021 Scipp contributors (https://github.com/scipp)
# @author Neil Vaytet
from .. import config
from ..core import concatenate, values, dtype, units, nanmin, nanmax, histogram, \
full_like
from ..core import Variable, DataArray
from ..core import abs as abs_
import numpy as np
from copy import copy
import io
def get_line_param(name=None, index=None):
"""
Get the default line parameter from the config.
If an index is supplied, return the i-th item in the list.
"""
param = getattr(config.plot, name)
return param[index % len(param)]
def to_bin_centers(x, dim):
"""
Convert array edges to centers
"""
return 0.5 * (x[dim, 1:] + x[dim, :-1])
def to_bin_edges(x, dim):
"""
Convert array centers to edges
"""
idim = x.dims.index(dim)
if x.shape[idim] < 2:
one = 1.0 * x.unit
return concatenate(x[dim, 0:1] - one, x[dim, 0:1] + one, dim)
else:
center = to_bin_centers(x, dim)
# Note: use range of 0:1 to keep dimension dim in the slice to avoid
# switching round dimension order in concatenate step.
left = center[dim, 0:1] - (x[dim, 1] - x[dim, 0])
right = center[dim, -1] + (x[dim, -1] - x[dim, -2])
return concatenate(concatenate(left, center, dim), right, dim)
def parse_params(params=None, defaults=None, globs=None, array=None):
"""
Construct the colorbar settings using default and input values
"""
from matplotlib.colors import Normalize, LogNorm, LinearSegmentedColormap
from matplotlib import cm
parsed = dict(config.plot.params)
if defaults is not None:
for key, val in defaults.items():
parsed[key] = val
if globs is not None:
for key, val in globs.items():
# Global parameters need special treatment because by default they
# are set to None, and we don't want to overwrite the defaults.
if val is not None:
parsed[key] = val
if params is not None:
if isinstance(params, bool):
params = {"show": params}
for key, val in params.items():
parsed[key] = val
if parsed["norm"] == "log":
norm = LogNorm
elif parsed["norm"] == "linear":
norm = Normalize
else:
raise RuntimeError("Unknown norm. Expected 'linear' or 'log', "
"got {}.".format(parsed["norm"]))
vmin = parsed["vmin"]
vmax = parsed["vmax"]
parsed["norm"] = norm(vmin=vmin.value if vmin is not None else None,
vmax=vmax.value if vmax is not None else None)
# Convert color into custom colormap
if parsed["color"] is not None:
parsed["cmap"] = LinearSegmentedColormap.from_list(
"tmp", [parsed["color"], parsed["color"]])
else:
parsed["cmap"] = copy(cm.get_cmap(parsed["cmap"]))
if parsed["under_color"] is None:
parsed["cmap"].set_under(parsed["cmap"](0.0))
else:
parsed["cmap"].set_under(parsed["under_color"])
if parsed["over_color"] is None:
parsed["cmap"].set_over(parsed["cmap"](1.0))
else:
parsed["cmap"].set_over(parsed["over_color"])
return parsed
def vars_to_err(v):
"""
Convert variances to errors.
"""
with np.errstate(invalid="ignore"):
v = np.sqrt(v)
np.nan_to_num(v, copy=False)
return v
def find_log_limits(x):
"""
To find log scale limits, we histogram the data between 1.0-30
and 1.0e+30 and include only bins that are non-zero.
"""
from .. import flatten, ones
volume = np.product(x.shape)
pixel = flatten(values(x.astype(dtype.float64)), to='pixel')
weights = ones(dims=['pixel'], shape=[volume], unit='counts')
hist = histogram(DataArray(data=weights, coords={'order': pixel}),
bins=Variable(dims=['order'],
values=np.geomspace(1e-30, 1e30, num=61),
unit=x.unit))
# Find the first and the last non-zero bins
inds = np.nonzero((hist.data > 0.0 * units.counts).values)
ar = np.arange(hist.data.shape[0])[inds]
# Safety check in case there are no values in range 1.0e-30:1.0e+30:
# fall back to the linear method and replace with arbitrary values if the
# limits are negative.
if len(ar) == 0:
[vmin, vmax] = find_linear_limits(x)
if vmin.value <= 0.0:
if vmax.value <= 0.0:
vmin = full_like(vmin, 0.1)
vmax = full_like(vmax, 1.0)
else:
vmin = 1.0e-3 * vmax
else:
vmin = hist.coords['order']['order', ar.min()]
vmax = hist.coords['order']['order', ar.max() + 1]
return [vmin, vmax]
def find_linear_limits(x):
"""
Find variable min and max.
"""
return [
values(nanmin(x).astype(dtype.float64)),
values(nanmax(x).astype(dtype.float64))
]
def find_limits(x, scale=None, flip=False):
"""
Find sensible limits, depending on linear or log scale.
"""
if scale is not None:
if scale == "log":
lims = {"log": find_log_limits(x)}
else:
lims = {"linear": find_linear_limits(x)}
else:
lims = {"log": find_log_limits(x), "linear": find_linear_limits(x)}
if flip:
for key in lims:
lims[key] = np.flip(lims[key]).copy()
return lims
def fix_empty_range(lims, replacement=None):
"""
Range correction in case xmin == xmax
"""
dx = 0.0 * lims[0].unit
if lims[0].value == lims[1].value:
if replacement is not None:
dx = 0.5 * replacement
elif lims[0].value == 0.0:
dx = 0.5 * lims[0].unit
else:
dx = 0.5 * abs_(lims[0])
return [lims[0] - dx, lims[1] + dx]
def fig_to_pngbytes(fig):
"""
Convert figure to png image bytes.
We also close the figure to prevent it from showing up again in
cells further down the notebook.
"""
import matplotlib.pyplot as plt
buf = io.BytesIO()
fig.savefig(buf, format='png')
plt.close(fig)
buf.seek(0)
return buf.getvalue()
def to_dict(meta):
"""
Convert a coords, meta, attrs or masks object to a python dict.
"""
return {name: var for name, var in meta.items()}
| 31.058252
| 82
| 0.586902
|
from .. import config
from ..core import concatenate, values, dtype, units, nanmin, nanmax, histogram, \
full_like
from ..core import Variable, DataArray
from ..core import abs as abs_
import numpy as np
from copy import copy
import io
def get_line_param(name=None, index=None):
param = getattr(config.plot, name)
return param[index % len(param)]
def to_bin_centers(x, dim):
return 0.5 * (x[dim, 1:] + x[dim, :-1])
def to_bin_edges(x, dim):
idim = x.dims.index(dim)
if x.shape[idim] < 2:
one = 1.0 * x.unit
return concatenate(x[dim, 0:1] - one, x[dim, 0:1] + one, dim)
else:
center = to_bin_centers(x, dim)
left = center[dim, 0:1] - (x[dim, 1] - x[dim, 0])
right = center[dim, -1] + (x[dim, -1] - x[dim, -2])
return concatenate(concatenate(left, center, dim), right, dim)
def parse_params(params=None, defaults=None, globs=None, array=None):
from matplotlib.colors import Normalize, LogNorm, LinearSegmentedColormap
from matplotlib import cm
parsed = dict(config.plot.params)
if defaults is not None:
for key, val in defaults.items():
parsed[key] = val
if globs is not None:
for key, val in globs.items():
if val is not None:
parsed[key] = val
if params is not None:
if isinstance(params, bool):
params = {"show": params}
for key, val in params.items():
parsed[key] = val
if parsed["norm"] == "log":
norm = LogNorm
elif parsed["norm"] == "linear":
norm = Normalize
else:
raise RuntimeError("Unknown norm. Expected 'linear' or 'log', "
"got {}.".format(parsed["norm"]))
vmin = parsed["vmin"]
vmax = parsed["vmax"]
parsed["norm"] = norm(vmin=vmin.value if vmin is not None else None,
vmax=vmax.value if vmax is not None else None)
# Convert color into custom colormap
if parsed["color"] is not None:
parsed["cmap"] = LinearSegmentedColormap.from_list(
"tmp", [parsed["color"], parsed["color"]])
else:
parsed["cmap"] = copy(cm.get_cmap(parsed["cmap"]))
if parsed["under_color"] is None:
parsed["cmap"].set_under(parsed["cmap"](0.0))
else:
parsed["cmap"].set_under(parsed["under_color"])
if parsed["over_color"] is None:
parsed["cmap"].set_over(parsed["cmap"](1.0))
else:
parsed["cmap"].set_over(parsed["over_color"])
return parsed
def vars_to_err(v):
with np.errstate(invalid="ignore"):
v = np.sqrt(v)
np.nan_to_num(v, copy=False)
return v
def find_log_limits(x):
from .. import flatten, ones
volume = np.product(x.shape)
pixel = flatten(values(x.astype(dtype.float64)), to='pixel')
weights = ones(dims=['pixel'], shape=[volume], unit='counts')
hist = histogram(DataArray(data=weights, coords={'order': pixel}),
bins=Variable(dims=['order'],
values=np.geomspace(1e-30, 1e30, num=61),
unit=x.unit))
# Find the first and the last non-zero bins
inds = np.nonzero((hist.data > 0.0 * units.counts).values)
ar = np.arange(hist.data.shape[0])[inds]
# Safety check in case there are no values in range 1.0e-30:1.0e+30:
# fall back to the linear method and replace with arbitrary values if the
# limits are negative.
if len(ar) == 0:
[vmin, vmax] = find_linear_limits(x)
if vmin.value <= 0.0:
if vmax.value <= 0.0:
vmin = full_like(vmin, 0.1)
vmax = full_like(vmax, 1.0)
else:
vmin = 1.0e-3 * vmax
else:
vmin = hist.coords['order']['order', ar.min()]
vmax = hist.coords['order']['order', ar.max() + 1]
return [vmin, vmax]
def find_linear_limits(x):
return [
values(nanmin(x).astype(dtype.float64)),
values(nanmax(x).astype(dtype.float64))
]
def find_limits(x, scale=None, flip=False):
if scale is not None:
if scale == "log":
lims = {"log": find_log_limits(x)}
else:
lims = {"linear": find_linear_limits(x)}
else:
lims = {"log": find_log_limits(x), "linear": find_linear_limits(x)}
if flip:
for key in lims:
lims[key] = np.flip(lims[key]).copy()
return lims
def fix_empty_range(lims, replacement=None):
dx = 0.0 * lims[0].unit
if lims[0].value == lims[1].value:
if replacement is not None:
dx = 0.5 * replacement
elif lims[0].value == 0.0:
dx = 0.5 * lims[0].unit
else:
dx = 0.5 * abs_(lims[0])
return [lims[0] - dx, lims[1] + dx]
def fig_to_pngbytes(fig):
import matplotlib.pyplot as plt
buf = io.BytesIO()
fig.savefig(buf, format='png')
plt.close(fig)
buf.seek(0)
return buf.getvalue()
def to_dict(meta):
return {name: var for name, var in meta.items()}
| true
| true
|
f708f31fda5291b510a4b006df811bd66c465bd9
| 85,439
|
bzl
|
Python
|
tensorflow/tensorflow.bzl
|
ShaunHeNJU/DeepRec-1
|
e280fb19de179f03dc05e1d8e3f4f7459796d96e
|
[
"Apache-2.0"
] | 1
|
2021-12-24T06:04:16.000Z
|
2021-12-24T06:04:16.000Z
|
tensorflow/tensorflow.bzl
|
ShaunHeNJU/DeepRec-1
|
e280fb19de179f03dc05e1d8e3f4f7459796d96e
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/tensorflow.bzl
|
ShaunHeNJU/DeepRec-1
|
e280fb19de179f03dc05e1d8e3f4f7459796d96e
|
[
"Apache-2.0"
] | 1
|
2022-02-28T08:28:25.000Z
|
2022-02-28T08:28:25.000Z
|
# -*- Python -*-
# Return the options to use for a C++ library or binary build.
# Uses the ":optmode" config_setting to pick the options.
load(
"//tensorflow/core/platform:default/build_config_root.bzl",
"if_dynamic_kernels",
"if_static",
"tf_additional_grpc_deps_py",
"tf_additional_xla_deps_py",
"tf_cuda_tests_tags",
"tf_exec_compatible_with",
"tf_gpu_tests_tags",
"tf_sycl_tests_tags",
)
load(
"@local_config_tensorrt//:build_defs.bzl",
"if_tensorrt",
)
load(
"//tensorflow/core/platform:default/cuda_build_defs.bzl",
"if_cuda_is_configured",
)
load(
"@local_config_cuda//cuda:build_defs.bzl",
"cuda_default_copts",
"if_cuda",
)
load(
"@local_config_rocm//rocm:build_defs.bzl",
"if_rocm",
"if_rocm_is_configured",
"rocm_copts",
"rocm_default_copts",
)
load(
"//third_party/mkl:build_defs.bzl",
"if_enable_mkl",
"if_mkl",
"if_mkl_lnx_x64",
"if_mkl_ml",
"mkl_deps",
)
load(
"//third_party/mkl_dnn:build_defs.bzl",
"if_mkl_open_source_only",
"if_mkldnn_threadpool",
)
load(
"//third_party/ngraph:build_defs.bzl",
"if_ngraph",
)
def register_extension_info(**kwargs):
pass
# version for the shared libraries, can
# not contain rc or alpha, only numbers.
# Also update tensorflow/core/public/version.h
# and tensorflow/tools/pip_package/setup.py
VERSION = "1.15.5"
VERSION_MAJOR = VERSION.split(".")[0]
def if_v2(a):
return select({
clean_dep("//tensorflow:api_version_2"): a,
"//conditions:default": [],
})
def if_not_v2(a):
return select({
clean_dep("//tensorflow:api_version_2"): [],
"//conditions:default": a,
})
def if_cuda_is_configured_compat(x):
return if_cuda_is_configured(x)
# Given a source file, generate a test name.
# i.e. "common_runtime/direct_session_test.cc" becomes
# "common_runtime_direct_session_test"
def src_to_test_name(src):
return src.replace("/", "_").replace(":", "_").split(".")[0]
def full_path(relative_paths):
return [native.package_name() + "/" + relative for relative in relative_paths]
def _add_tfcore_prefix(src):
if src.startswith("//"):
return src
return "//tensorflow/core:" + src
# List of proto files for android builds
def tf_android_core_proto_sources(core_proto_sources_relative):
return [
_add_tfcore_prefix(p)
for p in core_proto_sources_relative
]
# Returns the list of pb.h and proto.h headers that are generated for
# tf_android_core_proto_sources().
def tf_android_core_proto_headers(core_proto_sources_relative):
return ([
_add_tfcore_prefix(p).replace(":", "/").replace(".proto", ".pb.h")
for p in core_proto_sources_relative
] + [
_add_tfcore_prefix(p).replace(":", "/").replace(".proto", ".proto.h")
for p in core_proto_sources_relative
])
# Wrapper for portable protos which currently just creates an empty rule.
def tf_portable_proto_library(name, proto_deps, **kwargs):
_ignore = [kwargs]
native.cc_library(name = name, deps = proto_deps)
# Sanitize a dependency so that it works correctly from code that includes
# TensorFlow as a submodule.
def clean_dep(dep):
return str(Label(dep))
def if_android_x86(a):
return select({
clean_dep("//tensorflow:android_x86"): a,
clean_dep("//tensorflow:android_x86_64"): a,
"//conditions:default": [],
})
def if_android_arm(a):
return select({
clean_dep("//tensorflow:android_arm"): a,
"//conditions:default": [],
})
def if_android_arm64(a):
return select({
clean_dep("//tensorflow:android_arm64"): a,
"//conditions:default": [],
})
def if_android_mips(a):
return select({
clean_dep("//tensorflow:android_mips"): a,
"//conditions:default": [],
})
def if_not_android(a):
return select({
clean_dep("//tensorflow:android"): [],
"//conditions:default": a,
})
def if_not_android_mips_and_mips64(a):
return select({
clean_dep("//tensorflow:android_mips"): [],
clean_dep("//tensorflow:android_mips64"): [],
"//conditions:default": a,
})
def if_android(a):
return select({
clean_dep("//tensorflow:android"): a,
"//conditions:default": [],
})
def if_emscripten(a):
return select({
clean_dep("//tensorflow:emscripten"): a,
"//conditions:default": [],
})
def if_macos(a, otherwise = []):
return select({
clean_dep("//tensorflow:macos"): a,
"//conditions:default": otherwise,
})
def if_ios(a):
return select({
clean_dep("//tensorflow:ios"): a,
"//conditions:default": [],
})
def if_ios_x86_64(a):
return select({
clean_dep("//tensorflow:ios_x86_64"): a,
"//conditions:default": [],
})
def if_mobile(a):
return select({
clean_dep("//tensorflow:android"): a,
clean_dep("//tensorflow:ios"): a,
"//conditions:default": [],
})
def if_not_mobile(a):
return select({
clean_dep("//tensorflow:android"): [],
clean_dep("//tensorflow:ios"): [],
"//conditions:default": a,
})
# Config setting selector used when building for products
# which requires restricted licenses to be avoided.
def if_not_lgpl_restricted(a):
_ = (a,)
return select({
"//conditions:default": [],
})
def if_not_windows(a):
return select({
clean_dep("//tensorflow:windows"): [],
"//conditions:default": a,
})
def if_windows(a, otherwise = []):
return select({
clean_dep("//tensorflow:windows"): a,
"//conditions:default": otherwise,
})
def if_windows_cuda(a, otherwise = []):
return select({
clean_dep("//tensorflow:with_cuda_support_windows_override"): a,
"//conditions:default": otherwise,
})
def if_linux_x86_64(a):
return select({
clean_dep("//tensorflow:linux_x86_64"): a,
"//conditions:default": [],
})
def if_override_eigen_strong_inline(a):
return select({
clean_dep("//tensorflow:override_eigen_strong_inline"): a,
"//conditions:default": [],
})
def if_nccl(if_true, if_false = []):
return select({
"//tensorflow:no_nccl_support": if_false,
"//tensorflow:windows": if_false,
"//conditions:default": if_true,
})
def get_win_copts(is_external = False):
WINDOWS_COPTS = [
"/DPLATFORM_WINDOWS",
"/DEIGEN_HAS_C99_MATH",
"/DTENSORFLOW_USE_EIGEN_THREADPOOL",
"/DEIGEN_AVOID_STL_ARRAY",
"/Iexternal/gemmlowp",
"/wd4018", # -Wno-sign-compare
# Bazel's CROSSTOOL currently pass /EHsc to enable exception by
# default. We can't pass /EHs-c- to disable exception, otherwise
# we will get a waterfall of flag conflict warnings. Wait for
# Bazel to fix this.
# "/D_HAS_EXCEPTIONS=0",
# "/EHs-c-",
"/wd4577",
"/DNOGDI",
]
if is_external:
return WINDOWS_COPTS + ["/UTF_COMPILE_LIBRARY"]
else:
return WINDOWS_COPTS + ["/DTF_COMPILE_LIBRARY"]
# LINT.IfChange
def tf_copts(
android_optimization_level_override = "-O2",
is_external = False,
allow_exceptions = False):
# For compatibility reasons, android_optimization_level_override
# is currently only being set for Android.
# To clear this value, and allow the CROSSTOOL default
# to be used, pass android_optimization_level_override=None
android_copts = [
"-DTF_LEAN_BINARY",
"-Wno-narrowing",
"-fomit-frame-pointer",
]
if android_optimization_level_override:
android_copts.append(android_optimization_level_override)
return (
if_not_windows([
"-DEIGEN_AVOID_STL_ARRAY",
"-Iexternal/gemmlowp",
"-Wno-sign-compare",
"-ftemplate-depth=900",
]) +
(if_not_windows(["-fno-exceptions"]) if not allow_exceptions else []) +
if_cuda(["-DGOOGLE_CUDA=1"]) +
if_tensorrt(["-DGOOGLE_TENSORRT=1"]) +
if_nccl(["-DGOOGLE_NCCL=1"]) +
if_mkl(["-DINTEL_MKL=1", "-DENABLE_MKLDNN_V1", "-DENABLE_INTEL_MKL_BFLOAT16"]) +
if_mkl_open_source_only(["-DINTEL_MKL_DNN_ONLY"]) +
if_mkldnn_threadpool(["-DENABLE_MKLDNN_THREADPOOL"]) +
if_enable_mkl(["-DENABLE_MKL"]) +
if_ngraph(["-DINTEL_NGRAPH=1"]) +
if_android_arm(["-mfpu=neon"]) +
if_linux_x86_64(["-msse3"]) +
if_ios_x86_64(["-msse4.1"]) +
select({
clean_dep("//tensorflow:framework_shared_object"): [],
"//conditions:default": ["-DTENSORFLOW_MONOLITHIC_BUILD"],
}) +
select({
clean_dep("//tensorflow:android"): android_copts,
clean_dep("//tensorflow:macos"): [],
clean_dep("//tensorflow:windows"): get_win_copts(is_external),
clean_dep("//tensorflow:ios"): [],
clean_dep("//tensorflow:no_lgpl_deps"): ["-D__TENSORFLOW_NO_LGPL_DEPS__", "-pthread"],
"//conditions:default": ["-pthread"],
})
)
def tf_openmp_copts():
return (if_mkl_lnx_x64(["-fopenmp"]) + if_mkldnn_threadpool(["-fno-openmp"]))
def tfe_xla_copts():
return select({
"//tensorflow:with_xla_support": ["-DTENSORFLOW_EAGER_USE_XLA"],
"//conditions:default": [],
})
def tf_opts_nortti_if_android():
return if_android([
"-fno-rtti",
"-DGOOGLE_PROTOBUF_NO_RTTI",
"-DGOOGLE_PROTOBUF_NO_STATIC_INITIALIZER",
])
# LINT.ThenChange(//tensorflow/contrib/android/cmake/CMakeLists.txt)
def tf_opts_nortti_if_emscripten():
return if_emscripten([
"-fno-rtti",
"-DGOOGLE_PROTOBUF_NO_RTTI",
"-DGOOGLE_PROTOBUF_NO_STATIC_INITIALIZER",
])
def tf_features_nomodules_if_android():
return if_android(["-use_header_modules"])
def tf_features_nomodules_if_emscripten():
return if_emscripten(["-use_header_modules"])
# Given a list of "op_lib_names" (a list of files in the ops directory
# without their .cc extensions), generate a library for that file.
def tf_gen_op_libs(op_lib_names, deps = None, is_external = True):
# Make library out of each op so it can also be used to generate wrappers
# for various languages.
if not deps:
deps = []
for n in op_lib_names:
native.cc_library(
name = n + "_op_lib",
copts = tf_copts(is_external = is_external),
srcs = ["ops/" + n + ".cc"],
deps = deps + [clean_dep("//tensorflow/core:framework")],
visibility = ["//visibility:public"],
alwayslink = 1,
linkstatic = 1,
)
def _make_search_paths(prefix, levels_to_root):
return ",".join(
[
"-rpath,%s/%s" % (prefix, "/".join([".."] * search_level))
for search_level in range(levels_to_root + 1)
],
)
def _rpath_linkopts(name):
# Search parent directories up to the TensorFlow root directory for shared
# object dependencies, even if this op shared object is deeply nested
# (e.g. tensorflow/contrib/package:python/ops/_op_lib.so). tensorflow/ is then
# the root and tensorflow/libtensorflow_framework.so should exist when
# deployed. Other shared object dependencies (e.g. shared between contrib/
# ops) are picked up as long as they are in either the same or a parent
# directory in the tensorflow/ tree.
levels_to_root = native.package_name().count("/") + name.count("/")
return select({
clean_dep("//tensorflow:macos"): [
"-Wl,%s" % (_make_search_paths("@loader_path", levels_to_root),),
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-Wl,%s" % (_make_search_paths("$$ORIGIN", levels_to_root),),
],
})
# Bazel-generated shared objects which must be linked into TensorFlow binaries
# to define symbols from //tensorflow/core:framework and //tensorflow/core:lib.
def tf_binary_additional_srcs(fullversion = False):
if fullversion:
suffix = "." + VERSION
else:
suffix = "." + VERSION_MAJOR
return if_static(
extra_deps = [],
macos = [
clean_dep("//tensorflow:libtensorflow_framework%s.dylib" % suffix),
],
otherwise = [
clean_dep("//tensorflow:libtensorflow_framework.so%s" % suffix),
],
)
def tf_binary_additional_data_deps():
return if_static(
extra_deps = [],
macos = [
clean_dep("//tensorflow:libtensorflow_framework.dylib"),
clean_dep("//tensorflow:libtensorflow_framework.%s.dylib" % VERSION_MAJOR),
clean_dep("//tensorflow:libtensorflow_framework.%s.dylib" % VERSION),
],
otherwise = [
clean_dep("//tensorflow:libtensorflow_framework.so"),
clean_dep("//tensorflow:libtensorflow_framework.so.%s" % VERSION_MAJOR),
clean_dep("//tensorflow:libtensorflow_framework.so.%s" % VERSION),
],
)
def tf_binary_pybind_deps():
return select({
clean_dep("//tensorflow:macos"): [
clean_dep(
"//tensorflow/python:_pywrap_tensorflow_internal_macos",
),
],
clean_dep("//tensorflow:windows"): [
clean_dep(
"//tensorflow/python:_pywrap_tensorflow_internal_windows",
),
],
"//conditions:default": [
clean_dep(
"//tensorflow/python:_pywrap_tensorflow_internal_linux",
),
],
})
# Helper function for the per-OS tensorflow libraries and their version symlinks
def tf_shared_library_deps():
return select({
clean_dep("//tensorflow:macos_with_framework_shared_object"): [
clean_dep("//tensorflow:libtensorflow.dylib"),
clean_dep("//tensorflow:libtensorflow.%s.dylib" % VERSION_MAJOR),
clean_dep("//tensorflow:libtensorflow.%s.dylib" % VERSION),
],
clean_dep("//tensorflow:macos"): [],
clean_dep("//tensorflow:windows"): [
clean_dep("//tensorflow:tensorflow.dll"),
clean_dep("//tensorflow:tensorflow_dll_import_lib"),
],
clean_dep("//tensorflow:framework_shared_object"): [
clean_dep("//tensorflow:libtensorflow.so"),
clean_dep("//tensorflow:libtensorflow.so.%s" % VERSION_MAJOR),
clean_dep("//tensorflow:libtensorflow.so.%s" % VERSION),
],
"//conditions:default": [],
}) + tf_binary_additional_srcs()
# Helper functions to add kernel dependencies to tf binaries when using dynamic
# kernel linking.
def tf_binary_dynamic_kernel_dsos():
return if_dynamic_kernels(
extra_deps = [
"//tensorflow/core/kernels:libtfkernel_all_kernels.so",
],
otherwise = [],
)
# Helper functions to add kernel dependencies to tf binaries when using static
# kernel linking.
def tf_binary_dynamic_kernel_deps(kernels):
return if_dynamic_kernels(
extra_deps = [],
otherwise = kernels,
)
# Shared libraries have different name pattern on different platforms,
# but cc_binary cannot output correct artifact name yet,
# so we generate multiple cc_binary targets with all name patterns when necessary.
# TODO(pcloudy): Remove this workaround when https://github.com/bazelbuild/bazel/issues/4570
# is done and cc_shared_library is available.
SHARED_LIBRARY_NAME_PATTERNS = [
"lib%s.so%s", # On Linux, shared libraries are usually named as libfoo.so
"lib%s%s.dylib", # On macos, shared libraries are usually named as libfoo.dylib
"%s%s.dll", # On Windows, shared libraries are usually named as foo.dll
]
def tf_cc_shared_object(
name,
srcs = [],
deps = [],
data = [],
linkopts = [],
framework_so = tf_binary_additional_srcs(),
soversion = None,
kernels = [],
per_os_targets = False, # Generate targets with SHARED_LIBRARY_NAME_PATTERNS
visibility = None,
**kwargs):
"""Configure the shared object (.so) file for TensorFlow."""
if soversion != None:
suffix = "." + str(soversion).split(".")[0]
longsuffix = "." + str(soversion)
else:
suffix = ""
longsuffix = ""
if per_os_targets:
names = [
(
pattern % (name, ""),
pattern % (name, suffix),
pattern % (name, longsuffix),
)
for pattern in SHARED_LIBRARY_NAME_PATTERNS
]
else:
names = [(
name,
name + suffix,
name + longsuffix,
)]
for name_os, name_os_major, name_os_full in names:
# Windows DLLs cant be versioned
if name_os.endswith(".dll"):
name_os_major = name_os
name_os_full = name_os
if name_os != name_os_major:
native.genrule(
name = name_os + "_sym",
outs = [name_os],
srcs = [name_os_major],
output_to_bindir = 1,
cmd = "ln -sf $$(basename $<) $@",
)
native.genrule(
name = name_os_major + "_sym",
outs = [name_os_major],
srcs = [name_os_full],
output_to_bindir = 1,
cmd = "ln -sf $$(basename $<) $@",
)
soname = name_os_major.split("/")[-1]
data_extra = []
if framework_so != []:
data_extra = tf_binary_additional_data_deps()
native.cc_binary(
name = name_os_full,
srcs = srcs + framework_so,
deps = deps,
linkshared = 1,
data = data + data_extra,
linkopts = linkopts + _rpath_linkopts(name_os_full) + select({
clean_dep("//tensorflow:macos"): [
"-Wl,-install_name,@rpath/" + soname,
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-Wl,-soname," + soname,
],
}),
visibility = visibility,
**kwargs
)
flat_names = [item for sublist in names for item in sublist]
if name not in flat_names:
native.filegroup(
name = name,
srcs = select({
"//tensorflow:windows": [":%s.dll" % (name)],
"//tensorflow:macos": [":lib%s%s.dylib" % (name, longsuffix)],
"//conditions:default": [":lib%s.so%s" % (name, longsuffix)],
}),
visibility = visibility,
)
register_extension_info(
extension_name = "tf_cc_shared_object",
label_regex_for_dep = "{extension_name}",
)
# Links in the framework shared object
# (//third_party/tensorflow:libtensorflow_framework.so) when not building
# statically. Also adds linker options (rpaths) so that the framework shared
# object can be found.
def tf_cc_binary(
name,
srcs = [],
deps = [],
data = [],
linkopts = [],
copts = tf_copts(),
kernels = [],
per_os_targets = False, # Generate targets with SHARED_LIBRARY_NAME_PATTERNS
visibility = None,
**kwargs):
if kernels:
added_data_deps = tf_binary_dynamic_kernel_dsos()
else:
added_data_deps = []
if per_os_targets:
names = [pattern % (name, "") for pattern in SHARED_LIBRARY_NAME_PATTERNS]
else:
names = [name]
for name_os in names:
native.cc_binary(
name = name_os,
copts = copts,
srcs = srcs + tf_binary_additional_srcs(),
deps = deps + tf_binary_dynamic_kernel_deps(kernels) + if_mkl_ml(
[
clean_dep("//third_party/mkl:intel_binary_blob"),
],
),
data = depset(data + added_data_deps),
linkopts = linkopts + _rpath_linkopts(name_os),
visibility = visibility,
**kwargs
)
if name not in names:
native.filegroup(
name = name,
srcs = select({
"//tensorflow:windows": [":%s.dll" % name],
"//tensorflow:macos": [":lib%s.dylib" % name],
"//conditions:default": [":lib%s.so" % name],
}),
visibility = visibility,
)
register_extension_info(
extension_name = "tf_cc_binary",
label_regex_for_dep = "{extension_name}.*",
)
# A simple wrap around native.cc_binary rule.
# When using this rule, you should realize it doesn't link to any tensorflow
# dependencies by default.
def tf_native_cc_binary(
name,
copts = tf_copts(),
linkopts = [],
**kwargs):
native.cc_binary(
name = name,
copts = copts,
linkopts = select({
clean_dep("//tensorflow:windows"): [],
clean_dep("//tensorflow:macos"): [
"-lm",
],
"//conditions:default": [
"-lpthread",
"-lm",
],
}) + linkopts + _rpath_linkopts(name),
**kwargs
)
register_extension_info(
extension_name = "tf_native_cc_binary",
label_regex_for_dep = "{extension_name}.*",
)
def tf_gen_op_wrapper_cc(
name,
out_ops_file,
pkg = "",
op_gen = clean_dep("//tensorflow/cc:cc_op_gen_main"),
deps = None,
include_internal_ops = 0,
# ApiDefs will be loaded in the order specified in this list.
api_def_srcs = []):
# Construct an op generator binary for these ops.
tool = out_ops_file + "_gen_cc"
if deps == None:
deps = [pkg + ":" + name + "_op_lib"]
tf_cc_binary(
name = tool,
copts = tf_copts(),
linkopts = if_not_windows(["-lm", "-Wl,-ldl"]),
linkstatic = 1, # Faster to link this one-time-use binary dynamically
deps = [op_gen] + deps,
)
srcs = api_def_srcs[:]
if not api_def_srcs:
api_def_args_str = ","
else:
api_def_args = []
for api_def_src in api_def_srcs:
# Add directory of the first ApiDef source to args.
# We are assuming all ApiDefs in a single api_def_src are in the
# same directory.
api_def_args.append(
" $$(dirname $$(echo $(locations " + api_def_src +
") | cut -d\" \" -f1))",
)
api_def_args_str = ",".join(api_def_args)
native.genrule(
name = name + "_genrule",
outs = [
out_ops_file + ".h",
out_ops_file + ".cc",
out_ops_file + "_internal.h",
out_ops_file + "_internal.cc",
],
srcs = srcs,
tools = [":" + tool] + tf_binary_additional_srcs(),
cmd = ("$(location :" + tool + ") $(location :" + out_ops_file + ".h) " +
"$(location :" + out_ops_file + ".cc) " +
str(include_internal_ops) + " " + api_def_args_str),
)
# Given a list of "op_lib_names" (a list of files in the ops directory
# without their .cc extensions), generate individual C++ .cc and .h
# files for each of the ops files mentioned, and then generate a
# single cc_library called "name" that combines all the
# generated C++ code.
#
# For example, for:
# tf_gen_op_wrappers_cc("tf_ops_lib", [ "array_ops", "math_ops" ])
#
#
# This will ultimately generate ops/* files and a library like:
#
# cc_library(name = "tf_ops_lib",
# srcs = [ "ops/array_ops.cc",
# "ops/math_ops.cc" ],
# hdrs = [ "ops/array_ops.h",
# "ops/math_ops.h" ],
# deps = [ ... ])
#
# Plus a private library for the "hidden" ops.
# cc_library(name = "tf_ops_lib_internal",
# srcs = [ "ops/array_ops_internal.cc",
# "ops/math_ops_internal.cc" ],
# hdrs = [ "ops/array_ops_internal.h",
# "ops/math_ops_internal.h" ],
# deps = [ ... ])
# TODO(joshl): Cleaner approach for hidden ops.
def tf_gen_op_wrappers_cc(
name,
op_lib_names = [],
other_srcs = [],
other_hdrs = [],
other_srcs_internal = [],
other_hdrs_internal = [],
pkg = "",
deps = [
clean_dep("//tensorflow/cc:ops"),
clean_dep("//tensorflow/cc:scope"),
clean_dep("//tensorflow/cc:const_op"),
],
deps_internal = [],
op_gen = clean_dep("//tensorflow/cc:cc_op_gen_main"),
include_internal_ops = 0,
visibility = None,
# ApiDefs will be loaded in the order specified in this list.
api_def_srcs = [],
# Any extra dependencies that the wrapper generator might need.
extra_gen_deps = []):
subsrcs = other_srcs[:]
subhdrs = other_hdrs[:]
internalsrcs = other_srcs_internal[:]
internalhdrs = other_hdrs_internal[:]
for n in op_lib_names:
tf_gen_op_wrapper_cc(
n,
"ops/" + n,
api_def_srcs = api_def_srcs,
include_internal_ops = include_internal_ops,
op_gen = op_gen,
pkg = pkg,
deps = [pkg + ":" + n + "_op_lib"] + extra_gen_deps,
)
subsrcs += ["ops/" + n + ".cc"]
subhdrs += ["ops/" + n + ".h"]
internalsrcs += ["ops/" + n + "_internal.cc"]
internalhdrs += ["ops/" + n + "_internal.h"]
native.cc_library(
name = name,
srcs = subsrcs,
hdrs = subhdrs,
deps = deps + if_not_android([
clean_dep("//tensorflow/core:core_cpu"),
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/core:lib"),
clean_dep("//tensorflow/core:ops"),
clean_dep("//tensorflow/core:protos_all_cc"),
]) + if_android([
clean_dep("//tensorflow/core:android_tensorflow_lib"),
]),
copts = tf_copts(),
alwayslink = 1,
visibility = visibility,
)
native.cc_library(
name = name + "_internal",
srcs = internalsrcs,
hdrs = internalhdrs,
deps = deps + deps_internal + if_not_android([
clean_dep("//tensorflow/core:core_cpu"),
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/core:lib"),
clean_dep("//tensorflow/core:ops"),
clean_dep("//tensorflow/core:protos_all_cc"),
]) + if_android([
clean_dep("//tensorflow/core:android_tensorflow_lib"),
]),
copts = tf_copts(),
alwayslink = 1,
visibility = [clean_dep("//tensorflow:internal")],
)
# Generates a Python library target wrapping the ops registered in "deps".
#
# Args:
# name: used as the name of the generated target and as a name component of
# the intermediate files.
# out: name of the python file created by this rule. If None, then
# "ops/gen_{name}.py" is used.
# hidden: Optional list of ops names to make private in the Python module.
# It is invalid to specify both "hidden" and "op_whitelist".
# visibility: passed to py_library.
# deps: list of dependencies for the intermediate tool used to generate the
# python target. NOTE these `deps` are not applied to the final python
# library target itself.
# require_shape_functions: leave this as False.
# hidden_file: optional file that contains a list of op names to make private
# in the generated Python module. Each op name should be on a line by
# itself. Lines that start with characters that are invalid op name
# starting characters are treated as comments and ignored.
# generated_target_name: name of the generated target (overrides the
# "name" arg)
# op_whitelist: if not empty, only op names in this list will be wrapped. It
# is invalid to specify both "hidden" and "op_whitelist".
# cc_linkopts: Optional linkopts to be added to tf_cc_binary that contains the
# specified ops.
def tf_gen_op_wrapper_py(
name,
out = None,
hidden = None,
visibility = None,
deps = [],
require_shape_functions = False,
hidden_file = None,
generated_target_name = None,
op_whitelist = [],
cc_linkopts = [],
api_def_srcs = []):
if (hidden or hidden_file) and op_whitelist:
fail("Cannot pass specify both hidden and op_whitelist.")
# Construct a cc_binary containing the specified ops.
tool_name = "gen_" + name + "_py_wrappers_cc"
if not deps:
deps = [str(Label("//tensorflow/core:" + name + "_op_lib"))]
tf_cc_binary(
name = tool_name,
copts = tf_copts(),
linkopts = if_not_windows(["-lm", "-Wl,-ldl"]) + cc_linkopts,
linkstatic = 1, # Faster to link this one-time-use binary dynamically
visibility = [clean_dep("//tensorflow:internal")],
deps = ([
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/python:python_op_gen_main"),
] + deps),
)
# Invoke the previous cc_binary to generate a python file.
if not out:
out = "ops/gen_" + name + ".py"
if hidden:
op_list_arg = ",".join(hidden)
op_list_is_whitelist = False
elif op_whitelist:
op_list_arg = ",".join(op_whitelist)
op_list_is_whitelist = True
else:
op_list_arg = "''"
op_list_is_whitelist = False
# Prepare ApiDef directories to pass to the genrule.
if not api_def_srcs:
api_def_args_str = ","
else:
api_def_args = []
for api_def_src in api_def_srcs:
# Add directory of the first ApiDef source to args.
# We are assuming all ApiDefs in a single api_def_src are in the
# same directory.
api_def_args.append(
"$$(dirname $$(echo $(locations " + api_def_src +
") | cut -d\" \" -f1))",
)
api_def_args_str = ",".join(api_def_args)
if hidden_file:
# `hidden_file` is file containing a list of op names to be hidden in the
# generated module.
native.genrule(
name = name + "_pygenrule",
outs = [out],
srcs = api_def_srcs + [hidden_file],
tools = [tool_name] + tf_binary_additional_srcs(),
cmd = ("$(location " + tool_name + ") " + api_def_args_str +
" @$(location " + hidden_file + ") " +
("1" if require_shape_functions else "0") + " > $@"),
)
else:
native.genrule(
name = name + "_pygenrule",
outs = [out],
srcs = api_def_srcs,
tools = [tool_name] + tf_binary_additional_srcs(),
cmd = ("$(location " + tool_name + ") " + api_def_args_str + " " +
op_list_arg + " " +
("1" if require_shape_functions else "0") + " " +
("1" if op_list_is_whitelist else "0") + " > $@"),
)
# Make a py_library out of the generated python file.
if not generated_target_name:
generated_target_name = name
native.py_library(
name = generated_target_name,
srcs = [out],
srcs_version = "PY2AND3",
visibility = visibility,
deps = [
clean_dep("//tensorflow/python:framework_for_generated_wrappers_v2"),
],
# Instruct build_cleaner to try to avoid using this rule; typically ops
# creators will provide their own tf_custom_op_py_library based target
# that wraps this one.
tags = ["avoid_dep"],
)
# Define a bazel macro that creates cc_test for tensorflow.
#
# Links in the framework shared object
# (//third_party/tensorflow:libtensorflow_framework.so) when not building
# statically. Also adds linker options (rpaths) so that the framework shared
# object can be found.
#
# TODO(opensource): we need to enable this to work around the hidden symbol
# __cudaRegisterFatBinary error. Need more investigations.
def tf_cc_test(
name,
srcs,
deps,
data = [],
linkstatic = 0,
extra_copts = [],
suffix = "",
linkopts = [],
kernels = [],
**kwargs):
native.cc_test(
name = "%s%s" % (name, suffix),
srcs = srcs + tf_binary_additional_srcs(),
copts = tf_copts() + extra_copts,
linkopts = select({
clean_dep("//tensorflow:android"): [
"-pie",
],
clean_dep("//tensorflow:windows"): [],
clean_dep("//tensorflow:macos"): [
"-lm",
],
"//conditions:default": [
"-lpthread",
"-lm",
],
}) + linkopts + _rpath_linkopts(name),
deps = deps + tf_binary_dynamic_kernel_deps(kernels) + if_mkl_ml(
[
clean_dep("//third_party/mkl:intel_binary_blob"),
],
),
data = data +
tf_binary_dynamic_kernel_dsos() +
tf_binary_additional_srcs(),
exec_compatible_with = tf_exec_compatible_with(kwargs),
# Nested select() statements seem not to be supported when passed to
# linkstatic, and we already have a cuda select() passed in to this
# function.
linkstatic = linkstatic or select({
# cc_tests with ".so"s in srcs incorrectly link on Darwin unless
# linkstatic=1 (https://github.com/bazelbuild/bazel/issues/3450).
# TODO(allenl): Remove Mac static linking when Bazel 0.6 is out.
clean_dep("//tensorflow:macos"): 1,
"//conditions:default": 0,
}),
**kwargs
)
register_extension_info(
extension_name = "tf_cc_test",
label_regex_for_dep = "{extension_name}.*",
)
# Part of the testing workflow requires a distinguishable name for the build
# rules that involve a GPU, even if otherwise identical to the base rule.
def tf_cc_test_gpu(
name,
srcs,
deps,
linkstatic = 0,
tags = [],
data = [],
size = "medium",
suffix = "",
args = None):
tf_cc_test(
name,
srcs,
deps,
size = size,
args = args,
data = data,
linkstatic = linkstatic,
suffix = suffix,
tags = tags,
)
register_extension_info(
extension_name = "tf_cc_test_gpu",
label_regex_for_dep = "{extension_name}",
)
def tf_gpu_cc_test(
name,
srcs = [],
deps = [],
tags = [],
data = [],
size = "medium",
extra_copts = [],
linkstatic = 0,
args = [],
kernels = [],
linkopts = []):
tf_cc_test(
name = name,
size = size,
srcs = srcs,
args = args,
data = data,
extra_copts = extra_copts + if_cuda(["-DNV_CUDNN_DISABLE_EXCEPTION"]),
kernels = kernels,
linkopts = linkopts,
linkstatic = linkstatic,
tags = tags + ["manual"],
deps = deps,
)
tf_cc_test(
name = name,
size = size,
srcs = srcs,
args = args,
data = data,
extra_copts = extra_copts + if_cuda(["-DNV_CUDNN_DISABLE_EXCEPTION"]),
kernels = kernels,
linkopts = linkopts,
linkstatic = select({
# TODO(allenl): Remove Mac static linking when Bazel 0.6 is out.
clean_dep("//tensorflow:macos"): 1,
"@local_config_cuda//cuda:using_nvcc": 1,
"@local_config_cuda//cuda:using_clang": 1,
"//conditions:default": 0,
}),
suffix = "_gpu",
tags = tags + tf_gpu_tests_tags(),
deps = deps + if_cuda_is_configured([
clean_dep("//tensorflow/core:gpu_runtime"),
]) + if_rocm_is_configured([
clean_dep("//tensorflow/core:gpu_runtime"),
]),
)
register_extension_info(
extension_name = "tf_gpu_cc_test",
label_regex_for_dep = "{extension_name}",
)
# terminology changes: saving tf_cuda_* definition for compatibility
def tf_cuda_cc_test(*args, **kwargs):
tf_gpu_cc_test(*args, **kwargs)
register_extension_info(
extension_name = "tf_cuda_cc_test",
label_regex_for_dep = "{extension_name}",
)
def tf_gpu_only_cc_test(
name,
srcs = [],
deps = [],
tags = [],
data = [],
size = "medium",
linkstatic = 0,
args = [],
kernels = [],
linkopts = []):
tags = tags + tf_gpu_tests_tags()
native.cc_test(
name = "%s%s" % (name, "_gpu"),
srcs = srcs + tf_binary_additional_srcs(),
size = size,
args = args,
copts = _cuda_copts() + rocm_copts() + tf_copts(),
features = if_cuda(["-use_header_modules"]),
data = data + tf_binary_dynamic_kernel_dsos(),
deps = deps + tf_binary_dynamic_kernel_deps(kernels) + if_cuda_is_configured([
clean_dep("//tensorflow/core:cuda"),
clean_dep("//tensorflow/core:gpu_lib"),
]) + if_rocm_is_configured([
clean_dep("//tensorflow/core:gpu_lib"),
]),
linkopts = if_not_windows(["-lpthread", "-lm"]) + linkopts + _rpath_linkopts(name),
linkstatic = linkstatic or select({
# cc_tests with ".so"s in srcs incorrectly link on Darwin
# unless linkstatic=1.
# TODO(allenl): Remove Mac static linking when Bazel 0.6 is out.
clean_dep("//tensorflow:macos"): 1,
"//conditions:default": 0,
}),
tags = tags,
exec_compatible_with = tf_exec_compatible_with({"tags": tags}),
)
register_extension_info(
extension_name = "tf_gpu_only_cc_test",
label_regex_for_dep = "{extension_name}_gpu",
)
# terminology changes: saving tf_cuda_* definition for compatibility
def tf_cuda_only_cc_test(*args, **kwargs):
tf_gpu_only_cc_test(*args, **kwargs)
register_extension_info(
extension_name = "tf_cuda_only_cc_test",
label_regex_for_dep = "{extension_name}_gpu",
)
# Create a cc_test for each of the tensorflow tests listed in "tests"
def tf_cc_tests(
srcs,
deps,
name = "",
linkstatic = 0,
tags = [],
size = "medium",
args = None,
linkopts = [],
kernels = []):
for src in srcs:
tf_cc_test(
name = src_to_test_name(src),
size = size,
srcs = [src],
args = args,
kernels = kernels,
linkopts = linkopts,
linkstatic = linkstatic,
tags = tags,
deps = deps,
)
def tf_cc_test_mkl(
srcs,
deps,
name = "",
data = [],
linkstatic = 0,
tags = [],
size = "medium",
kernels = [],
args = None):
# -fno-exceptions in nocopts breaks compilation if header modules are enabled.
disable_header_modules = ["-use_header_modules"]
for src in srcs:
native.cc_test(
name = src_to_test_name(src),
srcs = if_mkl([src]) + tf_binary_additional_srcs(),
copts = tf_copts(allow_exceptions = True) + tf_openmp_copts(),
linkopts = select({
clean_dep("//tensorflow:android"): [
"-pie",
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-lpthread",
"-lm",
],
}) + _rpath_linkopts(src_to_test_name(src)),
deps = deps + tf_binary_dynamic_kernel_deps(kernels) + if_mkl_ml(["//third_party/mkl:intel_binary_blob"]),
data = data + tf_binary_dynamic_kernel_dsos(),
exec_compatible_with = tf_exec_compatible_with({"tags": tags}),
linkstatic = linkstatic,
tags = tags,
size = size,
args = args,
features = disable_header_modules,
)
def tf_cc_tests_gpu(
srcs,
deps,
name = "",
linkstatic = 0,
tags = [],
size = "medium",
kernels = [],
args = None):
tf_cc_tests(srcs, deps, linkstatic, size = size, args = args, kernels = kernels, tags = tags)
def tf_gpu_cc_tests(
srcs,
deps,
name = "",
tags = [],
size = "medium",
linkstatic = 0,
args = None,
kernels = [],
linkopts = []):
for src in srcs:
tf_gpu_cc_test(
name = src_to_test_name(src),
size = size,
srcs = [src],
args = args,
kernels = kernels,
linkopts = linkopts,
linkstatic = linkstatic,
tags = tags,
deps = deps,
)
# terminology changes: saving tf_cuda_* definition for compatibility
def tf_cuda_cc_tests(*args, **kwargs):
tf_gpu_cc_tests(*args, **kwargs)
def tf_java_test(
name,
srcs = [],
deps = [],
kernels = [],
*args,
**kwargs):
native.java_test(
name = name,
srcs = srcs,
deps = deps + tf_binary_additional_srcs(fullversion = True) + tf_binary_dynamic_kernel_dsos() + tf_binary_dynamic_kernel_deps(kernels),
*args,
**kwargs
)
register_extension_info(
extension_name = "tf_java_test",
label_regex_for_dep = "{extension_name}",
)
def _cuda_copts(opts = []):
"""Gets the appropriate set of copts for (maybe) CUDA compilation.
If we're doing CUDA compilation, returns copts for our particular CUDA
compiler. If we're not doing CUDA compilation, returns an empty list.
"""
return cuda_default_copts() + select({
"//conditions:default": [],
"@local_config_cuda//cuda:using_nvcc": ([
"-nvcc_options=relaxed-constexpr",
"-nvcc_options=ftz=true",
]),
"@local_config_cuda//cuda:using_clang": ([
"-fcuda-flush-denormals-to-zero",
]),
}) + if_cuda_is_configured_compat(opts)
# Build defs for TensorFlow kernels
# When this target is built using --config=cuda, a cc_library is built
# that passes -DGOOGLE_CUDA=1 and '-x cuda', linking in additional
# libraries needed by GPU kernels.
#
# When this target is built using --config=rocm, a cc_library is built
# that passes -DTENSORFLOW_USE_ROCM and '-x rocm', linking in additional
# libraries needed by GPU kernels.
def tf_gpu_kernel_library(
srcs,
copts = [],
cuda_copts = [],
deps = [],
hdrs = [],
**kwargs):
copts = copts + tf_copts() + _cuda_copts(opts = cuda_copts) + rocm_copts(opts = cuda_copts)
kwargs["features"] = kwargs.get("features", []) + ["-use_header_modules"]
native.cc_library(
srcs = srcs,
hdrs = hdrs,
copts = copts,
deps = deps + if_cuda_is_configured_compat([
clean_dep("//tensorflow/stream_executor/cuda:cudart_stub"),
clean_dep("//tensorflow/core:gpu_lib"),
]) + if_rocm_is_configured([
clean_dep("//tensorflow/core:gpu_lib"),
]),
alwayslink = 1,
**kwargs
)
register_extension_info(
extension_name = "tf_gpu_kernel_library",
label_regex_for_dep = "{extension_name}",
)
def tf_gpu_library(deps = None, cuda_deps = None, copts = tf_copts(), **kwargs):
"""Generate a cc_library with a conditional set of CUDA dependencies.
When the library is built with --config=cuda:
- Both deps and cuda_deps are used as dependencies.
- The cuda runtime is added as a dependency (if necessary).
- The library additionally passes -DGOOGLE_CUDA=1 to the list of copts.
- In addition, when the library is also built with TensorRT enabled, it
additionally passes -DGOOGLE_TENSORRT=1 to the list of copts. Likewise
for NCCL and -DGOOGLE_NCCL=1.
Args:
- cuda_deps: BUILD dependencies which will be linked if and only if:
'--config=cuda' is passed to the bazel command line.
- deps: dependencies which will always be linked.
- copts: copts always passed to the cc_library.
- kwargs: Any other argument to cc_library.
"""
if not deps:
deps = []
if not cuda_deps:
cuda_deps = []
kwargs["features"] = kwargs.get("features", []) + ["-use_header_modules"]
native.cc_library(
deps = deps + if_cuda_is_configured_compat(cuda_deps + [
clean_dep("//tensorflow/stream_executor/cuda:cudart_stub"),
"@local_config_cuda//cuda:cuda_headers",
]) + if_rocm_is_configured(cuda_deps + [
"@local_config_rocm//rocm:rocm_headers",
]),
copts = (copts + if_cuda(["-DGOOGLE_CUDA=1", "-DNV_CUDNN_DISABLE_EXCEPTION"]) + if_rocm(["-DTENSORFLOW_USE_ROCM=1"]) + if_mkl(["-DINTEL_MKL=1"]) + if_mkl_open_source_only(["-DINTEL_MKL_DNN_ONLY"]) + if_enable_mkl(["-DENABLE_MKL"]) + if_tensorrt(["-DGOOGLE_TENSORRT=1"]) + if_nccl(["-DGOOGLE_NCCL=1"])),
**kwargs
)
register_extension_info(
extension_name = "tf_gpu_library",
label_regex_for_dep = "{extension_name}",
)
# terminology changes: saving tf_cuda_* definition for compatibility
def tf_cuda_library(*args, **kwargs):
tf_gpu_library(*args, **kwargs)
register_extension_info(
extension_name = "tf_cuda_library",
label_regex_for_dep = "{extension_name}",
)
def tf_kernel_library(
name,
prefix = None,
srcs = None,
gpu_srcs = None,
hdrs = None,
deps = None,
alwayslink = 1,
copts = None,
gpu_copts = None,
is_external = False,
**kwargs):
"""A rule to build a TensorFlow OpKernel.
May either specify srcs/hdrs or prefix. Similar to tf_gpu_library,
but with alwayslink=1 by default. If prefix is specified:
* prefix*.cc (except *.cu.cc) is added to srcs
* prefix*.h (except *.cu.h) is added to hdrs
* prefix*.cu.cc and prefix*.h (including *.cu.h) are added to gpu_srcs.
With the exception that test files are excluded.
For example, with prefix = "cast_op",
* srcs = ["cast_op.cc"]
* hdrs = ["cast_op.h"]
* gpu_srcs = ["cast_op_gpu.cu.cc", "cast_op.h"]
* "cast_op_test.cc" is excluded
With prefix = "cwise_op"
* srcs = ["cwise_op_abs.cc", ..., "cwise_op_tanh.cc"],
* hdrs = ["cwise_ops.h", "cwise_ops_common.h"],
* gpu_srcs = ["cwise_op_gpu_abs.cu.cc", ..., "cwise_op_gpu_tanh.cu.cc",
"cwise_ops.h", "cwise_ops_common.h",
"cwise_ops_gpu_common.cu.h"]
* "cwise_ops_test.cc" is excluded
"""
if not srcs:
srcs = []
if not hdrs:
hdrs = []
if not deps:
deps = []
if not copts:
copts = []
if not gpu_copts:
gpu_copts = []
textual_hdrs = []
copts = copts + tf_copts(is_external = is_external) + if_cuda(["-DNV_CUDNN_DISABLE_EXCEPTION"])
# Override EIGEN_STRONG_INLINE to inline when
# --define=override_eigen_strong_inline=true to avoid long compiling time.
# See https://github.com/tensorflow/tensorflow/issues/10521
copts = copts + if_override_eigen_strong_inline(["/DEIGEN_STRONG_INLINE=inline"])
if prefix:
if native.glob([prefix + "*.cu.cc"], exclude = ["*test*"]):
if not gpu_srcs:
gpu_srcs = []
gpu_srcs = gpu_srcs + native.glob(
[prefix + "*.cu.cc", prefix + "*.h"],
exclude = [prefix + "*test*"],
)
srcs = srcs + native.glob(
[prefix + "*.cc"],
exclude = [prefix + "*test*", prefix + "*.cu.cc"],
)
hdrs = hdrs + native.glob(
[prefix + "*.h"],
exclude = [prefix + "*test*", prefix + "*.cu.h", prefix + "*impl.h"],
)
textual_hdrs = native.glob(
[prefix + "*impl.h"],
exclude = [prefix + "*test*", prefix + "*.cu.h"],
)
cuda_deps = [clean_dep("//tensorflow/core:gpu_lib")]
if gpu_srcs:
for gpu_src in gpu_srcs:
if gpu_src.endswith(".cc") and not gpu_src.endswith(".cu.cc"):
fail("{} not allowed in gpu_srcs. .cc sources must end with .cu.cc"
.format(gpu_src))
tf_gpu_kernel_library(
name = name + "_gpu",
srcs = gpu_srcs,
deps = deps,
copts = gpu_copts,
**kwargs
)
cuda_deps.extend([":" + name + "_gpu"])
kwargs["tags"] = kwargs.get("tags", []) + [
"req_dep=%s" % clean_dep("//tensorflow/core:gpu_lib"),
"req_dep=@local_config_cuda//cuda:cuda_headers",
]
tf_gpu_library(
name = name,
srcs = srcs,
hdrs = hdrs,
textual_hdrs = textual_hdrs,
copts = copts,
cuda_deps = cuda_deps,
linkstatic = 1, # Needed since alwayslink is broken in bazel b/27630669
alwayslink = alwayslink,
deps = deps,
**kwargs
)
# TODO(gunan): CUDA dependency not clear here. Fix it.
tf_cc_shared_object(
name = "libtfkernel_%s.so" % name,
srcs = srcs + hdrs,
copts = copts,
tags = ["manual", "notap"],
deps = deps,
)
register_extension_info(
extension_name = "tf_kernel_library",
label_regex_for_dep = "{extension_name}(_gpu)?",
)
def tf_mkl_kernel_library(
name,
prefix = None,
srcs = None,
hdrs = None,
deps = None,
alwayslink = 1,
copts = tf_copts(allow_exceptions = True) + tf_openmp_copts()):
"""A rule to build MKL-based TensorFlow kernel libraries."""
if not bool(srcs):
srcs = []
if not bool(hdrs):
hdrs = []
if prefix:
srcs = srcs + native.glob(
[prefix + "*.cc"],
exclude = [prefix + "*test*"],
)
hdrs = hdrs + native.glob(
[prefix + "*.h"],
exclude = [prefix + "*test*"],
)
# -fno-exceptions in nocopts breaks compilation if header modules are enabled.
disable_header_modules = ["-use_header_modules"]
native.cc_library(
name = name,
srcs = if_mkl(srcs),
hdrs = hdrs,
deps = deps,
alwayslink = alwayslink,
copts = copts,
features = disable_header_modules,
)
register_extension_info(
extension_name = "tf_mkl_kernel_library",
label_regex_for_dep = "{extension_name}",
)
def _get_transitive_headers(hdrs, deps):
"""Obtain the header files for a target and its transitive dependencies.
Args:
hdrs: a list of header files
deps: a list of targets that are direct dependencies
Returns:
a collection of the transitive headers
"""
return depset(
hdrs,
transitive = [dep[CcInfo].compilation_context.headers for dep in deps],
)
# Bazel rules for building swig files.
def _py_wrap_cc_impl(ctx):
srcs = ctx.files.srcs
if len(srcs) != 1:
fail("Exactly one SWIG source file label must be specified.", "srcs")
module_name = ctx.attr.module_name
src = ctx.files.srcs[0]
inputs = _get_transitive_headers([src] + ctx.files.swig_includes, ctx.attr.deps)
inputs = depset(ctx.files._swiglib, transitive = [inputs])
inputs = depset(ctx.files.toolchain_deps, transitive = [inputs])
swig_include_dirs = depset(_get_repository_roots(ctx, inputs))
swig_include_dirs = depset(sorted([f.dirname for f in ctx.files._swiglib]), transitive = [swig_include_dirs])
args = [
"-c++",
"-python",
"-module",
module_name,
"-o",
ctx.outputs.cc_out.path,
"-outdir",
ctx.outputs.py_out.dirname,
]
args += ["-l" + f.path for f in ctx.files.swig_includes]
args += ["-I" + i for i in swig_include_dirs.to_list()]
args += [src.path]
outputs = [ctx.outputs.cc_out, ctx.outputs.py_out]
ctx.actions.run(
executable = ctx.executable._swig,
arguments = args,
inputs = inputs.to_list(),
outputs = outputs,
mnemonic = "PythonSwig",
progress_message = "SWIGing " + src.path,
)
return struct(files = depset(outputs))
_py_wrap_cc = rule(
attrs = {
"srcs": attr.label_list(
mandatory = True,
allow_files = True,
),
"swig_includes": attr.label_list(
allow_files = True,
),
"deps": attr.label_list(
allow_files = True,
providers = [CcInfo],
),
"toolchain_deps": attr.label_list(
allow_files = True,
),
"module_name": attr.string(mandatory = True),
"py_module_name": attr.string(mandatory = True),
"_swig": attr.label(
default = Label("@swig//:swig"),
executable = True,
cfg = "host",
),
"_swiglib": attr.label(
default = Label("@swig//:templates"),
allow_files = True,
),
},
outputs = {
"cc_out": "%{module_name}.cc",
"py_out": "%{py_module_name}.py",
},
implementation = _py_wrap_cc_impl,
)
def _get_repository_roots(ctx, files):
"""Returns abnormal root directories under which files reside.
When running a ctx.action, source files within the main repository are all
relative to the current directory; however, files that are generated or exist
in remote repositories will have their root directory be a subdirectory,
e.g. bazel-out/local-fastbuild/genfiles/external/jpeg_archive. This function
returns the set of these devious directories, ranked and sorted by popularity
in order to hopefully minimize the number of I/O system calls within the
compiler, because includes have quadratic complexity.
"""
result = {}
for f in files.to_list():
root = f.root.path
if root:
if root not in result:
result[root] = 0
result[root] -= 1
work = f.owner.workspace_root
if work:
if root:
root += "/"
root += work
if root:
if root not in result:
result[root] = 0
result[root] -= 1
return [k for v, k in sorted([(v, k) for k, v in result.items()])]
# Bazel rule for collecting the header files that a target depends on.
def _transitive_hdrs_impl(ctx):
outputs = _get_transitive_headers([], ctx.attr.deps)
return struct(files = outputs)
_transitive_hdrs = rule(
attrs = {
"deps": attr.label_list(
allow_files = True,
providers = [CcInfo],
),
},
implementation = _transitive_hdrs_impl,
)
def transitive_hdrs(name, deps = [], **kwargs):
_transitive_hdrs(name = name + "_gather", deps = deps)
native.filegroup(name = name, srcs = [":" + name + "_gather"])
# Create a header only library that includes all the headers exported by
# the libraries in deps.
def cc_header_only_library(name, deps = [], includes = [], extra_deps = [], **kwargs):
_transitive_hdrs(name = name + "_gather", deps = deps)
native.cc_library(
name = name,
hdrs = [":" + name + "_gather"],
includes = includes,
deps = extra_deps,
**kwargs
)
def tf_custom_op_library_additional_deps():
return [
"@com_google_protobuf//:protobuf_headers",
clean_dep("//third_party/eigen3"),
clean_dep("//tensorflow/core:framework_headers_lib"),
] + if_windows(["//tensorflow/python:pywrap_tensorflow_import_lib"])
# A list of targets that contains the implemenation of
# tf_custom_op_library_additional_deps. It's used to generate a DEF file for
# exporting symbols from _pywrap_tensorflow.dll on Windows.
def tf_custom_op_library_additional_deps_impl():
return [
"@com_google_protobuf//:protobuf",
"@nsync//:nsync_cpp",
# for //third_party/eigen3
clean_dep("//third_party/eigen3"),
# for //tensorflow/core:framework_headers_lib
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/core:reader_base"),
]
# Traverse the dependency graph along the "deps" attribute of the
# target and return a struct with one field called 'tf_collected_deps'.
# tf_collected_deps will be the union of the deps of the current target
# and the tf_collected_deps of the dependencies of this target.
def _collect_deps_aspect_impl(target, ctx):
alldeps = depset()
if hasattr(ctx.rule.attr, "deps"):
for dep in ctx.rule.attr.deps:
alldeps = depset([dep.label], transitive = [alldeps])
if hasattr(dep, "tf_collected_deps"):
alldeps = depset(transitive = [alldeps, dep.tf_collected_deps])
return struct(tf_collected_deps = alldeps)
collect_deps_aspect = aspect(
attr_aspects = ["deps"],
implementation = _collect_deps_aspect_impl,
)
def _dep_label(dep):
label = dep.label
return label.package + ":" + label.name
# This rule checks that the transitive dependencies of targets listed
# in the 'deps' attribute don't depend on the targets listed in
# the 'disallowed_deps' attribute.
def _check_deps_impl(ctx):
disallowed_deps = ctx.attr.disallowed_deps
for input_dep in ctx.attr.deps:
if not hasattr(input_dep, "tf_collected_deps"):
continue
for dep in input_dep.tf_collected_deps.to_list():
for disallowed_dep in disallowed_deps:
if dep == disallowed_dep.label:
fail(
_dep_label(input_dep) + " cannot depend on " + _dep_label(
disallowed_dep,
),
)
return struct()
check_deps = rule(
_check_deps_impl,
attrs = {
"deps": attr.label_list(
aspects = [collect_deps_aspect],
mandatory = True,
allow_files = True,
),
"disallowed_deps": attr.label_list(
mandatory = True,
allow_files = True,
),
},
)
def tf_custom_op_library(name, srcs = [], gpu_srcs = [], deps = [], linkopts = [], copts = [], **kwargs):
"""Helper to build a dynamic library (.so) from the sources containing implementations of custom ops and kernels.
"""
cuda_deps = [
clean_dep("//tensorflow/core:stream_executor_headers_lib"),
"@local_config_cuda//cuda:cuda_headers",
"@local_config_cuda//cuda:cudart_static",
]
rocm_deps = [
clean_dep("//tensorflow/core:stream_executor_headers_lib"),
]
deps = deps + tf_custom_op_library_additional_deps()
# Override EIGEN_STRONG_INLINE to inline when
# --define=override_eigen_strong_inline=true to avoid long compiling time.
# See https://github.com/tensorflow/tensorflow/issues/10521
copts = copts + if_override_eigen_strong_inline(["/DEIGEN_STRONG_INLINE=inline"]) + if_cuda(["-DNV_CUDNN_DISABLE_EXCEPTION"])
if gpu_srcs:
basename = name.split(".")[0]
native.cc_library(
name = basename + "_gpu",
srcs = gpu_srcs,
copts = copts + _cuda_copts() + if_tensorrt(["-DGOOGLE_TENSORRT=1"]) + if_nccl(["-DGOOGLE_NCCL=1"]),
features = if_cuda(["-use_header_modules"]),
deps = deps + if_cuda_is_configured_compat(cuda_deps) + if_rocm_is_configured(rocm_deps),
**kwargs
)
cuda_deps.extend([":" + basename + "_gpu"])
rocm_deps.extend([":" + basename + "_gpu"])
check_deps(
name = name + "_check_deps",
disallowed_deps = [
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/core:lib"),
],
deps = deps + if_cuda_is_configured_compat(cuda_deps) + if_rocm_is_configured(rocm_deps),
)
tf_cc_shared_object(
name = name,
srcs = srcs,
deps = deps + if_cuda_is_configured_compat(cuda_deps) + if_rocm_is_configured(rocm_deps),
data = if_static([name + "_check_deps"]),
copts = copts + tf_copts(is_external = True),
features = ["windows_export_all_symbols"],
linkopts = linkopts + select({
"//conditions:default": [
"-lm",
],
clean_dep("//tensorflow:windows"): [],
clean_dep("//tensorflow:macos"): [],
}),
**kwargs
)
register_extension_info(
extension_name = "tf_custom_op_library",
label_regex_for_dep = "{extension_name}",
)
def tf_custom_op_py_library(
name,
srcs = [],
dso = [],
kernels = [],
srcs_version = "PY2AND3",
visibility = None,
deps = []):
_ignore = [kernels]
native.py_library(
name = name,
data = dso,
srcs = srcs,
srcs_version = srcs_version,
visibility = visibility,
deps = deps,
)
register_extension_info(
extension_name = "tf_custom_op_py_library",
label_regex_for_dep = "{extension_name}",
)
# In tf_py_wrap_cc generated libraries
# module init functions are not exported unless
# they contain one of the keywords in the version file
# this prevents custom python modules.
# This function attempts to append init_module_name to list of
# exported functions in version script
def _append_init_to_versionscript_impl(ctx):
mod_name = ctx.attr.module_name
if ctx.attr.is_version_script:
ctx.actions.expand_template(
template = ctx.file.template_file,
output = ctx.outputs.versionscript,
substitutions = {
"global:": "global:\n init_%s;\n _init_%s;\n PyInit_*;\n _PyInit_*;" % (mod_name, mod_name),
},
is_executable = False,
)
else:
ctx.actions.expand_template(
template = ctx.file.template_file,
output = ctx.outputs.versionscript,
substitutions = {
"*tensorflow*": "*tensorflow*\ninit_%s\n_init_%s\nPyInit_*\n_PyInit_*\n" % (mod_name, mod_name),
},
is_executable = False,
)
_append_init_to_versionscript = rule(
attrs = {
"module_name": attr.string(mandatory = True),
"template_file": attr.label(
allow_single_file = True,
mandatory = True,
),
"is_version_script": attr.bool(
default = True,
doc = "whether target is a ld version script or exported symbol list",
mandatory = False,
),
},
outputs = {"versionscript": "%{name}.lds"},
implementation = _append_init_to_versionscript_impl,
)
def tf_py_wrap_cc(
name,
srcs,
swig_includes = [],
deps = [],
copts = [],
version_script = None,
**kwargs):
"""Builds a Python extension module."""
module_name = name.split("/")[-1]
# Convert a rule name such as foo/bar/baz to foo/bar/_baz.so
# and use that as the name for the rule producing the .so file.
cc_library_base = "/".join(name.split("/")[:-1] + ["_" + module_name])
# TODO(b/137885063): tf_cc_shared_object needs to be cleaned up; we really
# shouldn't be passing a name qualified with .so here.
cc_library_name = cc_library_base + ".so"
cc_library_pyd_name = "/".join(
name.split("/")[:-1] + ["_" + module_name + ".pyd"],
)
extra_deps = []
_py_wrap_cc(
name = name + "_py_wrap",
srcs = srcs,
module_name = module_name,
py_module_name = name,
swig_includes = swig_includes,
toolchain_deps = ["@bazel_tools//tools/cpp:current_cc_toolchain"],
deps = deps + extra_deps,
)
if not version_script:
version_script = select({
"@local_config_cuda//cuda:darwin": clean_dep("//tensorflow:tf_exported_symbols.lds"),
"//conditions:default": clean_dep("//tensorflow:tf_version_script.lds"),
})
vscriptname = name + "_versionscript"
_append_init_to_versionscript(
name = vscriptname,
is_version_script = select({
"@local_config_cuda//cuda:darwin": False,
"//conditions:default": True,
}),
module_name = module_name,
template_file = version_script,
)
extra_linkopts = select({
"@local_config_cuda//cuda:darwin": [
"-Wl,-exported_symbols_list,$(location %s.lds)" % vscriptname,
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-Wl,--version-script",
"$(location %s.lds)" % vscriptname,
],
})
extra_deps += select({
"@local_config_cuda//cuda:darwin": [
"%s.lds" % vscriptname,
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"%s.lds" % vscriptname,
],
})
tf_cc_shared_object(
name = cc_library_name,
srcs = [module_name + ".cc"],
copts = copts + if_not_windows([
"-Wno-self-assign",
"-Wno-sign-compare",
"-Wno-write-strings",
]),
linkopts = extra_linkopts,
linkstatic = 1,
deps = deps + extra_deps,
**kwargs
)
# When a non-versioned .so is added as a 'src' to a bazel target, it uses
# -l%(so_name) instead of -l:%(so_file) during linking. When -l%(so_name)
# is passed to ld, it will look for an associated file with the schema
# lib%(so_name).so. Since pywrap_tensorflow is not explicitly versioned
# and is not prefixed with lib_, we add a rule for the creation of an .so
# file with the canonical lib schema (e.g. libNAME.so), so that
# -l%(so_name) is resolved during linking.
#
# See: https://github.com/bazelbuild/bazel/blob/7a6808260a733d50983c1adf0cf5a7493472267f/src/main/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollector.java#L319
for pattern in SHARED_LIBRARY_NAME_PATTERNS:
name_os = pattern % (cc_library_base, "")
native.genrule(
name = name_os + "_rule",
srcs = [":" + cc_library_name],
outs = [name_os],
cmd = "cp $< $@",
)
native.genrule(
name = "gen_" + cc_library_pyd_name,
srcs = [":" + cc_library_name],
outs = [cc_library_pyd_name],
cmd = "cp $< $@",
)
native.py_library(
name = name,
srcs = [":" + name + ".py"],
srcs_version = "PY2AND3",
data = select({
clean_dep("//tensorflow:windows"): [":" + cc_library_pyd_name],
"//conditions:default": [":" + cc_library_name],
}),
)
# This macro is for running python tests against system installed pip package
# on Windows.
#
# py_test is built as an executable python zip file on Windows, which contains all
# dependencies of the target. Because of the C++ extensions, it would be very
# inefficient if the py_test zips all runfiles, plus we don't need them when running
# tests against system installed pip package. So we'd like to get rid of the deps
# of py_test in this case.
#
# In order to trigger the tests without bazel clean after getting rid of deps,
# we introduce the following :
# 1. When --define=no_tensorflow_py_deps=true, the py_test depends on a marker
# file of the pip package, the test gets to rerun when the pip package change.
# Note that this only works on Windows. See the definition of
# //third_party/tensorflow/tools/pip_package:win_pip_package_marker for specific reasons.
# 2. When --define=no_tensorflow_py_deps=false (by default), it's a normal py_test.
def py_test(deps = [], data = [], kernels = [], **kwargs):
# Python version placeholder
native.py_test(
# TODO(jlebar): Ideally we'd use tcmalloc here.,
deps = select({
"//conditions:default": deps,
clean_dep("//tensorflow:no_tensorflow_py_deps"): [],
}),
data = data + select({
"//conditions:default": [],
clean_dep("//tensorflow:no_tensorflow_py_deps"): ["//tensorflow/tools/pip_package:win_pip_package_marker"],
}) + tf_binary_dynamic_kernel_dsos(),
exec_compatible_with = tf_exec_compatible_with(kwargs),
**kwargs
)
register_extension_info(
extension_name = "py_test",
label_regex_for_dep = "{extension_name}",
)
# Similar to py_test above, this macro is used to exclude dependencies for some py_binary
# targets in order to reduce the size of //tensorflow/tools/pip_package:simple_console_windows.
# See https://github.com/tensorflow/tensorflow/issues/22390
def py_binary(name, deps = [], **kwargs):
# Add an extra target for dependencies to avoid nested select statement.
native.py_library(
name = name + "_deps",
deps = deps,
)
# Python version placeholder
native.py_binary(
name = name,
deps = select({
"//conditions:default": [":" + name + "_deps"],
clean_dep("//tensorflow:no_tensorflow_py_deps"): [],
}),
**kwargs
)
register_extension_info(
extension_name = "py_binary",
label_regex_for_dep = "{extension_name}",
)
def tf_py_test(
name,
srcs,
size = "medium",
data = [],
main = None,
args = [],
tags = [],
shard_count = 1,
additional_deps = [],
additional_visibility = [],
kernels = [],
flaky = 0,
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False,
**kwargs):
"""Create one or more python tests with extra tensorflow dependencies."""
xla_test_true_list = []
# xla_enable_strict_auto_jit is used to run Tensorflow unit tests with all XLA compilable
# kernels compiled with XLA.
if xla_enable_strict_auto_jit:
xla_enabled = True
xla_test_true_list += ["//tensorflow/python:is_xla_test_true"]
if xla_enabled:
additional_deps = additional_deps + tf_additional_xla_deps_py()
if grpc_enabled:
additional_deps = additional_deps + tf_additional_grpc_deps_py()
# Python version placeholder
py_test(
name = name,
size = size,
srcs = srcs,
args = args,
data = data,
flaky = flaky,
kernels = kernels,
main = main,
shard_count = shard_count,
srcs_version = "PY2AND3",
tags = tags,
visibility = [clean_dep("//tensorflow:internal")] +
additional_visibility,
deps = depset([
clean_dep("//tensorflow/python:extra_py_tests_deps"),
clean_dep("//tensorflow/python:gradient_checker"),
] + additional_deps + xla_test_true_list),
**kwargs
)
register_extension_info(
extension_name = "tf_py_test",
label_regex_map = {"additional_deps": "deps:{extension_name}"},
)
def gpu_py_test(
name,
srcs,
size = "medium",
data = [],
main = None,
args = [],
shard_count = 1,
additional_deps = [],
kernels = [],
tags = [],
flaky = 0,
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False):
# TODO(b/122522101): Don't ignore xla_enable_strict_auto_jit and enable additional
# XLA tests once enough compute resources are available.
_ignored = [xla_enable_strict_auto_jit]
if main == None:
main = name + ".py"
for config in ["cpu", "gpu"]:
test_name = name
test_tags = tags
if config == "gpu":
test_name += "_gpu"
test_tags = test_tags + tf_gpu_tests_tags()
tf_py_test(
name = test_name,
size = size,
srcs = srcs,
additional_deps = additional_deps,
args = args,
data = data,
flaky = flaky,
grpc_enabled = grpc_enabled,
kernels = kernels,
main = main,
shard_count = shard_count,
tags = test_tags,
xla_enabled = xla_enabled,
xla_enable_strict_auto_jit = False,
)
register_extension_info(
extension_name = "gpu_py_test",
label_regex_map = {"additional_deps": "additional_deps:{extension_name}"},
)
# terminology changes: saving cuda_* definition for compatibility
def cuda_py_test(*args, **kwargs):
gpu_py_test(*args, **kwargs)
register_extension_info(
extension_name = "cuda_py_test",
label_regex_map = {"additional_deps": "additional_deps:{extension_name}"},
)
def sycl_py_test(
name,
srcs,
size = "medium",
data = [],
main = None,
args = [],
shard_count = 1,
additional_deps = [],
kernels = [],
tags = [],
flaky = 0,
xla_enabled = False,
grpc_enabled = False):
test_tags = tags + tf_sycl_tests_tags()
tf_py_test(
name = name,
size = size,
srcs = srcs,
additional_deps = additional_deps,
args = args,
data = data,
flaky = flaky,
grpc_enabled = grpc_enabled,
kernels = kernels,
main = main,
shard_count = shard_count,
tags = test_tags,
xla_enabled = xla_enabled,
)
register_extension_info(
extension_name = "sycl_py_test",
label_regex_map = {"additional_deps": "additional_deps:{extension_name}"},
)
def py_tests(
name,
srcs,
size = "medium",
additional_deps = [],
kernels = [],
data = [],
tags = [],
shard_count = 1,
prefix = "",
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False):
for src in srcs:
test_name = src.split("/")[-1].split(".")[0]
if prefix:
test_name = "%s_%s" % (prefix, test_name)
tf_py_test(
name = test_name,
size = size,
srcs = [src],
additional_deps = additional_deps,
data = data,
grpc_enabled = grpc_enabled,
kernels = kernels,
main = src,
shard_count = shard_count,
tags = tags,
xla_enabled = xla_enabled,
xla_enable_strict_auto_jit = xla_enable_strict_auto_jit,
)
def gpu_py_tests(
name,
srcs,
size = "medium",
additional_deps = [],
kernels = [],
data = [],
shard_count = 1,
tags = [],
prefix = "",
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False):
# TODO(b/122522101): Don't ignore xla_enable_strict_auto_jit and enable additional
# XLA tests once enough compute resources are available.
_ignored = [xla_enable_strict_auto_jit]
test_tags = tags + tf_gpu_tests_tags()
py_tests(
name = name,
size = size,
srcs = srcs,
additional_deps = additional_deps,
data = data,
grpc_enabled = grpc_enabled,
kernels = kernels,
prefix = prefix,
shard_count = shard_count,
tags = test_tags,
xla_enabled = xla_enabled,
xla_enable_strict_auto_jit = False,
)
# terminology changes: saving cuda_* definition for compatibility
def cuda_py_tests(*args, **kwargs):
gpu_py_tests(*args, **kwargs)
# Creates a genrule named <name> for running tools/proto_text's generator to
# make the proto_text functions, for the protos passed in <srcs>.
#
# Return a struct with fields (hdrs, srcs) containing the names of the
# generated files.
def tf_generate_proto_text_sources(name, srcs_relative_dir, srcs, protodeps = [], deps = [], visibility = None):
out_hdrs = (
[
p.replace(".proto", ".pb_text.h")
for p in srcs
] + [p.replace(".proto", ".pb_text-impl.h") for p in srcs]
)
out_srcs = [p.replace(".proto", ".pb_text.cc") for p in srcs]
native.genrule(
name = name + "_srcs",
srcs = srcs + protodeps + [clean_dep("//tensorflow/tools/proto_text:placeholder.txt")],
outs = out_hdrs + out_srcs,
visibility = visibility,
cmd =
"$(location //tensorflow/tools/proto_text:gen_proto_text_functions) " +
"$(@D) " + srcs_relative_dir + " $(SRCS)",
tools = [
clean_dep("//tensorflow/tools/proto_text:gen_proto_text_functions"),
],
)
native.filegroup(
name = name + "_hdrs",
srcs = out_hdrs,
visibility = visibility,
)
native.cc_library(
name = name,
srcs = out_srcs,
hdrs = out_hdrs,
visibility = visibility,
deps = deps,
)
def tf_genrule_cmd_append_to_srcs(to_append):
return ("cat $(SRCS) > $(@) && " + "echo >> $(@) && " + "echo " + to_append +
" >> $(@)")
def tf_version_info_genrule():
native.genrule(
name = "version_info_gen",
srcs = [
clean_dep("@local_config_git//:gen/spec.json"),
clean_dep("@local_config_git//:gen/head"),
clean_dep("@local_config_git//:gen/branch_ref"),
],
outs = ["util/version_info.cc"],
cmd =
"$(location //tensorflow/tools/git:gen_git_source) --generate $(SRCS) \"$@\" --git_tag_override=$${GIT_TAG_OVERRIDE:-}",
local = 1,
tools = [clean_dep("//tensorflow/tools/git:gen_git_source")],
)
def tf_py_build_info_genrule():
native.genrule(
name = "py_build_info_gen",
outs = ["platform/build_info.py"],
cmd =
"$(location //tensorflow/tools/build_info:gen_build_info) --raw_generate \"$@\" " +
" --is_config_cuda " + if_cuda("True", "False") +
" --is_config_rocm " + if_rocm("True", "False") +
" --key_value " +
if_cuda(" cuda_version_number=$${TF_CUDA_VERSION:-} cudnn_version_number=$${TF_CUDNN_VERSION:-} ", "") +
if_windows(" msvcp_dll_name=msvcp140.dll ", "") +
if_windows_cuda(" ".join([
"nvcuda_dll_name=nvcuda.dll",
"cudart_dll_name=cudart64_$$(echo $${TF_CUDA_VERSION:-} | sed \"s/\\.//\").dll",
"cudnn_dll_name=cudnn64_$${TF_CUDNN_VERSION:-}.dll",
]), ""),
local = 1,
tools = [clean_dep("//tensorflow/tools/build_info:gen_build_info")],
)
def cc_library_with_android_deps(
deps,
android_deps = [],
common_deps = [],
copts = tf_copts(),
**kwargs):
deps = if_not_android(deps) + if_android(android_deps) + common_deps
native.cc_library(deps = deps, copts = copts, **kwargs)
register_extension_info(
extension_name = "cc_library_with_android_deps",
label_regex_for_dep = "{extension_name}",
)
def tensorflow_opensource_extra_deps():
return []
# buildozer: disable=function-docstring-args
def pybind_extension(
name,
srcs,
module_name,
hdrs = [],
features = [],
srcs_version = "PY2AND3",
data = [],
copts = None,
linkopts = [],
deps = [],
visibility = None,
testonly = None,
licenses = None,
compatible_with = None,
restricted_to = None,
deprecation = None):
"""Builds a generic Python extension module."""
_ignore = [module_name]
p = name.rfind("/")
if p == -1:
sname = name
prefix = ""
else:
sname = name[p + 1:]
prefix = name[:p + 1]
so_file = "%s%s.so" % (prefix, sname)
pyd_file = "%s%s.pyd" % (prefix, sname)
symbol = "init%s" % sname
symbol2 = "init_%s" % sname
symbol3 = "PyInit_%s" % sname
exported_symbols_file = "%s-exported-symbols.lds" % name
version_script_file = "%s-version-script.lds" % name
native.genrule(
name = name + "_exported_symbols",
outs = [exported_symbols_file],
cmd = "echo '_%s\n_%s\n_%s' >$@" % (symbol, symbol2, symbol3),
output_licenses = ["unencumbered"],
visibility = ["//visibility:private"],
testonly = testonly,
)
native.genrule(
name = name + "_version_script",
outs = [version_script_file],
cmd = "echo '{global:\n %s;\n %s;\n %s;\n local: *;};' >$@" % (symbol, symbol2, symbol3),
output_licenses = ["unencumbered"],
visibility = ["//visibility:private"],
testonly = testonly,
)
native.cc_binary(
name = so_file,
srcs = srcs + hdrs,
data = data,
copts = copts,
linkopts = linkopts + _rpath_linkopts(name) + select({
"@local_config_cuda//cuda:darwin": [
"-Wl,-exported_symbols_list,$(location %s)" % exported_symbols_file,
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-Wl,--version-script",
"$(location %s)" % version_script_file,
],
}),
deps = deps + [
exported_symbols_file,
version_script_file,
],
features = features,
linkshared = 1,
testonly = testonly,
licenses = licenses,
visibility = visibility,
deprecation = deprecation,
restricted_to = restricted_to,
compatible_with = compatible_with,
)
native.genrule(
name = name + "_pyd_copy",
srcs = [so_file],
outs = [pyd_file],
cmd = "cp $< $@",
output_to_bindir = True,
visibility = visibility,
deprecation = deprecation,
restricted_to = restricted_to,
compatible_with = compatible_with,
)
native.py_library(
name = name,
data = select({
"@org_tensorflow//tensorflow:windows": [pyd_file],
"//conditions:default": [so_file],
}),
srcs_version = srcs_version,
licenses = licenses,
testonly = testonly,
visibility = visibility,
deprecation = deprecation,
restricted_to = restricted_to,
compatible_with = compatible_with,
)
# buildozer: enable=function-docstring-args
def tf_python_pybind_extension(
name,
srcs,
module_name,
hdrs = [],
features = [],
copts = None,
deps = []):
"""A wrapper macro for pybind_extension that is used in tensorflow/python/BUILD.
It is used for targets under //third_party/tensorflow/python that link
against libtensorflow_framework.so and pywrap_tensorflow_internal.so.
"""
pybind_extension(
name,
srcs + tf_binary_additional_srcs(),
module_name,
hdrs = hdrs,
features = features,
copts = copts,
deps = deps + tf_binary_pybind_deps() + if_mkl_ml(["//third_party/mkl:intel_binary_blob"]),
)
def if_cuda_or_rocm(if_true, if_false = []):
"""Shorthand for select()'ing whether to build for either CUDA or ROCm.
Returns a select statement which evaluates to
if_true if we're building with either CUDA or ROCm enabled.
if_false, otherwise.
Sometimes a target has additional CUDa or ROCm specific dependencies.
The `if_cuda` / `if_rocm` functions are used to specify these additional
dependencies. For eg, see the `//tensorflow/core/kernels:bias_op` target
If the same additional dependency is needed for both CUDA and ROCm
(for eg. `reduction_ops` dependency for the `bias_op` target above),
then specifying that dependency in both both `if_cuda` and `if_rocm` will
result in both those functions returning a select statement, which contains
the same dependency, which then leads to a duplicate dependency bazel error.
In order to work around this error, any additional dependency that is common
to both the CUDA and ROCm platforms, should be specified using this function.
Doing so will eliminate the cause of the bazel error (i.e. the same
dependency showing up in two different select statements)
"""
return select({
"@local_config_cuda//cuda:using_nvcc": if_true,
"@local_config_cuda//cuda:using_clang": if_true,
"@local_config_rocm//rocm:using_hipcc": if_true,
"//conditions:default": if_false,
})
def tf_jit_compilation_passes_extra_deps():
return []
def if_mlir(if_true, if_false = []):
return select({
"//conditions:default": if_false,
"//tensorflow:with_mlir_support": if_true,
})
# TODO(b/138724071): Remove when build is stable.
def if_mlir_tflite(if_true, if_false = []):
return if_true # Internally we always build with MLIR.
def tfcompile_extra_flags():
return ""
def tf_grpc_dependency():
return "//tensorflow:grpc"
def tf_grpc_cc_dependency():
return "//tensorflow:grpc++"
| 33.180194
| 310
| 0.588373
|
load(
"//tensorflow/core/platform:default/build_config_root.bzl",
"if_dynamic_kernels",
"if_static",
"tf_additional_grpc_deps_py",
"tf_additional_xla_deps_py",
"tf_cuda_tests_tags",
"tf_exec_compatible_with",
"tf_gpu_tests_tags",
"tf_sycl_tests_tags",
)
load(
"@local_config_tensorrt//:build_defs.bzl",
"if_tensorrt",
)
load(
"//tensorflow/core/platform:default/cuda_build_defs.bzl",
"if_cuda_is_configured",
)
load(
"@local_config_cuda//cuda:build_defs.bzl",
"cuda_default_copts",
"if_cuda",
)
load(
"@local_config_rocm//rocm:build_defs.bzl",
"if_rocm",
"if_rocm_is_configured",
"rocm_copts",
"rocm_default_copts",
)
load(
"//third_party/mkl:build_defs.bzl",
"if_enable_mkl",
"if_mkl",
"if_mkl_lnx_x64",
"if_mkl_ml",
"mkl_deps",
)
load(
"//third_party/mkl_dnn:build_defs.bzl",
"if_mkl_open_source_only",
"if_mkldnn_threadpool",
)
load(
"//third_party/ngraph:build_defs.bzl",
"if_ngraph",
)
def register_extension_info(**kwargs):
pass
VERSION = "1.15.5"
VERSION_MAJOR = VERSION.split(".")[0]
def if_v2(a):
return select({
clean_dep("//tensorflow:api_version_2"): a,
"//conditions:default": [],
})
def if_not_v2(a):
return select({
clean_dep("//tensorflow:api_version_2"): [],
"//conditions:default": a,
})
def if_cuda_is_configured_compat(x):
return if_cuda_is_configured(x)
def src_to_test_name(src):
return src.replace("/", "_").replace(":", "_").split(".")[0]
def full_path(relative_paths):
return [native.package_name() + "/" + relative for relative in relative_paths]
def _add_tfcore_prefix(src):
if src.startswith("//"):
return src
return "//tensorflow/core:" + src
def tf_android_core_proto_sources(core_proto_sources_relative):
return [
_add_tfcore_prefix(p)
for p in core_proto_sources_relative
]
def tf_android_core_proto_headers(core_proto_sources_relative):
return ([
_add_tfcore_prefix(p).replace(":", "/").replace(".proto", ".pb.h")
for p in core_proto_sources_relative
] + [
_add_tfcore_prefix(p).replace(":", "/").replace(".proto", ".proto.h")
for p in core_proto_sources_relative
])
def tf_portable_proto_library(name, proto_deps, **kwargs):
_ignore = [kwargs]
native.cc_library(name = name, deps = proto_deps)
def clean_dep(dep):
return str(Label(dep))
def if_android_x86(a):
return select({
clean_dep("//tensorflow:android_x86"): a,
clean_dep("//tensorflow:android_x86_64"): a,
"//conditions:default": [],
})
def if_android_arm(a):
return select({
clean_dep("//tensorflow:android_arm"): a,
"//conditions:default": [],
})
def if_android_arm64(a):
return select({
clean_dep("//tensorflow:android_arm64"): a,
"//conditions:default": [],
})
def if_android_mips(a):
return select({
clean_dep("//tensorflow:android_mips"): a,
"//conditions:default": [],
})
def if_not_android(a):
return select({
clean_dep("//tensorflow:android"): [],
"//conditions:default": a,
})
def if_not_android_mips_and_mips64(a):
return select({
clean_dep("//tensorflow:android_mips"): [],
clean_dep("//tensorflow:android_mips64"): [],
"//conditions:default": a,
})
def if_android(a):
return select({
clean_dep("//tensorflow:android"): a,
"//conditions:default": [],
})
def if_emscripten(a):
return select({
clean_dep("//tensorflow:emscripten"): a,
"//conditions:default": [],
})
def if_macos(a, otherwise = []):
return select({
clean_dep("//tensorflow:macos"): a,
"//conditions:default": otherwise,
})
def if_ios(a):
return select({
clean_dep("//tensorflow:ios"): a,
"//conditions:default": [],
})
def if_ios_x86_64(a):
return select({
clean_dep("//tensorflow:ios_x86_64"): a,
"//conditions:default": [],
})
def if_mobile(a):
return select({
clean_dep("//tensorflow:android"): a,
clean_dep("//tensorflow:ios"): a,
"//conditions:default": [],
})
def if_not_mobile(a):
return select({
clean_dep("//tensorflow:android"): [],
clean_dep("//tensorflow:ios"): [],
"//conditions:default": a,
})
def if_not_lgpl_restricted(a):
_ = (a,)
return select({
"//conditions:default": [],
})
def if_not_windows(a):
return select({
clean_dep("//tensorflow:windows"): [],
"//conditions:default": a,
})
def if_windows(a, otherwise = []):
return select({
clean_dep("//tensorflow:windows"): a,
"//conditions:default": otherwise,
})
def if_windows_cuda(a, otherwise = []):
return select({
clean_dep("//tensorflow:with_cuda_support_windows_override"): a,
"//conditions:default": otherwise,
})
def if_linux_x86_64(a):
return select({
clean_dep("//tensorflow:linux_x86_64"): a,
"//conditions:default": [],
})
def if_override_eigen_strong_inline(a):
return select({
clean_dep("//tensorflow:override_eigen_strong_inline"): a,
"//conditions:default": [],
})
def if_nccl(if_true, if_false = []):
return select({
"//tensorflow:no_nccl_support": if_false,
"//tensorflow:windows": if_false,
"//conditions:default": if_true,
})
def get_win_copts(is_external = False):
WINDOWS_COPTS = [
"/DPLATFORM_WINDOWS",
"/DEIGEN_HAS_C99_MATH",
"/DTENSORFLOW_USE_EIGEN_THREADPOOL",
"/DEIGEN_AVOID_STL_ARRAY",
"/Iexternal/gemmlowp",
"/wd4018", # default. We can't pass /EHs-c- to disable exception, otherwise
"/wd4577",
"/DNOGDI",
]
if is_external:
return WINDOWS_COPTS + ["/UTF_COMPILE_LIBRARY"]
else:
return WINDOWS_COPTS + ["/DTF_COMPILE_LIBRARY"]
def tf_copts(
android_optimization_level_override = "-O2",
is_external = False,
allow_exceptions = False):
android_copts = [
"-DTF_LEAN_BINARY",
"-Wno-narrowing",
"-fomit-frame-pointer",
]
if android_optimization_level_override:
android_copts.append(android_optimization_level_override)
return (
if_not_windows([
"-DEIGEN_AVOID_STL_ARRAY",
"-Iexternal/gemmlowp",
"-Wno-sign-compare",
"-ftemplate-depth=900",
]) +
(if_not_windows(["-fno-exceptions"]) if not allow_exceptions else []) +
if_cuda(["-DGOOGLE_CUDA=1"]) +
if_tensorrt(["-DGOOGLE_TENSORRT=1"]) +
if_nccl(["-DGOOGLE_NCCL=1"]) +
if_mkl(["-DINTEL_MKL=1", "-DENABLE_MKLDNN_V1", "-DENABLE_INTEL_MKL_BFLOAT16"]) +
if_mkl_open_source_only(["-DINTEL_MKL_DNN_ONLY"]) +
if_mkldnn_threadpool(["-DENABLE_MKLDNN_THREADPOOL"]) +
if_enable_mkl(["-DENABLE_MKL"]) +
if_ngraph(["-DINTEL_NGRAPH=1"]) +
if_android_arm(["-mfpu=neon"]) +
if_linux_x86_64(["-msse3"]) +
if_ios_x86_64(["-msse4.1"]) +
select({
clean_dep("//tensorflow:framework_shared_object"): [],
"//conditions:default": ["-DTENSORFLOW_MONOLITHIC_BUILD"],
}) +
select({
clean_dep("//tensorflow:android"): android_copts,
clean_dep("//tensorflow:macos"): [],
clean_dep("//tensorflow:windows"): get_win_copts(is_external),
clean_dep("//tensorflow:ios"): [],
clean_dep("//tensorflow:no_lgpl_deps"): ["-D__TENSORFLOW_NO_LGPL_DEPS__", "-pthread"],
"//conditions:default": ["-pthread"],
})
)
def tf_openmp_copts():
return (if_mkl_lnx_x64(["-fopenmp"]) + if_mkldnn_threadpool(["-fno-openmp"]))
def tfe_xla_copts():
return select({
"//tensorflow:with_xla_support": ["-DTENSORFLOW_EAGER_USE_XLA"],
"//conditions:default": [],
})
def tf_opts_nortti_if_android():
return if_android([
"-fno-rtti",
"-DGOOGLE_PROTOBUF_NO_RTTI",
"-DGOOGLE_PROTOBUF_NO_STATIC_INITIALIZER",
])
def tf_opts_nortti_if_emscripten():
return if_emscripten([
"-fno-rtti",
"-DGOOGLE_PROTOBUF_NO_RTTI",
"-DGOOGLE_PROTOBUF_NO_STATIC_INITIALIZER",
])
def tf_features_nomodules_if_android():
return if_android(["-use_header_modules"])
def tf_features_nomodules_if_emscripten():
return if_emscripten(["-use_header_modules"])
def tf_gen_op_libs(op_lib_names, deps = None, is_external = True):
if not deps:
deps = []
for n in op_lib_names:
native.cc_library(
name = n + "_op_lib",
copts = tf_copts(is_external = is_external),
srcs = ["ops/" + n + ".cc"],
deps = deps + [clean_dep("//tensorflow/core:framework")],
visibility = ["//visibility:public"],
alwayslink = 1,
linkstatic = 1,
)
def _make_search_paths(prefix, levels_to_root):
return ",".join(
[
"-rpath,%s/%s" % (prefix, "/".join([".."] * search_level))
for search_level in range(levels_to_root + 1)
],
)
def _rpath_linkopts(name):
levels_to_root = native.package_name().count("/") + name.count("/")
return select({
clean_dep("//tensorflow:macos"): [
"-Wl,%s" % (_make_search_paths("@loader_path", levels_to_root),),
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-Wl,%s" % (_make_search_paths("$$ORIGIN", levels_to_root),),
],
})
def tf_binary_additional_srcs(fullversion = False):
if fullversion:
suffix = "." + VERSION
else:
suffix = "." + VERSION_MAJOR
return if_static(
extra_deps = [],
macos = [
clean_dep("//tensorflow:libtensorflow_framework%s.dylib" % suffix),
],
otherwise = [
clean_dep("//tensorflow:libtensorflow_framework.so%s" % suffix),
],
)
def tf_binary_additional_data_deps():
return if_static(
extra_deps = [],
macos = [
clean_dep("//tensorflow:libtensorflow_framework.dylib"),
clean_dep("//tensorflow:libtensorflow_framework.%s.dylib" % VERSION_MAJOR),
clean_dep("//tensorflow:libtensorflow_framework.%s.dylib" % VERSION),
],
otherwise = [
clean_dep("//tensorflow:libtensorflow_framework.so"),
clean_dep("//tensorflow:libtensorflow_framework.so.%s" % VERSION_MAJOR),
clean_dep("//tensorflow:libtensorflow_framework.so.%s" % VERSION),
],
)
def tf_binary_pybind_deps():
return select({
clean_dep("//tensorflow:macos"): [
clean_dep(
"//tensorflow/python:_pywrap_tensorflow_internal_macos",
),
],
clean_dep("//tensorflow:windows"): [
clean_dep(
"//tensorflow/python:_pywrap_tensorflow_internal_windows",
),
],
"//conditions:default": [
clean_dep(
"//tensorflow/python:_pywrap_tensorflow_internal_linux",
),
],
})
def tf_shared_library_deps():
return select({
clean_dep("//tensorflow:macos_with_framework_shared_object"): [
clean_dep("//tensorflow:libtensorflow.dylib"),
clean_dep("//tensorflow:libtensorflow.%s.dylib" % VERSION_MAJOR),
clean_dep("//tensorflow:libtensorflow.%s.dylib" % VERSION),
],
clean_dep("//tensorflow:macos"): [],
clean_dep("//tensorflow:windows"): [
clean_dep("//tensorflow:tensorflow.dll"),
clean_dep("//tensorflow:tensorflow_dll_import_lib"),
],
clean_dep("//tensorflow:framework_shared_object"): [
clean_dep("//tensorflow:libtensorflow.so"),
clean_dep("//tensorflow:libtensorflow.so.%s" % VERSION_MAJOR),
clean_dep("//tensorflow:libtensorflow.so.%s" % VERSION),
],
"//conditions:default": [],
}) + tf_binary_additional_srcs()
def tf_binary_dynamic_kernel_dsos():
return if_dynamic_kernels(
extra_deps = [
"//tensorflow/core/kernels:libtfkernel_all_kernels.so",
],
otherwise = [],
)
def tf_binary_dynamic_kernel_deps(kernels):
return if_dynamic_kernels(
extra_deps = [],
otherwise = kernels,
)
SHARED_LIBRARY_NAME_PATTERNS = [
"lib%s.so%s", "lib%s%s.dylib", "%s%s.dll", ]
def tf_cc_shared_object(
name,
srcs = [],
deps = [],
data = [],
linkopts = [],
framework_so = tf_binary_additional_srcs(),
soversion = None,
kernels = [],
per_os_targets = False, visibility = None,
**kwargs):
if soversion != None:
suffix = "." + str(soversion).split(".")[0]
longsuffix = "." + str(soversion)
else:
suffix = ""
longsuffix = ""
if per_os_targets:
names = [
(
pattern % (name, ""),
pattern % (name, suffix),
pattern % (name, longsuffix),
)
for pattern in SHARED_LIBRARY_NAME_PATTERNS
]
else:
names = [(
name,
name + suffix,
name + longsuffix,
)]
for name_os, name_os_major, name_os_full in names:
if name_os.endswith(".dll"):
name_os_major = name_os
name_os_full = name_os
if name_os != name_os_major:
native.genrule(
name = name_os + "_sym",
outs = [name_os],
srcs = [name_os_major],
output_to_bindir = 1,
cmd = "ln -sf $$(basename $<) $@",
)
native.genrule(
name = name_os_major + "_sym",
outs = [name_os_major],
srcs = [name_os_full],
output_to_bindir = 1,
cmd = "ln -sf $$(basename $<) $@",
)
soname = name_os_major.split("/")[-1]
data_extra = []
if framework_so != []:
data_extra = tf_binary_additional_data_deps()
native.cc_binary(
name = name_os_full,
srcs = srcs + framework_so,
deps = deps,
linkshared = 1,
data = data + data_extra,
linkopts = linkopts + _rpath_linkopts(name_os_full) + select({
clean_dep("//tensorflow:macos"): [
"-Wl,-install_name,@rpath/" + soname,
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-Wl,-soname," + soname,
],
}),
visibility = visibility,
**kwargs
)
flat_names = [item for sublist in names for item in sublist]
if name not in flat_names:
native.filegroup(
name = name,
srcs = select({
"//tensorflow:windows": [":%s.dll" % (name)],
"//tensorflow:macos": [":lib%s%s.dylib" % (name, longsuffix)],
"//conditions:default": [":lib%s.so%s" % (name, longsuffix)],
}),
visibility = visibility,
)
register_extension_info(
extension_name = "tf_cc_shared_object",
label_regex_for_dep = "{extension_name}",
)
def tf_cc_binary(
name,
srcs = [],
deps = [],
data = [],
linkopts = [],
copts = tf_copts(),
kernels = [],
per_os_targets = False, visibility = None,
**kwargs):
if kernels:
added_data_deps = tf_binary_dynamic_kernel_dsos()
else:
added_data_deps = []
if per_os_targets:
names = [pattern % (name, "") for pattern in SHARED_LIBRARY_NAME_PATTERNS]
else:
names = [name]
for name_os in names:
native.cc_binary(
name = name_os,
copts = copts,
srcs = srcs + tf_binary_additional_srcs(),
deps = deps + tf_binary_dynamic_kernel_deps(kernels) + if_mkl_ml(
[
clean_dep("//third_party/mkl:intel_binary_blob"),
],
),
data = depset(data + added_data_deps),
linkopts = linkopts + _rpath_linkopts(name_os),
visibility = visibility,
**kwargs
)
if name not in names:
native.filegroup(
name = name,
srcs = select({
"//tensorflow:windows": [":%s.dll" % name],
"//tensorflow:macos": [":lib%s.dylib" % name],
"//conditions:default": [":lib%s.so" % name],
}),
visibility = visibility,
)
register_extension_info(
extension_name = "tf_cc_binary",
label_regex_for_dep = "{extension_name}.*",
)
# dependencies by default.
def tf_native_cc_binary(
name,
copts = tf_copts(),
linkopts = [],
**kwargs):
native.cc_binary(
name = name,
copts = copts,
linkopts = select({
clean_dep("//tensorflow:windows"): [],
clean_dep("//tensorflow:macos"): [
"-lm",
],
"//conditions:default": [
"-lpthread",
"-lm",
],
}) + linkopts + _rpath_linkopts(name),
**kwargs
)
register_extension_info(
extension_name = "tf_native_cc_binary",
label_regex_for_dep = "{extension_name}.*",
)
def tf_gen_op_wrapper_cc(
name,
out_ops_file,
pkg = "",
op_gen = clean_dep("//tensorflow/cc:cc_op_gen_main"),
deps = None,
include_internal_ops = 0,
# ApiDefs will be loaded in the order specified in this list.
api_def_srcs = []):
# Construct an op generator binary for these ops.
tool = out_ops_file + "_gen_cc"
if deps == None:
deps = [pkg + ":" + name + "_op_lib"]
tf_cc_binary(
name = tool,
copts = tf_copts(),
linkopts = if_not_windows(["-lm", "-Wl,-ldl"]),
linkstatic = 1, # Faster to link this one-time-use binary dynamically
deps = [op_gen] + deps,
)
srcs = api_def_srcs[:]
if not api_def_srcs:
api_def_args_str = ","
else:
api_def_args = []
for api_def_src in api_def_srcs:
# Add directory of the first ApiDef source to args.
# We are assuming all ApiDefs in a single api_def_src are in the
# same directory.
api_def_args.append(
" $$(dirname $$(echo $(locations " + api_def_src +
") | cut -d\" \" -f1))",
)
api_def_args_str = ",".join(api_def_args)
native.genrule(
name = name + "_genrule",
outs = [
out_ops_file + ".h",
out_ops_file + ".cc",
out_ops_file + "_internal.h",
out_ops_file + "_internal.cc",
],
srcs = srcs,
tools = [":" + tool] + tf_binary_additional_srcs(),
cmd = ("$(location :" + tool + ") $(location :" + out_ops_file + ".h) " +
"$(location :" + out_ops_file + ".cc) " +
str(include_internal_ops) + " " + api_def_args_str),
)
# Given a list of "op_lib_names" (a list of files in the ops directory
# without their .cc extensions), generate individual C++ .cc and .h
# files for each of the ops files mentioned, and then generate a
# single cc_library called "name" that combines all the
# generated C++ code.
#
# For example, for:
# tf_gen_op_wrappers_cc("tf_ops_lib", [ "array_ops", "math_ops" ])
#
#
# This will ultimately generate ops/* files and a library like:
#
# cc_library(name = "tf_ops_lib",
# srcs = [ "ops/array_ops.cc",
# "ops/math_ops.cc" ],
# hdrs = [ "ops/array_ops.h",
# "ops/math_ops.h" ],
# deps = [ ... ])
#
# Plus a private library for the "hidden" ops.
# cc_library(name = "tf_ops_lib_internal",
# srcs = [ "ops/array_ops_internal.cc",
# "ops/math_ops_internal.cc" ],
# hdrs = [ "ops/array_ops_internal.h",
# "ops/math_ops_internal.h" ],
# deps = [ ... ])
# TODO(joshl): Cleaner approach for hidden ops.
def tf_gen_op_wrappers_cc(
name,
op_lib_names = [],
other_srcs = [],
other_hdrs = [],
other_srcs_internal = [],
other_hdrs_internal = [],
pkg = "",
deps = [
clean_dep("//tensorflow/cc:ops"),
clean_dep("//tensorflow/cc:scope"),
clean_dep("//tensorflow/cc:const_op"),
],
deps_internal = [],
op_gen = clean_dep("//tensorflow/cc:cc_op_gen_main"),
include_internal_ops = 0,
visibility = None,
# ApiDefs will be loaded in the order specified in this list.
api_def_srcs = [],
# Any extra dependencies that the wrapper generator might need.
extra_gen_deps = []):
subsrcs = other_srcs[:]
subhdrs = other_hdrs[:]
internalsrcs = other_srcs_internal[:]
internalhdrs = other_hdrs_internal[:]
for n in op_lib_names:
tf_gen_op_wrapper_cc(
n,
"ops/" + n,
api_def_srcs = api_def_srcs,
include_internal_ops = include_internal_ops,
op_gen = op_gen,
pkg = pkg,
deps = [pkg + ":" + n + "_op_lib"] + extra_gen_deps,
)
subsrcs += ["ops/" + n + ".cc"]
subhdrs += ["ops/" + n + ".h"]
internalsrcs += ["ops/" + n + "_internal.cc"]
internalhdrs += ["ops/" + n + "_internal.h"]
native.cc_library(
name = name,
srcs = subsrcs,
hdrs = subhdrs,
deps = deps + if_not_android([
clean_dep("//tensorflow/core:core_cpu"),
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/core:lib"),
clean_dep("//tensorflow/core:ops"),
clean_dep("//tensorflow/core:protos_all_cc"),
]) + if_android([
clean_dep("//tensorflow/core:android_tensorflow_lib"),
]),
copts = tf_copts(),
alwayslink = 1,
visibility = visibility,
)
native.cc_library(
name = name + "_internal",
srcs = internalsrcs,
hdrs = internalhdrs,
deps = deps + deps_internal + if_not_android([
clean_dep("//tensorflow/core:core_cpu"),
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/core:lib"),
clean_dep("//tensorflow/core:ops"),
clean_dep("//tensorflow/core:protos_all_cc"),
]) + if_android([
clean_dep("//tensorflow/core:android_tensorflow_lib"),
]),
copts = tf_copts(),
alwayslink = 1,
visibility = [clean_dep("//tensorflow:internal")],
)
# Generates a Python library target wrapping the ops registered in "deps".
#
# Args:
# name: used as the name of the generated target and as a name component of
# the intermediate files.
# out: name of the python file created by this rule. If None, then
# "ops/gen_{name}.py" is used.
# hidden: Optional list of ops names to make private in the Python module.
# It is invalid to specify both "hidden" and "op_whitelist".
# visibility: passed to py_library.
# deps: list of dependencies for the intermediate tool used to generate the
# python target. NOTE these `deps` are not applied to the final python
# library target itself.
# require_shape_functions: leave this as False.
# hidden_file: optional file that contains a list of op names to make private
# in the generated Python module. Each op name should be on a line by
# itself. Lines that start with characters that are invalid op name
# starting characters are treated as comments and ignored.
# generated_target_name: name of the generated target (overrides the
# "name" arg)
# op_whitelist: if not empty, only op names in this list will be wrapped. It
# is invalid to specify both "hidden" and "op_whitelist".
# cc_linkopts: Optional linkopts to be added to tf_cc_binary that contains the
# specified ops.
def tf_gen_op_wrapper_py(
name,
out = None,
hidden = None,
visibility = None,
deps = [],
require_shape_functions = False,
hidden_file = None,
generated_target_name = None,
op_whitelist = [],
cc_linkopts = [],
api_def_srcs = []):
if (hidden or hidden_file) and op_whitelist:
fail("Cannot pass specify both hidden and op_whitelist.")
# Construct a cc_binary containing the specified ops.
tool_name = "gen_" + name + "_py_wrappers_cc"
if not deps:
deps = [str(Label("//tensorflow/core:" + name + "_op_lib"))]
tf_cc_binary(
name = tool_name,
copts = tf_copts(),
linkopts = if_not_windows(["-lm", "-Wl,-ldl"]) + cc_linkopts,
linkstatic = 1, # Faster to link this one-time-use binary dynamically
visibility = [clean_dep("//tensorflow:internal")],
deps = ([
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/python:python_op_gen_main"),
] + deps),
)
# Invoke the previous cc_binary to generate a python file.
if not out:
out = "ops/gen_" + name + ".py"
if hidden:
op_list_arg = ",".join(hidden)
op_list_is_whitelist = False
elif op_whitelist:
op_list_arg = ",".join(op_whitelist)
op_list_is_whitelist = True
else:
op_list_arg = "''"
op_list_is_whitelist = False
# Prepare ApiDef directories to pass to the genrule.
if not api_def_srcs:
api_def_args_str = ","
else:
api_def_args = []
for api_def_src in api_def_srcs:
# Add directory of the first ApiDef source to args.
# We are assuming all ApiDefs in a single api_def_src are in the
# same directory.
api_def_args.append(
"$$(dirname $$(echo $(locations " + api_def_src +
") | cut -d\" \" -f1))",
)
api_def_args_str = ",".join(api_def_args)
if hidden_file:
# `hidden_file` is file containing a list of op names to be hidden in the
# generated module.
native.genrule(
name = name + "_pygenrule",
outs = [out],
srcs = api_def_srcs + [hidden_file],
tools = [tool_name] + tf_binary_additional_srcs(),
cmd = ("$(location " + tool_name + ") " + api_def_args_str +
" @$(location " + hidden_file + ") " +
("1" if require_shape_functions else "0") + " > $@"),
)
else:
native.genrule(
name = name + "_pygenrule",
outs = [out],
srcs = api_def_srcs,
tools = [tool_name] + tf_binary_additional_srcs(),
cmd = ("$(location " + tool_name + ") " + api_def_args_str + " " +
op_list_arg + " " +
("1" if require_shape_functions else "0") + " " +
("1" if op_list_is_whitelist else "0") + " > $@"),
)
# Make a py_library out of the generated python file.
if not generated_target_name:
generated_target_name = name
native.py_library(
name = generated_target_name,
srcs = [out],
srcs_version = "PY2AND3",
visibility = visibility,
deps = [
clean_dep("//tensorflow/python:framework_for_generated_wrappers_v2"),
],
# Instruct build_cleaner to try to avoid using this rule; typically ops
# creators will provide their own tf_custom_op_py_library based target
# that wraps this one.
tags = ["avoid_dep"],
)
# Define a bazel macro that creates cc_test for tensorflow.
#
# Links in the framework shared object
# (//third_party/tensorflow:libtensorflow_framework.so) when not building
# statically. Also adds linker options (rpaths) so that the framework shared
# object can be found.
#
# TODO(opensource): we need to enable this to work around the hidden symbol
# __cudaRegisterFatBinary error. Need more investigations.
def tf_cc_test(
name,
srcs,
deps,
data = [],
linkstatic = 0,
extra_copts = [],
suffix = "",
linkopts = [],
kernels = [],
**kwargs):
native.cc_test(
name = "%s%s" % (name, suffix),
srcs = srcs + tf_binary_additional_srcs(),
copts = tf_copts() + extra_copts,
linkopts = select({
clean_dep("//tensorflow:android"): [
"-pie",
],
clean_dep("//tensorflow:windows"): [],
clean_dep("//tensorflow:macos"): [
"-lm",
],
"//conditions:default": [
"-lpthread",
"-lm",
],
}) + linkopts + _rpath_linkopts(name),
deps = deps + tf_binary_dynamic_kernel_deps(kernels) + if_mkl_ml(
[
clean_dep("//third_party/mkl:intel_binary_blob"),
],
),
data = data +
tf_binary_dynamic_kernel_dsos() +
tf_binary_additional_srcs(),
exec_compatible_with = tf_exec_compatible_with(kwargs),
# Nested select() statements seem not to be supported when passed to
# linkstatic, and we already have a cuda select() passed in to this
# function.
linkstatic = linkstatic or select({
# cc_tests with ".so"s in srcs incorrectly link on Darwin unless
# linkstatic=1 (https://github.com/bazelbuild/bazel/issues/3450).
# TODO(allenl): Remove Mac static linking when Bazel 0.6 is out.
clean_dep("//tensorflow:macos"): 1,
"//conditions:default": 0,
}),
**kwargs
)
register_extension_info(
extension_name = "tf_cc_test",
label_regex_for_dep = "{extension_name}.*",
)
# Part of the testing workflow requires a distinguishable name for the build
# rules that involve a GPU, even if otherwise identical to the base rule.
def tf_cc_test_gpu(
name,
srcs,
deps,
linkstatic = 0,
tags = [],
data = [],
size = "medium",
suffix = "",
args = None):
tf_cc_test(
name,
srcs,
deps,
size = size,
args = args,
data = data,
linkstatic = linkstatic,
suffix = suffix,
tags = tags,
)
register_extension_info(
extension_name = "tf_cc_test_gpu",
label_regex_for_dep = "{extension_name}",
)
def tf_gpu_cc_test(
name,
srcs = [],
deps = [],
tags = [],
data = [],
size = "medium",
extra_copts = [],
linkstatic = 0,
args = [],
kernels = [],
linkopts = []):
tf_cc_test(
name = name,
size = size,
srcs = srcs,
args = args,
data = data,
extra_copts = extra_copts + if_cuda(["-DNV_CUDNN_DISABLE_EXCEPTION"]),
kernels = kernels,
linkopts = linkopts,
linkstatic = linkstatic,
tags = tags + ["manual"],
deps = deps,
)
tf_cc_test(
name = name,
size = size,
srcs = srcs,
args = args,
data = data,
extra_copts = extra_copts + if_cuda(["-DNV_CUDNN_DISABLE_EXCEPTION"]),
kernels = kernels,
linkopts = linkopts,
linkstatic = select({
# TODO(allenl): Remove Mac static linking when Bazel 0.6 is out.
clean_dep("//tensorflow:macos"): 1,
"@local_config_cuda//cuda:using_nvcc": 1,
"@local_config_cuda//cuda:using_clang": 1,
"//conditions:default": 0,
}),
suffix = "_gpu",
tags = tags + tf_gpu_tests_tags(),
deps = deps + if_cuda_is_configured([
clean_dep("//tensorflow/core:gpu_runtime"),
]) + if_rocm_is_configured([
clean_dep("//tensorflow/core:gpu_runtime"),
]),
)
register_extension_info(
extension_name = "tf_gpu_cc_test",
label_regex_for_dep = "{extension_name}",
)
# terminology changes: saving tf_cuda_* definition for compatibility
def tf_cuda_cc_test(*args, **kwargs):
tf_gpu_cc_test(*args, **kwargs)
register_extension_info(
extension_name = "tf_cuda_cc_test",
label_regex_for_dep = "{extension_name}",
)
def tf_gpu_only_cc_test(
name,
srcs = [],
deps = [],
tags = [],
data = [],
size = "medium",
linkstatic = 0,
args = [],
kernels = [],
linkopts = []):
tags = tags + tf_gpu_tests_tags()
native.cc_test(
name = "%s%s" % (name, "_gpu"),
srcs = srcs + tf_binary_additional_srcs(),
size = size,
args = args,
copts = _cuda_copts() + rocm_copts() + tf_copts(),
features = if_cuda(["-use_header_modules"]),
data = data + tf_binary_dynamic_kernel_dsos(),
deps = deps + tf_binary_dynamic_kernel_deps(kernels) + if_cuda_is_configured([
clean_dep("//tensorflow/core:cuda"),
clean_dep("//tensorflow/core:gpu_lib"),
]) + if_rocm_is_configured([
clean_dep("//tensorflow/core:gpu_lib"),
]),
linkopts = if_not_windows(["-lpthread", "-lm"]) + linkopts + _rpath_linkopts(name),
linkstatic = linkstatic or select({
# cc_tests with ".so"s in srcs incorrectly link on Darwin
# unless linkstatic=1.
# TODO(allenl): Remove Mac static linking when Bazel 0.6 is out.
clean_dep("//tensorflow:macos"): 1,
"//conditions:default": 0,
}),
tags = tags,
exec_compatible_with = tf_exec_compatible_with({"tags": tags}),
)
register_extension_info(
extension_name = "tf_gpu_only_cc_test",
label_regex_for_dep = "{extension_name}_gpu",
)
# terminology changes: saving tf_cuda_* definition for compatibility
def tf_cuda_only_cc_test(*args, **kwargs):
tf_gpu_only_cc_test(*args, **kwargs)
register_extension_info(
extension_name = "tf_cuda_only_cc_test",
label_regex_for_dep = "{extension_name}_gpu",
)
# Create a cc_test for each of the tensorflow tests listed in "tests"
def tf_cc_tests(
srcs,
deps,
name = "",
linkstatic = 0,
tags = [],
size = "medium",
args = None,
linkopts = [],
kernels = []):
for src in srcs:
tf_cc_test(
name = src_to_test_name(src),
size = size,
srcs = [src],
args = args,
kernels = kernels,
linkopts = linkopts,
linkstatic = linkstatic,
tags = tags,
deps = deps,
)
def tf_cc_test_mkl(
srcs,
deps,
name = "",
data = [],
linkstatic = 0,
tags = [],
size = "medium",
kernels = [],
args = None):
# -fno-exceptions in nocopts breaks compilation if header modules are enabled.
disable_header_modules = ["-use_header_modules"]
for src in srcs:
native.cc_test(
name = src_to_test_name(src),
srcs = if_mkl([src]) + tf_binary_additional_srcs(),
copts = tf_copts(allow_exceptions = True) + tf_openmp_copts(),
linkopts = select({
clean_dep("//tensorflow:android"): [
"-pie",
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-lpthread",
"-lm",
],
}) + _rpath_linkopts(src_to_test_name(src)),
deps = deps + tf_binary_dynamic_kernel_deps(kernels) + if_mkl_ml(["//third_party/mkl:intel_binary_blob"]),
data = data + tf_binary_dynamic_kernel_dsos(),
exec_compatible_with = tf_exec_compatible_with({"tags": tags}),
linkstatic = linkstatic,
tags = tags,
size = size,
args = args,
features = disable_header_modules,
)
def tf_cc_tests_gpu(
srcs,
deps,
name = "",
linkstatic = 0,
tags = [],
size = "medium",
kernels = [],
args = None):
tf_cc_tests(srcs, deps, linkstatic, size = size, args = args, kernels = kernels, tags = tags)
def tf_gpu_cc_tests(
srcs,
deps,
name = "",
tags = [],
size = "medium",
linkstatic = 0,
args = None,
kernels = [],
linkopts = []):
for src in srcs:
tf_gpu_cc_test(
name = src_to_test_name(src),
size = size,
srcs = [src],
args = args,
kernels = kernels,
linkopts = linkopts,
linkstatic = linkstatic,
tags = tags,
deps = deps,
)
# terminology changes: saving tf_cuda_* definition for compatibility
def tf_cuda_cc_tests(*args, **kwargs):
tf_gpu_cc_tests(*args, **kwargs)
def tf_java_test(
name,
srcs = [],
deps = [],
kernels = [],
*args,
**kwargs):
native.java_test(
name = name,
srcs = srcs,
deps = deps + tf_binary_additional_srcs(fullversion = True) + tf_binary_dynamic_kernel_dsos() + tf_binary_dynamic_kernel_deps(kernels),
*args,
**kwargs
)
register_extension_info(
extension_name = "tf_java_test",
label_regex_for_dep = "{extension_name}",
)
def _cuda_copts(opts = []):
return cuda_default_copts() + select({
"//conditions:default": [],
"@local_config_cuda//cuda:using_nvcc": ([
"-nvcc_options=relaxed-constexpr",
"-nvcc_options=ftz=true",
]),
"@local_config_cuda//cuda:using_clang": ([
"-fcuda-flush-denormals-to-zero",
]),
}) + if_cuda_is_configured_compat(opts)
# Build defs for TensorFlow kernels
# When this target is built using --config=cuda, a cc_library is built
# that passes -DGOOGLE_CUDA=1 and '-x cuda', linking in additional
# libraries needed by GPU kernels.
#
# When this target is built using --config=rocm, a cc_library is built
# that passes -DTENSORFLOW_USE_ROCM and '-x rocm', linking in additional
# libraries needed by GPU kernels.
def tf_gpu_kernel_library(
srcs,
copts = [],
cuda_copts = [],
deps = [],
hdrs = [],
**kwargs):
copts = copts + tf_copts() + _cuda_copts(opts = cuda_copts) + rocm_copts(opts = cuda_copts)
kwargs["features"] = kwargs.get("features", []) + ["-use_header_modules"]
native.cc_library(
srcs = srcs,
hdrs = hdrs,
copts = copts,
deps = deps + if_cuda_is_configured_compat([
clean_dep("//tensorflow/stream_executor/cuda:cudart_stub"),
clean_dep("//tensorflow/core:gpu_lib"),
]) + if_rocm_is_configured([
clean_dep("//tensorflow/core:gpu_lib"),
]),
alwayslink = 1,
**kwargs
)
register_extension_info(
extension_name = "tf_gpu_kernel_library",
label_regex_for_dep = "{extension_name}",
)
def tf_gpu_library(deps = None, cuda_deps = None, copts = tf_copts(), **kwargs):
if not deps:
deps = []
if not cuda_deps:
cuda_deps = []
kwargs["features"] = kwargs.get("features", []) + ["-use_header_modules"]
native.cc_library(
deps = deps + if_cuda_is_configured_compat(cuda_deps + [
clean_dep("//tensorflow/stream_executor/cuda:cudart_stub"),
"@local_config_cuda//cuda:cuda_headers",
]) + if_rocm_is_configured(cuda_deps + [
"@local_config_rocm//rocm:rocm_headers",
]),
copts = (copts + if_cuda(["-DGOOGLE_CUDA=1", "-DNV_CUDNN_DISABLE_EXCEPTION"]) + if_rocm(["-DTENSORFLOW_USE_ROCM=1"]) + if_mkl(["-DINTEL_MKL=1"]) + if_mkl_open_source_only(["-DINTEL_MKL_DNN_ONLY"]) + if_enable_mkl(["-DENABLE_MKL"]) + if_tensorrt(["-DGOOGLE_TENSORRT=1"]) + if_nccl(["-DGOOGLE_NCCL=1"])),
**kwargs
)
register_extension_info(
extension_name = "tf_gpu_library",
label_regex_for_dep = "{extension_name}",
)
# terminology changes: saving tf_cuda_* definition for compatibility
def tf_cuda_library(*args, **kwargs):
tf_gpu_library(*args, **kwargs)
register_extension_info(
extension_name = "tf_cuda_library",
label_regex_for_dep = "{extension_name}",
)
def tf_kernel_library(
name,
prefix = None,
srcs = None,
gpu_srcs = None,
hdrs = None,
deps = None,
alwayslink = 1,
copts = None,
gpu_copts = None,
is_external = False,
**kwargs):
if not srcs:
srcs = []
if not hdrs:
hdrs = []
if not deps:
deps = []
if not copts:
copts = []
if not gpu_copts:
gpu_copts = []
textual_hdrs = []
copts = copts + tf_copts(is_external = is_external) + if_cuda(["-DNV_CUDNN_DISABLE_EXCEPTION"])
# Override EIGEN_STRONG_INLINE to inline when
# --define=override_eigen_strong_inline=true to avoid long compiling time.
# See https://github.com/tensorflow/tensorflow/issues/10521
copts = copts + if_override_eigen_strong_inline(["/DEIGEN_STRONG_INLINE=inline"])
if prefix:
if native.glob([prefix + "*.cu.cc"], exclude = ["*test*"]):
if not gpu_srcs:
gpu_srcs = []
gpu_srcs = gpu_srcs + native.glob(
[prefix + "*.cu.cc", prefix + "*.h"],
exclude = [prefix + "*test*"],
)
srcs = srcs + native.glob(
[prefix + "*.cc"],
exclude = [prefix + "*test*", prefix + "*.cu.cc"],
)
hdrs = hdrs + native.glob(
[prefix + "*.h"],
exclude = [prefix + "*test*", prefix + "*.cu.h", prefix + "*impl.h"],
)
textual_hdrs = native.glob(
[prefix + "*impl.h"],
exclude = [prefix + "*test*", prefix + "*.cu.h"],
)
cuda_deps = [clean_dep("//tensorflow/core:gpu_lib")]
if gpu_srcs:
for gpu_src in gpu_srcs:
if gpu_src.endswith(".cc") and not gpu_src.endswith(".cu.cc"):
fail("{} not allowed in gpu_srcs. .cc sources must end with .cu.cc"
.format(gpu_src))
tf_gpu_kernel_library(
name = name + "_gpu",
srcs = gpu_srcs,
deps = deps,
copts = gpu_copts,
**kwargs
)
cuda_deps.extend([":" + name + "_gpu"])
kwargs["tags"] = kwargs.get("tags", []) + [
"req_dep=%s" % clean_dep("//tensorflow/core:gpu_lib"),
"req_dep=@local_config_cuda//cuda:cuda_headers",
]
tf_gpu_library(
name = name,
srcs = srcs,
hdrs = hdrs,
textual_hdrs = textual_hdrs,
copts = copts,
cuda_deps = cuda_deps,
linkstatic = 1, # Needed since alwayslink is broken in bazel b/27630669
alwayslink = alwayslink,
deps = deps,
**kwargs
)
# TODO(gunan): CUDA dependency not clear here. Fix it.
tf_cc_shared_object(
name = "libtfkernel_%s.so" % name,
srcs = srcs + hdrs,
copts = copts,
tags = ["manual", "notap"],
deps = deps,
)
register_extension_info(
extension_name = "tf_kernel_library",
label_regex_for_dep = "{extension_name}(_gpu)?",
)
def tf_mkl_kernel_library(
name,
prefix = None,
srcs = None,
hdrs = None,
deps = None,
alwayslink = 1,
copts = tf_copts(allow_exceptions = True) + tf_openmp_copts()):
if not bool(srcs):
srcs = []
if not bool(hdrs):
hdrs = []
if prefix:
srcs = srcs + native.glob(
[prefix + "*.cc"],
exclude = [prefix + "*test*"],
)
hdrs = hdrs + native.glob(
[prefix + "*.h"],
exclude = [prefix + "*test*"],
)
# -fno-exceptions in nocopts breaks compilation if header modules are enabled.
disable_header_modules = ["-use_header_modules"]
native.cc_library(
name = name,
srcs = if_mkl(srcs),
hdrs = hdrs,
deps = deps,
alwayslink = alwayslink,
copts = copts,
features = disable_header_modules,
)
register_extension_info(
extension_name = "tf_mkl_kernel_library",
label_regex_for_dep = "{extension_name}",
)
def _get_transitive_headers(hdrs, deps):
return depset(
hdrs,
transitive = [dep[CcInfo].compilation_context.headers for dep in deps],
)
# Bazel rules for building swig files.
def _py_wrap_cc_impl(ctx):
srcs = ctx.files.srcs
if len(srcs) != 1:
fail("Exactly one SWIG source file label must be specified.", "srcs")
module_name = ctx.attr.module_name
src = ctx.files.srcs[0]
inputs = _get_transitive_headers([src] + ctx.files.swig_includes, ctx.attr.deps)
inputs = depset(ctx.files._swiglib, transitive = [inputs])
inputs = depset(ctx.files.toolchain_deps, transitive = [inputs])
swig_include_dirs = depset(_get_repository_roots(ctx, inputs))
swig_include_dirs = depset(sorted([f.dirname for f in ctx.files._swiglib]), transitive = [swig_include_dirs])
args = [
"-c++",
"-python",
"-module",
module_name,
"-o",
ctx.outputs.cc_out.path,
"-outdir",
ctx.outputs.py_out.dirname,
]
args += ["-l" + f.path for f in ctx.files.swig_includes]
args += ["-I" + i for i in swig_include_dirs.to_list()]
args += [src.path]
outputs = [ctx.outputs.cc_out, ctx.outputs.py_out]
ctx.actions.run(
executable = ctx.executable._swig,
arguments = args,
inputs = inputs.to_list(),
outputs = outputs,
mnemonic = "PythonSwig",
progress_message = "SWIGing " + src.path,
)
return struct(files = depset(outputs))
_py_wrap_cc = rule(
attrs = {
"srcs": attr.label_list(
mandatory = True,
allow_files = True,
),
"swig_includes": attr.label_list(
allow_files = True,
),
"deps": attr.label_list(
allow_files = True,
providers = [CcInfo],
),
"toolchain_deps": attr.label_list(
allow_files = True,
),
"module_name": attr.string(mandatory = True),
"py_module_name": attr.string(mandatory = True),
"_swig": attr.label(
default = Label("@swig//:swig"),
executable = True,
cfg = "host",
),
"_swiglib": attr.label(
default = Label("@swig//:templates"),
allow_files = True,
),
},
outputs = {
"cc_out": "%{module_name}.cc",
"py_out": "%{py_module_name}.py",
},
implementation = _py_wrap_cc_impl,
)
def _get_repository_roots(ctx, files):
result = {}
for f in files.to_list():
root = f.root.path
if root:
if root not in result:
result[root] = 0
result[root] -= 1
work = f.owner.workspace_root
if work:
if root:
root += "/"
root += work
if root:
if root not in result:
result[root] = 0
result[root] -= 1
return [k for v, k in sorted([(v, k) for k, v in result.items()])]
# Bazel rule for collecting the header files that a target depends on.
def _transitive_hdrs_impl(ctx):
outputs = _get_transitive_headers([], ctx.attr.deps)
return struct(files = outputs)
_transitive_hdrs = rule(
attrs = {
"deps": attr.label_list(
allow_files = True,
providers = [CcInfo],
),
},
implementation = _transitive_hdrs_impl,
)
def transitive_hdrs(name, deps = [], **kwargs):
_transitive_hdrs(name = name + "_gather", deps = deps)
native.filegroup(name = name, srcs = [":" + name + "_gather"])
# Create a header only library that includes all the headers exported by
# the libraries in deps.
def cc_header_only_library(name, deps = [], includes = [], extra_deps = [], **kwargs):
_transitive_hdrs(name = name + "_gather", deps = deps)
native.cc_library(
name = name,
hdrs = [":" + name + "_gather"],
includes = includes,
deps = extra_deps,
**kwargs
)
def tf_custom_op_library_additional_deps():
return [
"@com_google_protobuf//:protobuf_headers",
clean_dep("//third_party/eigen3"),
clean_dep("//tensorflow/core:framework_headers_lib"),
] + if_windows(["//tensorflow/python:pywrap_tensorflow_import_lib"])
# A list of targets that contains the implemenation of
# tf_custom_op_library_additional_deps. It's used to generate a DEF file for
def tf_custom_op_library_additional_deps_impl():
return [
"@com_google_protobuf//:protobuf",
"@nsync//:nsync_cpp",
clean_dep("//third_party/eigen3"),
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/core:reader_base"),
]
def _collect_deps_aspect_impl(target, ctx):
alldeps = depset()
if hasattr(ctx.rule.attr, "deps"):
for dep in ctx.rule.attr.deps:
alldeps = depset([dep.label], transitive = [alldeps])
if hasattr(dep, "tf_collected_deps"):
alldeps = depset(transitive = [alldeps, dep.tf_collected_deps])
return struct(tf_collected_deps = alldeps)
collect_deps_aspect = aspect(
attr_aspects = ["deps"],
implementation = _collect_deps_aspect_impl,
)
def _dep_label(dep):
label = dep.label
return label.package + ":" + label.name
# the 'disallowed_deps' attribute.
def _check_deps_impl(ctx):
disallowed_deps = ctx.attr.disallowed_deps
for input_dep in ctx.attr.deps:
if not hasattr(input_dep, "tf_collected_deps"):
continue
for dep in input_dep.tf_collected_deps.to_list():
for disallowed_dep in disallowed_deps:
if dep == disallowed_dep.label:
fail(
_dep_label(input_dep) + " cannot depend on " + _dep_label(
disallowed_dep,
),
)
return struct()
check_deps = rule(
_check_deps_impl,
attrs = {
"deps": attr.label_list(
aspects = [collect_deps_aspect],
mandatory = True,
allow_files = True,
),
"disallowed_deps": attr.label_list(
mandatory = True,
allow_files = True,
),
},
)
def tf_custom_op_library(name, srcs = [], gpu_srcs = [], deps = [], linkopts = [], copts = [], **kwargs):
cuda_deps = [
clean_dep("//tensorflow/core:stream_executor_headers_lib"),
"@local_config_cuda//cuda:cuda_headers",
"@local_config_cuda//cuda:cudart_static",
]
rocm_deps = [
clean_dep("//tensorflow/core:stream_executor_headers_lib"),
]
deps = deps + tf_custom_op_library_additional_deps()
# Override EIGEN_STRONG_INLINE to inline when
# --define=override_eigen_strong_inline=true to avoid long compiling time.
# See https://github.com/tensorflow/tensorflow/issues/10521
copts = copts + if_override_eigen_strong_inline(["/DEIGEN_STRONG_INLINE=inline"]) + if_cuda(["-DNV_CUDNN_DISABLE_EXCEPTION"])
if gpu_srcs:
basename = name.split(".")[0]
native.cc_library(
name = basename + "_gpu",
srcs = gpu_srcs,
copts = copts + _cuda_copts() + if_tensorrt(["-DGOOGLE_TENSORRT=1"]) + if_nccl(["-DGOOGLE_NCCL=1"]),
features = if_cuda(["-use_header_modules"]),
deps = deps + if_cuda_is_configured_compat(cuda_deps) + if_rocm_is_configured(rocm_deps),
**kwargs
)
cuda_deps.extend([":" + basename + "_gpu"])
rocm_deps.extend([":" + basename + "_gpu"])
check_deps(
name = name + "_check_deps",
disallowed_deps = [
clean_dep("//tensorflow/core:framework"),
clean_dep("//tensorflow/core:lib"),
],
deps = deps + if_cuda_is_configured_compat(cuda_deps) + if_rocm_is_configured(rocm_deps),
)
tf_cc_shared_object(
name = name,
srcs = srcs,
deps = deps + if_cuda_is_configured_compat(cuda_deps) + if_rocm_is_configured(rocm_deps),
data = if_static([name + "_check_deps"]),
copts = copts + tf_copts(is_external = True),
features = ["windows_export_all_symbols"],
linkopts = linkopts + select({
"//conditions:default": [
"-lm",
],
clean_dep("//tensorflow:windows"): [],
clean_dep("//tensorflow:macos"): [],
}),
**kwargs
)
register_extension_info(
extension_name = "tf_custom_op_library",
label_regex_for_dep = "{extension_name}",
)
def tf_custom_op_py_library(
name,
srcs = [],
dso = [],
kernels = [],
srcs_version = "PY2AND3",
visibility = None,
deps = []):
_ignore = [kernels]
native.py_library(
name = name,
data = dso,
srcs = srcs,
srcs_version = srcs_version,
visibility = visibility,
deps = deps,
)
register_extension_info(
extension_name = "tf_custom_op_py_library",
label_regex_for_dep = "{extension_name}",
)
# In tf_py_wrap_cc generated libraries
# module init functions are not exported unless
# they contain one of the keywords in the version file
# this prevents custom python modules.
# This function attempts to append init_module_name to list of
# exported functions in version script
def _append_init_to_versionscript_impl(ctx):
mod_name = ctx.attr.module_name
if ctx.attr.is_version_script:
ctx.actions.expand_template(
template = ctx.file.template_file,
output = ctx.outputs.versionscript,
substitutions = {
"global:": "global:\n init_%s;\n _init_%s;\n PyInit_*;\n _PyInit_*;" % (mod_name, mod_name),
},
is_executable = False,
)
else:
ctx.actions.expand_template(
template = ctx.file.template_file,
output = ctx.outputs.versionscript,
substitutions = {
"*tensorflow*": "*tensorflow*\ninit_%s\n_init_%s\nPyInit_*\n_PyInit_*\n" % (mod_name, mod_name),
},
is_executable = False,
)
_append_init_to_versionscript = rule(
attrs = {
"module_name": attr.string(mandatory = True),
"template_file": attr.label(
allow_single_file = True,
mandatory = True,
),
"is_version_script": attr.bool(
default = True,
doc = "whether target is a ld version script or exported symbol list",
mandatory = False,
),
},
outputs = {"versionscript": "%{name}.lds"},
implementation = _append_init_to_versionscript_impl,
)
def tf_py_wrap_cc(
name,
srcs,
swig_includes = [],
deps = [],
copts = [],
version_script = None,
**kwargs):
module_name = name.split("/")[-1]
# Convert a rule name such as foo/bar/baz to foo/bar/_baz.so
# and use that as the name for the rule producing the .so file.
cc_library_base = "/".join(name.split("/")[:-1] + ["_" + module_name])
# TODO(b/137885063): tf_cc_shared_object needs to be cleaned up; we really
# shouldn't be passing a name qualified with .so here.
cc_library_name = cc_library_base + ".so"
cc_library_pyd_name = "/".join(
name.split("/")[:-1] + ["_" + module_name + ".pyd"],
)
extra_deps = []
_py_wrap_cc(
name = name + "_py_wrap",
srcs = srcs,
module_name = module_name,
py_module_name = name,
swig_includes = swig_includes,
toolchain_deps = ["@bazel_tools//tools/cpp:current_cc_toolchain"],
deps = deps + extra_deps,
)
if not version_script:
version_script = select({
"@local_config_cuda//cuda:darwin": clean_dep("//tensorflow:tf_exported_symbols.lds"),
"//conditions:default": clean_dep("//tensorflow:tf_version_script.lds"),
})
vscriptname = name + "_versionscript"
_append_init_to_versionscript(
name = vscriptname,
is_version_script = select({
"@local_config_cuda//cuda:darwin": False,
"//conditions:default": True,
}),
module_name = module_name,
template_file = version_script,
)
extra_linkopts = select({
"@local_config_cuda//cuda:darwin": [
"-Wl,-exported_symbols_list,$(location %s.lds)" % vscriptname,
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-Wl,--version-script",
"$(location %s.lds)" % vscriptname,
],
})
extra_deps += select({
"@local_config_cuda//cuda:darwin": [
"%s.lds" % vscriptname,
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"%s.lds" % vscriptname,
],
})
tf_cc_shared_object(
name = cc_library_name,
srcs = [module_name + ".cc"],
copts = copts + if_not_windows([
"-Wno-self-assign",
"-Wno-sign-compare",
"-Wno-write-strings",
]),
linkopts = extra_linkopts,
linkstatic = 1,
deps = deps + extra_deps,
**kwargs
)
for pattern in SHARED_LIBRARY_NAME_PATTERNS:
name_os = pattern % (cc_library_base, "")
native.genrule(
name = name_os + "_rule",
srcs = [":" + cc_library_name],
outs = [name_os],
cmd = "cp $< $@",
)
native.genrule(
name = "gen_" + cc_library_pyd_name,
srcs = [":" + cc_library_name],
outs = [cc_library_pyd_name],
cmd = "cp $< $@",
)
native.py_library(
name = name,
srcs = [":" + name + ".py"],
srcs_version = "PY2AND3",
data = select({
clean_dep("//tensorflow:windows"): [":" + cc_library_pyd_name],
"//conditions:default": [":" + cc_library_name],
}),
)
# tests against system installed pip package. So we'd like to get rid of the deps
def py_test(deps = [], data = [], kernels = [], **kwargs):
# Python version placeholder
native.py_test(
# TODO(jlebar): Ideally we'd use tcmalloc here.,
deps = select({
"//conditions:default": deps,
clean_dep("//tensorflow:no_tensorflow_py_deps"): [],
}),
data = data + select({
"//conditions:default": [],
clean_dep("//tensorflow:no_tensorflow_py_deps"): ["//tensorflow/tools/pip_package:win_pip_package_marker"],
}) + tf_binary_dynamic_kernel_dsos(),
exec_compatible_with = tf_exec_compatible_with(kwargs),
**kwargs
)
register_extension_info(
extension_name = "py_test",
label_regex_for_dep = "{extension_name}",
)
def py_binary(name, deps = [], **kwargs):
native.py_library(
name = name + "_deps",
deps = deps,
)
native.py_binary(
name = name,
deps = select({
"//conditions:default": [":" + name + "_deps"],
clean_dep("//tensorflow:no_tensorflow_py_deps"): [],
}),
**kwargs
)
register_extension_info(
extension_name = "py_binary",
label_regex_for_dep = "{extension_name}",
)
def tf_py_test(
name,
srcs,
size = "medium",
data = [],
main = None,
args = [],
tags = [],
shard_count = 1,
additional_deps = [],
additional_visibility = [],
kernels = [],
flaky = 0,
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False,
**kwargs):
xla_test_true_list = []
if xla_enable_strict_auto_jit:
xla_enabled = True
xla_test_true_list += ["//tensorflow/python:is_xla_test_true"]
if xla_enabled:
additional_deps = additional_deps + tf_additional_xla_deps_py()
if grpc_enabled:
additional_deps = additional_deps + tf_additional_grpc_deps_py()
py_test(
name = name,
size = size,
srcs = srcs,
args = args,
data = data,
flaky = flaky,
kernels = kernels,
main = main,
shard_count = shard_count,
srcs_version = "PY2AND3",
tags = tags,
visibility = [clean_dep("//tensorflow:internal")] +
additional_visibility,
deps = depset([
clean_dep("//tensorflow/python:extra_py_tests_deps"),
clean_dep("//tensorflow/python:gradient_checker"),
] + additional_deps + xla_test_true_list),
**kwargs
)
register_extension_info(
extension_name = "tf_py_test",
label_regex_map = {"additional_deps": "deps:{extension_name}"},
)
def gpu_py_test(
name,
srcs,
size = "medium",
data = [],
main = None,
args = [],
shard_count = 1,
additional_deps = [],
kernels = [],
tags = [],
flaky = 0,
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False):
# XLA tests once enough compute resources are available.
_ignored = [xla_enable_strict_auto_jit]
if main == None:
main = name + ".py"
for config in ["cpu", "gpu"]:
test_name = name
test_tags = tags
if config == "gpu":
test_name += "_gpu"
test_tags = test_tags + tf_gpu_tests_tags()
tf_py_test(
name = test_name,
size = size,
srcs = srcs,
additional_deps = additional_deps,
args = args,
data = data,
flaky = flaky,
grpc_enabled = grpc_enabled,
kernels = kernels,
main = main,
shard_count = shard_count,
tags = test_tags,
xla_enabled = xla_enabled,
xla_enable_strict_auto_jit = False,
)
register_extension_info(
extension_name = "gpu_py_test",
label_regex_map = {"additional_deps": "additional_deps:{extension_name}"},
)
# terminology changes: saving cuda_* definition for compatibility
def cuda_py_test(*args, **kwargs):
gpu_py_test(*args, **kwargs)
register_extension_info(
extension_name = "cuda_py_test",
label_regex_map = {"additional_deps": "additional_deps:{extension_name}"},
)
def sycl_py_test(
name,
srcs,
size = "medium",
data = [],
main = None,
args = [],
shard_count = 1,
additional_deps = [],
kernels = [],
tags = [],
flaky = 0,
xla_enabled = False,
grpc_enabled = False):
test_tags = tags + tf_sycl_tests_tags()
tf_py_test(
name = name,
size = size,
srcs = srcs,
additional_deps = additional_deps,
args = args,
data = data,
flaky = flaky,
grpc_enabled = grpc_enabled,
kernels = kernels,
main = main,
shard_count = shard_count,
tags = test_tags,
xla_enabled = xla_enabled,
)
register_extension_info(
extension_name = "sycl_py_test",
label_regex_map = {"additional_deps": "additional_deps:{extension_name}"},
)
def py_tests(
name,
srcs,
size = "medium",
additional_deps = [],
kernels = [],
data = [],
tags = [],
shard_count = 1,
prefix = "",
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False):
for src in srcs:
test_name = src.split("/")[-1].split(".")[0]
if prefix:
test_name = "%s_%s" % (prefix, test_name)
tf_py_test(
name = test_name,
size = size,
srcs = [src],
additional_deps = additional_deps,
data = data,
grpc_enabled = grpc_enabled,
kernels = kernels,
main = src,
shard_count = shard_count,
tags = tags,
xla_enabled = xla_enabled,
xla_enable_strict_auto_jit = xla_enable_strict_auto_jit,
)
def gpu_py_tests(
name,
srcs,
size = "medium",
additional_deps = [],
kernels = [],
data = [],
shard_count = 1,
tags = [],
prefix = "",
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False):
# TODO(b/122522101): Don't ignore xla_enable_strict_auto_jit and enable additional
_ignored = [xla_enable_strict_auto_jit]
test_tags = tags + tf_gpu_tests_tags()
py_tests(
name = name,
size = size,
srcs = srcs,
additional_deps = additional_deps,
data = data,
grpc_enabled = grpc_enabled,
kernels = kernels,
prefix = prefix,
shard_count = shard_count,
tags = test_tags,
xla_enabled = xla_enabled,
xla_enable_strict_auto_jit = False,
)
def cuda_py_tests(*args, **kwargs):
gpu_py_tests(*args, **kwargs)
# make the proto_text functions, for the protos passed in <srcs>.
#
# Return a struct with fields (hdrs, srcs) containing the names of the
# generated files.
def tf_generate_proto_text_sources(name, srcs_relative_dir, srcs, protodeps = [], deps = [], visibility = None):
out_hdrs = (
[
p.replace(".proto", ".pb_text.h")
for p in srcs
] + [p.replace(".proto", ".pb_text-impl.h") for p in srcs]
)
out_srcs = [p.replace(".proto", ".pb_text.cc") for p in srcs]
native.genrule(
name = name + "_srcs",
srcs = srcs + protodeps + [clean_dep("//tensorflow/tools/proto_text:placeholder.txt")],
outs = out_hdrs + out_srcs,
visibility = visibility,
cmd =
"$(location //tensorflow/tools/proto_text:gen_proto_text_functions) " +
"$(@D) " + srcs_relative_dir + " $(SRCS)",
tools = [
clean_dep("//tensorflow/tools/proto_text:gen_proto_text_functions"),
],
)
native.filegroup(
name = name + "_hdrs",
srcs = out_hdrs,
visibility = visibility,
)
native.cc_library(
name = name,
srcs = out_srcs,
hdrs = out_hdrs,
visibility = visibility,
deps = deps,
)
def tf_genrule_cmd_append_to_srcs(to_append):
return ("cat $(SRCS) > $(@) && " + "echo >> $(@) && " + "echo " + to_append +
" >> $(@)")
def tf_version_info_genrule():
native.genrule(
name = "version_info_gen",
srcs = [
clean_dep("@local_config_git//:gen/spec.json"),
clean_dep("@local_config_git//:gen/head"),
clean_dep("@local_config_git//:gen/branch_ref"),
],
outs = ["util/version_info.cc"],
cmd =
"$(location //tensorflow/tools/git:gen_git_source) --generate $(SRCS) \"$@\" --git_tag_override=$${GIT_TAG_OVERRIDE:-}",
local = 1,
tools = [clean_dep("//tensorflow/tools/git:gen_git_source")],
)
def tf_py_build_info_genrule():
native.genrule(
name = "py_build_info_gen",
outs = ["platform/build_info.py"],
cmd =
"$(location //tensorflow/tools/build_info:gen_build_info) --raw_generate \"$@\" " +
" --is_config_cuda " + if_cuda("True", "False") +
" --is_config_rocm " + if_rocm("True", "False") +
" --key_value " +
if_cuda(" cuda_version_number=$${TF_CUDA_VERSION:-} cudnn_version_number=$${TF_CUDNN_VERSION:-} ", "") +
if_windows(" msvcp_dll_name=msvcp140.dll ", "") +
if_windows_cuda(" ".join([
"nvcuda_dll_name=nvcuda.dll",
"cudart_dll_name=cudart64_$$(echo $${TF_CUDA_VERSION:-} | sed \"s/\\.//\").dll",
"cudnn_dll_name=cudnn64_$${TF_CUDNN_VERSION:-}.dll",
]), ""),
local = 1,
tools = [clean_dep("//tensorflow/tools/build_info:gen_build_info")],
)
def cc_library_with_android_deps(
deps,
android_deps = [],
common_deps = [],
copts = tf_copts(),
**kwargs):
deps = if_not_android(deps) + if_android(android_deps) + common_deps
native.cc_library(deps = deps, copts = copts, **kwargs)
register_extension_info(
extension_name = "cc_library_with_android_deps",
label_regex_for_dep = "{extension_name}",
)
def tensorflow_opensource_extra_deps():
return []
# buildozer: disable=function-docstring-args
def pybind_extension(
name,
srcs,
module_name,
hdrs = [],
features = [],
srcs_version = "PY2AND3",
data = [],
copts = None,
linkopts = [],
deps = [],
visibility = None,
testonly = None,
licenses = None,
compatible_with = None,
restricted_to = None,
deprecation = None):
_ignore = [module_name]
p = name.rfind("/")
if p == -1:
sname = name
prefix = ""
else:
sname = name[p + 1:]
prefix = name[:p + 1]
so_file = "%s%s.so" % (prefix, sname)
pyd_file = "%s%s.pyd" % (prefix, sname)
symbol = "init%s" % sname
symbol2 = "init_%s" % sname
symbol3 = "PyInit_%s" % sname
exported_symbols_file = "%s-exported-symbols.lds" % name
version_script_file = "%s-version-script.lds" % name
native.genrule(
name = name + "_exported_symbols",
outs = [exported_symbols_file],
cmd = "echo '_%s\n_%s\n_%s' >$@" % (symbol, symbol2, symbol3),
output_licenses = ["unencumbered"],
visibility = ["//visibility:private"],
testonly = testonly,
)
native.genrule(
name = name + "_version_script",
outs = [version_script_file],
cmd = "echo '{global:\n %s;\n %s;\n %s;\n local: *;};' >$@" % (symbol, symbol2, symbol3),
output_licenses = ["unencumbered"],
visibility = ["//visibility:private"],
testonly = testonly,
)
native.cc_binary(
name = so_file,
srcs = srcs + hdrs,
data = data,
copts = copts,
linkopts = linkopts + _rpath_linkopts(name) + select({
"@local_config_cuda//cuda:darwin": [
"-Wl,-exported_symbols_list,$(location %s)" % exported_symbols_file,
],
clean_dep("//tensorflow:windows"): [],
"//conditions:default": [
"-Wl,--version-script",
"$(location %s)" % version_script_file,
],
}),
deps = deps + [
exported_symbols_file,
version_script_file,
],
features = features,
linkshared = 1,
testonly = testonly,
licenses = licenses,
visibility = visibility,
deprecation = deprecation,
restricted_to = restricted_to,
compatible_with = compatible_with,
)
native.genrule(
name = name + "_pyd_copy",
srcs = [so_file],
outs = [pyd_file],
cmd = "cp $< $@",
output_to_bindir = True,
visibility = visibility,
deprecation = deprecation,
restricted_to = restricted_to,
compatible_with = compatible_with,
)
native.py_library(
name = name,
data = select({
"@org_tensorflow//tensorflow:windows": [pyd_file],
"//conditions:default": [so_file],
}),
srcs_version = srcs_version,
licenses = licenses,
testonly = testonly,
visibility = visibility,
deprecation = deprecation,
restricted_to = restricted_to,
compatible_with = compatible_with,
)
# buildozer: enable=function-docstring-args
def tf_python_pybind_extension(
name,
srcs,
module_name,
hdrs = [],
features = [],
copts = None,
deps = []):
pybind_extension(
name,
srcs + tf_binary_additional_srcs(),
module_name,
hdrs = hdrs,
features = features,
copts = copts,
deps = deps + tf_binary_pybind_deps() + if_mkl_ml(["//third_party/mkl:intel_binary_blob"]),
)
def if_cuda_or_rocm(if_true, if_false = []):
return select({
"@local_config_cuda//cuda:using_nvcc": if_true,
"@local_config_cuda//cuda:using_clang": if_true,
"@local_config_rocm//rocm:using_hipcc": if_true,
"//conditions:default": if_false,
})
def tf_jit_compilation_passes_extra_deps():
return []
def if_mlir(if_true, if_false = []):
return select({
"//conditions:default": if_false,
"//tensorflow:with_mlir_support": if_true,
})
# TODO(b/138724071): Remove when build is stable.
def if_mlir_tflite(if_true, if_false = []):
return if_true # Internally we always build with MLIR.
def tfcompile_extra_flags():
return ""
def tf_grpc_dependency():
return "//tensorflow:grpc"
def tf_grpc_cc_dependency():
return "//tensorflow:grpc++"
| true
| true
|
f708f38157a3bbf5a76937de0696b8f45e77f048
| 5,275
|
py
|
Python
|
controllers/assessments.py
|
dgerod/cb4oru
|
b5fb3bd52193ab21b30b6917232a799ac41b6c32
|
[
"Apache-2.0"
] | 1
|
2018-01-22T20:23:27.000Z
|
2018-01-22T20:23:27.000Z
|
controllers/assessments.py
|
dgerod/cb4oru
|
b5fb3bd52193ab21b30b6917232a799ac41b6c32
|
[
"Apache-2.0"
] | null | null | null |
controllers/assessments.py
|
dgerod/cb4oru
|
b5fb3bd52193ab21b30b6917232a799ac41b6c32
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and methods to manage all aspects of student assessments."""
__author__ = 'pgbovine@google.com (Philip Guo)'
import datetime
import json
from models import models
from models import utils
from models.models import Student
from models.models import StudentAnswersEntity
from utils import BaseHandler
from google.appengine.ext import db
def store_score(student, assessment_type, score):
"""Stores a student's score on a particular assessment.
Args:
student: the student whose data is stored.
assessment_type: the type of the assessment.
score: the student's score on this assessment.
Returns:
the (possibly modified) assessment_type, which the caller can
use to render an appropriate response page.
"""
# FIXME: Course creators can edit this code to implement custom
# assessment scoring and storage behavior
# TODO(pgbovine): Note that the latest version of answers are always saved,
# but scores are only saved if they're higher than the previous attempt.
# This can lead to unexpected analytics behavior. Resolve this.
existing_score = utils.get_score(student, assessment_type)
# remember to cast to int for comparison
if (existing_score is None) or (score > int(existing_score)):
utils.set_score(student, assessment_type, score)
# special handling for computing final score:
if assessment_type == 'postcourse':
midcourse_score = utils.get_score(student, 'midcourse')
if midcourse_score is None:
midcourse_score = 0
else:
midcourse_score = int(midcourse_score)
if existing_score is None:
postcourse_score = score
else:
postcourse_score = int(existing_score)
if score > postcourse_score:
postcourse_score = score
# Calculate overall score based on a formula
overall_score = int((0.3 * midcourse_score) + (0.7 * postcourse_score))
# TODO(pgbovine): this changing of assessment_type is ugly ...
if overall_score >= 70:
assessment_type = 'postcourse_pass'
else:
assessment_type = 'postcourse_fail'
utils.set_score(student, 'overall_score', overall_score)
return assessment_type
class AnswerHandler(BaseHandler):
"""Handler for saving assessment answers."""
# Find student entity and save answers
@db.transactional(xg=True)
def update_assessment_transaction(
self, email, assessment_type, new_answers, score):
"""Stores answer and updates user scores."""
student = Student.get_by_email(email)
# It may be that old Student entities don't have user_id set; fix it.
if not student.user_id:
student.user_id = self.get_user().user_id()
answers = StudentAnswersEntity.get_by_key_name(student.user_id)
if not answers:
answers = StudentAnswersEntity(key_name=student.user_id)
answers.updated_on = datetime.datetime.now()
utils.set_answer(answers, assessment_type, new_answers)
assessment_type = store_score(student, assessment_type, score)
student.put()
answers.put()
# Also record the event, which is useful for tracking multiple
# submissions and history.
models.EventEntity.record(
'submit-assessment', self.get_user(), json.dumps({
'type': 'assessment-%s' % assessment_type,
'values': new_answers, 'location': 'AnswerHandler'}))
return (student, assessment_type)
def post(self):
"""Handles POST requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'assessment-post'):
return
assessment_type = self.request.get('assessment_type')
# Convert answers from JSON to dict.
answers = self.request.get('answers')
if answers:
answers = json.loads(answers)
else:
answers = []
# TODO(pgbovine): consider storing as float for better precision
score = int(round(float(self.request.get('score'))))
# Record score.
(student, assessment_type) = self.update_assessment_transaction(
student.key().name(), assessment_type, answers, score)
self.template_value['navbar'] = {'course': True}
self.template_value['assessment'] = assessment_type
self.template_value['student_score'] = utils.get_score(
student, 'overall_score')
self.render('test_confirmation.html')
| 36.631944
| 79
| 0.676209
|
__author__ = 'pgbovine@google.com (Philip Guo)'
import datetime
import json
from models import models
from models import utils
from models.models import Student
from models.models import StudentAnswersEntity
from utils import BaseHandler
from google.appengine.ext import db
def store_score(student, assessment_type, score):
# This can lead to unexpected analytics behavior. Resolve this.
existing_score = utils.get_score(student, assessment_type)
# remember to cast to int for comparison
if (existing_score is None) or (score > int(existing_score)):
utils.set_score(student, assessment_type, score)
# special handling for computing final score:
if assessment_type == 'postcourse':
midcourse_score = utils.get_score(student, 'midcourse')
if midcourse_score is None:
midcourse_score = 0
else:
midcourse_score = int(midcourse_score)
if existing_score is None:
postcourse_score = score
else:
postcourse_score = int(existing_score)
if score > postcourse_score:
postcourse_score = score
# Calculate overall score based on a formula
overall_score = int((0.3 * midcourse_score) + (0.7 * postcourse_score))
# TODO(pgbovine): this changing of assessment_type is ugly ...
if overall_score >= 70:
assessment_type = 'postcourse_pass'
else:
assessment_type = 'postcourse_fail'
utils.set_score(student, 'overall_score', overall_score)
return assessment_type
class AnswerHandler(BaseHandler):
# Find student entity and save answers
@db.transactional(xg=True)
def update_assessment_transaction(
self, email, assessment_type, new_answers, score):
student = Student.get_by_email(email)
# It may be that old Student entities don't have user_id set; fix it.
if not student.user_id:
student.user_id = self.get_user().user_id()
answers = StudentAnswersEntity.get_by_key_name(student.user_id)
if not answers:
answers = StudentAnswersEntity(key_name=student.user_id)
answers.updated_on = datetime.datetime.now()
utils.set_answer(answers, assessment_type, new_answers)
assessment_type = store_score(student, assessment_type, score)
student.put()
answers.put()
models.EventEntity.record(
'submit-assessment', self.get_user(), json.dumps({
'type': 'assessment-%s' % assessment_type,
'values': new_answers, 'location': 'AnswerHandler'}))
return (student, assessment_type)
def post(self):
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'assessment-post'):
return
assessment_type = self.request.get('assessment_type')
answers = self.request.get('answers')
if answers:
answers = json.loads(answers)
else:
answers = []
score = int(round(float(self.request.get('score'))))
(student, assessment_type) = self.update_assessment_transaction(
student.key().name(), assessment_type, answers, score)
self.template_value['navbar'] = {'course': True}
self.template_value['assessment'] = assessment_type
self.template_value['student_score'] = utils.get_score(
student, 'overall_score')
self.render('test_confirmation.html')
| true
| true
|
f708f4f7a69c96ae81bce8dac525d2845164097e
| 1,608
|
py
|
Python
|
modules/party/forms.py
|
BurraAbhishek/VirtualElections_v2
|
db95f58d09ee5ed9755a3910aebcbfb48302b04e
|
[
"Apache-2.0"
] | 1
|
2022-01-30T19:55:47.000Z
|
2022-01-30T19:55:47.000Z
|
modules/party/forms.py
|
BurraAbhishek/VirtualElections_v2
|
db95f58d09ee5ed9755a3910aebcbfb48302b04e
|
[
"Apache-2.0"
] | null | null | null |
modules/party/forms.py
|
BurraAbhishek/VirtualElections_v2
|
db95f58d09ee5ed9755a3910aebcbfb48302b04e
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
from django.forms.widgets import PasswordInput
from modules.common.id_choicefield import IdentificationField
class PartyForm(forms.Form):
error_messages = {
'password_mismatch': (
'The confirmation was different from that you chose.'
),
}
party_name = forms.CharField(label="Name of the contesting party")
cname = forms.CharField(label="Candidate's Name")
age = forms.IntegerField(min_value=0, label="Candidate's Age")
citype = IdentificationField(label="Identity Proof of the Candidate")
cidno = forms.CharField(label="Passport / ID Number")
party_manifesto = forms.CharField(
widget=forms.Textarea,
required=False
)
party_symbol = forms.ImageField(
required=False,
help_text="The maximum size permitted is 2.5 MB"
)
cpd1 = forms.CharField(
widget=PasswordInput,
label="Enter your password",
strip=False,
)
cpd2 = forms.CharField(
widget=PasswordInput,
label="Confirm Password",
strip=False,
help_text=("Enter the same password as before, for verification")
)
show_profile = forms.ChoiceField(
choices=[
(True, "Show Profile to public"),
(False, "Hide profile from public")
],
help_text="The Election Commission can override this setting."
)
class PartyEditForm(forms.Form):
party_name = forms.CharField()
cpass = forms.CharField(
widget=PasswordInput,
label="Enter your password",
strip=False,
)
| 24.738462
| 73
| 0.643657
|
from django import forms
from django.forms.widgets import PasswordInput
from modules.common.id_choicefield import IdentificationField
class PartyForm(forms.Form):
error_messages = {
'password_mismatch': (
'The confirmation was different from that you chose.'
),
}
party_name = forms.CharField(label="Name of the contesting party")
cname = forms.CharField(label="Candidate's Name")
age = forms.IntegerField(min_value=0, label="Candidate's Age")
citype = IdentificationField(label="Identity Proof of the Candidate")
cidno = forms.CharField(label="Passport / ID Number")
party_manifesto = forms.CharField(
widget=forms.Textarea,
required=False
)
party_symbol = forms.ImageField(
required=False,
help_text="The maximum size permitted is 2.5 MB"
)
cpd1 = forms.CharField(
widget=PasswordInput,
label="Enter your password",
strip=False,
)
cpd2 = forms.CharField(
widget=PasswordInput,
label="Confirm Password",
strip=False,
help_text=("Enter the same password as before, for verification")
)
show_profile = forms.ChoiceField(
choices=[
(True, "Show Profile to public"),
(False, "Hide profile from public")
],
help_text="The Election Commission can override this setting."
)
class PartyEditForm(forms.Form):
party_name = forms.CharField()
cpass = forms.CharField(
widget=PasswordInput,
label="Enter your password",
strip=False,
)
| true
| true
|
f708f5aace0d5437314d23389d4db32b009a8935
| 8,265
|
py
|
Python
|
horizon/workflows/views.py
|
Hodorable/0602
|
3b1e4cb7458e4f456bfebc52fc2902205c36cc15
|
[
"Apache-2.0"
] | 1
|
2019-09-11T11:56:19.000Z
|
2019-09-11T11:56:19.000Z
|
tools/dockerize/webportal/usr/lib/python2.7/site-packages/horizon/workflows/views.py
|
foruy/openflow-multiopenstack
|
74140b041ac25ed83898ff3998e8dcbed35572bb
|
[
"Apache-2.0"
] | null | null | null |
tools/dockerize/webportal/usr/lib/python2.7/site-packages/horizon/workflows/views.py
|
foruy/openflow-multiopenstack
|
74140b041ac25ed83898ff3998e8dcbed35572bb
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
from django import forms
from django import http
from django import shortcuts
from django.views import generic
import six
from horizon import exceptions
from horizon.forms import views as hz_views
from horizon.forms.views import ADD_TO_FIELD_HEADER # noqa
from horizon import messages
class WorkflowView(hz_views.ModalBackdropMixin, generic.TemplateView):
"""A generic class-based view which handles the intricacies of workflow
processing with minimal user configuration.
.. attribute:: workflow_class
The :class:`~horizon.workflows.Workflow` class which this view handles.
Required.
.. attribute:: template_name
The template to use when rendering this view via standard HTTP
requests. Required.
.. attribute:: ajax_template_name
The template to use when rendering the workflow for AJAX requests.
In general the default common template should be used. Defaults to
``"horizon/common/_workflow.html"``.
.. attribute:: context_object_name
The key which should be used for the workflow object in the template
context. Defaults to ``"workflow"``.
"""
workflow_class = None
template_name = 'horizon/common/_workflow_base.html'
context_object_name = "workflow"
ajax_template_name = 'horizon/common/_workflow.html'
step_errors = {}
def __init__(self):
super(WorkflowView, self).__init__()
if not self.workflow_class:
raise AttributeError("You must set the workflow_class attribute "
"on %s." % self.__class__.__name__)
def get_initial(self):
"""Returns initial data for the workflow. Defaults to using the GET
parameters to allow pre-seeding of the workflow context values.
"""
return copy.copy(self.request.GET)
def get_workflow(self):
"""Returns the instantiated workflow class."""
extra_context = self.get_initial()
entry_point = self.request.GET.get("step", None)
workflow = self.workflow_class(self.request,
context_seed=extra_context,
entry_point=entry_point)
return workflow
def get_context_data(self, **kwargs):
"""Returns the template context, including the workflow class.
This method should be overridden in subclasses to provide additional
context data to the template.
"""
context = super(WorkflowView, self).get_context_data(**kwargs)
workflow = self.get_workflow()
context[self.context_object_name] = workflow
next = self.request.REQUEST.get(workflow.redirect_param_name, None)
context['REDIRECT_URL'] = next
context['layout'] = self.get_layout()
# For consistency with Workflow class
context['modal'] = 'modal' in context['layout']
if ADD_TO_FIELD_HEADER in self.request.META:
context['add_to_field'] = self.request.META[ADD_TO_FIELD_HEADER]
return context
def get_layout(self):
"""returns classes for the workflow element in template based on
the workflow characteristics
"""
if self.request.is_ajax():
layout = ['modal', ]
if self.workflow_class.fullscreen:
layout += ['fullscreen', ]
else:
layout = ['static_page', ]
if self.workflow_class.wizard:
layout += ['wizard', ]
return layout
def get_template_names(self):
"""Returns the template name to use for this request."""
if self.request.is_ajax():
template = self.ajax_template_name
else:
template = self.template_name
return template
def get_object_id(self, obj):
return getattr(obj, "id", None)
def get_object_display(self, obj):
return getattr(obj, "name", None)
def add_error_to_step(self, error_msg, step):
self.step_errors[step] = error_msg
def set_workflow_step_errors(self, context):
workflow = context['workflow']
for step in self.step_errors:
error_msg = self.step_errors[step]
workflow.add_error_to_step(error_msg, step)
def get(self, request, *args, **kwargs):
"""Handler for HTTP GET requests."""
context = self.get_context_data(**kwargs)
self.set_workflow_step_errors(context)
return self.render_to_response(context)
def validate_steps(self, request, workflow, start, end):
"""Validates the workflow steps from ``start`` to ``end``, inclusive.
Returns a dict describing the validation state of the workflow.
"""
errors = {}
for step in workflow.steps[start:end + 1]:
if not step.action.is_valid():
errors[step.slug] = dict(
(field, [unicode(error) for error in errors])
for (field, errors) in six.iteritems(step.action.errors))
return {
'has_errors': bool(errors),
'workflow_slug': workflow.slug,
'errors': errors,
}
def post(self, request, *args, **kwargs):
"""Handler for HTTP POST requests."""
context = self.get_context_data(**kwargs)
workflow = context[self.context_object_name]
try:
# Check for the VALIDATE_STEP* headers, if they are present
# and valid integers, return validation results as JSON,
# otherwise proceed normally.
validate_step_start = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_START', ''))
validate_step_end = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_END', ''))
except ValueError:
# No VALIDATE_STEP* headers, or invalid values. Just proceed
# with normal workflow handling for POSTs.
pass
else:
# There are valid VALIDATE_STEP* headers, so only do validation
# for the specified steps and return results.
data = self.validate_steps(request, workflow,
validate_step_start,
validate_step_end)
return http.HttpResponse(json.dumps(data),
content_type="application/json")
if not workflow.is_valid():
return self.render_to_response(context)
try:
success = workflow.finalize()
except forms.ValidationError:
return self.render_to_response(context)
except Exception:
success = False
exceptions.handle(request)
if success:
msg = workflow.format_status_message(workflow.success_message)
messages.success(request, msg)
else:
msg = workflow.format_status_message(workflow.failure_message)
messages.error(request, msg)
if "HTTP_X_HORIZON_ADD_TO_FIELD" in self.request.META:
field_id = self.request.META["HTTP_X_HORIZON_ADD_TO_FIELD"]
response = http.HttpResponse()
if workflow.object:
data = [self.get_object_id(workflow.object),
self.get_object_display(workflow.object)]
response.content = json.dumps(data)
response["X-Horizon-Add-To-Field"] = field_id
return response
next_url = self.request.REQUEST.get(workflow.redirect_param_name, None)
return shortcuts.redirect(next_url or workflow.get_success_url())
| 38.44186
| 79
| 0.632184
|
import copy
import json
from django import forms
from django import http
from django import shortcuts
from django.views import generic
import six
from horizon import exceptions
from horizon.forms import views as hz_views
from horizon.forms.views import ADD_TO_FIELD_HEADER from horizon import messages
class WorkflowView(hz_views.ModalBackdropMixin, generic.TemplateView):
workflow_class = None
template_name = 'horizon/common/_workflow_base.html'
context_object_name = "workflow"
ajax_template_name = 'horizon/common/_workflow.html'
step_errors = {}
def __init__(self):
super(WorkflowView, self).__init__()
if not self.workflow_class:
raise AttributeError("You must set the workflow_class attribute "
"on %s." % self.__class__.__name__)
def get_initial(self):
return copy.copy(self.request.GET)
def get_workflow(self):
extra_context = self.get_initial()
entry_point = self.request.GET.get("step", None)
workflow = self.workflow_class(self.request,
context_seed=extra_context,
entry_point=entry_point)
return workflow
def get_context_data(self, **kwargs):
context = super(WorkflowView, self).get_context_data(**kwargs)
workflow = self.get_workflow()
context[self.context_object_name] = workflow
next = self.request.REQUEST.get(workflow.redirect_param_name, None)
context['REDIRECT_URL'] = next
context['layout'] = self.get_layout()
context['modal'] = 'modal' in context['layout']
if ADD_TO_FIELD_HEADER in self.request.META:
context['add_to_field'] = self.request.META[ADD_TO_FIELD_HEADER]
return context
def get_layout(self):
if self.request.is_ajax():
layout = ['modal', ]
if self.workflow_class.fullscreen:
layout += ['fullscreen', ]
else:
layout = ['static_page', ]
if self.workflow_class.wizard:
layout += ['wizard', ]
return layout
def get_template_names(self):
if self.request.is_ajax():
template = self.ajax_template_name
else:
template = self.template_name
return template
def get_object_id(self, obj):
return getattr(obj, "id", None)
def get_object_display(self, obj):
return getattr(obj, "name", None)
def add_error_to_step(self, error_msg, step):
self.step_errors[step] = error_msg
def set_workflow_step_errors(self, context):
workflow = context['workflow']
for step in self.step_errors:
error_msg = self.step_errors[step]
workflow.add_error_to_step(error_msg, step)
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
self.set_workflow_step_errors(context)
return self.render_to_response(context)
def validate_steps(self, request, workflow, start, end):
errors = {}
for step in workflow.steps[start:end + 1]:
if not step.action.is_valid():
errors[step.slug] = dict(
(field, [unicode(error) for error in errors])
for (field, errors) in six.iteritems(step.action.errors))
return {
'has_errors': bool(errors),
'workflow_slug': workflow.slug,
'errors': errors,
}
def post(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
workflow = context[self.context_object_name]
try:
validate_step_start = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_START', ''))
validate_step_end = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_END', ''))
except ValueError:
pass
else:
data = self.validate_steps(request, workflow,
validate_step_start,
validate_step_end)
return http.HttpResponse(json.dumps(data),
content_type="application/json")
if not workflow.is_valid():
return self.render_to_response(context)
try:
success = workflow.finalize()
except forms.ValidationError:
return self.render_to_response(context)
except Exception:
success = False
exceptions.handle(request)
if success:
msg = workflow.format_status_message(workflow.success_message)
messages.success(request, msg)
else:
msg = workflow.format_status_message(workflow.failure_message)
messages.error(request, msg)
if "HTTP_X_HORIZON_ADD_TO_FIELD" in self.request.META:
field_id = self.request.META["HTTP_X_HORIZON_ADD_TO_FIELD"]
response = http.HttpResponse()
if workflow.object:
data = [self.get_object_id(workflow.object),
self.get_object_display(workflow.object)]
response.content = json.dumps(data)
response["X-Horizon-Add-To-Field"] = field_id
return response
next_url = self.request.REQUEST.get(workflow.redirect_param_name, None)
return shortcuts.redirect(next_url or workflow.get_success_url())
| true
| true
|
f708f675d7c4e19130ddf6f100485d7b50c26946
| 711
|
py
|
Python
|
PygFW/Builtin/Events/EntityClickEvent.py
|
shauncameron/PygFW
|
970541d0c3fc6e1f306fe527d90834a620694804
|
[
"MIT"
] | null | null | null |
PygFW/Builtin/Events/EntityClickEvent.py
|
shauncameron/PygFW
|
970541d0c3fc6e1f306fe527d90834a620694804
|
[
"MIT"
] | 4
|
2021-04-15T00:12:14.000Z
|
2021-04-18T20:46:09.000Z
|
build/lib/PygFW/Builtin/Events/EntityClickEvent.py
|
shauncameron/PygFW
|
970541d0c3fc6e1f306fe527d90834a620694804
|
[
"MIT"
] | null | null | null |
from PygFW import Event
import pygame
class EntityClickEvent(Event):
def __init__(self, scene_surface):
Event.__init__(self, scene_surface, pygame.MOUSEBUTTONDOWN)
def executor(self, scene, event):
for entity in scene.entities._list_:
if entity.clickable:
if entity.collides_with([event.pos]):
entity.click(scene, event)
class EntityUnclickEvent(Event):
def __init__(self, scene_surface):
Event.__init__(self, scene_surface, pygame.MOUSEBUTTONUP)
def executor(self, scene, event):
for entity in scene.entities._list_:
if entity.un_clickable:
entity.un_click(scene, event)
| 21.545455
| 67
| 0.651195
|
from PygFW import Event
import pygame
class EntityClickEvent(Event):
def __init__(self, scene_surface):
Event.__init__(self, scene_surface, pygame.MOUSEBUTTONDOWN)
def executor(self, scene, event):
for entity in scene.entities._list_:
if entity.clickable:
if entity.collides_with([event.pos]):
entity.click(scene, event)
class EntityUnclickEvent(Event):
def __init__(self, scene_surface):
Event.__init__(self, scene_surface, pygame.MOUSEBUTTONUP)
def executor(self, scene, event):
for entity in scene.entities._list_:
if entity.un_clickable:
entity.un_click(scene, event)
| true
| true
|
f708f82a32b1ca8094a66b729c31827022f652e8
| 1,690
|
py
|
Python
|
examples/wait_terminated.py
|
gridengine/drmaa2-python
|
36e84e8dc0079c9e3d772c1536f07ecb1e435684
|
[
"Apache-2.0"
] | 10
|
2019-05-28T23:17:39.000Z
|
2022-01-14T08:52:54.000Z
|
examples/wait_terminated.py
|
iamh2o/drmaa2-python
|
36e84e8dc0079c9e3d772c1536f07ecb1e435684
|
[
"Apache-2.0"
] | 5
|
2019-11-01T10:50:19.000Z
|
2021-12-13T11:56:19.000Z
|
examples/wait_terminated.py
|
iamh2o/drmaa2-python
|
36e84e8dc0079c9e3d772c1536f07ecb1e435684
|
[
"Apache-2.0"
] | 2
|
2019-02-26T16:36:07.000Z
|
2019-10-29T02:02:06.000Z
|
#!/usr/bin/env python
# ___INFO__MARK_BEGIN__
#######################################################################################
# Copyright 2008-2021 Univa Corporation (acquired and owned by Altair Engineering Inc.)
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################################################
# ___INFO__MARK_END__
import time
from drmaa2 import JobSession
from drmaa2 import JobInfo
from drmaa2 import Time
if __name__ == '__main__':
js = JobSession('js-01')
print('Created job session: %s' % js.name)
j = js.run_job({'remote_command': '/bin/sleep', 'args': ['100']})
print('Submitted job: %s, waiting on start' % j)
t1 = time.time()
j.wait_started(10)
t2 = time.time()
print('Wait on job start is over after %s seconds' % (t2 - t1))
ji = j.get_info()
print('Retrieved job info: %s' % ji)
print('Waiting on job %s termination' % j.id)
t1 = time.time()
j.wait_terminated(Time.INFINITE_TIME.value)
t2 = time.time()
print('Job terminated, wait is over after %s seconds' % (t2 - t1))
ji = j.get_info()
print('Retrieved job info after termination: %s' % ji)
| 38.409091
| 87
| 0.623669
|
import time
from drmaa2 import JobSession
from drmaa2 import JobInfo
from drmaa2 import Time
if __name__ == '__main__':
js = JobSession('js-01')
print('Created job session: %s' % js.name)
j = js.run_job({'remote_command': '/bin/sleep', 'args': ['100']})
print('Submitted job: %s, waiting on start' % j)
t1 = time.time()
j.wait_started(10)
t2 = time.time()
print('Wait on job start is over after %s seconds' % (t2 - t1))
ji = j.get_info()
print('Retrieved job info: %s' % ji)
print('Waiting on job %s termination' % j.id)
t1 = time.time()
j.wait_terminated(Time.INFINITE_TIME.value)
t2 = time.time()
print('Job terminated, wait is over after %s seconds' % (t2 - t1))
ji = j.get_info()
print('Retrieved job info after termination: %s' % ji)
| true
| true
|
f708f85ee111c98d70db5adf69d738fcf803c184
| 1,991
|
py
|
Python
|
tensorflow/predict.py
|
alishameli/CS231n-Sample-Code-1
|
e47e593026c80530f7c387c4feca24f88c1618a2
|
[
"BSD-2-Clause"
] | null | null | null |
tensorflow/predict.py
|
alishameli/CS231n-Sample-Code-1
|
e47e593026c80530f7c387c4feca24f88c1618a2
|
[
"BSD-2-Clause"
] | null | null | null |
tensorflow/predict.py
|
alishameli/CS231n-Sample-Code-1
|
e47e593026c80530f7c387c4feca24f88c1618a2
|
[
"BSD-2-Clause"
] | null | null | null |
import argparse
import os
import numpy as np
import tensorflow as tf
from matplotlib import pyplot as plt
from PIL import Image
import models
def predict(model_data_path, image_path):
# Default input size
height = 228
width = 304
channels = 3
batch_size = 1
# Read image
img = Image.open(image_path)
img = img.resize([width,height], Image.ANTIALIAS)
img = np.array(img).astype('float32')
img = np.expand_dims(np.asarray(img), axis = 0)
# Create a placeholder for the input image
input_node = tf.placeholder(tf.float32, shape=(None, height, width, channels))
# Construct the network
net = models.ResNet50UpProj({'data': input_node}, batch_size)
with tf.Session() as sess:
# Load the converted parameters
print('Loading the model')
net.load(model_data_path, sess)
uninitialized_vars = []
for var in tf.global_variables():
try:
sess.run(var)
except tf.errors.FailedPreconditionError:
uninitialized_vars.append(var)
init_new_vars_op = tf.variables_initializer(uninitialized_vars)
sess.run(init_new_vars_op)
# Evalute the network for the given image
pred = sess.run(net.get_output(), feed_dict={input_node: img})
# Plot result
fig = plt.figure()
ii = plt.imshow(pred[0,:,:,0], interpolation='nearest')
fig.colorbar(ii)
plt.show()
return pred
def main():
# Parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('model_path', help='Converted parameters for the model')
parser.add_argument('image_paths', help='Directory of images to predict')
args = parser.parse_args()
# Predict the image
pred = predict(args.model_path, args.image_paths)
os._exit(0)
if __name__ == '__main__':
main()
| 25.857143
| 82
| 0.616775
|
import argparse
import os
import numpy as np
import tensorflow as tf
from matplotlib import pyplot as plt
from PIL import Image
import models
def predict(model_data_path, image_path):
height = 228
width = 304
channels = 3
batch_size = 1
img = Image.open(image_path)
img = img.resize([width,height], Image.ANTIALIAS)
img = np.array(img).astype('float32')
img = np.expand_dims(np.asarray(img), axis = 0)
input_node = tf.placeholder(tf.float32, shape=(None, height, width, channels))
net = models.ResNet50UpProj({'data': input_node}, batch_size)
with tf.Session() as sess:
print('Loading the model')
net.load(model_data_path, sess)
uninitialized_vars = []
for var in tf.global_variables():
try:
sess.run(var)
except tf.errors.FailedPreconditionError:
uninitialized_vars.append(var)
init_new_vars_op = tf.variables_initializer(uninitialized_vars)
sess.run(init_new_vars_op)
pred = sess.run(net.get_output(), feed_dict={input_node: img})
fig = plt.figure()
ii = plt.imshow(pred[0,:,:,0], interpolation='nearest')
fig.colorbar(ii)
plt.show()
return pred
def main():
parser = argparse.ArgumentParser()
parser.add_argument('model_path', help='Converted parameters for the model')
parser.add_argument('image_paths', help='Directory of images to predict')
args = parser.parse_args()
pred = predict(args.model_path, args.image_paths)
os._exit(0)
if __name__ == '__main__':
main()
| true
| true
|
f708f8c29a2b935211377999a17a11f8c119f77c
| 168
|
py
|
Python
|
tests/model_control/detailed/transf_Logit/model_control_one_enabled_Logit_MovingAverage_Seasonal_DayOfMonth_LSTM.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
tests/model_control/detailed/transf_Logit/model_control_one_enabled_Logit_MovingAverage_Seasonal_DayOfMonth_LSTM.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | 1
|
2019-11-30T23:39:38.000Z
|
2019-12-01T04:34:35.000Z
|
tests/model_control/detailed/transf_Logit/model_control_one_enabled_Logit_MovingAverage_Seasonal_DayOfMonth_LSTM.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Logit'] , ['MovingAverage'] , ['Seasonal_DayOfMonth'] , ['LSTM'] );
| 42
| 90
| 0.761905
|
import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Logit'] , ['MovingAverage'] , ['Seasonal_DayOfMonth'] , ['LSTM'] );
| true
| true
|
f708f8e5cdfa0014f95318bafc336ec46675c827
| 12,690
|
py
|
Python
|
src/hpc/autoscale/job/demandprinter.py
|
hmeiland/cyclecloud-scalelib
|
f246737ddea631c7378d716a51431857eb6b06b3
|
[
"MIT"
] | null | null | null |
src/hpc/autoscale/job/demandprinter.py
|
hmeiland/cyclecloud-scalelib
|
f246737ddea631c7378d716a51431857eb6b06b3
|
[
"MIT"
] | null | null | null |
src/hpc/autoscale/job/demandprinter.py
|
hmeiland/cyclecloud-scalelib
|
f246737ddea631c7378d716a51431857eb6b06b3
|
[
"MIT"
] | null | null | null |
import inspect
import io
import json
import logging as logginglib
import sys
from datetime import datetime
from typing import Any, Callable, Dict, List, Optional, Set, TextIO, Tuple
from typing_extensions import Literal
from hpc.autoscale import hpclogging as logging
from hpc.autoscale.codeanalysis import hpcwrapclass
from hpc.autoscale.hpctypes import Hostname
from hpc.autoscale.job.demand import DemandResult
from hpc.autoscale.node.node import Node
OutputFormat = Literal["json", "table", "table_headerless"]
@hpcwrapclass
class DemandPrinter:
def __init__(
self,
column_names: Optional[List[str]] = None,
stream: Optional[TextIO] = None,
output_format: OutputFormat = "table",
long: bool = False,
) -> None:
column_names_list: List[str] = []
if column_names:
column_names_list = column_names
self.__defaults = {}
for n in range(len(column_names_list)):
expr = column_names_list[n]
if ":" in expr and "[" not in expr:
column, default_value = expr.split(":", 1)
column_names_list[n] = column
self.__defaults[column] = default_value
self.column_names = [x.lower() for x in column_names_list]
self.stream = stream or sys.stdout
self.output_format = output_format
self.long = long
def _calc_width(self, columns: List[str], rows: List[List[str]]) -> Tuple[int, ...]:
maxes = [len(c) for c in columns]
for row in rows:
for n in range(len(row)):
maxes[n] = max(len(row[n]), maxes[n])
return tuple(maxes)
def _get_all_columns(self, compute_nodes: List[Node]) -> List[str]:
columns = []
for attr_name in dir(Node):
if not attr_name[0].isalpha():
continue
attr = getattr(Node, attr_name)
if hasattr(attr, "__call__"):
continue
columns.append(attr_name)
if compute_nodes:
all_available: Set[str] = set()
for n in compute_nodes:
all_available.update(n.available.keys())
columns += list(all_available)
assert None not in columns
columns = sorted(columns)
return columns
def print_columns(self, demand_result: DemandResult = None) -> None:
columns = self.column_names
if not columns:
columns = self._get_all_columns(
demand_result.compute_nodes if demand_result else []
)
columns = [c for c in columns if c != "hostname_required"]
widths = self._calc_width(columns, [])
formats = " ".join(["{:%d}" % x for x in widths])
assert len(widths) == len(columns), "{} != {}".format(len(widths), len(columns))
print(formats.format(*columns), file=self.stream)
self.stream.flush()
def print_demand(self, demand_result: DemandResult) -> None:
rows = []
columns = self.column_names
if not columns:
columns = self._get_all_columns(demand_result.compute_nodes)
if self.output_format == "json":
columns = [c for c in columns if c not in ["hostname_required"]]
else:
columns = [
c
for c in columns
if c not in ["available", "node", "hostname_required"]
]
columns = ["job_ids" if c == "assigned_job_ids" else c for c in columns]
if "name" in columns:
columns.remove("name")
columns.insert(0, "name")
short_columns = [c.split("@")[0] for c in columns]
long_columns = [c.split("@")[-1] for c in columns]
# sort by private ip or the node name
def sort_by_ip_or_name(node: Node) -> Any:
if node.private_ip:
return tuple(map(int, node.private_ip.split(".")))
name_toks = node.name.split("-")
if name_toks[-1].isdigit():
node_index = int(name_toks[-1])
nodearray_ord = [ord(x) for x in node.nodearray]
# 2**31 to make these come after private ips
# then nodearray name, then index
return tuple([2 ** 31] + nodearray_ord + [node_index])
return tuple([-1] + name_toks)
ordered_nodes = sorted(demand_result.compute_nodes, key=sort_by_ip_or_name)
for node in ordered_nodes:
row: List[str] = []
rows.append(row)
for column in long_columns:
# TODO justify - this is a printing function, so this value could be lots of things etc.
value: Any = None
is_from_available = column.startswith("*")
is_ratio = column.startswith("/")
is_slice = "[" in column
if is_from_available or is_ratio:
column = column[1:]
def _slice(v: str) -> str:
return v
slice = _slice
if is_slice:
slice_expr = column[column.index("[") :]
column = column.split("[")[0]
# TODO maybe parse this instead of eval-ing a lambda
if self.long:
slice = lambda v: v # noqa: E731
else:
slice = eval(
"lambda v: v%s if v is not None else v" % slice_expr
)
if column == "hostname":
hostname = node.hostname
if not node.exists or not hostname:
if node.private_ip:
hostname = Hostname(str(node.private_ip))
else:
hostname = Hostname("tbd")
value = hostname
elif column == "hostname_required":
continue
elif column == "job_ids":
value = node.assignments
elif hasattr(node, column):
value = getattr(node, column)
else:
if is_from_available:
value = node.available.get(column)
elif is_ratio:
value = "{}/{}".format(
node.available.get(column), node.resources.get(column)
)
elif column in node.resources:
value = node.resources.get(column)
else:
value = node.metadata.get(column)
if value is None:
value = self.__defaults.get(column)
# convert sets to lists, as sets are not json serializable
if isinstance(value, set):
value = list(value)
elif isinstance(value, datetime):
value = value.isoformat()
# for json, we support lists, null, numbers etc.
# for table* we will output a string for every value.
if self.output_format != "json":
if isinstance(value, list):
value = ",".join(sorted(value))
elif isinstance(value, set):
value = ",".join(sorted(list(value)))
elif value is None:
value = ""
elif isinstance(value, float):
value = "{:.1f}".format(value)
elif not isinstance(value, str):
value = str(value)
else:
if hasattr(value, "to_json"):
value = value.to_json()
elif hasattr(value, "keys"):
value = dict(value)
row.append(slice(value))
# remove / and slice expressions
stripped_short_names = [c.lstrip("/").split("[")[0] for c in short_columns]
if self.output_format != "json":
stripped_short_names = [x.upper() for x in stripped_short_names]
print_rows(stripped_short_names, rows, self.stream, self.output_format)
def __str__(self) -> str:
return "DemandPrinter(columns={}, output_format={}, stream={})".format(
str(self.column_names), self.output_format, self.stream
)
def __repr__(self) -> str:
return str(self)
def print_columns(
demand_result: DemandResult,
stream: Optional[TextIO] = None,
output_format: OutputFormat = "table",
long: bool = False,
) -> None:
printer = DemandPrinter(None, stream=stream, output_format=output_format, long=long)
printer.print_columns(demand_result)
def print_demand(
columns: List[str],
demand_result: DemandResult,
stream: Optional[TextIO] = None,
output_format: OutputFormat = "table",
log: bool = False,
long: bool = False,
) -> None:
if log:
stream = logging_stream(stream or sys.stdout)
printer = DemandPrinter(
columns, stream=stream, output_format=output_format, long=long
)
printer.print_demand(demand_result)
def wrap_text_io(clz: Any) -> Callable[[TextIO, Optional[str]], TextIO]:
members: Dict[str, Any] = {}
for attr in dir(TextIO):
if not attr[0].islower() and attr not in [
"__enter__",
"__exit__",
"__iter__",
"__next__",
]:
continue
if attr in dir(clz):
continue
def make_member(mem_name: str) -> Any:
is_function = inspect.isfunction(getattr(TextIO, mem_name))
if is_function:
return lambda *args: getattr(args[0].wrapped, mem_name)(*args[1:])
else:
return property(lambda *args: getattr(args[0].wrapped, mem_name))
members[attr] = make_member(attr)
return type("LoggingStream", (clz,), members)
class _LoggingStream:
def __init__(self, wrapped: TextIO, logger_name: Optional[str] = None) -> None:
self.line_buffer = io.StringIO()
self.wrapped = wrapped
self.logger_name = logger_name
def write(self, s: str) -> int:
self.line_buffer.write(s)
return self.wrapped.write(s)
def flush(self) -> None:
buf = self.line_buffer.getvalue()
if not buf:
return
fact = logginglib.getLogRecordFactory()
logger = logging.getLogger(self.logger_name)
created = None
for line in buf.splitlines(keepends=False):
record = fact(
name="demandprinter",
level=logging.INFO,
pathname=__file__,
lineno=1,
msg=line,
args=(),
exc_info=None,
created=created,
)
created = created or record.created
logger.handle(record)
self.line_buffer = io.StringIO()
def close(self) -> None:
self.flush()
self.wrapped.close()
LoggingStream = wrap_text_io(_LoggingStream)
def logging_stream(wrapped: TextIO, logger_name: Optional[str] = None) -> TextIO:
logger_name = logger_name or "demand"
return LoggingStream(wrapped, logger_name)
class ExcludeDemandPrinterFilter(logginglib.Filter):
def __init__(self, name: str = "") -> None:
super().__init__(name)
def filter(self, record: logginglib.LogRecord) -> bool:
return record.name != "demandprinter"
def calculate_column_widths(
columns: List[str], rows: List[List[str]]
) -> Tuple[int, ...]:
maxes = [len(c.split("@")[0]) for c in columns]
for row in rows:
for n in range(len(row)):
maxes[n] = max(len(row[n]), maxes[n])
return tuple(maxes)
def print_rows(
columns: List[str],
rows: List[List[str]],
stream: Optional[TextIO] = None,
output_format: str = "table",
) -> None:
output_format = output_format or "table"
stream = stream or sys.stdout
short_names = [c.split("@")[0] for c in columns]
if output_format.lower() == "json":
json.dump(
[dict(zip(short_names, row)) for row in rows], stream, indent=2,
)
else:
widths = calculate_column_widths(short_names, rows)
formats = " ".join(["{:%d}" % x for x in widths])
if output_format == "table":
print(formats.format(*short_names), file=stream)
for row in rows:
print(formats.format(*[str(r) for r in row]), file=stream)
stream.flush()
| 33.660477
| 104
| 0.545469
|
import inspect
import io
import json
import logging as logginglib
import sys
from datetime import datetime
from typing import Any, Callable, Dict, List, Optional, Set, TextIO, Tuple
from typing_extensions import Literal
from hpc.autoscale import hpclogging as logging
from hpc.autoscale.codeanalysis import hpcwrapclass
from hpc.autoscale.hpctypes import Hostname
from hpc.autoscale.job.demand import DemandResult
from hpc.autoscale.node.node import Node
OutputFormat = Literal["json", "table", "table_headerless"]
@hpcwrapclass
class DemandPrinter:
def __init__(
self,
column_names: Optional[List[str]] = None,
stream: Optional[TextIO] = None,
output_format: OutputFormat = "table",
long: bool = False,
) -> None:
column_names_list: List[str] = []
if column_names:
column_names_list = column_names
self.__defaults = {}
for n in range(len(column_names_list)):
expr = column_names_list[n]
if ":" in expr and "[" not in expr:
column, default_value = expr.split(":", 1)
column_names_list[n] = column
self.__defaults[column] = default_value
self.column_names = [x.lower() for x in column_names_list]
self.stream = stream or sys.stdout
self.output_format = output_format
self.long = long
def _calc_width(self, columns: List[str], rows: List[List[str]]) -> Tuple[int, ...]:
maxes = [len(c) for c in columns]
for row in rows:
for n in range(len(row)):
maxes[n] = max(len(row[n]), maxes[n])
return tuple(maxes)
def _get_all_columns(self, compute_nodes: List[Node]) -> List[str]:
columns = []
for attr_name in dir(Node):
if not attr_name[0].isalpha():
continue
attr = getattr(Node, attr_name)
if hasattr(attr, "__call__"):
continue
columns.append(attr_name)
if compute_nodes:
all_available: Set[str] = set()
for n in compute_nodes:
all_available.update(n.available.keys())
columns += list(all_available)
assert None not in columns
columns = sorted(columns)
return columns
def print_columns(self, demand_result: DemandResult = None) -> None:
columns = self.column_names
if not columns:
columns = self._get_all_columns(
demand_result.compute_nodes if demand_result else []
)
columns = [c for c in columns if c != "hostname_required"]
widths = self._calc_width(columns, [])
formats = " ".join(["{:%d}" % x for x in widths])
assert len(widths) == len(columns), "{} != {}".format(len(widths), len(columns))
print(formats.format(*columns), file=self.stream)
self.stream.flush()
def print_demand(self, demand_result: DemandResult) -> None:
rows = []
columns = self.column_names
if not columns:
columns = self._get_all_columns(demand_result.compute_nodes)
if self.output_format == "json":
columns = [c for c in columns if c not in ["hostname_required"]]
else:
columns = [
c
for c in columns
if c not in ["available", "node", "hostname_required"]
]
columns = ["job_ids" if c == "assigned_job_ids" else c for c in columns]
if "name" in columns:
columns.remove("name")
columns.insert(0, "name")
short_columns = [c.split("@")[0] for c in columns]
long_columns = [c.split("@")[-1] for c in columns]
def sort_by_ip_or_name(node: Node) -> Any:
if node.private_ip:
return tuple(map(int, node.private_ip.split(".")))
name_toks = node.name.split("-")
if name_toks[-1].isdigit():
node_index = int(name_toks[-1])
nodearray_ord = [ord(x) for x in node.nodearray]
return tuple([2 ** 31] + nodearray_ord + [node_index])
return tuple([-1] + name_toks)
ordered_nodes = sorted(demand_result.compute_nodes, key=sort_by_ip_or_name)
for node in ordered_nodes:
row: List[str] = []
rows.append(row)
for column in long_columns:
value: Any = None
is_from_available = column.startswith("*")
is_ratio = column.startswith("/")
is_slice = "[" in column
if is_from_available or is_ratio:
column = column[1:]
def _slice(v: str) -> str:
return v
slice = _slice
if is_slice:
slice_expr = column[column.index("[") :]
column = column.split("[")[0]
if self.long:
slice = lambda v: v else:
slice = eval(
"lambda v: v%s if v is not None else v" % slice_expr
)
if column == "hostname":
hostname = node.hostname
if not node.exists or not hostname:
if node.private_ip:
hostname = Hostname(str(node.private_ip))
else:
hostname = Hostname("tbd")
value = hostname
elif column == "hostname_required":
continue
elif column == "job_ids":
value = node.assignments
elif hasattr(node, column):
value = getattr(node, column)
else:
if is_from_available:
value = node.available.get(column)
elif is_ratio:
value = "{}/{}".format(
node.available.get(column), node.resources.get(column)
)
elif column in node.resources:
value = node.resources.get(column)
else:
value = node.metadata.get(column)
if value is None:
value = self.__defaults.get(column)
if isinstance(value, set):
value = list(value)
elif isinstance(value, datetime):
value = value.isoformat()
if self.output_format != "json":
if isinstance(value, list):
value = ",".join(sorted(value))
elif isinstance(value, set):
value = ",".join(sorted(list(value)))
elif value is None:
value = ""
elif isinstance(value, float):
value = "{:.1f}".format(value)
elif not isinstance(value, str):
value = str(value)
else:
if hasattr(value, "to_json"):
value = value.to_json()
elif hasattr(value, "keys"):
value = dict(value)
row.append(slice(value))
stripped_short_names = [c.lstrip("/").split("[")[0] for c in short_columns]
if self.output_format != "json":
stripped_short_names = [x.upper() for x in stripped_short_names]
print_rows(stripped_short_names, rows, self.stream, self.output_format)
def __str__(self) -> str:
return "DemandPrinter(columns={}, output_format={}, stream={})".format(
str(self.column_names), self.output_format, self.stream
)
def __repr__(self) -> str:
return str(self)
def print_columns(
demand_result: DemandResult,
stream: Optional[TextIO] = None,
output_format: OutputFormat = "table",
long: bool = False,
) -> None:
printer = DemandPrinter(None, stream=stream, output_format=output_format, long=long)
printer.print_columns(demand_result)
def print_demand(
columns: List[str],
demand_result: DemandResult,
stream: Optional[TextIO] = None,
output_format: OutputFormat = "table",
log: bool = False,
long: bool = False,
) -> None:
if log:
stream = logging_stream(stream or sys.stdout)
printer = DemandPrinter(
columns, stream=stream, output_format=output_format, long=long
)
printer.print_demand(demand_result)
def wrap_text_io(clz: Any) -> Callable[[TextIO, Optional[str]], TextIO]:
members: Dict[str, Any] = {}
for attr in dir(TextIO):
if not attr[0].islower() and attr not in [
"__enter__",
"__exit__",
"__iter__",
"__next__",
]:
continue
if attr in dir(clz):
continue
def make_member(mem_name: str) -> Any:
is_function = inspect.isfunction(getattr(TextIO, mem_name))
if is_function:
return lambda *args: getattr(args[0].wrapped, mem_name)(*args[1:])
else:
return property(lambda *args: getattr(args[0].wrapped, mem_name))
members[attr] = make_member(attr)
return type("LoggingStream", (clz,), members)
class _LoggingStream:
def __init__(self, wrapped: TextIO, logger_name: Optional[str] = None) -> None:
self.line_buffer = io.StringIO()
self.wrapped = wrapped
self.logger_name = logger_name
def write(self, s: str) -> int:
self.line_buffer.write(s)
return self.wrapped.write(s)
def flush(self) -> None:
buf = self.line_buffer.getvalue()
if not buf:
return
fact = logginglib.getLogRecordFactory()
logger = logging.getLogger(self.logger_name)
created = None
for line in buf.splitlines(keepends=False):
record = fact(
name="demandprinter",
level=logging.INFO,
pathname=__file__,
lineno=1,
msg=line,
args=(),
exc_info=None,
created=created,
)
created = created or record.created
logger.handle(record)
self.line_buffer = io.StringIO()
def close(self) -> None:
self.flush()
self.wrapped.close()
LoggingStream = wrap_text_io(_LoggingStream)
def logging_stream(wrapped: TextIO, logger_name: Optional[str] = None) -> TextIO:
logger_name = logger_name or "demand"
return LoggingStream(wrapped, logger_name)
class ExcludeDemandPrinterFilter(logginglib.Filter):
def __init__(self, name: str = "") -> None:
super().__init__(name)
def filter(self, record: logginglib.LogRecord) -> bool:
return record.name != "demandprinter"
def calculate_column_widths(
columns: List[str], rows: List[List[str]]
) -> Tuple[int, ...]:
maxes = [len(c.split("@")[0]) for c in columns]
for row in rows:
for n in range(len(row)):
maxes[n] = max(len(row[n]), maxes[n])
return tuple(maxes)
def print_rows(
columns: List[str],
rows: List[List[str]],
stream: Optional[TextIO] = None,
output_format: str = "table",
) -> None:
output_format = output_format or "table"
stream = stream or sys.stdout
short_names = [c.split("@")[0] for c in columns]
if output_format.lower() == "json":
json.dump(
[dict(zip(short_names, row)) for row in rows], stream, indent=2,
)
else:
widths = calculate_column_widths(short_names, rows)
formats = " ".join(["{:%d}" % x for x in widths])
if output_format == "table":
print(formats.format(*short_names), file=stream)
for row in rows:
print(formats.format(*[str(r) for r in row]), file=stream)
stream.flush()
| true
| true
|
f708f90566cd4035c61dccabe262ed5ac91bc040
| 418
|
py
|
Python
|
setup.py
|
sluedtke/borg_hydro
|
ef856784191e21e98e7fe8dd906c0dd9f82fd4ff
|
[
"MIT"
] | null | null | null |
setup.py
|
sluedtke/borg_hydro
|
ef856784191e21e98e7fe8dd906c0dd9f82fd4ff
|
[
"MIT"
] | null | null | null |
setup.py
|
sluedtke/borg_hydro
|
ef856784191e21e98e7fe8dd906c0dd9f82fd4ff
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('LICENSE') as f:
license = f.read()
setup(
name='borg_hydro',
version='0.1.0',
author='Stefan Lüdtke',
url='https://git.gfz-potsdam.de:sluedtke/borg_hydro.git',
packages=find_packages(),
license=license,
include_package_data=True,
tests_require=['pytest'],
install_requires=['pandas', 'numpy']
)
| 20.9
| 61
| 0.655502
|
from setuptools import setup, find_packages
with open('LICENSE') as f:
license = f.read()
setup(
name='borg_hydro',
version='0.1.0',
author='Stefan Lüdtke',
url='https://git.gfz-potsdam.de:sluedtke/borg_hydro.git',
packages=find_packages(),
license=license,
include_package_data=True,
tests_require=['pytest'],
install_requires=['pandas', 'numpy']
)
| true
| true
|
f708f90948cf6c550c9741e8d27b736df78d0e44
| 83,746
|
py
|
Python
|
evennia/objects/objects.py
|
zeitkunst/evennia
|
1f254b2542fbefe400c114b3d7029522cdcb37b7
|
[
"BSD-3-Clause"
] | null | null | null |
evennia/objects/objects.py
|
zeitkunst/evennia
|
1f254b2542fbefe400c114b3d7029522cdcb37b7
|
[
"BSD-3-Clause"
] | null | null | null |
evennia/objects/objects.py
|
zeitkunst/evennia
|
1f254b2542fbefe400c114b3d7029522cdcb37b7
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This module defines the basic `DefaultObject` and its children
`DefaultCharacter`, `DefaultAccount`, `DefaultRoom` and `DefaultExit`.
These are the (default) starting points for all in-game visible
entities.
"""
import time
import inflect
from builtins import object
from future.utils import with_metaclass
from collections import defaultdict
from django.conf import settings
from evennia.typeclasses.models import TypeclassBase
from evennia.typeclasses.attributes import NickHandler
from evennia.objects.manager import ObjectManager
from evennia.objects.models import ObjectDB
from evennia.scripts.scripthandler import ScriptHandler
from evennia.commands import cmdset, command
from evennia.commands.cmdsethandler import CmdSetHandler
from evennia.commands import cmdhandler
from evennia.utils import search
from evennia.utils import logger
from evennia.utils import ansi
from evennia.utils.utils import (variable_from_module, lazy_property,
make_iter, to_unicode, is_iter, list_to_string,
to_str)
from django.utils.translation import ugettext as _
_INFLECT = inflect.engine()
_MULTISESSION_MODE = settings.MULTISESSION_MODE
_ScriptDB = None
_SESSIONS = None
_AT_SEARCH_RESULT = variable_from_module(*settings.SEARCH_AT_RESULT.rsplit('.', 1))
# the sessid_max is based on the length of the db_sessid csv field (excluding commas)
_SESSID_MAX = 16 if _MULTISESSION_MODE in (1, 3) else 1
class ObjectSessionHandler(object):
"""
Handles the get/setting of the sessid
comma-separated integer field
"""
def __init__(self, obj):
"""
Initializes the handler.
Args:
obj (Object): The object on which the handler is defined.
"""
self.obj = obj
self._sessid_cache = []
self._recache()
def _recache(self):
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
self._sessid_cache = list(set(int(val) for val in (self.obj.db_sessid or "").split(",") if val))
if any(sessid for sessid in self._sessid_cache if sessid not in _SESSIONS):
# cache is out of sync with sessionhandler! Only retain the ones in the handler.
self._sessid_cache = [sessid for sessid in self._sessid_cache if sessid in _SESSIONS]
self.obj.db_sessid = ",".join(str(val) for val in self._sessid_cache)
self.obj.save(update_fields=["db_sessid"])
def get(self, sessid=None):
"""
Get the sessions linked to this Object.
Args:
sessid (int, optional): A specific session id.
Returns:
sessions (list): The sessions connected to this object. If `sessid` is given,
this is a list of one (or zero) elements.
Notes:
Aliased to `self.all()`.
"""
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
if sessid:
sessions = [_SESSIONS[sessid] if sessid in _SESSIONS else None] if sessid in self._sessid_cache else []
else:
sessions = [_SESSIONS[ssid] if ssid in _SESSIONS else None for ssid in self._sessid_cache]
if None in sessions:
# this happens only if our cache has gone out of sync with the SessionHandler.
self._recache()
return self.get(sessid=sessid)
return sessions
def all(self):
"""
Alias to get(), returning all sessions.
Returns:
sessions (list): All sessions.
"""
return self.get()
def add(self, session):
"""
Add session to handler.
Args:
session (Session or int): Session or session id to add.
Notes:
We will only add a session/sessid if this actually also exists
in the the core sessionhandler.
"""
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
try:
sessid = session.sessid
except AttributeError:
sessid = session
sessid_cache = self._sessid_cache
if sessid in _SESSIONS and sessid not in sessid_cache:
if len(sessid_cache) >= _SESSID_MAX:
return
sessid_cache.append(sessid)
self.obj.db_sessid = ",".join(str(val) for val in sessid_cache)
self.obj.save(update_fields=["db_sessid"])
def remove(self, session):
"""
Remove session from handler.
Args:
session (Session or int): Session or session id to remove.
"""
try:
sessid = session.sessid
except AttributeError:
sessid = session
sessid_cache = self._sessid_cache
if sessid in sessid_cache:
sessid_cache.remove(sessid)
self.obj.db_sessid = ",".join(str(val) for val in sessid_cache)
self.obj.save(update_fields=["db_sessid"])
def clear(self):
"""
Clear all handled sessids.
"""
self._sessid_cache = []
self.obj.db_sessid = None
self.obj.save(update_fields=["db_sessid"])
def count(self):
"""
Get amount of sessions connected.
Returns:
sesslen (int): Number of sessions handled.
"""
return len(self._sessid_cache)
#
# Base class to inherit from.
class DefaultObject(with_metaclass(TypeclassBase, ObjectDB)):
"""
This is the root typeclass object, representing all entities that
have an actual presence in-game. DefaultObjects generally have a
location. They can also be manipulated and looked at. Game
entities you define should inherit from DefaultObject at some distance.
It is recommended to create children of this class using the
`evennia.create_object()` function rather than to initialize the class
directly - this will both set things up and efficiently save the object
without `obj.save()` having to be called explicitly.
"""
objects = ObjectManager()
# on-object properties
@lazy_property
def cmdset(self):
return CmdSetHandler(self, True)
@lazy_property
def scripts(self):
return ScriptHandler(self)
@lazy_property
def nicks(self):
return NickHandler(self)
@lazy_property
def sessions(self):
return ObjectSessionHandler(self)
@property
def is_connected(self):
# we get an error for objects subscribed to channels without this
if self.account: # seems sane to pass on the account
return self.account.is_connected
else:
return False
@property
def has_account(self):
"""
Convenience property for checking if an active account is
currently connected to this object.
"""
return self.sessions.count()
@property
def is_superuser(self):
"""
Check if user has an account, and if so, if it is a superuser.
"""
return self.db_account and self.db_account.is_superuser \
and not self.db_account.attributes.get("_quell")
def contents_get(self, exclude=None):
"""
Returns the contents of this object, i.e. all
objects that has this object set as its location.
This should be publically available.
Args:
exclude (Object): Object to exclude from returned
contents list
Returns:
contents (list): List of contents of this Object.
Notes:
Also available as the `contents` property.
"""
con = self.contents_cache.get(exclude=exclude)
# print "contents_get:", self, con, id(self), calledby() # DEBUG
return con
contents = property(contents_get)
@property
def exits(self):
"""
Returns all exits from this object, i.e. all objects at this
location having the property destination != `None`.
"""
return [exi for exi in self.contents if exi.destination]
# main methods
def get_display_name(self, looker, **kwargs):
"""
Displays the name of the object in a viewer-aware manner.
Args:
looker (TypedObject): The object or account that is looking
at/getting inforamtion for this object.
Returns:
name (str): A string containing the name of the object,
including the DBREF if this user is privileged to control
said object.
Notes:
This function could be extended to change how object names
appear to users in character, but be wary. This function
does not change an object's keys or aliases when
searching, and is expected to produce something useful for
builders.
"""
if self.locks.check_lockstring(looker, "perm(Builder)"):
return "{}(#{})".format(self.name, self.id)
return self.name
def get_numbered_name(self, count, looker, **kwargs):
"""
Return the numbered (singular, plural) forms of this object's key. This is by default called
by return_appearance and is used for grouping multiple same-named of this object. Note that
this will be called on *every* member of a group even though the plural name will be only
shown once. Also the singular display version, such as 'an apple', 'a tree' is determined
from this method.
Args:
count (int): Number of objects of this type
looker (Object): Onlooker. Not used by default.
Kwargs:
key (str): Optional key to pluralize, if given, use this instead of the object's key.
Returns:
singular (str): The singular form to display.
plural (str): The determined plural form of the key, including the count.
"""
key = kwargs.get("key", self.key)
key = ansi.ANSIString(key) # this is needed to allow inflection of colored names
plural = _INFLECT.plural(key, 2)
plural = "%s %s" % (_INFLECT.number_to_words(count, threshold=12), plural)
singular = _INFLECT.an(key)
if not self.aliases.get(plural, category="plural_key"):
# we need to wipe any old plurals/an/a in case key changed in the interrim
self.aliases.clear(category="plural_key")
self.aliases.add(plural, category="plural_key")
# save the singular form as an alias here too so we can display "an egg" and also
# look at 'an egg'.
self.aliases.add(singular, category="plural_key")
return singular, plural
def search(self, searchdata,
global_search=False,
use_nicks=True,
typeclass=None,
location=None,
attribute_name=None,
quiet=False,
exact=False,
candidates=None,
nofound_string=None,
multimatch_string=None,
use_dbref=None):
"""
Returns an Object matching a search string/condition
Perform a standard object search in the database, handling
multiple results and lack thereof gracefully. By default, only
objects in the current `location` of `self` or its inventory are searched for.
Args:
searchdata (str or obj): Primary search criterion. Will be matched
against `object.key` (with `object.aliases` second) unless
the keyword attribute_name specifies otherwise.
**Special strings:**
- `#<num>`: search by unique dbref. This is always
a global search.
- `me,self`: self-reference to this object
- `<num>-<string>` - can be used to differentiate
between multiple same-named matches
global_search (bool): Search all objects globally. This is overruled
by `location` keyword.
use_nicks (bool): Use nickname-replace (nicktype "object") on `searchdata`.
typeclass (str or Typeclass, or list of either): Limit search only
to `Objects` with this typeclass. May be a list of typeclasses
for a broader search.
location (Object or list): Specify a location or multiple locations
to search. Note that this is used to query the *contents* of a
location and will not match for the location itself -
if you want that, don't set this or use `candidates` to specify
exactly which objects should be searched.
attribute_name (str): Define which property to search. If set, no
key+alias search will be performed. This can be used
to search database fields (db_ will be automatically
prepended), and if that fails, it will try to return
objects having Attributes with this name and value
equal to searchdata. A special use is to search for
"key" here if you want to do a key-search without
including aliases.
quiet (bool): don't display default error messages - this tells the
search method that the user wants to handle all errors
themselves. It also changes the return value type, see
below.
exact (bool): if unset (default) - prefers to match to beginning of
string rather than not matching at all. If set, requires
exact matching of entire string.
candidates (list of objects): this is an optional custom list of objects
to search (filter) between. It is ignored if `global_search`
is given. If not set, this list will automatically be defined
to include the location, the contents of location and the
caller's contents (inventory).
nofound_string (str): optional custom string for not-found error message.
multimatch_string (str): optional custom string for multimatch error header.
use_dbref (bool or None, optional): If `True`, allow to enter e.g. a query "#123"
to find an object (globally) by its database-id 123. If `False`, the string "#123"
will be treated like a normal string. If `None` (default), the ability to query by
#dbref is turned on if `self` has the permission 'Builder' and is turned off
otherwise.
Returns:
match (Object, None or list): will return an Object/None if `quiet=False`,
otherwise it will return a list of 0, 1 or more matches.
Notes:
To find Accounts, use eg. `evennia.account_search`. If
`quiet=False`, error messages will be handled by
`settings.SEARCH_AT_RESULT` and echoed automatically (on
error, return will be `None`). If `quiet=True`, the error
messaging is assumed to be handled by the caller.
"""
is_string = isinstance(searchdata, basestring)
if is_string:
# searchdata is a string; wrap some common self-references
if searchdata.lower() in ("here", ):
return [self.location] if quiet else self.location
if searchdata.lower() in ("me", "self",):
return [self] if quiet else self
if use_dbref is None:
use_dbref = self.locks.check_lockstring(self, "_dummy:perm(Builder)")
if use_nicks:
# do nick-replacement on search
searchdata = self.nicks.nickreplace(searchdata, categories=("object", "account"), include_account=True)
if (global_search or (is_string and searchdata.startswith("#") and
len(searchdata) > 1 and searchdata[1:].isdigit())):
# only allow exact matching if searching the entire database
# or unique #dbrefs
exact = True
candidates = None
elif candidates is None:
# no custom candidates given - get them automatically
if location:
# location(s) were given
candidates = []
for obj in make_iter(location):
candidates.extend(obj.contents)
else:
# local search. Candidates are taken from
# self.contents, self.location and
# self.location.contents
location = self.location
candidates = self.contents
if location:
candidates = candidates + [location] + location.contents
else:
# normally we don't need this since we are
# included in location.contents
candidates.append(self)
results = ObjectDB.objects.object_search(searchdata,
attribute_name=attribute_name,
typeclass=typeclass,
candidates=candidates,
exact=exact,
use_dbref=use_dbref)
if quiet:
return results
return _AT_SEARCH_RESULT(results, self, query=searchdata,
nofound_string=nofound_string, multimatch_string=multimatch_string)
def search_account(self, searchdata, quiet=False):
"""
Simple shortcut wrapper to search for accounts, not characters.
Args:
searchdata (str): Search criterion - the key or dbref of the account
to search for. If this is "here" or "me", search
for the account connected to this object.
quiet (bool): Returns the results as a list rather than
echo eventual standard error messages. Default `False`.
Returns:
result (Account, None or list): Just what is returned depends on
the `quiet` setting:
- `quiet=True`: No match or multumatch auto-echoes errors
to self.msg, then returns `None`. The esults are passed
through `settings.SEARCH_AT_RESULT` and
`settings.SEARCH_AT_MULTIMATCH_INPUT`. If there is a
unique match, this will be returned.
- `quiet=True`: No automatic error messaging is done, and
what is returned is always a list with 0, 1 or more
matching Accounts.
"""
if isinstance(searchdata, basestring):
# searchdata is a string; wrap some common self-references
if searchdata.lower() in ("me", "self",):
return [self.account] if quiet else self.account
results = search.search_account(searchdata)
if quiet:
return results
return _AT_SEARCH_RESULT(results, self, query=searchdata)
def execute_cmd(self, raw_string, session=None, **kwargs):
"""
Do something as this object. This is never called normally,
it's only used when wanting specifically to let an object be
the caller of a command. It makes use of nicks of eventual
connected accounts as well.
Args:
raw_string (string): Raw command input
session (Session, optional): Session to
return results to
Kwargs:
Other keyword arguments will be added to the found command
object instace as variables before it executes. This is
unused by default Evennia but may be used to set flags and
change operating paramaters for commands at run-time.
Returns:
defer (Deferred): This is an asynchronous Twisted object that
will not fire until the command has actually finished
executing. To overload this one needs to attach
callback functions to it, with addCallback(function).
This function will be called with an eventual return
value from the command execution. This return is not
used at all by Evennia by default, but might be useful
for coders intending to implement some sort of nested
command structure.
"""
# nick replacement - we require full-word matching.
# do text encoding conversion
raw_string = to_unicode(raw_string)
raw_string = self.nicks.nickreplace(raw_string, categories=("inputline", "channel"), include_account=True)
return cmdhandler.cmdhandler(self, raw_string, callertype="object", session=session, **kwargs)
def msg(self, text=None, from_obj=None, session=None, options=None, **kwargs):
"""
Emits something to a session attached to the object.
Args:
text (str or tuple, optional): The message to send. This
is treated internally like any send-command, so its
value can be a tuple if sending multiple arguments to
the `text` oob command.
from_obj (obj or list, optional): object that is sending. If
given, at_msg_send will be called. This value will be
passed on to the protocol. If iterable, will execute hook
on all entities in it.
session (Session or list, optional): Session or list of
Sessions to relay data to, if any. If set, will force send
to these sessions. If unset, who receives the message
depends on the MULTISESSION_MODE.
options (dict, optional): Message-specific option-value
pairs. These will be applied at the protocol level.
Kwargs:
any (string or tuples): All kwarg keys not listed above
will be treated as send-command names and their arguments
(which can be a string or a tuple).
Notes:
`at_msg_receive` will be called on this Object.
All extra kwargs will be passed on to the protocol.
"""
# try send hooks
if from_obj:
for obj in make_iter(from_obj):
try:
obj.at_msg_send(text=text, to_obj=self, **kwargs)
except Exception:
logger.log_trace()
kwargs["options"] = options
try:
if not self.at_msg_receive(text=text, **kwargs):
# if at_msg_receive returns false, we abort message to this object
return
except Exception:
logger.log_trace()
if text is not None:
if not (isinstance(text, basestring) or isinstance(text, tuple)):
# sanitize text before sending across the wire
try:
text = to_str(text, force_string=True)
except Exception:
text = repr(text)
kwargs['text'] = text
# relay to session(s)
sessions = make_iter(session) if session else self.sessions.all()
for session in sessions:
session.data_out(**kwargs)
def for_contents(self, func, exclude=None, **kwargs):
"""
Runs a function on every object contained within this one.
Args:
func (callable): Function to call. This must have the
formal call sign func(obj, **kwargs), where obj is the
object currently being processed and `**kwargs` are
passed on from the call to `for_contents`.
exclude (list, optional): A list of object not to call the
function on.
Kwargs:
Keyword arguments will be passed to the function for all objects.
"""
contents = self.contents
if exclude:
exclude = make_iter(exclude)
contents = [obj for obj in contents if obj not in exclude]
for obj in contents:
func(obj, **kwargs)
def msg_contents(self, text=None, exclude=None, from_obj=None, mapping=None, **kwargs):
"""
Emits a message to all objects inside this object.
Args:
text (str or tuple): Message to send. If a tuple, this should be
on the valid OOB outmessage form `(message, {kwargs})`,
where kwargs are optional data passed to the `text`
outputfunc.
exclude (list, optional): A list of objects not to send to.
from_obj (Object, optional): An object designated as the
"sender" of the message. See `DefaultObject.msg()` for
more info.
mapping (dict, optional): A mapping of formatting keys
`{"key":<object>, "key2":<object2>,...}. The keys
must match `{key}` markers in the `text` if this is a string or
in the internal `message` if `text` is a tuple. These
formatting statements will be
replaced by the return of `<object>.get_display_name(looker)`
for every looker in contents that receives the
message. This allows for every object to potentially
get its own customized string.
Kwargs:
Keyword arguments will be passed on to `obj.msg()` for all
messaged objects.
Notes:
The `mapping` argument is required if `message` contains
{}-style format syntax. The keys of `mapping` should match
named format tokens, and its values will have their
`get_display_name()` function called for each object in
the room before substitution. If an item in the mapping does
not have `get_display_name()`, its string value will be used.
Example:
Say Char is a Character object and Npc is an NPC object:
char.location.msg_contents(
"{attacker} kicks {defender}",
mapping=dict(attacker=char, defender=npc), exclude=(char, npc))
This will result in everyone in the room seeing 'Char kicks NPC'
where everyone may potentially see different results for Char and Npc
depending on the results of `char.get_display_name(looker)` and
`npc.get_display_name(looker)` for each particular onlooker
"""
# we also accept an outcommand on the form (message, {kwargs})
is_outcmd = text and is_iter(text)
inmessage = text[0] if is_outcmd else text
outkwargs = text[1] if is_outcmd and len(text) > 1 else {}
contents = self.contents
if exclude:
exclude = make_iter(exclude)
contents = [obj for obj in contents if obj not in exclude]
for obj in contents:
if mapping:
substitutions = {t: sub.get_display_name(obj)
if hasattr(sub, 'get_display_name')
else str(sub) for t, sub in mapping.items()}
outmessage = inmessage.format(**substitutions)
else:
outmessage = inmessage
obj.msg(text=(outmessage, outkwargs), from_obj=from_obj, **kwargs)
def move_to(self, destination, quiet=False,
emit_to_obj=None, use_destination=True, to_none=False, move_hooks=True,
**kwargs):
"""
Moves this object to a new location.
Args:
destination (Object): Reference to the object to move to. This
can also be an exit object, in which case the
destination property is used as destination.
quiet (bool): If true, turn off the calling of the emit hooks
(announce_move_to/from etc)
emit_to_obj (Object): object to receive error messages
use_destination (bool): Default is for objects to use the "destination"
property of destinations as the target to move to. Turning off this
keyword allows objects to move "inside" exit objects.
to_none (bool): Allow destination to be None. Note that no hooks are run when
moving to a None location. If you want to run hooks, run them manually
(and make sure they can manage None locations).
move_hooks (bool): If False, turn off the calling of move-related hooks
(at_before/after_move etc) with quiet=True, this is as quiet a move
as can be done.
Kwargs:
Passed on to announce_move_to and announce_move_from hooks.
Returns:
result (bool): True/False depending on if there were problems with the move.
This method may also return various error messages to the
`emit_to_obj`.
Notes:
No access checks are done in this method, these should be handled before
calling `move_to`.
The `DefaultObject` hooks called (if `move_hooks=True`) are, in order:
1. `self.at_before_move(destination)` (if this returns False, move is aborted)
2. `source_location.at_object_leave(self, destination)`
3. `self.announce_move_from(destination)`
4. (move happens here)
5. `self.announce_move_to(source_location)`
6. `destination.at_object_receive(self, source_location)`
7. `self.at_after_move(source_location)`
"""
def logerr(string="", err=None):
"""Simple log helper method"""
logger.log_trace()
self.msg("%s%s" % (string, "" if err is None else " (%s)" % err))
return
errtxt = _("Couldn't perform move ('%s'). Contact an admin.")
if not emit_to_obj:
emit_to_obj = self
if not destination:
if to_none:
# immediately move to None. There can be no hooks called since
# there is no destination to call them with.
self.location = None
return True
emit_to_obj.msg(_("The destination doesn't exist."))
return False
if destination.destination and use_destination:
# traverse exits
destination = destination.destination
# Before the move, call eventual pre-commands.
if move_hooks:
try:
if not self.at_before_move(destination):
return False
except Exception as err:
logerr(errtxt % "at_before_move()", err)
return False
# Save the old location
source_location = self.location
# Call hook on source location
if move_hooks and source_location:
try:
source_location.at_object_leave(self, destination)
except Exception as err:
logerr(errtxt % "at_object_leave()", err)
return False
if not quiet:
# tell the old room we are leaving
try:
self.announce_move_from(destination, **kwargs)
except Exception as err:
logerr(errtxt % "at_announce_move()", err)
return False
# Perform move
try:
self.location = destination
except Exception as err:
logerr(errtxt % "location change", err)
return False
if not quiet:
# Tell the new room we are there.
try:
self.announce_move_to(source_location, **kwargs)
except Exception as err:
logerr(errtxt % "announce_move_to()", err)
return False
if move_hooks:
# Perform eventual extra commands on the receiving location
# (the object has already arrived at this point)
try:
destination.at_object_receive(self, source_location)
except Exception as err:
logerr(errtxt % "at_object_receive()", err)
return False
# Execute eventual extra commands on this object after moving it
# (usually calling 'look')
if move_hooks:
try:
self.at_after_move(source_location)
except Exception as err:
logerr(errtxt % "at_after_move", err)
return False
return True
def clear_exits(self):
"""
Destroys all of the exits and any exits pointing to this
object as a destination.
"""
for out_exit in [exi for exi in ObjectDB.objects.get_contents(self) if exi.db_destination]:
out_exit.delete()
for in_exit in ObjectDB.objects.filter(db_destination=self):
in_exit.delete()
def clear_contents(self):
"""
Moves all objects (accounts/things) to their home location or
to default home.
"""
# Gather up everything that thinks this is its location.
default_home_id = int(settings.DEFAULT_HOME.lstrip("#"))
try:
default_home = ObjectDB.objects.get(id=default_home_id)
if default_home.dbid == self.dbid:
# we are deleting default home!
default_home = None
except Exception:
string = _("Could not find default home '(#%d)'.")
logger.log_err(string % default_home_id)
default_home = None
for obj in self.contents:
home = obj.home
# Obviously, we can't send it back to here.
if not home or (home and home.dbid == self.dbid):
obj.home = default_home
home = default_home
# If for some reason it's still None...
if not home:
string = "Missing default home, '%s(#%d)' "
string += "now has a null location."
obj.location = None
obj.msg(_("Something went wrong! You are dumped into nowhere. Contact an admin."))
logger.log_err(string % (obj.name, obj.dbid))
return
if obj.has_account:
if home:
string = "Your current location has ceased to exist,"
string += " moving you to %s(#%d)."
obj.msg(_(string) % (home.name, home.dbid))
else:
# Famous last words: The account should never see this.
string = "This place should not exist ... contact an admin."
obj.msg(_(string))
obj.move_to(home)
def copy(self, new_key=None):
"""
Makes an identical copy of this object, identical except for a
new dbref in the database. If you want to customize the copy
by changing some settings, use ObjectDB.object.copy_object()
directly.
Args:
new_key (string): New key/name of copied object. If new_key is not
specified, the copy will be named <old_key>_copy by default.
Returns:
copy (Object): A copy of this object.
"""
def find_clone_key():
"""
Append 01, 02 etc to obj.key. Checks next higher number in the
same location, then adds the next number available
returns the new clone name on the form keyXX
"""
key = self.key
num = sum(1 for obj in self.location.contents
if obj.key.startswith(key) and obj.key.lstrip(key).isdigit())
return "%s%03i" % (key, num)
new_key = new_key or find_clone_key()
return ObjectDB.objects.copy_object(self, new_key=new_key)
def delete(self):
"""
Deletes this object. Before deletion, this method makes sure
to move all contained objects to their respective home
locations, as well as clean up all exits to/from the object.
Returns:
noerror (bool): Returns whether or not the delete completed
successfully or not.
"""
global _ScriptDB
if not _ScriptDB:
from evennia.scripts.models import ScriptDB as _ScriptDB
if not self.pk or not self.at_object_delete():
# This object has already been deleted,
# or the pre-delete check return False
return False
# See if we need to kick the account off.
for session in self.sessions.all():
session.msg(_("Your character %s has been destroyed.") % self.key)
# no need to disconnect, Account just jumps to OOC mode.
# sever the connection (important!)
if self.account:
for session in self.sessions.all():
self.account.unpuppet_object(session)
self.account = None
for script in _ScriptDB.objects.get_all_scripts_on_obj(self):
script.stop()
# Destroy any exits to and from this room, if any
self.clear_exits()
# Clear out any non-exit objects located within the object
self.clear_contents()
self.attributes.clear()
self.nicks.clear()
self.aliases.clear()
self.location = None # this updates contents_cache for our location
# Perform the deletion of the object
super(DefaultObject, self).delete()
return True
def access(self, accessing_obj, access_type='read', default=False, no_superuser_bypass=False, **kwargs):
"""
Determines if another object has permission to access this object
in whatever way.
Args:
accessing_obj (Object): Object trying to access this one.
access_type (str, optional): Type of access sought.
default (bool, optional): What to return if no lock of access_type was found.
no_superuser_bypass (bool, optional): If `True`, don't skip
lock check for superuser (be careful with this one).
Kwargs:
Passed on to the at_access hook along with the result of the access check.
"""
result = super(DefaultObject, self).access(accessing_obj, access_type=access_type,
default=default, no_superuser_bypass=no_superuser_bypass)
self.at_access(result, accessing_obj, access_type, **kwargs)
return result
#
# Hook methods
#
def at_first_save(self):
"""
This is called by the typeclass system whenever an instance of
this class is saved for the first time. It is a generic hook
for calling the startup hooks for the various game entities.
When overloading you generally don't overload this but
overload the hooks called by this method.
"""
self.basetype_setup()
self.at_object_creation()
if hasattr(self, "_createdict"):
# this will only be set if the utils.create function
# was used to create the object. We want the create
# call's kwargs to override the values set by hooks.
cdict = self._createdict
updates = []
if not cdict.get("key"):
if not self.db_key:
self.db_key = "#%i" % self.dbid
updates.append("db_key")
elif self.key != cdict.get("key"):
updates.append("db_key")
self.db_key = cdict["key"]
if cdict.get("location") and self.location != cdict["location"]:
self.db_location = cdict["location"]
updates.append("db_location")
if cdict.get("home") and self.home != cdict["home"]:
self.home = cdict["home"]
updates.append("db_home")
if cdict.get("destination") and self.destination != cdict["destination"]:
self.destination = cdict["destination"]
updates.append("db_destination")
if updates:
self.save(update_fields=updates)
if cdict.get("permissions"):
self.permissions.batch_add(*cdict["permissions"])
if cdict.get("locks"):
self.locks.add(cdict["locks"])
if cdict.get("aliases"):
self.aliases.batch_add(*cdict["aliases"])
if cdict.get("location"):
cdict["location"].at_object_receive(self, None)
self.at_after_move(None)
if cdict.get("tags"):
# this should be a list of tags, tuples (key, category) or (key, category, data)
self.tags.batch_add(*cdict["tags"])
if cdict.get("attributes"):
# this should be tuples (key, val, ...)
self.attributes.batch_add(*cdict["attributes"])
if cdict.get("nattributes"):
# this should be a dict of nattrname:value
for key, value in cdict["nattributes"]:
self.nattributes.add(key, value)
del self._createdict
self.basetype_posthook_setup()
# hooks called by the game engine #
def basetype_setup(self):
"""
This sets up the default properties of an Object, just before
the more general at_object_creation.
You normally don't need to change this unless you change some
fundamental things like names of permission groups.
"""
# the default security setup fallback for a generic
# object. Overload in child for a custom setup. Also creation
# commands may set this (create an item and you should be its
# controller, for example)
self.locks.add(";".join([
"control:perm(Developer)", # edit locks/permissions, delete
"examine:perm(Builder)", # examine properties
"view:all()", # look at object (visibility)
"edit:perm(Admin)", # edit properties/attributes
"delete:perm(Admin)", # delete object
"get:all()", # pick up object
"call:true()", # allow to call commands on this object
"tell:perm(Admin)", # allow emits to this object
"puppet:pperm(Developer)"])) # lock down puppeting only to staff by default
def basetype_posthook_setup(self):
"""
Called once, after basetype_setup and at_object_creation. This
should generally not be overloaded unless you are redefining
how a room/exit/object works. It allows for basetype-like
setup after the object is created. An example of this is
EXITs, who need to know keys, aliases, locks etc to set up
their exit-cmdsets.
"""
pass
def at_object_creation(self):
"""
Called once, when this object is first created. This is the
normal hook to overload for most object types.
"""
pass
def at_object_delete(self):
"""
Called just before the database object is permanently
delete()d from the database. If this method returns False,
deletion is aborted.
"""
return True
def at_init(self):
"""
This is always called whenever this object is initiated --
that is, whenever it its typeclass is cached from memory. This
happens on-demand first time the object is used or activated
in some way after being created but also after each server
restart or reload.
"""
pass
def at_cmdset_get(self, **kwargs):
"""
Called just before cmdsets on this object are requested by the
command handler. If changes need to be done on the fly to the
cmdset before passing them on to the cmdhandler, this is the
place to do it. This is called also if the object currently
have no cmdsets.
Kwargs:
caller (Session, Object or Account): The caller requesting
this cmdset.
"""
pass
def at_pre_puppet(self, account, session=None, **kwargs):
"""
Called just before an Account connects to this object to puppet
it.
Args:
account (Account): This is the connecting account.
session (Session): Session controlling the connection.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_post_puppet(self, **kwargs):
"""
Called just after puppeting has been completed and all
Account<->Object links have been established.
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Note:
You can use `self.account` and `self.sessions.get()` to get
account and sessions at this point; the last entry in the
list from `self.sessions.get()` is the latest Session
puppeting this Object.
"""
self.account.db._last_puppet = self
def at_pre_unpuppet(self, **kwargs):
"""
Called just before beginning to un-connect a puppeting from
this Account.
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Note:
You can use `self.account` and `self.sessions.get()` to get
account and sessions at this point; the last entry in the
list from `self.sessions.get()` is the latest Session
puppeting this Object.
"""
pass
def at_post_unpuppet(self, account, session=None, **kwargs):
"""
Called just after the Account successfully disconnected from
this object, severing all connections.
Args:
account (Account): The account object that just disconnected
from this object.
session (Session): Session id controlling the connection that
just disconnected.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_server_reload(self):
"""
This hook is called whenever the server is shutting down for
restart/reboot. If you want to, for example, save non-persistent
properties across a restart, this is the place to do it.
"""
pass
def at_server_shutdown(self):
"""
This hook is called whenever the server is shutting down fully
(i.e. not for a restart).
"""
pass
def at_access(self, result, accessing_obj, access_type, **kwargs):
"""
This is called with the result of an access call, along with
any kwargs used for that call. The return of this method does
not affect the result of the lock check. It can be used e.g. to
customize error messages in a central location or other effects
based on the access result.
Args:
result (bool): The outcome of the access call.
accessing_obj (Object or Account): The entity trying to gain access.
access_type (str): The type of access that was requested.
Kwargs:
Not used by default, added for possible expandability in a
game.
"""
pass
# hooks called when moving the object
def at_before_move(self, destination, **kwargs):
"""
Called just before starting to move this object to
destination.
Args:
destination (Object): The object we are moving to
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Returns:
shouldmove (bool): If we should move or not.
Notes:
If this method returns False/None, the move is cancelled
before it is even started.
"""
# return has_perm(self, destination, "can_move")
return True
def announce_move_from(self, destination, msg=None, mapping=None, **kwargs):
"""
Called if the move is to be announced. This is
called while we are still standing in the old
location.
Args:
destination (Object): The place we are going to.
msg (str, optional): a replacement message.
mapping (dict, optional): additional mapping objects.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
You can override this method and call its parent with a
message to simply change the default message. In the string,
you can use the following as mappings (between braces):
object: the object which is moving.
exit: the exit from which the object is moving (if found).
origin: the location of the object before the move.
destination: the location of the object after moving.
"""
if not self.location:
return
if msg:
string = msg
else:
string = "{object} is leaving {origin}, heading for {destination}."
location = self.location
exits = [o for o in location.contents if o.location is location and o.destination is destination]
if not mapping:
mapping = {}
mapping.update({
"object": self,
"exit": exits[0] if exits else "somewhere",
"origin": location or "nowhere",
"destination": destination or "nowhere",
})
location.msg_contents(string, exclude=(self, ), mapping=mapping)
def announce_move_to(self, source_location, msg=None, mapping=None, **kwargs):
"""
Called after the move if the move was not quiet. At this point
we are standing in the new location.
Args:
source_location (Object): The place we came from
msg (str, optional): the replacement message if location.
mapping (dict, optional): additional mapping objects.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Notes:
You can override this method and call its parent with a
message to simply change the default message. In the string,
you can use the following as mappings (between braces):
object: the object which is moving.
exit: the exit from which the object is moving (if found).
origin: the location of the object before the move.
destination: the location of the object after moving.
"""
if not source_location and self.location.has_account:
# This was created from nowhere and added to an account's
# inventory; it's probably the result of a create command.
string = "You now have %s in your possession." % self.get_display_name(self.location)
self.location.msg(string)
return
if source_location:
if msg:
string = msg
else:
string = "{object} arrives to {destination} from {origin}."
else:
string = "{object} arrives to {destination}."
origin = source_location
destination = self.location
exits = []
if origin:
exits = [o for o in destination.contents if o.location is destination and o.destination is origin]
if not mapping:
mapping = {}
mapping.update({
"object": self,
"exit": exits[0] if exits else "somewhere",
"origin": origin or "nowhere",
"destination": destination or "nowhere",
})
destination.msg_contents(string, exclude=(self, ), mapping=mapping)
def at_after_move(self, source_location, **kwargs):
"""
Called after move has completed, regardless of quiet mode or
not. Allows changes to the object due to the location it is
now in.
Args:
source_location (Object): Wwhere we came from. This may be `None`.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_object_leave(self, moved_obj, target_location, **kwargs):
"""
Called just before an object leaves from inside this object
Args:
moved_obj (Object): The object leaving
target_location (Object): Where `moved_obj` is going.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_object_receive(self, moved_obj, source_location, **kwargs):
"""
Called after an object has been moved into this object.
Args:
moved_obj (Object): The object moved into this one
source_location (Object): Where `moved_object` came from.
Note that this could be `None`.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_traverse(self, traversing_object, target_location, **kwargs):
"""
This hook is responsible for handling the actual traversal,
normally by calling
`traversing_object.move_to(target_location)`. It is normally
only implemented by Exit objects. If it returns False (usually
because `move_to` returned False), `at_after_traverse` below
should not be called and instead `at_failed_traverse` should be
called.
Args:
traversing_object (Object): Object traversing us.
target_location (Object): Where target is going.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_after_traverse(self, traversing_object, source_location, **kwargs):
"""
Called just after an object successfully used this object to
traverse to another object (i.e. this object is a type of
Exit)
Args:
traversing_object (Object): The object traversing us.
source_location (Object): Where `traversing_object` came from.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Notes:
The target location should normally be available as `self.destination`.
"""
pass
def at_failed_traverse(self, traversing_object, **kwargs):
"""
This is called if an object fails to traverse this object for
some reason.
Args:
traversing_object (Object): The object that failed traversing us.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Notes:
Using the default exits, this hook will not be called if an
Attribute `err_traverse` is defined - this will in that case be
read for an error string instead.
"""
pass
def at_msg_receive(self, text=None, from_obj=None, **kwargs):
"""
This hook is called whenever someone sends a message to this
object using the `msg` method.
Note that from_obj may be None if the sender did not include
itself as an argument to the obj.msg() call - so you have to
check for this. .
Consider this a pre-processing method before msg is passed on
to the user session. If this method returns False, the msg
will not be passed on.
Args:
text (str, optional): The message received.
from_obj (any, optional): The object sending the message.
Kwargs:
This includes any keywords sent to the `msg` method.
Returns:
receive (bool): If this message should be received.
Notes:
If this method returns False, the `msg` operation
will abort without sending the message.
"""
return True
def at_msg_send(self, text=None, to_obj=None, **kwargs):
"""
This is a hook that is called when *this* object sends a
message to another object with `obj.msg(text, to_obj=obj)`.
Args:
text (str, optional): Text to send.
to_obj (any, optional): The object to send to.
Kwargs:
Keywords passed from msg()
Notes:
Since this method is executed by `from_obj`, if no `from_obj`
was passed to `DefaultCharacter.msg` this hook will never
get called.
"""
pass
# hooks called by the default cmdset.
def return_appearance(self, looker, **kwargs):
"""
This formats a description. It is the hook a 'look' command
should call.
Args:
looker (Object): Object doing the looking.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
if not looker:
return ""
# get and identify all objects
visible = (con for con in self.contents if con != looker and
con.access(looker, "view"))
exits, users, things = [], [], defaultdict(list)
for con in visible:
key = con.get_display_name(looker)
if con.destination:
exits.append(key)
elif con.has_account:
users.append("|c%s|n" % key)
else:
# things can be pluralized
things[key].append(con)
# get description, build string
string = "|c%s|n\n" % self.get_display_name(looker)
desc = self.db.desc
if desc:
string += "%s" % desc
if exits:
string += "\n|wExits:|n " + list_to_string(exits)
if users or things:
# handle pluralization of things (never pluralize users)
thing_strings = []
for key, itemlist in sorted(things.iteritems()):
nitem = len(itemlist)
if nitem == 1:
key, _ = itemlist[0].get_numbered_name(nitem, looker, key=key)
else:
key = [item.get_numbered_name(nitem, looker, key=key)[1] for item in itemlist][0]
thing_strings.append(key)
string += "\n|wYou see:|n " + list_to_string(users + thing_strings)
return string
def at_look(self, target, **kwargs):
"""
Called when this object performs a look. It allows to
customize just what this means. It will not itself
send any data.
Args:
target (Object): The target being looked at. This is
commonly an object or the current location. It will
be checked for the "view" type access.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Returns:
lookstring (str): A ready-processed look string
potentially ready to return to the looker.
"""
if not target.access(self, "view"):
try:
return "Could not view '%s'." % target.get_display_name(self)
except AttributeError:
return "Could not view '%s'." % target.key
description = target.return_appearance(self)
# the target's at_desc() method.
# this must be the last reference to target so it may delete itself when acted on.
target.at_desc(looker=self)
return description
def at_desc(self, looker=None, **kwargs):
"""
This is called whenever someone looks at this object.
Args:
looker (Object, optional): The object requesting the description.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_before_get(self, getter, **kwargs):
"""
Called by the default `get` command before this object has been
picked up.
Args:
getter (Object): The object about to get this object.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Returns:
shouldget (bool): If the object should be gotten or not.
Notes:
If this method returns False/None, the getting is cancelled
before it is even started.
"""
return True
def at_get(self, getter, **kwargs):
"""
Called by the default `get` command when this object has been
picked up.
Args:
getter (Object): The object getting this object.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Notes:
This hook cannot stop the pickup from happening. Use
permissions or the at_before_get() hook for that.
"""
pass
def at_before_give(self, giver, getter, **kwargs):
"""
Called by the default `give` command before this object has been
given.
Args:
giver (Object): The object about to give this object.
getter (Object): The object about to get this object.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Returns:
shouldgive (bool): If the object should be given or not.
Notes:
If this method returns False/None, the giving is cancelled
before it is even started.
"""
return True
def at_give(self, giver, getter, **kwargs):
"""
Called by the default `give` command when this object has been
given.
Args:
giver (Object): The object giving this object.
getter (Object): The object getting this object.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Notes:
This hook cannot stop the give from happening. Use
permissions or the at_before_give() hook for that.
"""
pass
def at_before_drop(self, dropper, **kwargs):
"""
Called by the default `drop` command before this object has been
dropped.
Args:
dropper (Object): The object which will drop this object.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Returns:
shoulddrop (bool): If the object should be dropped or not.
Notes:
If this method returns False/None, the dropping is cancelled
before it is even started.
"""
return True
def at_drop(self, dropper, **kwargs):
"""
Called by the default `drop` command when this object has been
dropped.
Args:
dropper (Object): The object which just dropped this object.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Notes:
This hook cannot stop the drop from happening. Use
permissions or the at_before_drop() hook for that.
"""
pass
def at_before_say(self, message, **kwargs):
"""
Before the object says something.
This hook is by default used by the 'say' and 'whisper'
commands as used by this command it is called before the text
is said/whispered and can be used to customize the outgoing
text from the object. Returning `None` aborts the command.
Args:
message (str): The suggested say/whisper text spoken by self.
Kwargs:
whisper (bool): If True, this is a whisper rather than
a say. This is sent by the whisper command by default.
Other verbal commands could use this hook in similar
ways.
receivers (Object or iterable): If set, this is the target or targets for the say/whisper.
Returns:
message (str): The (possibly modified) text to be spoken.
"""
return message
def at_say(self, message, msg_self=None, msg_location=None,
receivers=None, msg_receivers=None, **kwargs):
"""
Display the actual say (or whisper) of self.
This hook should display the actual say/whisper of the object in its
location. It should both alert the object (self) and its
location that some text is spoken. The overriding of messages or
`mapping` allows for simple customization of the hook without
re-writing it completely.
Args:
message (str): The message to convey.
msg_self (bool or str, optional): If boolean True, echo `message` to self. If a string,
return that message. If False or unset, don't echo to self.
msg_location (str, optional): The message to echo to self's location.
receivers (Object or iterable, optional): An eventual receiver or receivers of the message
(by default only used by whispers).
msg_receivers(str): Specific message to pass to the receiver(s). This will parsed
with the {receiver} placeholder replaced with the given receiver.
Kwargs:
whisper (bool): If this is a whisper rather than a say. Kwargs
can be used by other verbal commands in a similar way.
mapping (dict): Pass an additional mapping to the message.
Notes:
Messages can contain {} markers. These are substituted against the values
passed in the `mapping` argument.
msg_self = 'You say: "{speech}"'
msg_location = '{object} says: "{speech}"'
msg_receivers = '{object} whispers: "{speech}"'
Supported markers by default:
{self}: text to self-reference with (default 'You')
{speech}: the text spoken/whispered by self.
{object}: the object speaking.
{receiver}: replaced with a single receiver only for strings meant for a specific
receiver (otherwise 'None').
{all_receivers}: comma-separated list of all receivers,
if more than one, otherwise same as receiver
{location}: the location where object is.
"""
msg_type = 'say'
if kwargs.get("whisper", False):
# whisper mode
msg_type = 'whisper'
msg_self = '{self} whisper to {all_receivers}, "{speech}"' if msg_self is True else msg_self
msg_receivers = '{object} whispers: "{speech}"'
msg_receivers = msg_receivers or '{object} whispers: "{speech}"'
msg_location = None
else:
msg_self = '{self} say, "{speech}"' if msg_self is True else msg_self
msg_location = msg_location or '{object} says, "{speech}"'
msg_receivers = msg_receivers or message
custom_mapping = kwargs.get('mapping', {})
receivers = make_iter(receivers) if receivers else None
location = self.location
if msg_self:
self_mapping = {"self": "You",
"object": self.get_display_name(self),
"location": location.get_display_name(self) if location else None,
"receiver": None,
"all_receivers": ", ".join(
recv.get_display_name(self)
for recv in receivers) if receivers else None,
"speech": message}
self_mapping.update(custom_mapping)
self.msg(text=(msg_self.format(**self_mapping), {"type": msg_type}), from_obj=self)
if receivers and msg_receivers:
receiver_mapping = {"self": "You",
"object": None,
"location": None,
"receiver": None,
"all_receivers": None,
"speech": message}
for receiver in make_iter(receivers):
individual_mapping = {"object": self.get_display_name(receiver),
"location": location.get_display_name(receiver),
"receiver": receiver.get_display_name(receiver),
"all_receivers": ", ".join(
recv.get_display_name(recv)
for recv in receivers) if receivers else None}
receiver_mapping.update(individual_mapping)
receiver_mapping.update(custom_mapping)
receiver.msg(text=(msg_receivers.format(**receiver_mapping),
{"type": msg_type}), from_obj=self)
if self.location and msg_location:
location_mapping = {"self": "You",
"object": self,
"location": location,
"all_receivers": ", ".join(str(recv) for recv in receivers) if receivers else None,
"receiver": None,
"speech": message}
location_mapping.update(custom_mapping)
exclude = []
if msg_self:
exclude.append(self)
if receivers:
exclude.extend(receivers)
self.location.msg_contents(text=(msg_location, {"type": msg_type}),
from_obj=self,
exclude=exclude,
mapping=location_mapping)
#
# Base Character object
#
class DefaultCharacter(DefaultObject):
"""
This implements an Object puppeted by a Session - that is,
a character avatar controlled by an account.
"""
def basetype_setup(self):
"""
Setup character-specific security.
You should normally not need to overload this, but if you do,
make sure to reproduce at least the two last commands in this
method (unless you want to fundamentally change how a
Character object works).
"""
super(DefaultCharacter, self).basetype_setup()
self.locks.add(";".join(["get:false()", # noone can pick up the character
"call:false()"])) # no commands can be called on character from outside
# add the default cmdset
self.cmdset.add_default(settings.CMDSET_CHARACTER, permanent=True)
def at_after_move(self, source_location, **kwargs):
"""
We make sure to look around after a move.
"""
if self.location.access(self, "view"):
self.msg(self.at_look(self.location))
def at_pre_puppet(self, account, session=None, **kwargs):
"""
Return the character from storage in None location in `at_post_unpuppet`.
Args:
account (Account): This is the connecting account.
session (Session): Session controlling the connection.
"""
if self.location is None: # Make sure character's location is never None before being puppeted.
# Return to last location (or home, which should always exist),
self.location = self.db.prelogout_location if self.db.prelogout_location else self.home
self.location.at_object_receive(self, None) # and trigger the location's reception hook.
if self.location: # If the character is verified to be somewhere,
self.db.prelogout_location = self.location # save location again to be sure.
else:
account.msg("|r%s has no location and no home is set.|n" % self, session=session) # Note to set home.
def at_post_puppet(self, **kwargs):
"""
Called just after puppeting has been completed and all
Account<->Object links have been established.
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Note:
You can use `self.account` and `self.sessions.get()` to get
account and sessions at this point; the last entry in the
list from `self.sessions.get()` is the latest Session
puppeting this Object.
"""
# NOTE: commenting out extraneous info
#self.msg("\nYou become |c%s|n.\n" % self.name)
self.msg((self.at_look(self.location), {'type':'look'}), options = None)
def message(obj, from_obj):
obj.msg("%s has entered the game." % self.get_display_name(obj), from_obj=from_obj)
self.location.for_contents(message, exclude=[self], from_obj=self)
def at_post_unpuppet(self, account, session=None, **kwargs):
"""
We stove away the character when the account goes ooc/logs off,
otherwise the character object will remain in the room also
after the account logged off ("headless", so to say).
Args:
account (Account): The account object that just disconnected
from this object.
session (Session): Session controlling the connection that
just disconnected.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
if not self.sessions.count():
# only remove this char from grid if no sessions control it anymore.
if self.location:
def message(obj, from_obj):
obj.msg("%s has left the game." % self.get_display_name(obj), from_obj=from_obj)
self.location.for_contents(message, exclude=[self], from_obj=self)
self.db.prelogout_location = self.location
self.location = None
@property
def idle_time(self):
"""
Returns the idle time of the least idle session in seconds. If
no sessions are connected it returns nothing.
"""
idle = [session.cmd_last_visible for session in self.sessions.all()]
if idle:
return time.time() - float(max(idle))
return None
@property
def connection_time(self):
"""
Returns the maximum connection time of all connected sessions
in seconds. Returns nothing if there are no sessions.
"""
conn = [session.conn_time for session in self.sessions.all()]
if conn:
return time.time() - float(min(conn))
return None
#
# Base Room object
class DefaultRoom(DefaultObject):
"""
This is the base room object. It's just like any Object except its
location is always `None`.
"""
def basetype_setup(self):
"""
Simple room setup setting locks to make sure the room
cannot be picked up.
"""
super(DefaultRoom, self).basetype_setup()
self.locks.add(";".join(["get:false()",
"puppet:false()"])) # would be weird to puppet a room ...
self.location = None
#
# Default Exit command, used by the base exit object
#
class ExitCommand(command.Command):
"""
This is a command that simply cause the caller to traverse
the object it is attached to.
"""
obj = None
def func(self):
"""
Default exit traverse if no syscommand is defined.
"""
if self.obj.access(self.caller, 'traverse'):
# we may traverse the exit.
self.obj.at_traverse(self.caller, self.obj.destination)
else:
# exit is locked
if self.obj.db.err_traverse:
# if exit has a better error message, let's use it.
self.caller.msg(self.obj.db.err_traverse)
else:
# No shorthand error message. Call hook.
self.obj.at_failed_traverse(self.caller)
def get_extra_info(self, caller, **kwargs):
"""
Shows a bit of information on where the exit leads.
Args:
caller (Object): The object (usually a character) that entered an ambiguous command.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Returns:
A string with identifying information to disambiguate the command, conventionally with a preceding space.
"""
if self.obj.destination:
return " (exit to %s)" % self.obj.destination.get_display_name(caller)
else:
return " (%s)" % self.obj.get_display_name(caller)
#
# Base Exit object
class DefaultExit(DefaultObject):
"""
This is the base exit object - it connects a location to another.
This is done by the exit assigning a "command" on itself with the
same name as the exit object (to do this we need to remember to
re-create the command when the object is cached since it must be
created dynamically depending on what the exit is called). This
command (which has a high priority) will thus allow us to traverse
exits simply by giving the exit-object's name on its own.
"""
exit_command = ExitCommand
priority = 101
# Helper classes and methods to implement the Exit. These need not
# be overloaded unless one want to change the foundation for how
# Exits work. See the end of the class for hook methods to overload.
def create_exit_cmdset(self, exidbobj):
"""
Helper function for creating an exit command set + command.
The command of this cmdset has the same name as the Exit
object and allows the exit to react when the account enter the
exit's name, triggering the movement between rooms.
Args:
exidbobj (Object): The DefaultExit object to base the command on.
"""
# create an exit command. We give the properties here,
# to always trigger metaclass preparations
cmd = self.exit_command(key=exidbobj.db_key.strip().lower(),
aliases=exidbobj.aliases.all(),
locks=str(exidbobj.locks),
auto_help=False,
destination=exidbobj.db_destination,
arg_regex=r"^$",
is_exit=True,
obj=exidbobj)
# create a cmdset
exit_cmdset = cmdset.CmdSet(None)
exit_cmdset.key = 'ExitCmdSet'
exit_cmdset.priority = self.priority
exit_cmdset.duplicates = True
# add command to cmdset
exit_cmdset.add(cmd)
return exit_cmdset
# Command hooks
def basetype_setup(self):
"""
Setup exit-security
You should normally not need to overload this - if you do make
sure you include all the functionality in this method.
"""
super(DefaultExit, self).basetype_setup()
# setting default locks (overload these in at_object_creation()
self.locks.add(";".join(["puppet:false()", # would be weird to puppet an exit ...
"traverse:all()", # who can pass through exit by default
"get:false()"])) # noone can pick up the exit
# an exit should have a destination (this is replaced at creation time)
if self.location:
self.destination = self.location
def at_cmdset_get(self, **kwargs):
"""
Called just before cmdsets on this object are requested by the
command handler. If changes need to be done on the fly to the
cmdset before passing them on to the cmdhandler, this is the
place to do it. This is called also if the object currently
has no cmdsets.
Kwargs:
force_init (bool): If `True`, force a re-build of the cmdset
(for example to update aliases).
"""
if "force_init" in kwargs or not self.cmdset.has_cmdset("ExitCmdSet", must_be_default=True):
# we are resetting, or no exit-cmdset was set. Create one dynamically.
self.cmdset.add_default(self.create_exit_cmdset(self), permanent=False)
def at_init(self):
"""
This is called when this objects is re-loaded from cache. When
that happens, we make sure to remove any old ExitCmdSet cmdset
(this most commonly occurs when renaming an existing exit)
"""
self.cmdset.remove_default()
def at_traverse(self, traversing_object, target_location, **kwargs):
"""
This implements the actual traversal. The traverse lock has
already been checked (in the Exit command) at this point.
Args:
traversing_object (Object): Object traversing us.
target_location (Object): Where target is going.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
source_location = traversing_object.location
if traversing_object.move_to(target_location):
self.at_after_traverse(traversing_object, source_location)
else:
if self.db.err_traverse:
# if exit has a better error message, let's use it.
self.caller.msg(self.db.err_traverse)
else:
# No shorthand error message. Call hook.
self.at_failed_traverse(traversing_object)
def at_failed_traverse(self, traversing_object, **kwargs):
"""
Overloads the default hook to implement a simple default error message.
Args:
traversing_object (Object): The object that failed traversing us.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Notes:
Using the default exits, this hook will not be called if an
Attribute `err_traverse` is defined - this will in that case be
read for an error string instead.
"""
traversing_object.msg("You cannot go there.")
| 39.042424
| 117
| 0.5889
|
import time
import inflect
from builtins import object
from future.utils import with_metaclass
from collections import defaultdict
from django.conf import settings
from evennia.typeclasses.models import TypeclassBase
from evennia.typeclasses.attributes import NickHandler
from evennia.objects.manager import ObjectManager
from evennia.objects.models import ObjectDB
from evennia.scripts.scripthandler import ScriptHandler
from evennia.commands import cmdset, command
from evennia.commands.cmdsethandler import CmdSetHandler
from evennia.commands import cmdhandler
from evennia.utils import search
from evennia.utils import logger
from evennia.utils import ansi
from evennia.utils.utils import (variable_from_module, lazy_property,
make_iter, to_unicode, is_iter, list_to_string,
to_str)
from django.utils.translation import ugettext as _
_INFLECT = inflect.engine()
_MULTISESSION_MODE = settings.MULTISESSION_MODE
_ScriptDB = None
_SESSIONS = None
_AT_SEARCH_RESULT = variable_from_module(*settings.SEARCH_AT_RESULT.rsplit('.', 1))
_SESSID_MAX = 16 if _MULTISESSION_MODE in (1, 3) else 1
class ObjectSessionHandler(object):
def __init__(self, obj):
self.obj = obj
self._sessid_cache = []
self._recache()
def _recache(self):
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
self._sessid_cache = list(set(int(val) for val in (self.obj.db_sessid or "").split(",") if val))
if any(sessid for sessid in self._sessid_cache if sessid not in _SESSIONS):
self._sessid_cache = [sessid for sessid in self._sessid_cache if sessid in _SESSIONS]
self.obj.db_sessid = ",".join(str(val) for val in self._sessid_cache)
self.obj.save(update_fields=["db_sessid"])
def get(self, sessid=None):
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
if sessid:
sessions = [_SESSIONS[sessid] if sessid in _SESSIONS else None] if sessid in self._sessid_cache else []
else:
sessions = [_SESSIONS[ssid] if ssid in _SESSIONS else None for ssid in self._sessid_cache]
if None in sessions:
self._recache()
return self.get(sessid=sessid)
return sessions
def all(self):
return self.get()
def add(self, session):
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
try:
sessid = session.sessid
except AttributeError:
sessid = session
sessid_cache = self._sessid_cache
if sessid in _SESSIONS and sessid not in sessid_cache:
if len(sessid_cache) >= _SESSID_MAX:
return
sessid_cache.append(sessid)
self.obj.db_sessid = ",".join(str(val) for val in sessid_cache)
self.obj.save(update_fields=["db_sessid"])
def remove(self, session):
try:
sessid = session.sessid
except AttributeError:
sessid = session
sessid_cache = self._sessid_cache
if sessid in sessid_cache:
sessid_cache.remove(sessid)
self.obj.db_sessid = ",".join(str(val) for val in sessid_cache)
self.obj.save(update_fields=["db_sessid"])
def clear(self):
self._sessid_cache = []
self.obj.db_sessid = None
self.obj.save(update_fields=["db_sessid"])
def count(self):
return len(self._sessid_cache)
class DefaultObject(with_metaclass(TypeclassBase, ObjectDB)):
objects = ObjectManager()
@lazy_property
def cmdset(self):
return CmdSetHandler(self, True)
@lazy_property
def scripts(self):
return ScriptHandler(self)
@lazy_property
def nicks(self):
return NickHandler(self)
@lazy_property
def sessions(self):
return ObjectSessionHandler(self)
@property
def is_connected(self):
if self.account: return self.account.is_connected
else:
return False
@property
def has_account(self):
return self.sessions.count()
@property
def is_superuser(self):
return self.db_account and self.db_account.is_superuser \
and not self.db_account.attributes.get("_quell")
def contents_get(self, exclude=None):
con = self.contents_cache.get(exclude=exclude)
return con
contents = property(contents_get)
@property
def exits(self):
return [exi for exi in self.contents if exi.destination]
def get_display_name(self, looker, **kwargs):
if self.locks.check_lockstring(looker, "perm(Builder)"):
return "{}(#{})".format(self.name, self.id)
return self.name
def get_numbered_name(self, count, looker, **kwargs):
key = kwargs.get("key", self.key)
key = ansi.ANSIString(key) plural = _INFLECT.plural(key, 2)
plural = "%s %s" % (_INFLECT.number_to_words(count, threshold=12), plural)
singular = _INFLECT.an(key)
if not self.aliases.get(plural, category="plural_key"):
self.aliases.clear(category="plural_key")
self.aliases.add(plural, category="plural_key")
self.aliases.add(singular, category="plural_key")
return singular, plural
def search(self, searchdata,
global_search=False,
use_nicks=True,
typeclass=None,
location=None,
attribute_name=None,
quiet=False,
exact=False,
candidates=None,
nofound_string=None,
multimatch_string=None,
use_dbref=None):
is_string = isinstance(searchdata, basestring)
if is_string:
if searchdata.lower() in ("here", ):
return [self.location] if quiet else self.location
if searchdata.lower() in ("me", "self",):
return [self] if quiet else self
if use_dbref is None:
use_dbref = self.locks.check_lockstring(self, "_dummy:perm(Builder)")
if use_nicks:
searchdata = self.nicks.nickreplace(searchdata, categories=("object", "account"), include_account=True)
if (global_search or (is_string and searchdata.startswith("#") and
len(searchdata) > 1 and searchdata[1:].isdigit())):
exact = True
candidates = None
elif candidates is None:
if location:
candidates = []
for obj in make_iter(location):
candidates.extend(obj.contents)
else:
location = self.location
candidates = self.contents
if location:
candidates = candidates + [location] + location.contents
else:
# included in location.contents
candidates.append(self)
results = ObjectDB.objects.object_search(searchdata,
attribute_name=attribute_name,
typeclass=typeclass,
candidates=candidates,
exact=exact,
use_dbref=use_dbref)
if quiet:
return results
return _AT_SEARCH_RESULT(results, self, query=searchdata,
nofound_string=nofound_string, multimatch_string=multimatch_string)
def search_account(self, searchdata, quiet=False):
if isinstance(searchdata, basestring):
# searchdata is a string; wrap some common self-references
if searchdata.lower() in ("me", "self",):
return [self.account] if quiet else self.account
results = search.search_account(searchdata)
if quiet:
return results
return _AT_SEARCH_RESULT(results, self, query=searchdata)
def execute_cmd(self, raw_string, session=None, **kwargs):
# nick replacement - we require full-word matching.
# do text encoding conversion
raw_string = to_unicode(raw_string)
raw_string = self.nicks.nickreplace(raw_string, categories=("inputline", "channel"), include_account=True)
return cmdhandler.cmdhandler(self, raw_string, callertype="object", session=session, **kwargs)
def msg(self, text=None, from_obj=None, session=None, options=None, **kwargs):
# try send hooks
if from_obj:
for obj in make_iter(from_obj):
try:
obj.at_msg_send(text=text, to_obj=self, **kwargs)
except Exception:
logger.log_trace()
kwargs["options"] = options
try:
if not self.at_msg_receive(text=text, **kwargs):
# if at_msg_receive returns false, we abort message to this object
return
except Exception:
logger.log_trace()
if text is not None:
if not (isinstance(text, basestring) or isinstance(text, tuple)):
# sanitize text before sending across the wire
try:
text = to_str(text, force_string=True)
except Exception:
text = repr(text)
kwargs['text'] = text
# relay to session(s)
sessions = make_iter(session) if session else self.sessions.all()
for session in sessions:
session.data_out(**kwargs)
def for_contents(self, func, exclude=None, **kwargs):
contents = self.contents
if exclude:
exclude = make_iter(exclude)
contents = [obj for obj in contents if obj not in exclude]
for obj in contents:
func(obj, **kwargs)
def msg_contents(self, text=None, exclude=None, from_obj=None, mapping=None, **kwargs):
# we also accept an outcommand on the form (message, {kwargs})
is_outcmd = text and is_iter(text)
inmessage = text[0] if is_outcmd else text
outkwargs = text[1] if is_outcmd and len(text) > 1 else {}
contents = self.contents
if exclude:
exclude = make_iter(exclude)
contents = [obj for obj in contents if obj not in exclude]
for obj in contents:
if mapping:
substitutions = {t: sub.get_display_name(obj)
if hasattr(sub, 'get_display_name')
else str(sub) for t, sub in mapping.items()}
outmessage = inmessage.format(**substitutions)
else:
outmessage = inmessage
obj.msg(text=(outmessage, outkwargs), from_obj=from_obj, **kwargs)
def move_to(self, destination, quiet=False,
emit_to_obj=None, use_destination=True, to_none=False, move_hooks=True,
**kwargs):
def logerr(string="", err=None):
logger.log_trace()
self.msg("%s%s" % (string, "" if err is None else " (%s)" % err))
return
errtxt = _("Couldn't perform move ('%s'). Contact an admin.")
if not emit_to_obj:
emit_to_obj = self
if not destination:
if to_none:
self.location = None
return True
emit_to_obj.msg(_("The destination doesn't exist."))
return False
if destination.destination and use_destination:
# traverse exits
destination = destination.destination
# Before the move, call eventual pre-commands.
if move_hooks:
try:
if not self.at_before_move(destination):
return False
except Exception as err:
logerr(errtxt % "at_before_move()", err)
return False
# Save the old location
source_location = self.location
# Call hook on source location
if move_hooks and source_location:
try:
source_location.at_object_leave(self, destination)
except Exception as err:
logerr(errtxt % "at_object_leave()", err)
return False
if not quiet:
# tell the old room we are leaving
try:
self.announce_move_from(destination, **kwargs)
except Exception as err:
logerr(errtxt % "at_announce_move()", err)
return False
# Perform move
try:
self.location = destination
except Exception as err:
logerr(errtxt % "location change", err)
return False
if not quiet:
# Tell the new room we are there.
try:
self.announce_move_to(source_location, **kwargs)
except Exception as err:
logerr(errtxt % "announce_move_to()", err)
return False
if move_hooks:
# Perform eventual extra commands on the receiving location
# (the object has already arrived at this point)
try:
destination.at_object_receive(self, source_location)
except Exception as err:
logerr(errtxt % "at_object_receive()", err)
return False
# Execute eventual extra commands on this object after moving it
# (usually calling 'look')
if move_hooks:
try:
self.at_after_move(source_location)
except Exception as err:
logerr(errtxt % "at_after_move", err)
return False
return True
def clear_exits(self):
for out_exit in [exi for exi in ObjectDB.objects.get_contents(self) if exi.db_destination]:
out_exit.delete()
for in_exit in ObjectDB.objects.filter(db_destination=self):
in_exit.delete()
def clear_contents(self):
# Gather up everything that thinks this is its location.
default_home_id = int(settings.DEFAULT_HOME.lstrip("#"))
try:
default_home = ObjectDB.objects.get(id=default_home_id)
if default_home.dbid == self.dbid:
# we are deleting default home!
default_home = None
except Exception:
string = _("Could not find default home '(#%d)'.")
logger.log_err(string % default_home_id)
default_home = None
for obj in self.contents:
home = obj.home
# Obviously, we can't send it back to here.
if not home or (home and home.dbid == self.dbid):
obj.home = default_home
home = default_home
if not home:
string = "Missing default home, '%s(#%d)' "
string += "now has a null location."
obj.location = None
obj.msg(_("Something went wrong! You are dumped into nowhere. Contact an admin."))
logger.log_err(string % (obj.name, obj.dbid))
return
if obj.has_account:
if home:
string = "Your current location has ceased to exist,"
string += " moving you to %s(#%d)."
obj.msg(_(string) % (home.name, home.dbid))
else:
# Famous last words: The account should never see this.
string = "This place should not exist ... contact an admin."
obj.msg(_(string))
obj.move_to(home)
def copy(self, new_key=None):
def find_clone_key():
key = self.key
num = sum(1 for obj in self.location.contents
if obj.key.startswith(key) and obj.key.lstrip(key).isdigit())
return "%s%03i" % (key, num)
new_key = new_key or find_clone_key()
return ObjectDB.objects.copy_object(self, new_key=new_key)
def delete(self):
global _ScriptDB
if not _ScriptDB:
from evennia.scripts.models import ScriptDB as _ScriptDB
if not self.pk or not self.at_object_delete():
# This object has already been deleted,
# or the pre-delete check return False
return False
# See if we need to kick the account off.
for session in self.sessions.all():
session.msg(_("Your character %s has been destroyed.") % self.key)
# no need to disconnect, Account just jumps to OOC mode.
# sever the connection (important!)
if self.account:
for session in self.sessions.all():
self.account.unpuppet_object(session)
self.account = None
for script in _ScriptDB.objects.get_all_scripts_on_obj(self):
script.stop()
# Destroy any exits to and from this room, if any
self.clear_exits()
# Clear out any non-exit objects located within the object
self.clear_contents()
self.attributes.clear()
self.nicks.clear()
self.aliases.clear()
self.location = None # this updates contents_cache for our location
# Perform the deletion of the object
super(DefaultObject, self).delete()
return True
def access(self, accessing_obj, access_type='read', default=False, no_superuser_bypass=False, **kwargs):
result = super(DefaultObject, self).access(accessing_obj, access_type=access_type,
default=default, no_superuser_bypass=no_superuser_bypass)
self.at_access(result, accessing_obj, access_type, **kwargs)
return result
#
# Hook methods
#
def at_first_save(self):
self.basetype_setup()
self.at_object_creation()
if hasattr(self, "_createdict"):
# this will only be set if the utils.create function
# was used to create the object. We want the create
# call's kwargs to override the values set by hooks.
cdict = self._createdict
updates = []
if not cdict.get("key"):
if not self.db_key:
self.db_key = "#%i" % self.dbid
updates.append("db_key")
elif self.key != cdict.get("key"):
updates.append("db_key")
self.db_key = cdict["key"]
if cdict.get("location") and self.location != cdict["location"]:
self.db_location = cdict["location"]
updates.append("db_location")
if cdict.get("home") and self.home != cdict["home"]:
self.home = cdict["home"]
updates.append("db_home")
if cdict.get("destination") and self.destination != cdict["destination"]:
self.destination = cdict["destination"]
updates.append("db_destination")
if updates:
self.save(update_fields=updates)
if cdict.get("permissions"):
self.permissions.batch_add(*cdict["permissions"])
if cdict.get("locks"):
self.locks.add(cdict["locks"])
if cdict.get("aliases"):
self.aliases.batch_add(*cdict["aliases"])
if cdict.get("location"):
cdict["location"].at_object_receive(self, None)
self.at_after_move(None)
if cdict.get("tags"):
self.tags.batch_add(*cdict["tags"])
if cdict.get("attributes"):
self.attributes.batch_add(*cdict["attributes"])
if cdict.get("nattributes"):
for key, value in cdict["nattributes"]:
self.nattributes.add(key, value)
del self._createdict
self.basetype_posthook_setup()
def basetype_setup(self):
self.locks.add(";".join([
"control:perm(Developer)", "examine:perm(Builder)", "view:all()", "edit:perm(Admin)", "delete:perm(Admin)", "get:all()", "call:true()", "tell:perm(Admin)", "puppet:pperm(Developer)"]))
def basetype_posthook_setup(self):
pass
def at_object_creation(self):
pass
def at_object_delete(self):
return True
def at_init(self):
pass
def at_cmdset_get(self, **kwargs):
pass
def at_pre_puppet(self, account, session=None, **kwargs):
pass
def at_post_puppet(self, **kwargs):
self.account.db._last_puppet = self
def at_pre_unpuppet(self, **kwargs):
pass
def at_post_unpuppet(self, account, session=None, **kwargs):
pass
def at_server_reload(self):
pass
def at_server_shutdown(self):
pass
def at_access(self, result, accessing_obj, access_type, **kwargs):
pass
def at_before_move(self, destination, **kwargs):
return True
def announce_move_from(self, destination, msg=None, mapping=None, **kwargs):
if not self.location:
return
if msg:
string = msg
else:
string = "{object} is leaving {origin}, heading for {destination}."
location = self.location
exits = [o for o in location.contents if o.location is location and o.destination is destination]
if not mapping:
mapping = {}
mapping.update({
"object": self,
"exit": exits[0] if exits else "somewhere",
"origin": location or "nowhere",
"destination": destination or "nowhere",
})
location.msg_contents(string, exclude=(self, ), mapping=mapping)
def announce_move_to(self, source_location, msg=None, mapping=None, **kwargs):
if not source_location and self.location.has_account:
# inventory; it's probably the result of a create command.
string = "You now have %s in your possession." % self.get_display_name(self.location)
self.location.msg(string)
return
if source_location:
if msg:
string = msg
else:
string = "{object} arrives to {destination} from {origin}."
else:
string = "{object} arrives to {destination}."
origin = source_location
destination = self.location
exits = []
if origin:
exits = [o for o in destination.contents if o.location is destination and o.destination is origin]
if not mapping:
mapping = {}
mapping.update({
"object": self,
"exit": exits[0] if exits else "somewhere",
"origin": origin or "nowhere",
"destination": destination or "nowhere",
})
destination.msg_contents(string, exclude=(self, ), mapping=mapping)
def at_after_move(self, source_location, **kwargs):
pass
def at_object_leave(self, moved_obj, target_location, **kwargs):
pass
def at_object_receive(self, moved_obj, source_location, **kwargs):
pass
def at_traverse(self, traversing_object, target_location, **kwargs):
pass
def at_after_traverse(self, traversing_object, source_location, **kwargs):
pass
def at_failed_traverse(self, traversing_object, **kwargs):
pass
def at_msg_receive(self, text=None, from_obj=None, **kwargs):
return True
def at_msg_send(self, text=None, to_obj=None, **kwargs):
pass
def return_appearance(self, looker, **kwargs):
if not looker:
return ""
visible = (con for con in self.contents if con != looker and
con.access(looker, "view"))
exits, users, things = [], [], defaultdict(list)
for con in visible:
key = con.get_display_name(looker)
if con.destination:
exits.append(key)
elif con.has_account:
users.append("|c%s|n" % key)
else:
things[key].append(con)
string = "|c%s|n\n" % self.get_display_name(looker)
desc = self.db.desc
if desc:
string += "%s" % desc
if exits:
string += "\n|wExits:|n " + list_to_string(exits)
if users or things:
thing_strings = []
for key, itemlist in sorted(things.iteritems()):
nitem = len(itemlist)
if nitem == 1:
key, _ = itemlist[0].get_numbered_name(nitem, looker, key=key)
else:
key = [item.get_numbered_name(nitem, looker, key=key)[1] for item in itemlist][0]
thing_strings.append(key)
string += "\n|wYou see:|n " + list_to_string(users + thing_strings)
return string
def at_look(self, target, **kwargs):
if not target.access(self, "view"):
try:
return "Could not view '%s'." % target.get_display_name(self)
except AttributeError:
return "Could not view '%s'." % target.key
description = target.return_appearance(self)
# this must be the last reference to target so it may delete itself when acted on.
target.at_desc(looker=self)
return description
def at_desc(self, looker=None, **kwargs):
pass
def at_before_get(self, getter, **kwargs):
return True
def at_get(self, getter, **kwargs):
pass
def at_before_give(self, giver, getter, **kwargs):
return True
def at_give(self, giver, getter, **kwargs):
pass
def at_before_drop(self, dropper, **kwargs):
return True
def at_drop(self, dropper, **kwargs):
pass
def at_before_say(self, message, **kwargs):
return message
def at_say(self, message, msg_self=None, msg_location=None,
receivers=None, msg_receivers=None, **kwargs):
msg_type = 'say'
if kwargs.get("whisper", False):
# whisper mode
msg_type = 'whisper'
msg_self = '{self} whisper to {all_receivers}, "{speech}"' if msg_self is True else msg_self
msg_receivers = '{object} whispers: "{speech}"'
msg_receivers = msg_receivers or '{object} whispers: "{speech}"'
msg_location = None
else:
msg_self = '{self} say, "{speech}"' if msg_self is True else msg_self
msg_location = msg_location or '{object} says, "{speech}"'
msg_receivers = msg_receivers or message
custom_mapping = kwargs.get('mapping', {})
receivers = make_iter(receivers) if receivers else None
location = self.location
if msg_self:
self_mapping = {"self": "You",
"object": self.get_display_name(self),
"location": location.get_display_name(self) if location else None,
"receiver": None,
"all_receivers": ", ".join(
recv.get_display_name(self)
for recv in receivers) if receivers else None,
"speech": message}
self_mapping.update(custom_mapping)
self.msg(text=(msg_self.format(**self_mapping), {"type": msg_type}), from_obj=self)
if receivers and msg_receivers:
receiver_mapping = {"self": "You",
"object": None,
"location": None,
"receiver": None,
"all_receivers": None,
"speech": message}
for receiver in make_iter(receivers):
individual_mapping = {"object": self.get_display_name(receiver),
"location": location.get_display_name(receiver),
"receiver": receiver.get_display_name(receiver),
"all_receivers": ", ".join(
recv.get_display_name(recv)
for recv in receivers) if receivers else None}
receiver_mapping.update(individual_mapping)
receiver_mapping.update(custom_mapping)
receiver.msg(text=(msg_receivers.format(**receiver_mapping),
{"type": msg_type}), from_obj=self)
if self.location and msg_location:
location_mapping = {"self": "You",
"object": self,
"location": location,
"all_receivers": ", ".join(str(recv) for recv in receivers) if receivers else None,
"receiver": None,
"speech": message}
location_mapping.update(custom_mapping)
exclude = []
if msg_self:
exclude.append(self)
if receivers:
exclude.extend(receivers)
self.location.msg_contents(text=(msg_location, {"type": msg_type}),
from_obj=self,
exclude=exclude,
mapping=location_mapping)
#
# Base Character object
#
class DefaultCharacter(DefaultObject):
def basetype_setup(self):
super(DefaultCharacter, self).basetype_setup()
self.locks.add(";".join(["get:false()", # noone can pick up the character
"call:false()"])) # no commands can be called on character from outside
# add the default cmdset
self.cmdset.add_default(settings.CMDSET_CHARACTER, permanent=True)
def at_after_move(self, source_location, **kwargs):
if self.location.access(self, "view"):
self.msg(self.at_look(self.location))
def at_pre_puppet(self, account, session=None, **kwargs):
if self.location is None: # Make sure character's location is never None before being puppeted.
self.location = self.db.prelogout_location if self.db.prelogout_location else self.home
self.location.at_object_receive(self, None) if self.location: # If the character is verified to be somewhere,
self.db.prelogout_location = self.location # save location again to be sure.
else:
account.msg("|r%s has no location and no home is set.|n" % self, session=session) # Note to set home.
def at_post_puppet(self, **kwargs):
# NOTE: commenting out extraneous info
#self.msg("\nYou become |c%s|n.\n" % self.name)
self.msg((self.at_look(self.location), {'type':'look'}), options = None)
def message(obj, from_obj):
obj.msg("%s has entered the game." % self.get_display_name(obj), from_obj=from_obj)
self.location.for_contents(message, exclude=[self], from_obj=self)
def at_post_unpuppet(self, account, session=None, **kwargs):
if not self.sessions.count():
# only remove this char from grid if no sessions control it anymore.
if self.location:
def message(obj, from_obj):
obj.msg("%s has left the game." % self.get_display_name(obj), from_obj=from_obj)
self.location.for_contents(message, exclude=[self], from_obj=self)
self.db.prelogout_location = self.location
self.location = None
@property
def idle_time(self):
idle = [session.cmd_last_visible for session in self.sessions.all()]
if idle:
return time.time() - float(max(idle))
return None
@property
def connection_time(self):
conn = [session.conn_time for session in self.sessions.all()]
if conn:
return time.time() - float(min(conn))
return None
#
# Base Room object
class DefaultRoom(DefaultObject):
def basetype_setup(self):
super(DefaultRoom, self).basetype_setup()
self.locks.add(";".join(["get:false()",
"puppet:false()"])) # would be weird to puppet a room ...
self.location = None
#
# Default Exit command, used by the base exit object
#
class ExitCommand(command.Command):
obj = None
def func(self):
if self.obj.access(self.caller, 'traverse'):
# we may traverse the exit.
self.obj.at_traverse(self.caller, self.obj.destination)
else:
# exit is locked
if self.obj.db.err_traverse:
# if exit has a better error message, let's use it.
self.caller.msg(self.obj.db.err_traverse)
else:
self.obj.at_failed_traverse(self.caller)
def get_extra_info(self, caller, **kwargs):
if self.obj.destination:
return " (exit to %s)" % self.obj.destination.get_display_name(caller)
else:
return " (%s)" % self.obj.get_display_name(caller)
class DefaultExit(DefaultObject):
exit_command = ExitCommand
priority = 101
def create_exit_cmdset(self, exidbobj):
cmd = self.exit_command(key=exidbobj.db_key.strip().lower(),
aliases=exidbobj.aliases.all(),
locks=str(exidbobj.locks),
auto_help=False,
destination=exidbobj.db_destination,
arg_regex=r"^$",
is_exit=True,
obj=exidbobj)
exit_cmdset = cmdset.CmdSet(None)
exit_cmdset.key = 'ExitCmdSet'
exit_cmdset.priority = self.priority
exit_cmdset.duplicates = True
exit_cmdset.add(cmd)
return exit_cmdset
def basetype_setup(self):
super(DefaultExit, self).basetype_setup()
self.locks.add(";".join(["puppet:false()", "traverse:all()", "get:false()"]))
if self.location:
self.destination = self.location
def at_cmdset_get(self, **kwargs):
if "force_init" in kwargs or not self.cmdset.has_cmdset("ExitCmdSet", must_be_default=True):
self.cmdset.add_default(self.create_exit_cmdset(self), permanent=False)
def at_init(self):
self.cmdset.remove_default()
def at_traverse(self, traversing_object, target_location, **kwargs):
source_location = traversing_object.location
if traversing_object.move_to(target_location):
self.at_after_traverse(traversing_object, source_location)
else:
if self.db.err_traverse:
self.caller.msg(self.db.err_traverse)
else:
# No shorthand error message. Call hook.
self.at_failed_traverse(traversing_object)
def at_failed_traverse(self, traversing_object, **kwargs):
traversing_object.msg("You cannot go there.")
| true
| true
|
f708f96bbfaa16617380f5df256668d0302deda9
| 204
|
py
|
Python
|
pickeats/admin.py
|
PatrickKan/PickEats
|
9d82a5fc1dfd0d329bf16f7fc60f1c3e7e676d53
|
[
"MIT"
] | 1
|
2020-05-03T04:28:57.000Z
|
2020-05-03T04:28:57.000Z
|
pickeats/admin.py
|
PatrickKan/PickEats
|
9d82a5fc1dfd0d329bf16f7fc60f1c3e7e676d53
|
[
"MIT"
] | null | null | null |
pickeats/admin.py
|
PatrickKan/PickEats
|
9d82a5fc1dfd0d329bf16f7fc60f1c3e7e676d53
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Preference, Profile, Allergy, Goal
admin.site.register(Preference)
admin.site.register(Profile)
admin.site.register(Allergy)
admin.site.register(Goal)
| 29.142857
| 54
| 0.823529
|
from django.contrib import admin
from .models import Preference, Profile, Allergy, Goal
admin.site.register(Preference)
admin.site.register(Profile)
admin.site.register(Allergy)
admin.site.register(Goal)
| true
| true
|
f708f9c9fe0eba5341025819686777bb36f2107c
| 1,440
|
py
|
Python
|
contents/serializers.py
|
omaralbeik/omaralbeik.com-api
|
03ce663fe2b3c52363520437d0f5b09cfcb121db
|
[
"MIT"
] | null | null | null |
contents/serializers.py
|
omaralbeik/omaralbeik.com-api
|
03ce663fe2b3c52363520437d0f5b09cfcb121db
|
[
"MIT"
] | 1
|
2018-04-05T13:44:13.000Z
|
2018-04-05T14:45:32.000Z
|
contents/serializers.py
|
omaralbeik/omaralbeik.com-api
|
03ce663fe2b3c52363520437d0f5b09cfcb121db
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
import markdown2
from .models import Content
from omaralbeik import server_variables as sv
class ContentSerializer(serializers.ModelSerializer):
tags = serializers.SerializerMethodField()
html_text = serializers.SerializerMethodField()
website_url = serializers.SerializerMethodField()
meta = serializers.SerializerMethodField()
class Meta:
model = Content
fields = (
"id",
"title",
"slug",
"image_url",
"summary",
"text",
"html_text",
"website_url",
"tags",
"meta",
)
# return content's web URL.
def get_website_url(self, content):
return "{}/{}".format(sv.CLIENT_PROD_URL, content.slug)
# return content's text as HTML
def get_html_text(self, content):
return markdown2.markdown(
content.text, extras=["target-blank-links", "fenced-code-blocks"]
)
# return content's tags.
def get_tags(self, content):
return content.tags.all().values("name", "slug")
# return content's meta fields.
def get_meta(self, content):
return {
"title": content.title,
"description": content.summary,
"keywords": ", ".join([tag.name for tag in content.tags.all()]),
"canonical": self.get_website_url(content),
}
| 28.8
| 77
| 0.596528
|
from rest_framework import serializers
import markdown2
from .models import Content
from omaralbeik import server_variables as sv
class ContentSerializer(serializers.ModelSerializer):
tags = serializers.SerializerMethodField()
html_text = serializers.SerializerMethodField()
website_url = serializers.SerializerMethodField()
meta = serializers.SerializerMethodField()
class Meta:
model = Content
fields = (
"id",
"title",
"slug",
"image_url",
"summary",
"text",
"html_text",
"website_url",
"tags",
"meta",
)
def get_website_url(self, content):
return "{}/{}".format(sv.CLIENT_PROD_URL, content.slug)
# return content's text as HTML
def get_html_text(self, content):
return markdown2.markdown(
content.text, extras=["target-blank-links", "fenced-code-blocks"]
)
def get_tags(self, content):
return content.tags.all().values("name", "slug")
# return content's meta fields.
def get_meta(self, content):
return {
"title": content.title,
"description": content.summary,
"keywords": ", ".join([tag.name for tag in content.tags.all()]),
"canonical": self.get_website_url(content),
}
| true
| true
|
f708fae5236a29c52f4f67d0421e7b9ff03707cb
| 3,962
|
py
|
Python
|
Scripts/GenCode_Explore_106.py
|
ShepherdCode/Soars2021
|
ab4f304eaa09e52d260152397a6c53d7a05457da
|
[
"MIT"
] | 1
|
2021-08-16T14:49:04.000Z
|
2021-08-16T14:49:04.000Z
|
Scripts/GenCode_Explore_106.py
|
ShepherdCode/Soars2021
|
ab4f304eaa09e52d260152397a6c53d7a05457da
|
[
"MIT"
] | null | null | null |
Scripts/GenCode_Explore_106.py
|
ShepherdCode/Soars2021
|
ab4f304eaa09e52d260152397a6c53d7a05457da
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# # GenCode Explore
#
# Explore the human RNA sequences from GenCode.
#
# Assume user downloaded files from GenCode 38 [FTP](http://ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/release_38/)
# to a subdirectory called data.
#
# Move the GenCodeLoader class to its own python module. Compare to 105.
# In[1]:
import time
def show_time():
t = time.time()
s = time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime(t))
print(s)
show_time()
# In[2]:
import numpy as np
import pandas as pd
import sys
try:
from google.colab import drive
IN_COLAB = True
print("On Google CoLab, mount cloud-local file, get our code from GitHub.")
PATH='/content/drive/'
#drive.mount(PATH,force_remount=True) # hardly ever need this
drive.mount(PATH) # Google will require login credentials
DATAPATH=PATH+'My Drive/data/' # must end in "/"
import requests
s = requests.get('https://raw.githubusercontent.com/ShepherdCode/Soars2021/master/SimTools/RNA_describe.py')
with open('RNA_describe.py', 'w') as f:
f.write(s.text) # writes to cloud local, delete the file later?
from RNA_describe import ORF_counter
from RNA_describe import assert_imported_RNA_describe
from GenCodeTools import GenCodeLoader
except:
print("CoLab not working. On my PC, use relative paths.")
IN_COLAB = False
DATAPATH='../data/' # must end in "/"
sys.path.append("..") # append parent dir in order to use sibling dirs
from SimTools.RNA_describe import ORF_counter
from SimTools.RNA_describe import assert_imported_RNA_describe
from SimTools.GenCodeTools import GenCodeLoader
MODELPATH="BestModel" # saved on cloud instance and lost after logout
#MODELPATH=DATAPATH+MODELPATH # saved on Google Drive but requires login
if not assert_imported_RNA_describe():
print("ERROR: Cannot use RNA_describe.")
# In[3]:
PC_FILENAME='gencode.v38.pc_transcripts.fa.gz'
NC_FILENAME='gencode.v38.lncRNA_transcripts.fa.gz'
# ## Load the GenCode data.
# Warning: GenCode has
# over 100K protein-coding RNA (mRNA)
# and almost 50K non-coding RNA (lncRNA).
# In[4]:
# Full GenCode ver 38 human is 106143 pc + 48752 nc and loads in 7 sec.
# Expect fewer transcripts if special filtering is used.
PC_FULLPATH=DATAPATH+PC_FILENAME
NC_FULLPATH=DATAPATH+NC_FILENAME
loader=GenCodeLoader()
show_time()
loader.set_label(1)
loader.set_check_list(None)
loader.set_check_utr(True)
pcdf=loader.load_file(PC_FULLPATH)
print("PC seqs loaded:",len(pcdf))
show_time()
loader.set_label(0)
loader.set_check_list(None)
loader.set_check_utr(False)
ncdf=loader.load_file(NC_FULLPATH)
print("NC seqs loaded:",len(ncdf))
show_time()
# In[5]:
print("Sorting PC...")
pcdf.sort_values('seqlen', ascending=True, inplace=True)
print("Sorting NC...")
ncdf.sort_values('seqlen', ascending=True, inplace=True)
# In[6]:
ncdf
# ## Look for short ORFs
# In[7]:
def show_short(df,too_short):
oc = ORF_counter()
count=len(df)
shorties=0
for pos in range(0,count):
sequence=df.iloc[pos]['sequence']
seqlen=df.iloc[pos]['seqlen']
oc.set_sequence(sequence)
orflen=oc.get_max_orf_len()
seqlen=df.iloc[pos]['seqlen']
if seqlen>200 and orflen<=TOO_SHORT:
seqid=df.iloc[pos]['tid']
#print("%s len=%d orf=%d"%(seqid,seqlen,orflen))
shorties += 1
if pos%10000==0:
print("Up to position %d, we have %d shorter than %d"%(pos,shorties,too_short))
print("After all %d, we have %d shorter than %d"%(count,shorties,too_short))
TOO_SHORT=60
show_short(pcdf,TOO_SHORT)
# In[8]:
show_short(ncdf,TOO_SHORT)
# ## Conclusion
# With TOO_SHORT=30
# NON-CODING
# We have 589 shorter than 30, with most of them (504) shorter than 10000
#
# CODING
# Using check_utr and check_list on pcdf, we have 0 shorter than 30.
# Using check_utr only, we have 0 shorter than 30.
#
| 25.895425
| 122
| 0.702171
|
import time
def show_time():
t = time.time()
s = time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime(t))
print(s)
show_time()
import numpy as np
import pandas as pd
import sys
try:
from google.colab import drive
IN_COLAB = True
print("On Google CoLab, mount cloud-local file, get our code from GitHub.")
PATH='/content/drive/'
drive.mount(PATH) DATAPATH=PATH+'My Drive/data/' import requests
s = requests.get('https://raw.githubusercontent.com/ShepherdCode/Soars2021/master/SimTools/RNA_describe.py')
with open('RNA_describe.py', 'w') as f:
f.write(s.text) from RNA_describe import ORF_counter
from RNA_describe import assert_imported_RNA_describe
from GenCodeTools import GenCodeLoader
except:
print("CoLab not working. On my PC, use relative paths.")
IN_COLAB = False
DATAPATH='../data/' sys.path.append("..") from SimTools.RNA_describe import ORF_counter
from SimTools.RNA_describe import assert_imported_RNA_describe
from SimTools.GenCodeTools import GenCodeLoader
MODELPATH="BestModel"
if not assert_imported_RNA_describe():
print("ERROR: Cannot use RNA_describe.")
PC_FILENAME='gencode.v38.pc_transcripts.fa.gz'
NC_FILENAME='gencode.v38.lncRNA_transcripts.fa.gz'
PC_FULLPATH=DATAPATH+PC_FILENAME
NC_FULLPATH=DATAPATH+NC_FILENAME
loader=GenCodeLoader()
show_time()
loader.set_label(1)
loader.set_check_list(None)
loader.set_check_utr(True)
pcdf=loader.load_file(PC_FULLPATH)
print("PC seqs loaded:",len(pcdf))
show_time()
loader.set_label(0)
loader.set_check_list(None)
loader.set_check_utr(False)
ncdf=loader.load_file(NC_FULLPATH)
print("NC seqs loaded:",len(ncdf))
show_time()
print("Sorting PC...")
pcdf.sort_values('seqlen', ascending=True, inplace=True)
print("Sorting NC...")
ncdf.sort_values('seqlen', ascending=True, inplace=True)
ncdf
def show_short(df,too_short):
oc = ORF_counter()
count=len(df)
shorties=0
for pos in range(0,count):
sequence=df.iloc[pos]['sequence']
seqlen=df.iloc[pos]['seqlen']
oc.set_sequence(sequence)
orflen=oc.get_max_orf_len()
seqlen=df.iloc[pos]['seqlen']
if seqlen>200 and orflen<=TOO_SHORT:
seqid=df.iloc[pos]['tid']
shorties += 1
if pos%10000==0:
print("Up to position %d, we have %d shorter than %d"%(pos,shorties,too_short))
print("After all %d, we have %d shorter than %d"%(count,shorties,too_short))
TOO_SHORT=60
show_short(pcdf,TOO_SHORT)
show_short(ncdf,TOO_SHORT)
| true
| true
|
f708fb900227794707bf957d23d33551a3309da5
| 896
|
py
|
Python
|
python/mxnet/gluon/contrib/__init__.py
|
Vikas-kum/incubator-mxnet
|
ba02bf2fe2da423caa59ddb3fd5e433b90b730bf
|
[
"Apache-2.0"
] | 64
|
2021-05-02T14:42:34.000Z
|
2021-05-06T01:35:03.000Z
|
python/mxnet/gluon/contrib/__init__.py
|
Vikas-kum/incubator-mxnet
|
ba02bf2fe2da423caa59ddb3fd5e433b90b730bf
|
[
"Apache-2.0"
] | 187
|
2018-03-16T23:44:43.000Z
|
2021-12-14T21:19:54.000Z
|
python/mxnet/gluon/contrib/__init__.py
|
Vikas-kum/incubator-mxnet
|
ba02bf2fe2da423caa59ddb3fd5e433b90b730bf
|
[
"Apache-2.0"
] | 51
|
2019-07-12T05:10:25.000Z
|
2021-07-28T16:19:06.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
"""Contrib neural network module."""
from . import nn
from . import rnn
from . import data
| 34.461538
| 62
| 0.761161
|
from . import nn
from . import rnn
from . import data
| true
| true
|
f708fca358a07d2554ea49c3a9e960e30af22afa
| 4,693
|
py
|
Python
|
spektral/datasets/delaunay.py
|
dbusbridge/spektral
|
83eaa381a263d0a217692b6f1018388946e85c45
|
[
"MIT"
] | 1
|
2020-06-25T03:29:30.000Z
|
2020-06-25T03:29:30.000Z
|
spektral/datasets/delaunay.py
|
kprzybylapara/pylint
|
a95807603c2bb96c80f34d326f663273c72ca3fc
|
[
"MIT"
] | null | null | null |
spektral/datasets/delaunay.py
|
kprzybylapara/pylint
|
a95807603c2bb96c80f34d326f663273c72ca3fc
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
import numpy as np
from scipy.spatial import Delaunay
from spektral.utils import label_to_one_hot, numpy_to_nx
RETURN_TYPES = {'numpy', 'networkx'}
MAX_K = 7 # Maximum number of nodes in a graph
def generate_data(return_type='networkx', classes=0, n_samples_in_class=1000,
n_nodes=7, support_low=0., support_high=10., drift_amount=1.0,
one_hot_labels=True, support=None, seed=None):
"""
Generates a dataset of Delaunay triangulations as described by
[Zambon et al. (2017)](https://arxiv.org/abs/1706.06941).
Note that this function is basically deprecated and will change soon.
:param return_type: `'networkx'` or `'numpy'`, data format to return;
:param classes: indices of the classes to load (integer, or list of integers
between 0 and 20);
:param n_samples_in_class: number of generated samples per class;
:param n_nodes: number of nodes in a graph;
:param support_low: lower bound of the uniform distribution from which the
support is generated;
:param support_high: upper bound of the uniform distribution from which the
support is generated;
:param drift_amount: coefficient to control the amount of change between
classes;
:param one_hot_labels: one-hot encode dataset labels;
:param support: custom support to use instead of generating it randomly;
:param seed: random numpy seed;
:return: if `return_type='networkx'`, a list of graphs in Networkx format,
and an array containing labels; if `return_type='numpy'`, the adjacency
matrix, node features, and an array containing labels.
"""
if return_type not in RETURN_TYPES:
raise ValueError('Possible return_type: {}'.format(RETURN_TYPES))
if isinstance(classes, int):
classes = [classes]
if max(classes) > 20 or min(classes) < 0:
raise ValueError('Class indices must be between 0 and 20')
r_classes = list(reversed(classes))
if r_classes[-1] == 0:
r_classes.insert(0, r_classes.pop(-1))
# Support points
np.random.seed(seed)
if support is None:
support = np.random.uniform(support_low, support_high, (1, n_nodes, 2))
else:
try:
assert support.shape == (1, n_nodes, 2)
except AssertionError:
print('The given support doesn\'t have shape (1, n_nodes, 2) as'
'expected. Attempting to reshape.')
support = support.reshape(1, n_nodes, 2)
# Compute node features
node_features = []
# Other node features
for idx, i in enumerate(r_classes):
if i == 0:
concept_0 = np.repeat(support, n_samples_in_class, 0)
noise_0 = np.random.normal(0, 1, (n_samples_in_class, n_nodes, 2))
class_0 = concept_0 + noise_0
node_features.append(class_0)
else:
radius = 10. * ((2./3.) ** (drift_amount * (i - 1)))
phase = np.random.uniform(0, 2 * np.pi, (n_nodes, 1))
perturb_i_x = radius * np.cos(phase)
perturb_i_y = radius * np.sin(phase)
perturb_i = np.concatenate((perturb_i_x, perturb_i_y), axis=-1)
support_i = support + perturb_i
concept_i = np.repeat(support_i, n_samples_in_class, 0)
noise_i = np.random.normal(0, 1, (n_samples_in_class, n_nodes, 2))
class_i = concept_i + noise_i
node_features.append(class_i)
node_features = np.array(node_features).reshape((-1, n_nodes, 2))
# Compute adjacency matrices
adjacency = []
for nf in node_features:
adj = compute_adj(nf)
adjacency.append(adj)
adjacency = np.array(adjacency)
# Compute labels
labels = np.repeat(classes, n_samples_in_class)
if one_hot_labels:
labels = label_to_one_hot(labels, labels=classes)
if return_type is 'numpy':
return adjacency, node_features, labels
elif return_type is 'networkx':
graphs = numpy_to_nx(adjacency, node_features=node_features, nf_name='coords')
return graphs, labels
else:
raise NotImplementedError
def compute_adj(x):
"""
Computes the Delaunay triangulation of the given points
:param x: array of shape (num_nodes, 2)
:return: the computed adjacency matrix
"""
tri = Delaunay(x)
edges_explicit = np.concatenate((tri.vertices[:, :2],
tri.vertices[:, 1:],
tri.vertices[:, ::2]), axis=0)
adj = np.zeros((x.shape[0], x.shape[0]))
adj[edges_explicit[:, 0], edges_explicit[:, 1]] = 1.
return np.clip(adj + adj.T, 0, 1)
| 39.436975
| 86
| 0.644364
|
from __future__ import absolute_import
import numpy as np
from scipy.spatial import Delaunay
from spektral.utils import label_to_one_hot, numpy_to_nx
RETURN_TYPES = {'numpy', 'networkx'}
MAX_K = 7
def generate_data(return_type='networkx', classes=0, n_samples_in_class=1000,
n_nodes=7, support_low=0., support_high=10., drift_amount=1.0,
one_hot_labels=True, support=None, seed=None):
if return_type not in RETURN_TYPES:
raise ValueError('Possible return_type: {}'.format(RETURN_TYPES))
if isinstance(classes, int):
classes = [classes]
if max(classes) > 20 or min(classes) < 0:
raise ValueError('Class indices must be between 0 and 20')
r_classes = list(reversed(classes))
if r_classes[-1] == 0:
r_classes.insert(0, r_classes.pop(-1))
np.random.seed(seed)
if support is None:
support = np.random.uniform(support_low, support_high, (1, n_nodes, 2))
else:
try:
assert support.shape == (1, n_nodes, 2)
except AssertionError:
print('The given support doesn\'t have shape (1, n_nodes, 2) as'
'expected. Attempting to reshape.')
support = support.reshape(1, n_nodes, 2)
# Compute node features
node_features = []
# Other node features
for idx, i in enumerate(r_classes):
if i == 0:
concept_0 = np.repeat(support, n_samples_in_class, 0)
noise_0 = np.random.normal(0, 1, (n_samples_in_class, n_nodes, 2))
class_0 = concept_0 + noise_0
node_features.append(class_0)
else:
radius = 10. * ((2./3.) ** (drift_amount * (i - 1)))
phase = np.random.uniform(0, 2 * np.pi, (n_nodes, 1))
perturb_i_x = radius * np.cos(phase)
perturb_i_y = radius * np.sin(phase)
perturb_i = np.concatenate((perturb_i_x, perturb_i_y), axis=-1)
support_i = support + perturb_i
concept_i = np.repeat(support_i, n_samples_in_class, 0)
noise_i = np.random.normal(0, 1, (n_samples_in_class, n_nodes, 2))
class_i = concept_i + noise_i
node_features.append(class_i)
node_features = np.array(node_features).reshape((-1, n_nodes, 2))
# Compute adjacency matrices
adjacency = []
for nf in node_features:
adj = compute_adj(nf)
adjacency.append(adj)
adjacency = np.array(adjacency)
# Compute labels
labels = np.repeat(classes, n_samples_in_class)
if one_hot_labels:
labels = label_to_one_hot(labels, labels=classes)
if return_type is 'numpy':
return adjacency, node_features, labels
elif return_type is 'networkx':
graphs = numpy_to_nx(adjacency, node_features=node_features, nf_name='coords')
return graphs, labels
else:
raise NotImplementedError
def compute_adj(x):
tri = Delaunay(x)
edges_explicit = np.concatenate((tri.vertices[:, :2],
tri.vertices[:, 1:],
tri.vertices[:, ::2]), axis=0)
adj = np.zeros((x.shape[0], x.shape[0]))
adj[edges_explicit[:, 0], edges_explicit[:, 1]] = 1.
return np.clip(adj + adj.T, 0, 1)
| true
| true
|
f708fe9ca7fe20dd9c734aeeb55a3dff1eb26bab
| 12,442
|
py
|
Python
|
.tox/scenario/lib/python2.7/site-packages/testrepository/ui/cli.py
|
bdrich/neutron-lbaas
|
b4711abfe0207c4fdd5d7fb7ecbf017e753abbfd
|
[
"Apache-2.0"
] | null | null | null |
.tox/scenario/lib/python2.7/site-packages/testrepository/ui/cli.py
|
bdrich/neutron-lbaas
|
b4711abfe0207c4fdd5d7fb7ecbf017e753abbfd
|
[
"Apache-2.0"
] | null | null | null |
.tox/scenario/lib/python2.7/site-packages/testrepository/ui/cli.py
|
bdrich/neutron-lbaas
|
b4711abfe0207c4fdd5d7fb7ecbf017e753abbfd
|
[
"Apache-2.0"
] | 1
|
2020-07-21T02:18:23.000Z
|
2020-07-21T02:18:23.000Z
|
#
# Copyright (c) 2009 Testrepository Contributors
#
# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
# license at the users choice. A copy of both licenses are available in the
# project source as Apache-2.0 and BSD. You may not use this file except in
# compliance with one of these two licences.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# license you chose for the specific language governing permissions and
# limitations under that license.
"""A command line UI for testrepository."""
import io
import os
import signal
import subunit
import sys
from extras import try_import
v2_avail = try_import('subunit.ByteStreamToStreamResult')
import testtools
from testtools import ExtendedToStreamDecorator, StreamToExtendedDecorator
from testtools.compat import unicode_output_stream, _u
from testrepository import ui
from testrepository.commands import get_command_parser
class CLITestResult(ui.BaseUITestResult):
"""A TestResult for the CLI."""
def __init__(self, ui, get_id, stream, previous_run=None, filter_tags=None):
"""Construct a CLITestResult writing to stream.
:param filter_tags: Tags that should be used to filter tests out. When
a tag in this set is present on a test outcome, the test is not
counted towards the test run count. If the test errors, then it is
still counted and the error is still shown.
"""
super(CLITestResult, self).__init__(ui, get_id, previous_run)
self.stream = unicode_output_stream(stream)
self.sep1 = _u('=' * 70 + '\n')
self.sep2 = _u('-' * 70 + '\n')
self.filter_tags = filter_tags or frozenset()
self.filterable_states = set(['success', 'uxsuccess', 'xfail', 'skip'])
def _format_error(self, label, test, error_text, test_tags=None):
test_tags = test_tags or ()
tags = _u(' ').join(test_tags)
if tags:
tags = _u('tags: %s\n') % tags
return _u('').join([
self.sep1,
_u('%s: %s\n') % (label, test.id()),
tags,
self.sep2,
error_text,
])
def status(self, test_id=None, test_status=None, test_tags=None,
runnable=True, file_name=None, file_bytes=None, eof=False,
mime_type=None, route_code=None, timestamp=None):
super(CLITestResult, self).status(test_id=test_id,
test_status=test_status, test_tags=test_tags, runnable=runnable,
file_name=file_name, file_bytes=file_bytes, eof=eof,
mime_type=mime_type, route_code=route_code, timestamp=timestamp)
if test_status == 'fail':
self.stream.write(
self._format_error(_u('FAIL'), *(self._summary.errors[-1]),
test_tags=test_tags))
if test_status not in self.filterable_states:
return
if test_tags and test_tags.intersection(self.filter_tags):
self._summary.testsRun -= 1
class UI(ui.AbstractUI):
"""A command line user interface."""
def __init__(self, argv, stdin, stdout, stderr):
"""Create a command line UI.
:param argv: Arguments from the process invocation.
:param stdin: The stream for stdin.
:param stdout: The stream for stdout.
:param stderr: The stream for stderr.
"""
self._argv = argv
self._stdin = stdin
self._stdout = stdout
self._stderr = stderr
self._binary_stdout = None
def _iter_streams(self, stream_type):
# Only the first stream declared in a command can be accepted at the
# moment - as there is only one stdin and alternate streams are not yet
# configurable in the CLI.
first_stream_type = self.cmd.input_streams[0]
if (stream_type != first_stream_type
and stream_type != first_stream_type[:-1]):
return
yield subunit.make_stream_binary(self._stdin)
def make_result(self, get_id, test_command, previous_run=None):
if getattr(self.options, 'subunit', False):
if v2_avail:
serializer = subunit.StreamResultToBytes(self._stdout)
else:
serializer = StreamToExtendedDecorator(
subunit.TestProtocolClient(self._stdout))
# By pass user transforms - just forward it all,
result = serializer
# and interpret everything as success.
summary = testtools.StreamSummary()
summary.startTestRun()
summary.stopTestRun()
return result, summary
else:
# Apply user defined transforms.
filter_tags = test_command.get_filter_tags()
output = CLITestResult(self, get_id, self._stdout, previous_run,
filter_tags=filter_tags)
summary = output._summary
return output, summary
def output_error(self, error_tuple):
if 'TESTR_PDB' in os.environ:
import traceback
self._stderr.write(_u('').join(traceback.format_tb(error_tuple[2])))
self._stderr.write(_u('\n'))
# This is terrible: it is because on Python2.x pdb writes bytes to
# its pipes, and the test suite uses io.StringIO that refuse bytes.
import pdb;
if sys.version_info[0]==2:
if isinstance(self._stdout, io.StringIO):
write = self._stdout.write
def _write(text):
return write(text.decode('utf8'))
self._stdout.write = _write
p = pdb.Pdb(stdin=self._stdin, stdout=self._stdout)
p.reset()
p.interaction(None, error_tuple[2])
error_type = str(error_tuple[1])
# XX: Python2.
if type(error_type) is bytes:
error_type = error_type.decode('utf8')
self._stderr.write(error_type + _u('\n'))
def output_rest(self, rest_string):
self._stdout.write(rest_string)
if not rest_string.endswith('\n'):
self._stdout.write(_u('\n'))
def output_stream(self, stream):
if not self._binary_stdout:
self._binary_stdout = subunit.make_stream_binary(self._stdout)
contents = stream.read(65536)
assert type(contents) is bytes, \
"Bad stream contents %r" % type(contents)
# If there are unflushed bytes in the text wrapper, we need to sync..
self._stdout.flush()
while contents:
self._binary_stdout.write(contents)
contents = stream.read(65536)
self._binary_stdout.flush()
def output_table(self, table):
# stringify
contents = []
for row in table:
new_row = []
for column in row:
new_row.append(str(column))
contents.append(new_row)
if not contents:
return
widths = [0] * len(contents[0])
for row in contents:
for idx, column in enumerate(row):
if widths[idx] < len(column):
widths[idx] = len(column)
# Show a row
outputs = []
def show_row(row):
for idx, column in enumerate(row):
outputs.append(column)
if idx == len(row) - 1:
outputs.append('\n')
return
# spacers for the next column
outputs.append(' '*(widths[idx]-len(column)))
outputs.append(' ')
show_row(contents[0])
# title spacer
for idx, width in enumerate(widths):
outputs.append('-'*width)
if idx == len(widths) - 1:
outputs.append('\n')
continue
outputs.append(' ')
for row in contents[1:]:
show_row(row)
self._stdout.write(_u('').join(outputs))
def output_tests(self, tests):
for test in tests:
# On Python 2.6 id() returns bytes.
id_str = test.id()
if type(id_str) is bytes:
id_str = id_str.decode('utf8')
self._stdout.write(id_str)
self._stdout.write(_u('\n'))
def output_values(self, values):
outputs = []
for label, value in values:
outputs.append('%s=%s' % (label, value))
self._stdout.write(_u('%s\n' % ', '.join(outputs)))
def _format_summary(self, successful, tests, tests_delta,
time, time_delta, values):
# We build the string by appending to a list of strings and then
# joining trivially at the end. Avoids expensive string concatenation.
summary = []
a = summary.append
if tests:
a("Ran %s" % (tests,))
if tests_delta:
a(" (%+d)" % (tests_delta,))
a(" tests")
if time:
if not summary:
a("Ran tests")
a(" in %0.3fs" % (time,))
if time_delta:
a(" (%+0.3fs)" % (time_delta,))
if summary:
a("\n")
if successful:
a('PASSED')
else:
a('FAILED')
if values:
a(' (')
values_strings = []
for name, value, delta in values:
value_str = '%s=%s' % (name, value)
if delta:
value_str += ' (%+d)' % (delta,)
values_strings.append(value_str)
a(', '.join(values_strings))
a(')')
return _u('').join(summary)
def output_summary(self, successful, tests, tests_delta,
time, time_delta, values):
self._stdout.write(
self._format_summary(
successful, tests, tests_delta, time, time_delta, values))
self._stdout.write(_u('\n'))
def _check_cmd(self):
parser = get_command_parser(self.cmd)
parser.add_option("-d", "--here", dest="here",
help="Set the directory or url that a command should run from. "
"This affects all default path lookups but does not affect paths "
"supplied to the command.", default=os.getcwd(), type=str)
parser.add_option("-q", "--quiet", action="store_true", default=False,
help="Turn off output other than the primary output for a command "
"and any errors.")
# yank out --, as optparse makes it silly hard to just preserve it.
try:
where_dashdash = self._argv.index('--')
opt_argv = self._argv[:where_dashdash]
other_args = self._argv[where_dashdash:]
except ValueError:
opt_argv = self._argv
other_args = []
if '-h' in opt_argv or '--help' in opt_argv or '-?' in opt_argv:
self.output_rest(parser.format_help())
# Fugly, but its what optparse does: we're just overriding the
# output path.
raise SystemExit(0)
options, args = parser.parse_args(opt_argv)
args += other_args
self.here = options.here
self.options = options
parsed_args = {}
failed = False
for arg in self.cmd.args:
try:
parsed_args[arg.name] = arg.parse(args)
except ValueError:
exc_info = sys.exc_info()
failed = True
self._stderr.write(_u("%s\n") % str(exc_info[1]))
break
if not failed:
self.arguments = parsed_args
if args != []:
self._stderr.write(_u("Unexpected arguments: %r\n") % args)
return not failed and args == []
def _clear_SIGPIPE(self):
"""Clear SIGPIPE : child processes expect the default handler."""
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def subprocess_Popen(self, *args, **kwargs):
import subprocess
if os.name == "posix":
# GZ 2010-12-04: Should perhaps check for existing preexec_fn and
# combine so both will get called.
kwargs['preexec_fn'] = self._clear_SIGPIPE
return subprocess.Popen(*args, **kwargs)
| 39.003135
| 80
| 0.578364
|
import io
import os
import signal
import subunit
import sys
from extras import try_import
v2_avail = try_import('subunit.ByteStreamToStreamResult')
import testtools
from testtools import ExtendedToStreamDecorator, StreamToExtendedDecorator
from testtools.compat import unicode_output_stream, _u
from testrepository import ui
from testrepository.commands import get_command_parser
class CLITestResult(ui.BaseUITestResult):
def __init__(self, ui, get_id, stream, previous_run=None, filter_tags=None):
super(CLITestResult, self).__init__(ui, get_id, previous_run)
self.stream = unicode_output_stream(stream)
self.sep1 = _u('=' * 70 + '\n')
self.sep2 = _u('-' * 70 + '\n')
self.filter_tags = filter_tags or frozenset()
self.filterable_states = set(['success', 'uxsuccess', 'xfail', 'skip'])
def _format_error(self, label, test, error_text, test_tags=None):
test_tags = test_tags or ()
tags = _u(' ').join(test_tags)
if tags:
tags = _u('tags: %s\n') % tags
return _u('').join([
self.sep1,
_u('%s: %s\n') % (label, test.id()),
tags,
self.sep2,
error_text,
])
def status(self, test_id=None, test_status=None, test_tags=None,
runnable=True, file_name=None, file_bytes=None, eof=False,
mime_type=None, route_code=None, timestamp=None):
super(CLITestResult, self).status(test_id=test_id,
test_status=test_status, test_tags=test_tags, runnable=runnable,
file_name=file_name, file_bytes=file_bytes, eof=eof,
mime_type=mime_type, route_code=route_code, timestamp=timestamp)
if test_status == 'fail':
self.stream.write(
self._format_error(_u('FAIL'), *(self._summary.errors[-1]),
test_tags=test_tags))
if test_status not in self.filterable_states:
return
if test_tags and test_tags.intersection(self.filter_tags):
self._summary.testsRun -= 1
class UI(ui.AbstractUI):
def __init__(self, argv, stdin, stdout, stderr):
self._argv = argv
self._stdin = stdin
self._stdout = stdout
self._stderr = stderr
self._binary_stdout = None
def _iter_streams(self, stream_type):
first_stream_type = self.cmd.input_streams[0]
if (stream_type != first_stream_type
and stream_type != first_stream_type[:-1]):
return
yield subunit.make_stream_binary(self._stdin)
def make_result(self, get_id, test_command, previous_run=None):
if getattr(self.options, 'subunit', False):
if v2_avail:
serializer = subunit.StreamResultToBytes(self._stdout)
else:
serializer = StreamToExtendedDecorator(
subunit.TestProtocolClient(self._stdout))
result = serializer
summary = testtools.StreamSummary()
summary.startTestRun()
summary.stopTestRun()
return result, summary
else:
filter_tags = test_command.get_filter_tags()
output = CLITestResult(self, get_id, self._stdout, previous_run,
filter_tags=filter_tags)
summary = output._summary
return output, summary
def output_error(self, error_tuple):
if 'TESTR_PDB' in os.environ:
import traceback
self._stderr.write(_u('').join(traceback.format_tb(error_tuple[2])))
self._stderr.write(_u('\n'))
import pdb;
if sys.version_info[0]==2:
if isinstance(self._stdout, io.StringIO):
write = self._stdout.write
def _write(text):
return write(text.decode('utf8'))
self._stdout.write = _write
p = pdb.Pdb(stdin=self._stdin, stdout=self._stdout)
p.reset()
p.interaction(None, error_tuple[2])
error_type = str(error_tuple[1])
if type(error_type) is bytes:
error_type = error_type.decode('utf8')
self._stderr.write(error_type + _u('\n'))
def output_rest(self, rest_string):
self._stdout.write(rest_string)
if not rest_string.endswith('\n'):
self._stdout.write(_u('\n'))
def output_stream(self, stream):
if not self._binary_stdout:
self._binary_stdout = subunit.make_stream_binary(self._stdout)
contents = stream.read(65536)
assert type(contents) is bytes, \
"Bad stream contents %r" % type(contents)
self._stdout.flush()
while contents:
self._binary_stdout.write(contents)
contents = stream.read(65536)
self._binary_stdout.flush()
def output_table(self, table):
contents = []
for row in table:
new_row = []
for column in row:
new_row.append(str(column))
contents.append(new_row)
if not contents:
return
widths = [0] * len(contents[0])
for row in contents:
for idx, column in enumerate(row):
if widths[idx] < len(column):
widths[idx] = len(column)
outputs = []
def show_row(row):
for idx, column in enumerate(row):
outputs.append(column)
if idx == len(row) - 1:
outputs.append('\n')
return
outputs.append(' '*(widths[idx]-len(column)))
outputs.append(' ')
show_row(contents[0])
for idx, width in enumerate(widths):
outputs.append('-'*width)
if idx == len(widths) - 1:
outputs.append('\n')
continue
outputs.append(' ')
for row in contents[1:]:
show_row(row)
self._stdout.write(_u('').join(outputs))
def output_tests(self, tests):
for test in tests:
id_str = test.id()
if type(id_str) is bytes:
id_str = id_str.decode('utf8')
self._stdout.write(id_str)
self._stdout.write(_u('\n'))
def output_values(self, values):
outputs = []
for label, value in values:
outputs.append('%s=%s' % (label, value))
self._stdout.write(_u('%s\n' % ', '.join(outputs)))
def _format_summary(self, successful, tests, tests_delta,
time, time_delta, values):
summary = []
a = summary.append
if tests:
a("Ran %s" % (tests,))
if tests_delta:
a(" (%+d)" % (tests_delta,))
a(" tests")
if time:
if not summary:
a("Ran tests")
a(" in %0.3fs" % (time,))
if time_delta:
a(" (%+0.3fs)" % (time_delta,))
if summary:
a("\n")
if successful:
a('PASSED')
else:
a('FAILED')
if values:
a(' (')
values_strings = []
for name, value, delta in values:
value_str = '%s=%s' % (name, value)
if delta:
value_str += ' (%+d)' % (delta,)
values_strings.append(value_str)
a(', '.join(values_strings))
a(')')
return _u('').join(summary)
def output_summary(self, successful, tests, tests_delta,
time, time_delta, values):
self._stdout.write(
self._format_summary(
successful, tests, tests_delta, time, time_delta, values))
self._stdout.write(_u('\n'))
def _check_cmd(self):
parser = get_command_parser(self.cmd)
parser.add_option("-d", "--here", dest="here",
help="Set the directory or url that a command should run from. "
"This affects all default path lookups but does not affect paths "
"supplied to the command.", default=os.getcwd(), type=str)
parser.add_option("-q", "--quiet", action="store_true", default=False,
help="Turn off output other than the primary output for a command "
"and any errors.")
try:
where_dashdash = self._argv.index('--')
opt_argv = self._argv[:where_dashdash]
other_args = self._argv[where_dashdash:]
except ValueError:
opt_argv = self._argv
other_args = []
if '-h' in opt_argv or '--help' in opt_argv or '-?' in opt_argv:
self.output_rest(parser.format_help())
# output path.
raise SystemExit(0)
options, args = parser.parse_args(opt_argv)
args += other_args
self.here = options.here
self.options = options
parsed_args = {}
failed = False
for arg in self.cmd.args:
try:
parsed_args[arg.name] = arg.parse(args)
except ValueError:
exc_info = sys.exc_info()
failed = True
self._stderr.write(_u("%s\n") % str(exc_info[1]))
break
if not failed:
self.arguments = parsed_args
if args != []:
self._stderr.write(_u("Unexpected arguments: %r\n") % args)
return not failed and args == []
def _clear_SIGPIPE(self):
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def subprocess_Popen(self, *args, **kwargs):
import subprocess
if os.name == "posix":
# GZ 2010-12-04: Should perhaps check for existing preexec_fn and
# combine so both will get called.
kwargs['preexec_fn'] = self._clear_SIGPIPE
return subprocess.Popen(*args, **kwargs)
| true
| true
|
f708ff0c051d3ee67c01661715510a72136a41d7
| 2,372
|
py
|
Python
|
TWLight/users/migrations/0076_auto_20210624_1015.py
|
aacaldwell/TWLight
|
68e6d0d81ddd52596025f15d2c9a75dcdf504734
|
[
"MIT"
] | 67
|
2017-12-14T22:27:48.000Z
|
2022-03-13T18:21:31.000Z
|
TWLight/users/migrations/0076_auto_20210624_1015.py
|
aacaldwell/TWLight
|
68e6d0d81ddd52596025f15d2c9a75dcdf504734
|
[
"MIT"
] | 433
|
2017-03-24T22:51:23.000Z
|
2022-03-31T19:36:22.000Z
|
TWLight/users/migrations/0076_auto_20210624_1015.py
|
Mahuton/TWLight
|
90b299d07b0479f21dc90e17b8d05f5a221b0de1
|
[
"MIT"
] | 105
|
2017-06-23T03:53:41.000Z
|
2022-03-30T17:24:29.000Z
|
# Generated by Django 3.1.12 on 2021-06-24 10:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("users", "0075_auto_20210607_1312"),
]
operations = [
migrations.AlterField(
model_name="userprofile",
name="lang",
field=models.CharField(
blank=True,
choices=[
("ar", "العربية"),
("as", "অসমীয়া"),
("bcl", "Bikol Central"),
("br", "brezhoneg"),
("da", "dansk"),
("dag", "dagbanli"),
("de", "Deutsch"),
("diq", "Zazaki"),
("en", "English"),
("en-gb", "British English"),
("eo", "Esperanto"),
("es", "español"),
("fa", "فارسی"),
("fi", "suomi"),
("fr", "français"),
("gu", "ગુજરાતી"),
("guw", "gungbe"),
("he", "עברית"),
("hi", "हिन्दी"),
("hy", "հայերեն"),
("id", "Bahasa Indonesia"),
("io", "Ido"),
("it", "italiano"),
("ja", "日本語"),
("ko", "한국어"),
("lv", "latviešu"),
("mk", "македонски"),
("mnw", "ဘာသာ မန်"),
("mr", "मराठी"),
("ms", "Bahasa Melayu"),
("my", "မြန်မာဘာသာ"),
("pl", "polski"),
("pt", "português"),
("pt-br", "português do Brasil"),
("ro", "română"),
("ru", "русский"),
("scn", "sicilianu"),
("sr-ec", "sr-cyrl"),
("sv", "svenska"),
("ta", "தமிழ்"),
("tr", "Türkçe"),
("uk", "українська"),
("vi", "Tiếng Việt"),
("zh-hans", "中文(简体)"),
("zh-hant", "中文(繁體)"),
],
help_text="Language",
max_length=128,
null=True,
),
),
]
| 33.408451
| 53
| 0.29511
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("users", "0075_auto_20210607_1312"),
]
operations = [
migrations.AlterField(
model_name="userprofile",
name="lang",
field=models.CharField(
blank=True,
choices=[
("ar", "العربية"),
("as", "অসমীয়া"),
("bcl", "Bikol Central"),
("br", "brezhoneg"),
("da", "dansk"),
("dag", "dagbanli"),
("de", "Deutsch"),
("diq", "Zazaki"),
("en", "English"),
("en-gb", "British English"),
("eo", "Esperanto"),
("es", "español"),
("fa", "فارسی"),
("fi", "suomi"),
("fr", "français"),
("gu", "ગુજરાતી"),
("guw", "gungbe"),
("he", "עברית"),
("hi", "हिन्दी"),
("hy", "հայերեն"),
("id", "Bahasa Indonesia"),
("io", "Ido"),
("it", "italiano"),
("ja", "日本語"),
("ko", "한국어"),
("lv", "latviešu"),
("mk", "македонски"),
("mnw", "ဘာသာ မန်"),
("mr", "मराठी"),
("ms", "Bahasa Melayu"),
("my", "မြန်မာဘာသာ"),
("pl", "polski"),
("pt", "português"),
("pt-br", "português do Brasil"),
("ro", "română"),
("ru", "русский"),
("scn", "sicilianu"),
("sr-ec", "sr-cyrl"),
("sv", "svenska"),
("ta", "தமிழ்"),
("tr", "Türkçe"),
("uk", "українська"),
("vi", "Tiếng Việt"),
("zh-hans", "中文(简体)"),
("zh-hant", "中文(繁體)"),
],
help_text="Language",
max_length=128,
null=True,
),
),
]
| true
| true
|
f708ff486b81166cc40bc29b8b4461414fe460e6
| 5,443
|
py
|
Python
|
samples/snippets/conftest.py
|
LaudateCorpus1/python-bigquery-datatransfer
|
babbaf7c6d4bb0c7485eb077b90303d99b32da30
|
[
"Apache-2.0"
] | 58
|
2020-03-05T16:06:45.000Z
|
2022-03-28T18:20:46.000Z
|
samples/snippets/conftest.py
|
LaudateCorpus1/python-bigquery-datatransfer
|
babbaf7c6d4bb0c7485eb077b90303d99b32da30
|
[
"Apache-2.0"
] | 120
|
2020-02-05T09:56:10.000Z
|
2022-03-23T00:19:09.000Z
|
samples/snippets/conftest.py
|
LaudateCorpus1/python-bigquery-datatransfer
|
babbaf7c6d4bb0c7485eb077b90303d99b32da30
|
[
"Apache-2.0"
] | 21
|
2020-02-05T23:11:23.000Z
|
2022-01-29T08:07:36.000Z
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import os
import random
import uuid
from google.api_core import client_options
import google.api_core.exceptions
import google.auth
from google.cloud import bigquery
from google.cloud import bigquery_datatransfer
from google.cloud import pubsub_v1
import pytest
RESOURCE_PREFIX = "python_bigquery_datatransfer_samples_snippets"
RESOURCE_DATE_FORMAT = "%Y%m%d%H%M%S"
RESOURCE_DATE_LENGTH = 4 + 2 + 2 + 2 + 2 + 2
def resource_prefix() -> str:
timestamp = datetime.datetime.utcnow().strftime(RESOURCE_DATE_FORMAT)
random_string = hex(random.randrange(1000000))[2:]
return f"{RESOURCE_PREFIX}_{timestamp}_{random_string}"
def resource_name_to_date(resource_name: str):
start_date = len(RESOURCE_PREFIX) + 1
date_string = resource_name[start_date : start_date + RESOURCE_DATE_LENGTH]
parsed_date = datetime.datetime.strptime(date_string, RESOURCE_DATE_FORMAT)
return parsed_date
@pytest.fixture(scope="session", autouse=True)
def cleanup_pubsub_topics(pubsub_client: pubsub_v1.PublisherClient, project_id):
yesterday = datetime.datetime.utcnow() - datetime.timedelta(days=1)
for topic in pubsub_client.list_topics(project=f"projects/{project_id}"):
topic_id = topic.name.split("/")[-1]
if (
topic_id.startswith(RESOURCE_PREFIX)
and resource_name_to_date(topic_id) < yesterday
):
pubsub_client.delete_topic(topic=topic.name)
def temp_suffix():
now = datetime.datetime.now()
return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}"
@pytest.fixture(scope="session")
def bigquery_client(default_credentials):
credentials, project_id = default_credentials
return bigquery.Client(credentials=credentials, project=project_id)
@pytest.fixture(scope="session")
def pubsub_client(default_credentials):
credentials, _ = default_credentials
return pubsub_v1.PublisherClient(credentials=credentials)
@pytest.fixture(scope="session")
def pubsub_topic(pubsub_client: pubsub_v1.PublisherClient, project_id):
topic_id = resource_prefix()
topic_path = pubsub_v1.PublisherClient.topic_path(project_id, topic_id)
pubsub_client.create_topic(name=topic_path)
yield topic_path
pubsub_client.delete_topic(topic=topic_path)
@pytest.fixture(scope="session")
def dataset_id(bigquery_client, project_id):
dataset_id = f"bqdts_{temp_suffix()}"
bigquery_client.create_dataset(f"{project_id}.{dataset_id}")
yield dataset_id
bigquery_client.delete_dataset(dataset_id, delete_contents=True)
@pytest.fixture(scope="session")
def default_credentials():
return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])
@pytest.fixture(scope="session")
def project_id():
return os.environ["GOOGLE_CLOUD_PROJECT"]
@pytest.fixture(scope="session")
def service_account_name(default_credentials):
credentials, _ = default_credentials
# The service_account_email attribute is not available when running with
# user account credentials, but should be available when running from our
# continuous integration tests.
return getattr(credentials, "service_account_email", None)
@pytest.fixture(scope="session")
def transfer_client(default_credentials, project_id):
credentials, _ = default_credentials
options = client_options.ClientOptions(quota_project_id=project_id)
transfer_client = bigquery_datatransfer.DataTransferServiceClient(
credentials=credentials, client_options=options
)
# Ensure quota is always attributed to the correct project.
bigquery_datatransfer.DataTransferServiceClient = lambda: transfer_client
return transfer_client
@pytest.fixture(scope="session")
def transfer_config_name(transfer_client, project_id, dataset_id, service_account_name):
from . import manage_transfer_configs, scheduled_query
# Use the transfer_client fixture so we know quota is attributed to the
# correct project.
assert transfer_client is not None
# To conserve limited BQ-DTS quota, this fixture creates only one transfer
# config for a whole session and is used to test the scheduled_query.py and
# the delete operation in manage_transfer_configs.py.
transfer_config = scheduled_query.create_scheduled_query(
{
"project_id": project_id,
"dataset_id": dataset_id,
"service_account_name": service_account_name,
}
)
yield transfer_config.name
manage_transfer_configs.delete_config(
{"transfer_config_name": transfer_config.name}
)
@pytest.fixture
def to_delete_configs(transfer_client):
to_delete = []
yield to_delete
for config_name in to_delete:
try:
transfer_client.delete_transfer_config(name=config_name)
except google.api_core.exceptions.GoogleAPICallError:
pass
| 34.01875
| 88
| 0.756752
|
import datetime
import os
import random
import uuid
from google.api_core import client_options
import google.api_core.exceptions
import google.auth
from google.cloud import bigquery
from google.cloud import bigquery_datatransfer
from google.cloud import pubsub_v1
import pytest
RESOURCE_PREFIX = "python_bigquery_datatransfer_samples_snippets"
RESOURCE_DATE_FORMAT = "%Y%m%d%H%M%S"
RESOURCE_DATE_LENGTH = 4 + 2 + 2 + 2 + 2 + 2
def resource_prefix() -> str:
timestamp = datetime.datetime.utcnow().strftime(RESOURCE_DATE_FORMAT)
random_string = hex(random.randrange(1000000))[2:]
return f"{RESOURCE_PREFIX}_{timestamp}_{random_string}"
def resource_name_to_date(resource_name: str):
start_date = len(RESOURCE_PREFIX) + 1
date_string = resource_name[start_date : start_date + RESOURCE_DATE_LENGTH]
parsed_date = datetime.datetime.strptime(date_string, RESOURCE_DATE_FORMAT)
return parsed_date
@pytest.fixture(scope="session", autouse=True)
def cleanup_pubsub_topics(pubsub_client: pubsub_v1.PublisherClient, project_id):
yesterday = datetime.datetime.utcnow() - datetime.timedelta(days=1)
for topic in pubsub_client.list_topics(project=f"projects/{project_id}"):
topic_id = topic.name.split("/")[-1]
if (
topic_id.startswith(RESOURCE_PREFIX)
and resource_name_to_date(topic_id) < yesterday
):
pubsub_client.delete_topic(topic=topic.name)
def temp_suffix():
now = datetime.datetime.now()
return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}"
@pytest.fixture(scope="session")
def bigquery_client(default_credentials):
credentials, project_id = default_credentials
return bigquery.Client(credentials=credentials, project=project_id)
@pytest.fixture(scope="session")
def pubsub_client(default_credentials):
credentials, _ = default_credentials
return pubsub_v1.PublisherClient(credentials=credentials)
@pytest.fixture(scope="session")
def pubsub_topic(pubsub_client: pubsub_v1.PublisherClient, project_id):
topic_id = resource_prefix()
topic_path = pubsub_v1.PublisherClient.topic_path(project_id, topic_id)
pubsub_client.create_topic(name=topic_path)
yield topic_path
pubsub_client.delete_topic(topic=topic_path)
@pytest.fixture(scope="session")
def dataset_id(bigquery_client, project_id):
dataset_id = f"bqdts_{temp_suffix()}"
bigquery_client.create_dataset(f"{project_id}.{dataset_id}")
yield dataset_id
bigquery_client.delete_dataset(dataset_id, delete_contents=True)
@pytest.fixture(scope="session")
def default_credentials():
return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])
@pytest.fixture(scope="session")
def project_id():
return os.environ["GOOGLE_CLOUD_PROJECT"]
@pytest.fixture(scope="session")
def service_account_name(default_credentials):
credentials, _ = default_credentials
return getattr(credentials, "service_account_email", None)
@pytest.fixture(scope="session")
def transfer_client(default_credentials, project_id):
credentials, _ = default_credentials
options = client_options.ClientOptions(quota_project_id=project_id)
transfer_client = bigquery_datatransfer.DataTransferServiceClient(
credentials=credentials, client_options=options
)
bigquery_datatransfer.DataTransferServiceClient = lambda: transfer_client
return transfer_client
@pytest.fixture(scope="session")
def transfer_config_name(transfer_client, project_id, dataset_id, service_account_name):
from . import manage_transfer_configs, scheduled_query
assert transfer_client is not None
transfer_config = scheduled_query.create_scheduled_query(
{
"project_id": project_id,
"dataset_id": dataset_id,
"service_account_name": service_account_name,
}
)
yield transfer_config.name
manage_transfer_configs.delete_config(
{"transfer_config_name": transfer_config.name}
)
@pytest.fixture
def to_delete_configs(transfer_client):
to_delete = []
yield to_delete
for config_name in to_delete:
try:
transfer_client.delete_transfer_config(name=config_name)
except google.api_core.exceptions.GoogleAPICallError:
pass
| true
| true
|
f708ff6f7bc2862d008b44cf3e33e780b4f3c6fa
| 6,442
|
py
|
Python
|
pypeit/scripts/flux_setup.py
|
finagle29/PypeIt
|
418d6d24d24054ad590d2f06c0b4688ea18f492e
|
[
"BSD-3-Clause"
] | null | null | null |
pypeit/scripts/flux_setup.py
|
finagle29/PypeIt
|
418d6d24d24054ad590d2f06c0b4688ea18f492e
|
[
"BSD-3-Clause"
] | null | null | null |
pypeit/scripts/flux_setup.py
|
finagle29/PypeIt
|
418d6d24d24054ad590d2f06c0b4688ea18f492e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import argparse
import os,time
import numpy as np
from astropy.io import fits
from astropy.table import Table
from pypeit import msgs
from pypeit.par.util import make_pypeit_file
class SmartFormatter(argparse.HelpFormatter):
def _split_lines(self, text, width):
if text.startswith('R|'):
return text[2:].splitlines()
# this is the RawTextHelpFormatter._split_lines
return argparse.HelpFormatter._split_lines(self, text, width)
def parser(options=None):
parser = argparse.ArgumentParser(description='Parse', formatter_class=SmartFormatter)
parser.add_argument("sci_path", type=str, help="Path for Science folder")
parser.add_argument("--objmodel", type=str, default='qso', choices=['qso', 'star', 'poly'],
help="R|Science object model used in the telluric fitting.\n"
"The options are:\n"
"\n"
" qso = For quasars. You might need to set redshift, bal_wv_min_mx in the tell file.\n"
"\n"
" star = For stars. You need to set star_type, star_ra, star_dec, and star_mag in the tell_file.\n"
"\n"
" poly = For other type object, You might need to set fit_wv_min_mx, \n"
" and norder in the tell_file."
)
if options is None:
args = parser.parse_args()
else:
args = parser.parse_args(options)
return args
def main(args):
"""
This setups PypeIt files for fluxing, coadding and telluric corrections.
It will produce three files named as your_spectragraph.flux, your_spectragraph.coadd1d,
and your_spectragraph.tell
"""
allfiles = os.listdir(args.sci_path)
allfiles = np.sort(allfiles)
spec1dfiles = []
spec2dfiles = []
spec1dinfos = []
for ifile in allfiles:
if ('spec1d' in ifile) and ('.fits' in ifile):
spec1dfiles.append(ifile)
elif ('spec2d' in ifile) and ('.fits' in ifile):
spec2dfiles.append(ifile)
elif ('spec1d' in ifile) and ('.txt' in ifile):
spec1dinfos.append(ifile)
else:
msgs.warn('{:} is not a standard PypeIt output.'.format(ifile))
if len(spec2dfiles) > len(spec1dfiles):
msgs.warn('The following exposures do not have 1D extractions:')
for ii in range(len(spec2dfiles)):
if not os.path.exists(os.path.join(args.sci_path, spec2dfiles[ii].replace('spec2d','spec1d'))):
msgs.info('\t {:}'.format(spec2dfiles[ii]))
if len(spec1dfiles) > 0:
par = fits.open(os.path.join(args.sci_path, spec1dfiles[0]))
## fluxing pypeit file
spectrograph = par[0].header['PYP_SPEC']
pypeline = par[0].header['PYPELINE']
flux_file = '{:}.flux'.format(spectrograph)
cfg_lines = ['[fluxcalib]']
cfg_lines += [' extinct_correct = False # Set to True if your SENSFUNC derived with the UVIS algorithm\n']
cfg_lines += ['# Please add your SENSFUNC file name below before running pypeit_flux_calib']
make_pypeit_file(flux_file, spectrograph, spec1dfiles, cfg_lines=cfg_lines, setup_mode=True)
fin = open(flux_file, "rt")
data = fin.read()
data = data.replace('spec1d_', os.path.join(args.sci_path,'spec1d_'))
data = data.replace('data', 'flux')
fin.close()
fin = open(flux_file, "wt")
fin.write(data)
fin.close()
## coadd1d pypeit file
coadd1d_file = '{:}.coadd1d'.format(spectrograph)
cfg_lines = ['[coadd1d]']
cfg_lines += [' coaddfile = YOUR_OUTPUT_FILE_NAME # Please set your output file name']
cfg_lines += [' sensfuncfile = YOUR_SENSFUNC_FILE # Please set your SENSFUNC file name']
if pypeline == 'Echelle':
cfg_lines += [' wave_method = velocity # creates a uniformly space grid in log10(lambda)\n']
else:
cfg_lines += [' wave_method = linear # creates a uniformly space grid in lambda\n']
cfg_lines += ['# This file includes all extracted objects. You need to figure out which object you want to \n'+\
'# coadd before running pypeit_coadd_1dspec!!!']
spec1d_info = []
for ii in range(len(spec1dfiles)):
meta_tbl = Table.read(os.path.join(args.sci_path, spec1dfiles[ii]).replace('.fits', '.txt'),
format='ascii.fixed_width')
_, indx = np.unique(meta_tbl['name'],return_index=True)
objects = meta_tbl[indx]
for jj in range(len(objects)):
spec1d_info.append(spec1dfiles[ii] + ' '+ objects['name'][jj])
make_pypeit_file(coadd1d_file, spectrograph, spec1d_info, cfg_lines=cfg_lines, setup_mode=True)
fin = open(coadd1d_file, "rt")
data = fin.read()
data = data.replace('spec1d_', os.path.join(args.sci_path,'spec1d_'))
data = data.replace('data', 'coadd1d')
fin.close()
fin = open(coadd1d_file, "wt")
fin.write(data)
fin.close()
## tellfit pypeit file
tellfit_file = '{:}.tell'.format(spectrograph)
cfg_lines = ['[tellfit]']
if args.objmodel == 'qso':
cfg_lines += [' objmodel = qso']
cfg_lines += [' redshift = 0.0']
cfg_lines += [' bal_wv_min_max = 10000.,11000.']
elif args.objmodel == 'star':
cfg_lines += [' objmodel = star']
cfg_lines += [' star_type = A0']
cfg_lines += [' star_mag = 0.0']
elif args.objmodel == 'poly':
cfg_lines += [' objmodel = poly']
cfg_lines += [' polyorder = 5']
cfg_lines += [' fit_wv_min_max = 17000.0,22000.0']
with open(tellfit_file, 'w') as f:
f.write('# Auto-generated PypeIt file\n')
f.write('# {0}\n'.format(time.strftime("%a %d %b %Y %H:%M:%S", time.localtime())))
f.write("\n")
f.write("# User-defined execution parameters\n")
f.write("# This is only an example. Make sure to change the following parameters accordingly.\n")
f.write('\n'.join(cfg_lines))
f.write('\n')
f.write('\n')
msgs.info('PypeIt file written to: {0}'.format(tellfit_file))
| 44.427586
| 127
| 0.586153
|
import argparse
import os,time
import numpy as np
from astropy.io import fits
from astropy.table import Table
from pypeit import msgs
from pypeit.par.util import make_pypeit_file
class SmartFormatter(argparse.HelpFormatter):
def _split_lines(self, text, width):
if text.startswith('R|'):
return text[2:].splitlines()
return argparse.HelpFormatter._split_lines(self, text, width)
def parser(options=None):
parser = argparse.ArgumentParser(description='Parse', formatter_class=SmartFormatter)
parser.add_argument("sci_path", type=str, help="Path for Science folder")
parser.add_argument("--objmodel", type=str, default='qso', choices=['qso', 'star', 'poly'],
help="R|Science object model used in the telluric fitting.\n"
"The options are:\n"
"\n"
" qso = For quasars. You might need to set redshift, bal_wv_min_mx in the tell file.\n"
"\n"
" star = For stars. You need to set star_type, star_ra, star_dec, and star_mag in the tell_file.\n"
"\n"
" poly = For other type object, You might need to set fit_wv_min_mx, \n"
" and norder in the tell_file."
)
if options is None:
args = parser.parse_args()
else:
args = parser.parse_args(options)
return args
def main(args):
allfiles = os.listdir(args.sci_path)
allfiles = np.sort(allfiles)
spec1dfiles = []
spec2dfiles = []
spec1dinfos = []
for ifile in allfiles:
if ('spec1d' in ifile) and ('.fits' in ifile):
spec1dfiles.append(ifile)
elif ('spec2d' in ifile) and ('.fits' in ifile):
spec2dfiles.append(ifile)
elif ('spec1d' in ifile) and ('.txt' in ifile):
spec1dinfos.append(ifile)
else:
msgs.warn('{:} is not a standard PypeIt output.'.format(ifile))
if len(spec2dfiles) > len(spec1dfiles):
msgs.warn('The following exposures do not have 1D extractions:')
for ii in range(len(spec2dfiles)):
if not os.path.exists(os.path.join(args.sci_path, spec2dfiles[ii].replace('spec2d','spec1d'))):
msgs.info('\t {:}'.format(spec2dfiles[ii]))
if len(spec1dfiles) > 0:
par = fits.open(os.path.join(args.sci_path, spec1dfiles[0]))
spectrograph = par[0].header['PYP_SPEC']
pypeline = par[0].header['PYPELINE']
flux_file = '{:}.flux'.format(spectrograph)
cfg_lines = ['[fluxcalib]']
cfg_lines += [' extinct_correct = False # Set to True if your SENSFUNC derived with the UVIS algorithm\n']
cfg_lines += ['# Please add your SENSFUNC file name below before running pypeit_flux_calib']
make_pypeit_file(flux_file, spectrograph, spec1dfiles, cfg_lines=cfg_lines, setup_mode=True)
fin = open(flux_file, "rt")
data = fin.read()
data = data.replace('spec1d_', os.path.join(args.sci_path,'spec1d_'))
data = data.replace('data', 'flux')
fin.close()
fin = open(flux_file, "wt")
fin.write(data)
fin.close()
coadd1d_file = '{:}.coadd1d'.format(spectrograph)
cfg_lines = ['[coadd1d]']
cfg_lines += [' coaddfile = YOUR_OUTPUT_FILE_NAME # Please set your output file name']
cfg_lines += [' sensfuncfile = YOUR_SENSFUNC_FILE # Please set your SENSFUNC file name']
if pypeline == 'Echelle':
cfg_lines += [' wave_method = velocity # creates a uniformly space grid in log10(lambda)\n']
else:
cfg_lines += [' wave_method = linear # creates a uniformly space grid in lambda\n']
cfg_lines += ['# This file includes all extracted objects. You need to figure out which object you want to \n'+\
'# coadd before running pypeit_coadd_1dspec!!!']
spec1d_info = []
for ii in range(len(spec1dfiles)):
meta_tbl = Table.read(os.path.join(args.sci_path, spec1dfiles[ii]).replace('.fits', '.txt'),
format='ascii.fixed_width')
_, indx = np.unique(meta_tbl['name'],return_index=True)
objects = meta_tbl[indx]
for jj in range(len(objects)):
spec1d_info.append(spec1dfiles[ii] + ' '+ objects['name'][jj])
make_pypeit_file(coadd1d_file, spectrograph, spec1d_info, cfg_lines=cfg_lines, setup_mode=True)
fin = open(coadd1d_file, "rt")
data = fin.read()
data = data.replace('spec1d_', os.path.join(args.sci_path,'spec1d_'))
data = data.replace('data', 'coadd1d')
fin.close()
fin = open(coadd1d_file, "wt")
fin.write(data)
fin.close()
tellfit_file = '{:}.tell'.format(spectrograph)
cfg_lines = ['[tellfit]']
if args.objmodel == 'qso':
cfg_lines += [' objmodel = qso']
cfg_lines += [' redshift = 0.0']
cfg_lines += [' bal_wv_min_max = 10000.,11000.']
elif args.objmodel == 'star':
cfg_lines += [' objmodel = star']
cfg_lines += [' star_type = A0']
cfg_lines += [' star_mag = 0.0']
elif args.objmodel == 'poly':
cfg_lines += [' objmodel = poly']
cfg_lines += [' polyorder = 5']
cfg_lines += [' fit_wv_min_max = 17000.0,22000.0']
with open(tellfit_file, 'w') as f:
f.write('# Auto-generated PypeIt file\n')
f.write('# {0}\n'.format(time.strftime("%a %d %b %Y %H:%M:%S", time.localtime())))
f.write("\n")
f.write("# User-defined execution parameters\n")
f.write("# This is only an example. Make sure to change the following parameters accordingly.\n")
f.write('\n'.join(cfg_lines))
f.write('\n')
f.write('\n')
msgs.info('PypeIt file written to: {0}'.format(tellfit_file))
| true
| true
|
f70900170d8c9fcf57f9fec29511f5b14e33da5a
| 8,192
|
py
|
Python
|
doc/source/conf.py
|
bswartz/cinder
|
6cfecade9e2ee86bbb7d95c3c401c9e4c70f6a96
|
[
"Apache-2.0"
] | null | null | null |
doc/source/conf.py
|
bswartz/cinder
|
6cfecade9e2ee86bbb7d95c3c401c9e4c70f6a96
|
[
"Apache-2.0"
] | null | null | null |
doc/source/conf.py
|
bswartz/cinder
|
6cfecade9e2ee86bbb7d95c3c401c9e4c70f6a96
|
[
"Apache-2.0"
] | null | null | null |
# cinder documentation build configuration file, created by
# sphinx-quickstart on Sat May 1 15:17:47 2010.
#
# This file is execfile()d with the current directory set
# to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import subprocess
import sys
import warnings
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings.
# They can be extensions coming with Sphinx (named 'sphinx.ext.*')
# or your custom ones.
extensions = ['sphinx.ext.autodoc',
'ext.cinder_todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.graphviz',
'oslosphinx',
'stevedore.sphinxext',
'oslo_config.sphinxconfiggen',
]
config_generator_config_file = '../../cinder/config/cinder-config-generator.conf'
sample_config_basename = '_static/cinder'
# autodoc generation is a bit aggressive and a nuisance
# when doing heavy text edit cycles. Execute "export SPHINX_DEBUG=1"
# in your terminal to disable
if not os.getenv('SPHINX_DEBUG'):
extensions += ['ext.cinder_autodoc']
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
# Changing the path so that the Hudson build output contains GA code
# and the source docs do not contain the code so local, offline sphinx builds
# are "clean."
templates_path = []
if os.getenv('HUDSON_PUBLISH_DOCS'):
templates_path = ['_ga', '_templates']
else:
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'cinder'
copyright = u'2010-present, OpenStack Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
from cinder.version import version_info
# The full version, including alpha/beta/rc tags.
release = version_info.release_string()
# The short X.Y version.
version = version_info.version_string()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
unused_docs = [
'api_ext/rst_extension_template',
'installer',
]
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use
# for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['cinder.']
# -- Options for man page output ----------------------------------------------
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
man_pages = [
('man/cinder-manage', 'cinder-manage', u'Cloud controller fabric',
[u'OpenStack'], 1)
]
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
"-n1"]
try:
html_last_updated_fmt = subprocess.Popen(
git_cmd, stdout=subprocess.PIPE).communicate()[0]
except Exception:
warnings.warn('Cannot get last updated time from git repository. '
'Not setting "html_last_updated_fmt".')
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'cinderdoc'
# -- Options for LaTeX output -------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'Cinder.tex', u'Cinder Documentation',
u'Anso Labs, LLC', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| 32.63745
| 81
| 0.708252
|
import os
import subprocess
import sys
import warnings
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
extensions = ['sphinx.ext.autodoc',
'ext.cinder_todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.graphviz',
'oslosphinx',
'stevedore.sphinxext',
'oslo_config.sphinxconfiggen',
]
config_generator_config_file = '../../cinder/config/cinder-config-generator.conf'
sample_config_basename = '_static/cinder'
if not os.getenv('SPHINX_DEBUG'):
extensions += ['ext.cinder_autodoc']
todo_include_todos = True
templates_path = []
if os.getenv('HUDSON_PUBLISH_DOCS'):
templates_path = ['_ga', '_templates']
else:
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'cinder'
copyright = u'2010-present, OpenStack Foundation'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
from cinder.version import version_info
# The full version, including alpha/beta/rc tags.
release = version_info.release_string()
# The short X.Y version.
version = version_info.version_string()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
unused_docs = [
'api_ext/rst_extension_template',
'installer',
]
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use
# for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['cinder.']
# -- Options for man page output ----------------------------------------------
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
man_pages = [
('man/cinder-manage', 'cinder-manage', u'Cloud controller fabric',
[u'OpenStack'], 1)
]
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
"-n1"]
try:
html_last_updated_fmt = subprocess.Popen(
git_cmd, stdout=subprocess.PIPE).communicate()[0]
except Exception:
warnings.warn('Cannot get last updated time from git repository. '
'Not setting "html_last_updated_fmt".')
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'cinderdoc'
# -- Options for LaTeX output -------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'Cinder.tex', u'Cinder Documentation',
u'Anso Labs, LLC', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| true
| true
|
f70900c8f30868cb2768c4bd7aab9e34ca82c6c6
| 3,322
|
py
|
Python
|
Agrus/settings.py
|
GlugovGrGlib/CtrlStorageApp
|
0da23c2d5e6d565ffecaa75e1deb47a789a37ff1
|
[
"MIT"
] | null | null | null |
Agrus/settings.py
|
GlugovGrGlib/CtrlStorageApp
|
0da23c2d5e6d565ffecaa75e1deb47a789a37ff1
|
[
"MIT"
] | null | null | null |
Agrus/settings.py
|
GlugovGrGlib/CtrlStorageApp
|
0da23c2d5e6d565ffecaa75e1deb47a789a37ff1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Django settings for Agrus project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
from pytz import timezone
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'gz6=&2*879yuym!pf(d8kch*30ow*eh=ybb-f0qsg+%c4+$@3c'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'Agrus',
'kurs',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_extensions',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Agrus.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Agrus.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': 'xe',
'USER': 'django',
'PASSWORD': 'djangooracle',
'HOST': 'localhost',
'PORT': '1521'
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
LOGIN_URL = '/login/'
LOGOUT_URL = '/logout'
| 25.166667
| 91
| 0.681818
|
from pytz import timezone
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'gz6=&2*879yuym!pf(d8kch*30ow*eh=ybb-f0qsg+%c4+$@3c'
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'Agrus',
'kurs',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_extensions',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Agrus.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Agrus.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': 'xe',
'USER': 'django',
'PASSWORD': 'djangooracle',
'HOST': 'localhost',
'PORT': '1521'
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
LOGIN_URL = '/login/'
LOGOUT_URL = '/logout'
| true
| true
|
f70901151625154350ab62aebeb639896e2c320e
| 975
|
py
|
Python
|
awards/urls.py
|
yareyaska/awwards
|
e9ccae3ea2fd9b36a6d6d0c3933de121ef5bf5ed
|
[
"MIT"
] | null | null | null |
awards/urls.py
|
yareyaska/awwards
|
e9ccae3ea2fd9b36a6d6d0c3933de121ef5bf5ed
|
[
"MIT"
] | 5
|
2020-06-05T22:47:45.000Z
|
2021-09-08T01:16:30.000Z
|
awards/urls.py
|
yareyaska/awwards
|
e9ccae3ea2fd9b36a6d6d0c3933de121ef5bf5ed
|
[
"MIT"
] | null | null | null |
"""awards URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include
from django.contrib import admin
from django.contrib.auth import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^',include('award.urls')),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^logout/$', views.logout, {"next_page": '/'}),
]
| 37.5
| 79
| 0.692308
|
from django.conf.urls import url,include
from django.contrib import admin
from django.contrib.auth import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^',include('award.urls')),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^logout/$', views.logout, {"next_page": '/'}),
]
| true
| true
|
f70901607d092422ea629b22f328383db82a6532
| 1,384
|
py
|
Python
|
library/aiohttp/__init__.py
|
RouganStriker/BDOBot
|
51fddfab4c06c8593d1d63543285fdf32b26c4f8
|
[
"MIT"
] | 5
|
2019-06-10T10:42:22.000Z
|
2019-07-10T14:05:13.000Z
|
library/aiohttp/__init__.py
|
RouganStriker/BDOBot
|
51fddfab4c06c8593d1d63543285fdf32b26c4f8
|
[
"MIT"
] | 3
|
2018-08-29T01:15:46.000Z
|
2018-08-29T15:12:38.000Z
|
library/aiohttp/__init__.py
|
RouganStriker/BDOBot
|
51fddfab4c06c8593d1d63543285fdf32b26c4f8
|
[
"MIT"
] | 2
|
2018-08-30T14:36:20.000Z
|
2019-06-17T13:07:18.000Z
|
__version__ = '1.0.5'
# Deprecated, keep it here for a while for backward compatibility.
import multidict # noqa
# This relies on each of the submodules having an __all__ variable.
from multidict import * # noqa
from . import hdrs # noqa
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .client_reqrep import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
from .multipart import * # noqa
from .client_ws import ClientWebSocketResponse # noqa
from ._ws_impl import WSMsgType, WSCloseCode, WSMessage, WebSocketError # noqa
from .file_sender import FileSender # noqa
from .cookiejar import CookieJar # noqa
from .resolver import * # noqa
MsgType = WSMsgType # backward compatibility
__all__ = (client.__all__ + # noqa
client_reqrep.__all__ + # noqa
errors.__all__ + # noqa
helpers.__all__ + # noqa
parsers.__all__ + # noqa
protocol.__all__ + # noqa
connector.__all__ + # noqa
streams.__all__ + # noqa
multidict.__all__ + # noqa
multipart.__all__ + # noqa
('hdrs', 'FileSender', 'WSMsgType', 'MsgType', 'WSCloseCode',
'WebSocketError', 'WSMessage',
'ClientWebSocketResponse', 'CookieJar'))
| 32.952381
| 79
| 0.657514
|
__version__ = '1.0.5'
import multidict
from multidict import * from . import hdrs from .protocol import * from .connector import * from .client import * from .client_reqrep import * from .errors import * from .helpers import * from .parsers import * from .streams import * from .multipart import * from .client_ws import ClientWebSocketResponse from ._ws_impl import WSMsgType, WSCloseCode, WSMessage, WebSocketError from .file_sender import FileSender from .cookiejar import CookieJar from .resolver import *
MsgType = WSMsgType
__all__ = (client.__all__ + client_reqrep.__all__ + errors.__all__ + helpers.__all__ + parsers.__all__ + protocol.__all__ + connector.__all__ + streams.__all__ + multidict.__all__ + multipart.__all__ + ('hdrs', 'FileSender', 'WSMsgType', 'MsgType', 'WSCloseCode',
'WebSocketError', 'WSMessage',
'ClientWebSocketResponse', 'CookieJar'))
| true
| true
|
f7090240b3aff921b5983ce0da0f77c0d2b72c2b
| 311
|
py
|
Python
|
python/p153.py
|
forewing/lc
|
314468a1a3bb7d38eccf1f34b0d1b7da04a34784
|
[
"CC0-1.0"
] | null | null | null |
python/p153.py
|
forewing/lc
|
314468a1a3bb7d38eccf1f34b0d1b7da04a34784
|
[
"CC0-1.0"
] | null | null | null |
python/p153.py
|
forewing/lc
|
314468a1a3bb7d38eccf1f34b0d1b7da04a34784
|
[
"CC0-1.0"
] | null | null | null |
class Solution:
def findMin(self, nums: List[int]) -> int:
l = 0
r = len(nums) - 1
while r - l > 3:
m = (l + r) // 2
if nums[m] > nums[l] and nums[m] > nums[r]:
l = m + 1
else:
r = m
return min(nums[l:r+1])
| 25.916667
| 55
| 0.366559
|
class Solution:
def findMin(self, nums: List[int]) -> int:
l = 0
r = len(nums) - 1
while r - l > 3:
m = (l + r) // 2
if nums[m] > nums[l] and nums[m] > nums[r]:
l = m + 1
else:
r = m
return min(nums[l:r+1])
| true
| true
|
f709056acf3cbbb016b307580078e37cb63de811
| 5,247
|
py
|
Python
|
lib/django-1.4/django/contrib/gis/admin/options.py
|
MiCHiLU/google_appengine_sdk
|
3da9f20d7e65e26c4938d2c4054bc4f39cbc5522
|
[
"Apache-2.0"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
AppServer/lib/django-1.4/django/contrib/gis/admin/options.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
AppServer/lib/django-1.4/django/contrib/gis/admin/options.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
from django.contrib.admin import ModelAdmin
from django.contrib.gis.admin.widgets import OpenLayersWidget
from django.contrib.gis.gdal import OGRGeomType
from django.contrib.gis.db import models
class GeoModelAdmin(ModelAdmin):
"""
The administration options class for Geographic models. Map settings
may be overloaded from their defaults to create custom maps.
"""
# The default map settings that may be overloaded -- still subject
# to API changes.
default_lon = 0
default_lat = 0
default_zoom = 4
display_wkt = False
display_srid = False
extra_js = []
num_zoom = 18
max_zoom = False
min_zoom = False
units = False
max_resolution = False
max_extent = False
modifiable = True
mouse_position = True
scale_text = True
layerswitcher = True
scrollable = True
map_width = 600
map_height = 400
map_srid = 4326
map_template = 'gis/admin/openlayers.html'
openlayers_url = 'http://openlayers.org/api/2.11/OpenLayers.js'
point_zoom = num_zoom - 6
wms_url = 'http://vmap0.tiles.osgeo.org/wms/vmap0'
wms_layer = 'basic'
wms_name = 'OpenLayers WMS'
debug = False
widget = OpenLayersWidget
@property
def media(self):
"Injects OpenLayers JavaScript into the admin."
media = super(GeoModelAdmin, self).media
media.add_js([self.openlayers_url])
media.add_js(self.extra_js)
return media
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Overloaded from ModelAdmin so that an OpenLayersWidget is used
for viewing/editing GeometryFields.
"""
if isinstance(db_field, models.GeometryField):
request = kwargs.pop('request', None)
# Setting the widget with the newly defined widget.
kwargs['widget'] = self.get_map_widget(db_field)
return db_field.formfield(**kwargs)
else:
return super(GeoModelAdmin, self).formfield_for_dbfield(db_field, **kwargs)
def get_map_widget(self, db_field):
"""
Returns a subclass of the OpenLayersWidget (or whatever was specified
in the `widget` attribute) using the settings from the attributes set
in this class.
"""
is_collection = db_field.geom_type in ('MULTIPOINT', 'MULTILINESTRING', 'MULTIPOLYGON', 'GEOMETRYCOLLECTION')
if is_collection:
if db_field.geom_type == 'GEOMETRYCOLLECTION': collection_type = 'Any'
else: collection_type = OGRGeomType(db_field.geom_type.replace('MULTI', ''))
else:
collection_type = 'None'
class OLMap(self.widget):
template = self.map_template
geom_type = db_field.geom_type
params = {'default_lon' : self.default_lon,
'default_lat' : self.default_lat,
'default_zoom' : self.default_zoom,
'display_wkt' : self.debug or self.display_wkt,
'geom_type' : OGRGeomType(db_field.geom_type),
'field_name' : db_field.name,
'is_collection' : is_collection,
'scrollable' : self.scrollable,
'layerswitcher' : self.layerswitcher,
'collection_type' : collection_type,
'is_linestring' : db_field.geom_type in ('LINESTRING', 'MULTILINESTRING'),
'is_polygon' : db_field.geom_type in ('POLYGON', 'MULTIPOLYGON'),
'is_point' : db_field.geom_type in ('POINT', 'MULTIPOINT'),
'num_zoom' : self.num_zoom,
'max_zoom' : self.max_zoom,
'min_zoom' : self.min_zoom,
'units' : self.units, #likely shoud get from object
'max_resolution' : self.max_resolution,
'max_extent' : self.max_extent,
'modifiable' : self.modifiable,
'mouse_position' : self.mouse_position,
'scale_text' : self.scale_text,
'map_width' : self.map_width,
'map_height' : self.map_height,
'point_zoom' : self.point_zoom,
'srid' : self.map_srid,
'display_srid' : self.display_srid,
'wms_url' : self.wms_url,
'wms_layer' : self.wms_layer,
'wms_name' : self.wms_name,
'debug' : self.debug,
}
return OLMap
from django.contrib.gis import gdal
if gdal.HAS_GDAL:
# Use the official spherical mercator projection SRID on versions
# of GDAL that support it; otherwise, fallback to 900913.
if gdal.GDAL_VERSION >= (1, 7):
spherical_mercator_srid = 3857
else:
spherical_mercator_srid = 900913
class OSMGeoAdmin(GeoModelAdmin):
map_template = 'gis/admin/osm.html'
num_zoom = 20
map_srid = spherical_mercator_srid
max_extent = '-20037508,-20037508,20037508,20037508'
max_resolution = '156543.0339'
point_zoom = num_zoom - 6
units = 'm'
| 40.361538
| 117
| 0.593673
|
from django.contrib.admin import ModelAdmin
from django.contrib.gis.admin.widgets import OpenLayersWidget
from django.contrib.gis.gdal import OGRGeomType
from django.contrib.gis.db import models
class GeoModelAdmin(ModelAdmin):
default_lon = 0
default_lat = 0
default_zoom = 4
display_wkt = False
display_srid = False
extra_js = []
num_zoom = 18
max_zoom = False
min_zoom = False
units = False
max_resolution = False
max_extent = False
modifiable = True
mouse_position = True
scale_text = True
layerswitcher = True
scrollable = True
map_width = 600
map_height = 400
map_srid = 4326
map_template = 'gis/admin/openlayers.html'
openlayers_url = 'http://openlayers.org/api/2.11/OpenLayers.js'
point_zoom = num_zoom - 6
wms_url = 'http://vmap0.tiles.osgeo.org/wms/vmap0'
wms_layer = 'basic'
wms_name = 'OpenLayers WMS'
debug = False
widget = OpenLayersWidget
@property
def media(self):
media = super(GeoModelAdmin, self).media
media.add_js([self.openlayers_url])
media.add_js(self.extra_js)
return media
def formfield_for_dbfield(self, db_field, **kwargs):
if isinstance(db_field, models.GeometryField):
request = kwargs.pop('request', None)
kwargs['widget'] = self.get_map_widget(db_field)
return db_field.formfield(**kwargs)
else:
return super(GeoModelAdmin, self).formfield_for_dbfield(db_field, **kwargs)
def get_map_widget(self, db_field):
is_collection = db_field.geom_type in ('MULTIPOINT', 'MULTILINESTRING', 'MULTIPOLYGON', 'GEOMETRYCOLLECTION')
if is_collection:
if db_field.geom_type == 'GEOMETRYCOLLECTION': collection_type = 'Any'
else: collection_type = OGRGeomType(db_field.geom_type.replace('MULTI', ''))
else:
collection_type = 'None'
class OLMap(self.widget):
template = self.map_template
geom_type = db_field.geom_type
params = {'default_lon' : self.default_lon,
'default_lat' : self.default_lat,
'default_zoom' : self.default_zoom,
'display_wkt' : self.debug or self.display_wkt,
'geom_type' : OGRGeomType(db_field.geom_type),
'field_name' : db_field.name,
'is_collection' : is_collection,
'scrollable' : self.scrollable,
'layerswitcher' : self.layerswitcher,
'collection_type' : collection_type,
'is_linestring' : db_field.geom_type in ('LINESTRING', 'MULTILINESTRING'),
'is_polygon' : db_field.geom_type in ('POLYGON', 'MULTIPOLYGON'),
'is_point' : db_field.geom_type in ('POINT', 'MULTIPOINT'),
'num_zoom' : self.num_zoom,
'max_zoom' : self.max_zoom,
'min_zoom' : self.min_zoom,
'units' : self.units, 'max_resolution' : self.max_resolution,
'max_extent' : self.max_extent,
'modifiable' : self.modifiable,
'mouse_position' : self.mouse_position,
'scale_text' : self.scale_text,
'map_width' : self.map_width,
'map_height' : self.map_height,
'point_zoom' : self.point_zoom,
'srid' : self.map_srid,
'display_srid' : self.display_srid,
'wms_url' : self.wms_url,
'wms_layer' : self.wms_layer,
'wms_name' : self.wms_name,
'debug' : self.debug,
}
return OLMap
from django.contrib.gis import gdal
if gdal.HAS_GDAL:
if gdal.GDAL_VERSION >= (1, 7):
spherical_mercator_srid = 3857
else:
spherical_mercator_srid = 900913
class OSMGeoAdmin(GeoModelAdmin):
map_template = 'gis/admin/osm.html'
num_zoom = 20
map_srid = spherical_mercator_srid
max_extent = '-20037508,-20037508,20037508,20037508'
max_resolution = '156543.0339'
point_zoom = num_zoom - 6
units = 'm'
| true
| true
|
f709057a2d026d1629ccdc2f418c49b8caa8ddab
| 21,226
|
py
|
Python
|
sdk/python/pulumi_aws/imagebuilder/distribution_configuration.py
|
RafalSumislawski/pulumi-aws
|
7c8a335d327c173aa32c8b3d98816e760db329fa
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-11-10T16:33:40.000Z
|
2021-11-10T16:33:40.000Z
|
sdk/python/pulumi_aws/imagebuilder/distribution_configuration.py
|
RafalSumislawski/pulumi-aws
|
7c8a335d327c173aa32c8b3d98816e760db329fa
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/imagebuilder/distribution_configuration.py
|
RafalSumislawski/pulumi-aws
|
7c8a335d327c173aa32c8b3d98816e760db329fa
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DistributionConfigurationArgs', 'DistributionConfiguration']
@pulumi.input_type
class DistributionConfigurationArgs:
def __init__(__self__, *,
distributions: pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a DistributionConfiguration resource.
:param pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]] distributions: One or more configuration blocks with distribution settings. Detailed below.
:param pulumi.Input[str] description: Description to apply to the distributed AMI.
:param pulumi.Input[str] name: Name to apply to the distributed AMI.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags for the distribution configuration. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
pulumi.set(__self__, "distributions", distributions)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def distributions(self) -> pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]:
"""
One or more configuration blocks with distribution settings. Detailed below.
"""
return pulumi.get(self, "distributions")
@distributions.setter
def distributions(self, value: pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]):
pulumi.set(self, "distributions", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description to apply to the distributed AMI.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name to apply to the distributed AMI.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Key-value map of resource tags for the distribution configuration. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _DistributionConfigurationState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
date_created: Optional[pulumi.Input[str]] = None,
date_updated: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
distributions: Optional[pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering DistributionConfiguration resources.
:param pulumi.Input[str] arn: (Required) Amazon Resource Name (ARN) of the distribution configuration.
:param pulumi.Input[str] date_created: Date the distribution configuration was created.
:param pulumi.Input[str] date_updated: Date the distribution configuration was updated.
:param pulumi.Input[str] description: Description to apply to the distributed AMI.
:param pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]] distributions: One or more configuration blocks with distribution settings. Detailed below.
:param pulumi.Input[str] name: Name to apply to the distributed AMI.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags for the distribution configuration. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider .
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if date_created is not None:
pulumi.set(__self__, "date_created", date_created)
if date_updated is not None:
pulumi.set(__self__, "date_updated", date_updated)
if description is not None:
pulumi.set(__self__, "description", description)
if distributions is not None:
pulumi.set(__self__, "distributions", distributions)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
(Required) Amazon Resource Name (ARN) of the distribution configuration.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="dateCreated")
def date_created(self) -> Optional[pulumi.Input[str]]:
"""
Date the distribution configuration was created.
"""
return pulumi.get(self, "date_created")
@date_created.setter
def date_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "date_created", value)
@property
@pulumi.getter(name="dateUpdated")
def date_updated(self) -> Optional[pulumi.Input[str]]:
"""
Date the distribution configuration was updated.
"""
return pulumi.get(self, "date_updated")
@date_updated.setter
def date_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "date_updated", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description to apply to the distributed AMI.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def distributions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]]:
"""
One or more configuration blocks with distribution settings. Detailed below.
"""
return pulumi.get(self, "distributions")
@distributions.setter
def distributions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]]):
pulumi.set(self, "distributions", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name to apply to the distributed AMI.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Key-value map of resource tags for the distribution configuration. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider .
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
class DistributionConfiguration(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
distributions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DistributionConfigurationDistributionArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages an Image Builder Distribution Configuration.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.imagebuilder.DistributionConfiguration("example", distributions=[aws.imagebuilder.DistributionConfigurationDistributionArgs(
ami_distribution_configuration=aws.imagebuilder.DistributionConfigurationDistributionAmiDistributionConfigurationArgs(
ami_tags={
"CostCenter": "IT",
},
launch_permission=aws.imagebuilder.DistributionConfigurationDistributionAmiDistributionConfigurationLaunchPermissionArgs(
user_ids=["123456789012"],
),
name="example-{{ imagebuilder:buildDate }}",
),
region="us-east-1",
)])
```
## Import
`aws_imagebuilder_distribution_configurations` resources can be imported by using the Amazon Resource Name (ARN), e.g.,
```sh
$ pulumi import aws:imagebuilder/distributionConfiguration:DistributionConfiguration example arn:aws:imagebuilder:us-east-1:123456789012:distribution-configuration/example
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: Description to apply to the distributed AMI.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DistributionConfigurationDistributionArgs']]]] distributions: One or more configuration blocks with distribution settings. Detailed below.
:param pulumi.Input[str] name: Name to apply to the distributed AMI.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags for the distribution configuration. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DistributionConfigurationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Image Builder Distribution Configuration.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.imagebuilder.DistributionConfiguration("example", distributions=[aws.imagebuilder.DistributionConfigurationDistributionArgs(
ami_distribution_configuration=aws.imagebuilder.DistributionConfigurationDistributionAmiDistributionConfigurationArgs(
ami_tags={
"CostCenter": "IT",
},
launch_permission=aws.imagebuilder.DistributionConfigurationDistributionAmiDistributionConfigurationLaunchPermissionArgs(
user_ids=["123456789012"],
),
name="example-{{ imagebuilder:buildDate }}",
),
region="us-east-1",
)])
```
## Import
`aws_imagebuilder_distribution_configurations` resources can be imported by using the Amazon Resource Name (ARN), e.g.,
```sh
$ pulumi import aws:imagebuilder/distributionConfiguration:DistributionConfiguration example arn:aws:imagebuilder:us-east-1:123456789012:distribution-configuration/example
```
:param str resource_name: The name of the resource.
:param DistributionConfigurationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DistributionConfigurationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
distributions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DistributionConfigurationDistributionArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DistributionConfigurationArgs.__new__(DistributionConfigurationArgs)
__props__.__dict__["description"] = description
if distributions is None and not opts.urn:
raise TypeError("Missing required property 'distributions'")
__props__.__dict__["distributions"] = distributions
__props__.__dict__["name"] = name
__props__.__dict__["tags"] = tags
__props__.__dict__["arn"] = None
__props__.__dict__["date_created"] = None
__props__.__dict__["date_updated"] = None
__props__.__dict__["tags_all"] = None
super(DistributionConfiguration, __self__).__init__(
'aws:imagebuilder/distributionConfiguration:DistributionConfiguration',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
date_created: Optional[pulumi.Input[str]] = None,
date_updated: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
distributions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DistributionConfigurationDistributionArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'DistributionConfiguration':
"""
Get an existing DistributionConfiguration resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: (Required) Amazon Resource Name (ARN) of the distribution configuration.
:param pulumi.Input[str] date_created: Date the distribution configuration was created.
:param pulumi.Input[str] date_updated: Date the distribution configuration was updated.
:param pulumi.Input[str] description: Description to apply to the distributed AMI.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DistributionConfigurationDistributionArgs']]]] distributions: One or more configuration blocks with distribution settings. Detailed below.
:param pulumi.Input[str] name: Name to apply to the distributed AMI.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags for the distribution configuration. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider .
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DistributionConfigurationState.__new__(_DistributionConfigurationState)
__props__.__dict__["arn"] = arn
__props__.__dict__["date_created"] = date_created
__props__.__dict__["date_updated"] = date_updated
__props__.__dict__["description"] = description
__props__.__dict__["distributions"] = distributions
__props__.__dict__["name"] = name
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
return DistributionConfiguration(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
(Required) Amazon Resource Name (ARN) of the distribution configuration.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="dateCreated")
def date_created(self) -> pulumi.Output[str]:
"""
Date the distribution configuration was created.
"""
return pulumi.get(self, "date_created")
@property
@pulumi.getter(name="dateUpdated")
def date_updated(self) -> pulumi.Output[str]:
"""
Date the distribution configuration was updated.
"""
return pulumi.get(self, "date_updated")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Description to apply to the distributed AMI.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def distributions(self) -> pulumi.Output[Sequence['outputs.DistributionConfigurationDistribution']]:
"""
One or more configuration blocks with distribution settings. Detailed below.
"""
return pulumi.get(self, "distributions")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name to apply to the distributed AMI.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Key-value map of resource tags for the distribution configuration. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
"""
A map of tags assigned to the resource, including those inherited from the provider .
"""
return pulumi.get(self, "tags_all")
| 46.143478
| 284
| 0.664751
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DistributionConfigurationArgs', 'DistributionConfiguration']
@pulumi.input_type
class DistributionConfigurationArgs:
def __init__(__self__, *,
distributions: pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
pulumi.set(__self__, "distributions", distributions)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def distributions(self) -> pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]:
return pulumi.get(self, "distributions")
@distributions.setter
def distributions(self, value: pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]):
pulumi.set(self, "distributions", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _DistributionConfigurationState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
date_created: Optional[pulumi.Input[str]] = None,
date_updated: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
distributions: Optional[pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
if arn is not None:
pulumi.set(__self__, "arn", arn)
if date_created is not None:
pulumi.set(__self__, "date_created", date_created)
if date_updated is not None:
pulumi.set(__self__, "date_updated", date_updated)
if description is not None:
pulumi.set(__self__, "description", description)
if distributions is not None:
pulumi.set(__self__, "distributions", distributions)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="dateCreated")
def date_created(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "date_created")
@date_created.setter
def date_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "date_created", value)
@property
@pulumi.getter(name="dateUpdated")
def date_updated(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "date_updated")
@date_updated.setter
def date_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "date_updated", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def distributions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]]:
return pulumi.get(self, "distributions")
@distributions.setter
def distributions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DistributionConfigurationDistributionArgs']]]]):
pulumi.set(self, "distributions", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
class DistributionConfiguration(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
distributions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DistributionConfigurationDistributionArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
...
@overload
def __init__(__self__,
resource_name: str,
args: DistributionConfigurationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DistributionConfigurationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
distributions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DistributionConfigurationDistributionArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DistributionConfigurationArgs.__new__(DistributionConfigurationArgs)
__props__.__dict__["description"] = description
if distributions is None and not opts.urn:
raise TypeError("Missing required property 'distributions'")
__props__.__dict__["distributions"] = distributions
__props__.__dict__["name"] = name
__props__.__dict__["tags"] = tags
__props__.__dict__["arn"] = None
__props__.__dict__["date_created"] = None
__props__.__dict__["date_updated"] = None
__props__.__dict__["tags_all"] = None
super(DistributionConfiguration, __self__).__init__(
'aws:imagebuilder/distributionConfiguration:DistributionConfiguration',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
date_created: Optional[pulumi.Input[str]] = None,
date_updated: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
distributions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DistributionConfigurationDistributionArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'DistributionConfiguration':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DistributionConfigurationState.__new__(_DistributionConfigurationState)
__props__.__dict__["arn"] = arn
__props__.__dict__["date_created"] = date_created
__props__.__dict__["date_updated"] = date_updated
__props__.__dict__["description"] = description
__props__.__dict__["distributions"] = distributions
__props__.__dict__["name"] = name
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
return DistributionConfiguration(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="dateCreated")
def date_created(self) -> pulumi.Output[str]:
return pulumi.get(self, "date_created")
@property
@pulumi.getter(name="dateUpdated")
def date_updated(self) -> pulumi.Output[str]:
return pulumi.get(self, "date_updated")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "description")
@property
@pulumi.getter
def distributions(self) -> pulumi.Output[Sequence['outputs.DistributionConfigurationDistribution']]:
return pulumi.get(self, "distributions")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
return pulumi.get(self, "tags_all")
| true
| true
|
f70906aedaf542b287443373bffe8401a5d3591e
| 882
|
py
|
Python
|
pyx12/map_override.py
|
arenius/pyx12
|
537493deaa0b8e18a3fa72eb1b3eeae9ef043b11
|
[
"BSD-3-Clause"
] | 120
|
2015-01-30T07:17:26.000Z
|
2022-03-25T16:42:15.000Z
|
pyx12/map_override.py
|
arenius/pyx12
|
537493deaa0b8e18a3fa72eb1b3eeae9ef043b11
|
[
"BSD-3-Clause"
] | 43
|
2015-02-12T18:42:26.000Z
|
2021-12-12T22:22:20.000Z
|
pyx12/map_override.py
|
arenius/pyx12
|
537493deaa0b8e18a3fa72eb1b3eeae9ef043b11
|
[
"BSD-3-Clause"
] | 85
|
2015-02-12T16:44:28.000Z
|
2022-03-24T20:20:46.000Z
|
######################################################################
# Copyright
# John Holland <john@zoner.org>
# All rights reserved.
#
# This software is licensed as described in the file LICENSE.txt, which
# you should have received as part of this distribution.
#
######################################################################
"""
Apply local overrides to the current map.
Overrides defined in a xml document.
NOT IMPLEMENTED
"""
class map_override(object):
"""
Apply local overrides to the current map. Overrides defined in a xml document.
"""
def __init__(self, map_root, override_file, icvn, vriic, fic):
pass
def _set_value(self, map_root, path, variable, value):
pass
def _append_value(self, map_root, path, variable, value):
pass
def _reset_list(self, map_root, path, variable, value):
pass
| 25.941176
| 82
| 0.578231
|
class map_override(object):
def __init__(self, map_root, override_file, icvn, vriic, fic):
pass
def _set_value(self, map_root, path, variable, value):
pass
def _append_value(self, map_root, path, variable, value):
pass
def _reset_list(self, map_root, path, variable, value):
pass
| true
| true
|
f7090725fa81666eade7bf5f5d55c5e7ff18d8ce
| 3,586
|
bzl
|
Python
|
haskell/private/dependencies.bzl
|
joneshf/rules_haskell
|
77cf42c30a424f7e701c72c5feb31f49b5230351
|
[
"Apache-2.0"
] | null | null | null |
haskell/private/dependencies.bzl
|
joneshf/rules_haskell
|
77cf42c30a424f7e701c72c5feb31f49b5230351
|
[
"Apache-2.0"
] | null | null | null |
haskell/private/dependencies.bzl
|
joneshf/rules_haskell
|
77cf42c30a424f7e701c72c5feb31f49b5230351
|
[
"Apache-2.0"
] | null | null | null |
load(
"//haskell:providers.bzl",
"HaskellInfo",
"HaskellLibraryInfo",
)
load(":private/set.bzl", "set")
def gather_dep_info(ctx, deps):
"""Collapse dependencies into a single `HaskellInfo`.
Args:
ctx: Rule context.
deps: deps attribute.
Returns:
HaskellInfo: Unified information about all dependencies.
"""
package_databases = depset(transitive = [
dep[HaskellInfo].package_databases
for dep in deps
if HaskellInfo in dep
])
static_libraries = depset(transitive = [
dep[HaskellInfo].static_libraries
for dep in deps
if HaskellInfo in dep
])
dynamic_libraries = depset(transitive = [
dep[HaskellInfo].dynamic_libraries
for dep in deps
if HaskellInfo in dep
])
interface_dirs = depset(transitive = [
dep[HaskellInfo].interface_dirs
for dep in deps
if HaskellInfo in dep
])
source_files = depset(transitive = [
dep[HaskellInfo].source_files
for dep in deps
if HaskellInfo in dep
])
import_dirs = set.empty()
for dep in deps:
if HaskellInfo in dep:
import_dirs = set.mutable_union(import_dirs, dep[HaskellInfo].import_dirs)
extra_source_files = depset(transitive = [
dep[HaskellInfo].extra_source_files
for dep in deps
if HaskellInfo in dep
])
compile_flags = []
for dep in deps:
if HaskellInfo in dep:
compile_flags.extend(dep[HaskellInfo].compile_flags)
acc = HaskellInfo(
package_databases = package_databases,
version_macros = set.empty(),
static_libraries = static_libraries,
dynamic_libraries = dynamic_libraries,
interface_dirs = interface_dirs,
source_files = source_files,
import_dirs = import_dirs,
extra_source_files = extra_source_files,
compile_flags = compile_flags,
)
for dep in deps:
if HaskellInfo in dep:
binfo = dep[HaskellInfo]
if HaskellLibraryInfo not in dep:
fail("Target {0} cannot depend on binary".format(ctx.attr.name))
acc = HaskellInfo(
package_databases = acc.package_databases,
version_macros = set.mutable_union(acc.version_macros, binfo.version_macros),
static_libraries = depset(transitive = [acc.static_libraries, binfo.static_libraries]),
dynamic_libraries = acc.dynamic_libraries,
interface_dirs = acc.interface_dirs,
import_dirs = import_dirs,
compile_flags = compile_flags,
extra_source_files = extra_source_files,
source_files = source_files,
)
elif CcInfo in dep and HaskellInfo not in dep:
# The final link of a binary must include all static libraries we
# depend on, including transitives ones. Theses libs are provided
# in the `CcInfo` provider.
acc = HaskellInfo(
package_databases = acc.package_databases,
version_macros = acc.version_macros,
import_dirs = acc.import_dirs,
source_files = acc.source_files,
compile_flags = acc.compile_flags,
static_libraries = acc.static_libraries,
dynamic_libraries = acc.dynamic_libraries,
extra_source_files = acc.extra_source_files,
interface_dirs = acc.interface_dirs,
)
return acc
| 33.514019
| 103
| 0.617959
|
load(
"//haskell:providers.bzl",
"HaskellInfo",
"HaskellLibraryInfo",
)
load(":private/set.bzl", "set")
def gather_dep_info(ctx, deps):
package_databases = depset(transitive = [
dep[HaskellInfo].package_databases
for dep in deps
if HaskellInfo in dep
])
static_libraries = depset(transitive = [
dep[HaskellInfo].static_libraries
for dep in deps
if HaskellInfo in dep
])
dynamic_libraries = depset(transitive = [
dep[HaskellInfo].dynamic_libraries
for dep in deps
if HaskellInfo in dep
])
interface_dirs = depset(transitive = [
dep[HaskellInfo].interface_dirs
for dep in deps
if HaskellInfo in dep
])
source_files = depset(transitive = [
dep[HaskellInfo].source_files
for dep in deps
if HaskellInfo in dep
])
import_dirs = set.empty()
for dep in deps:
if HaskellInfo in dep:
import_dirs = set.mutable_union(import_dirs, dep[HaskellInfo].import_dirs)
extra_source_files = depset(transitive = [
dep[HaskellInfo].extra_source_files
for dep in deps
if HaskellInfo in dep
])
compile_flags = []
for dep in deps:
if HaskellInfo in dep:
compile_flags.extend(dep[HaskellInfo].compile_flags)
acc = HaskellInfo(
package_databases = package_databases,
version_macros = set.empty(),
static_libraries = static_libraries,
dynamic_libraries = dynamic_libraries,
interface_dirs = interface_dirs,
source_files = source_files,
import_dirs = import_dirs,
extra_source_files = extra_source_files,
compile_flags = compile_flags,
)
for dep in deps:
if HaskellInfo in dep:
binfo = dep[HaskellInfo]
if HaskellLibraryInfo not in dep:
fail("Target {0} cannot depend on binary".format(ctx.attr.name))
acc = HaskellInfo(
package_databases = acc.package_databases,
version_macros = set.mutable_union(acc.version_macros, binfo.version_macros),
static_libraries = depset(transitive = [acc.static_libraries, binfo.static_libraries]),
dynamic_libraries = acc.dynamic_libraries,
interface_dirs = acc.interface_dirs,
import_dirs = import_dirs,
compile_flags = compile_flags,
extra_source_files = extra_source_files,
source_files = source_files,
)
elif CcInfo in dep and HaskellInfo not in dep:
acc = HaskellInfo(
package_databases = acc.package_databases,
version_macros = acc.version_macros,
import_dirs = acc.import_dirs,
source_files = acc.source_files,
compile_flags = acc.compile_flags,
static_libraries = acc.static_libraries,
dynamic_libraries = acc.dynamic_libraries,
extra_source_files = acc.extra_source_files,
interface_dirs = acc.interface_dirs,
)
return acc
| true
| true
|
f709097e17db2a904f99b4edcbc7bf034dbc3f63
| 2,118
|
py
|
Python
|
cvat/apps/engine/admin.py
|
netanelbarel/improvedCvat
|
ff2894d3b3757a5e080d3130d6875cfd14201bf5
|
[
"MIT"
] | null | null | null |
cvat/apps/engine/admin.py
|
netanelbarel/improvedCvat
|
ff2894d3b3757a5e080d3130d6875cfd14201bf5
|
[
"MIT"
] | 6
|
2020-03-25T11:49:12.000Z
|
2020-06-06T01:35:38.000Z
|
cvat/apps/engine/admin.py
|
netanelbarel/improvedCvat
|
ff2894d3b3757a5e080d3130d6875cfd14201bf5
|
[
"MIT"
] | 1
|
2020-03-25T11:40:48.000Z
|
2020-03-25T11:40:48.000Z
|
# Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
from django.contrib import admin
from .models import Task, Segment, Job, Label, AttributeSpec
class JobInline(admin.TabularInline):
model = Job
can_delete = False
# Don't show extra lines to add an object
def has_add_permission(self, request, object=None):
return False
class SegmentInline(admin.TabularInline):
model = Segment
show_change_link = True
readonly_fields = ('start_frame', 'stop_frame')
can_delete = False
# Don't show extra lines to add an object
def has_add_permission(self, request, object=None):
return False
class AttributeSpecInline(admin.TabularInline):
model = AttributeSpec
extra = 0
max_num = None
class LabelInline(admin.TabularInline):
model = Label
show_change_link = True
extra = 0
max_num = None
class LabelAdmin(admin.ModelAdmin):
# Don't show on admin index page
def has_module_permission(self, request):
return False
inlines = [
AttributeSpecInline
]
class SegmentAdmin(admin.ModelAdmin):
# Don't show on admin index page
def has_module_permission(self, request):
return False
inlines = [
JobInline
]
class TaskAdmin(admin.ModelAdmin):
date_hierarchy = 'updated_date'
readonly_fields = ('size', 'path', 'created_date', 'updated_date',
'overlap', 'flipped')
list_display = ('name', 'mode', 'owner', 'assignee', 'created_date', 'updated_date')
search_fields = ('name', 'mode', 'owner__username', 'owner__first_name',
'owner__last_name', 'owner__email', 'assignee__username', 'assignee__first_name',
'assignee__last_name')
inlines = [
SegmentInline,
LabelInline
]
# Don't allow to add a task because it isn't trivial operation
def has_add_permission(self, request):
return False
admin.site.register(Task, TaskAdmin)
admin.site.register(Segment, SegmentAdmin)
admin.site.register(Label, LabelAdmin)
| 27.506494
| 90
| 0.663362
|
from django.contrib import admin
from .models import Task, Segment, Job, Label, AttributeSpec
class JobInline(admin.TabularInline):
model = Job
can_delete = False
def has_add_permission(self, request, object=None):
return False
class SegmentInline(admin.TabularInline):
model = Segment
show_change_link = True
readonly_fields = ('start_frame', 'stop_frame')
can_delete = False
# Don't show extra lines to add an object
def has_add_permission(self, request, object=None):
return False
class AttributeSpecInline(admin.TabularInline):
model = AttributeSpec
extra = 0
max_num = None
class LabelInline(admin.TabularInline):
model = Label
show_change_link = True
extra = 0
max_num = None
class LabelAdmin(admin.ModelAdmin):
def has_module_permission(self, request):
return False
inlines = [
AttributeSpecInline
]
class SegmentAdmin(admin.ModelAdmin):
# Don't show on admin index page
def has_module_permission(self, request):
return False
inlines = [
JobInline
]
class TaskAdmin(admin.ModelAdmin):
date_hierarchy = 'updated_date'
readonly_fields = ('size', 'path', 'created_date', 'updated_date',
'overlap', 'flipped')
list_display = ('name', 'mode', 'owner', 'assignee', 'created_date', 'updated_date')
search_fields = ('name', 'mode', 'owner__username', 'owner__first_name',
'owner__last_name', 'owner__email', 'assignee__username', 'assignee__first_name',
'assignee__last_name')
inlines = [
SegmentInline,
LabelInline
]
def has_add_permission(self, request):
return False
admin.site.register(Task, TaskAdmin)
admin.site.register(Segment, SegmentAdmin)
admin.site.register(Label, LabelAdmin)
| true
| true
|
f70909e149d95c858ea8bf08919cce01258289e8
| 24
|
py
|
Python
|
me/maurer/__init__.py
|
amaurer/alarmdecoder-implementation
|
9b090064fd23f20ec707a8d388dc910dd260621a
|
[
"Apache-2.0"
] | 1
|
2018-03-08T04:25:31.000Z
|
2018-03-08T04:25:31.000Z
|
me/maurer/__init__.py
|
amaurer/alarmdecoder-implementation
|
9b090064fd23f20ec707a8d388dc910dd260621a
|
[
"Apache-2.0"
] | null | null | null |
me/maurer/__init__.py
|
amaurer/alarmdecoder-implementation
|
9b090064fd23f20ec707a8d388dc910dd260621a
|
[
"Apache-2.0"
] | null | null | null |
__all__ = ['ZoneMapper']
| 24
| 24
| 0.708333
|
__all__ = ['ZoneMapper']
| true
| true
|
f7090a9af584868b539a64c9c814074daf94e5d1
| 5,854
|
py
|
Python
|
python/dazl/model/core.py
|
DACH-NY/dazl-client
|
56c8b1be047415b2bcb35b6558de4a780a402458
|
[
"Apache-2.0"
] | null | null | null |
python/dazl/model/core.py
|
DACH-NY/dazl-client
|
56c8b1be047415b2bcb35b6558de4a780a402458
|
[
"Apache-2.0"
] | null | null | null |
python/dazl/model/core.py
|
DACH-NY/dazl-client
|
56c8b1be047415b2bcb35b6558de4a780a402458
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2017-2022 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
"""
This module has been relocated to ``dazl.client``, ``dazl.damlast``, ``dazl.protocols``, or
``dazl.query``.
"""
from typing import TYPE_CHECKING, TypeVar, Union
import warnings
from ..client.errors import ConfigurationError, DazlPartyMissingError, UnknownTemplateWarning
from ..client.state import (
ContractContextualData,
ContractContextualDataCollection,
ContractsHistoricalState,
ContractsState,
)
from ..damlast.daml_lf_1 import TypeConName
from ..damlast.pkgfile import Dar
from ..prim import ContractData, ContractId as ContractId_, DazlError, DazlWarning, Party
from ..prim.errors import DazlImportError
from ..protocols.errors import ConnectionTimeoutError, UserTerminateRequest
from ..query import ContractMatch
from ..util.proc_util import ProcessDiedException
if TYPE_CHECKING:
from .types import Type, TypeReference
T = TypeVar("T")
__all__ = [
"ConfigurationError",
"ConnectionTimeoutError",
"ContractContextualData",
"ContractContextualDataCollection",
"ContractData",
"ContractId",
"ContractMatch",
"ContractsHistoricalState",
"ContractsState",
"Dar",
"DazlError",
"DazlImportError",
"DazlPartyMissingError",
"DazlWarning",
"Party",
"ProcessDiedException",
"UnknownTemplateWarning",
"UserTerminateRequest",
]
class ContractId(ContractId_):
__slots__ = ("_value_type_deprecated",)
_value_type_deprecated: "TypeReference"
def __init__(self, contract_id: str, template_id: "Union[str, Type, TypeConName]"):
warnings.warn(
"dazl.model.core.ContractId is deprecated; use dazl.prim.ContractId instead.",
DeprecationWarning,
stacklevel=2,
)
from ..damlast.compat import parse_template
if not isinstance(contract_id, str):
raise ValueError("contract_id must be a string")
value = contract_id
value_type, value_type_deprecated = parse_template(template_id)
super().__init__(value_type, value)
object.__setattr__(self, "_value_type_deprecated", value_type_deprecated)
@property
def contract_id(self) -> str:
"""
Get the raw contract ID value (for example, ``"#4:1"``).
"""
warnings.warn(
"ContractId.contract_id is deprecated; use ContractId.value instead.",
DeprecationWarning,
stacklevel=2,
)
return self.value
@property
def template_id(self) -> "TypeReference":
"""
Get the type of template that is pointed to by this :class:`ContractId` as a
:class:`TypeReference`. Note that usage of :class:`Type` and :class:`TypeReference` are
deprecated, and :meth:`value_type` should be used instead.
As of dazl 7.3.0, the :class:`TemplateId` is always normalized to a :class:`TypeReference`,
regardless of what the :class:`ContractId` was constructed with.
"""
warnings.warn(
"ContractId.template_id is deprecated; use ContractId.value_type instead.",
DeprecationWarning,
stacklevel=2,
)
return self._value_type_deprecated
def exercise(self, choice_name, arguments=None):
"""
Create an :class:`ExerciseCommand` that represents the result of exercising a choice on this
contract with the specified choice.
:param choice_name:
The name of the choice to exercise.
:param arguments:
(optional) A ``dict`` of named values to send as parameters to the choice exercise.
"""
warnings.warn(
"ContractId.exercise is deprecated; prefer calling dazl.ledger.Connection.exercise or "
"dazl.client.PartyClient.submit_exercise, or use dazl.ledger.ExerciseCommand instead.",
DeprecationWarning,
stacklevel=2,
)
from .writing import ExerciseCommand
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
return ExerciseCommand(self, choice_name, arguments=arguments)
def replace(self, contract_id=None, template_id=None):
"""
Return a new :class:`ContractId` instance replacing specified fields with values.
"""
warnings.warn(
"ContractId.replace is deprecated; simply construct a ContractId with the desired "
"values instead.",
DeprecationWarning,
stacklevel=2,
)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
return ContractId(
contract_id if contract_id is not None else self.value,
template_id if template_id is not None else self.value_type,
)
def for_json(self):
"""
Return the JSON representation of this contract. This is currently just the contract ID
string itself.
"""
return self.value
class CommandTimeoutError(DazlError):
"""
Raised when a corresponding event for a command was not seen in the appropriate time window.
"""
def __init__(self):
warnings.warn(
"This error is never raised; this symbol will be removed in dazl v9",
DeprecationWarning,
stacklevel=2,
)
class ConnectionClosedError(DazlError):
"""
Raised when trying to do something that requires a connection after connection pools have been
closed.
"""
def __init__(self):
warnings.warn(
"This error is never raised; this symbol will be removed in dazl v9",
DeprecationWarning,
stacklevel=2,
)
| 33.261364
| 102
| 0.65972
|
from typing import TYPE_CHECKING, TypeVar, Union
import warnings
from ..client.errors import ConfigurationError, DazlPartyMissingError, UnknownTemplateWarning
from ..client.state import (
ContractContextualData,
ContractContextualDataCollection,
ContractsHistoricalState,
ContractsState,
)
from ..damlast.daml_lf_1 import TypeConName
from ..damlast.pkgfile import Dar
from ..prim import ContractData, ContractId as ContractId_, DazlError, DazlWarning, Party
from ..prim.errors import DazlImportError
from ..protocols.errors import ConnectionTimeoutError, UserTerminateRequest
from ..query import ContractMatch
from ..util.proc_util import ProcessDiedException
if TYPE_CHECKING:
from .types import Type, TypeReference
T = TypeVar("T")
__all__ = [
"ConfigurationError",
"ConnectionTimeoutError",
"ContractContextualData",
"ContractContextualDataCollection",
"ContractData",
"ContractId",
"ContractMatch",
"ContractsHistoricalState",
"ContractsState",
"Dar",
"DazlError",
"DazlImportError",
"DazlPartyMissingError",
"DazlWarning",
"Party",
"ProcessDiedException",
"UnknownTemplateWarning",
"UserTerminateRequest",
]
class ContractId(ContractId_):
__slots__ = ("_value_type_deprecated",)
_value_type_deprecated: "TypeReference"
def __init__(self, contract_id: str, template_id: "Union[str, Type, TypeConName]"):
warnings.warn(
"dazl.model.core.ContractId is deprecated; use dazl.prim.ContractId instead.",
DeprecationWarning,
stacklevel=2,
)
from ..damlast.compat import parse_template
if not isinstance(contract_id, str):
raise ValueError("contract_id must be a string")
value = contract_id
value_type, value_type_deprecated = parse_template(template_id)
super().__init__(value_type, value)
object.__setattr__(self, "_value_type_deprecated", value_type_deprecated)
@property
def contract_id(self) -> str:
warnings.warn(
"ContractId.contract_id is deprecated; use ContractId.value instead.",
DeprecationWarning,
stacklevel=2,
)
return self.value
@property
def template_id(self) -> "TypeReference":
warnings.warn(
"ContractId.template_id is deprecated; use ContractId.value_type instead.",
DeprecationWarning,
stacklevel=2,
)
return self._value_type_deprecated
def exercise(self, choice_name, arguments=None):
warnings.warn(
"ContractId.exercise is deprecated; prefer calling dazl.ledger.Connection.exercise or "
"dazl.client.PartyClient.submit_exercise, or use dazl.ledger.ExerciseCommand instead.",
DeprecationWarning,
stacklevel=2,
)
from .writing import ExerciseCommand
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
return ExerciseCommand(self, choice_name, arguments=arguments)
def replace(self, contract_id=None, template_id=None):
warnings.warn(
"ContractId.replace is deprecated; simply construct a ContractId with the desired "
"values instead.",
DeprecationWarning,
stacklevel=2,
)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
return ContractId(
contract_id if contract_id is not None else self.value,
template_id if template_id is not None else self.value_type,
)
def for_json(self):
return self.value
class CommandTimeoutError(DazlError):
def __init__(self):
warnings.warn(
"This error is never raised; this symbol will be removed in dazl v9",
DeprecationWarning,
stacklevel=2,
)
class ConnectionClosedError(DazlError):
def __init__(self):
warnings.warn(
"This error is never raised; this symbol will be removed in dazl v9",
DeprecationWarning,
stacklevel=2,
)
| true
| true
|
f7090b2aa80fae95376267bfcca6f9bcf9a9ac8a
| 956
|
py
|
Python
|
hand_net/unet/unet_model.py
|
clearsky767/examples
|
d6c744061ba5ed56088af43edb171990c6942efd
|
[
"BSD-3-Clause"
] | null | null | null |
hand_net/unet/unet_model.py
|
clearsky767/examples
|
d6c744061ba5ed56088af43edb171990c6942efd
|
[
"BSD-3-Clause"
] | null | null | null |
hand_net/unet/unet_model.py
|
clearsky767/examples
|
d6c744061ba5ed56088af43edb171990c6942efd
|
[
"BSD-3-Clause"
] | null | null | null |
# full assembly of the sub-parts to form the complete net
import torch.nn.functional as F
from .unet_parts import *
class UNet(nn.Module):
def __init__(self, n_channels, n_classes):
super(UNet, self).__init__()
self.inc = inconv(n_channels, 64)
self.down1 = down(64, 128)
self.down2 = down(128, 256)
self.down3 = down(256, 512)
self.down4 = down(512, 512)
self.up1 = up(1024, 256)
self.up2 = up(512, 128)
self.up3 = up(256, 64)
self.up4 = up(128, 64)
self.outc = outconv(64, n_classes)
self.sig = nn.Sigmoid()
def forward(self, x):
x1 = self.inc(x)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
x5 = self.down4(x4)
x = self.up1(x5, x4)
x = self.up2(x, x3)
x = self.up3(x, x2)
x = self.up4(x, x1)
x = self.outc(x)
x = self.sig(x)
return x
| 27.314286
| 57
| 0.535565
|
import torch.nn.functional as F
from .unet_parts import *
class UNet(nn.Module):
def __init__(self, n_channels, n_classes):
super(UNet, self).__init__()
self.inc = inconv(n_channels, 64)
self.down1 = down(64, 128)
self.down2 = down(128, 256)
self.down3 = down(256, 512)
self.down4 = down(512, 512)
self.up1 = up(1024, 256)
self.up2 = up(512, 128)
self.up3 = up(256, 64)
self.up4 = up(128, 64)
self.outc = outconv(64, n_classes)
self.sig = nn.Sigmoid()
def forward(self, x):
x1 = self.inc(x)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
x5 = self.down4(x4)
x = self.up1(x5, x4)
x = self.up2(x, x3)
x = self.up3(x, x2)
x = self.up4(x, x1)
x = self.outc(x)
x = self.sig(x)
return x
| true
| true
|
f7090b86c5ae735d7f01c2e87f2bb1515113d44d
| 4,104
|
py
|
Python
|
adafruit_register/i2c_struct.py
|
jepler/Adafruit_CircuitPython_Register
|
9f86b5179936bcb81d9765de2fe25c140b42036f
|
[
"MIT"
] | 1
|
2020-09-27T20:08:57.000Z
|
2020-09-27T20:08:57.000Z
|
adafruit_register/i2c_struct.py
|
jepler/Adafruit_CircuitPython_Register
|
9f86b5179936bcb81d9765de2fe25c140b42036f
|
[
"MIT"
] | null | null | null |
adafruit_register/i2c_struct.py
|
jepler/Adafruit_CircuitPython_Register
|
9f86b5179936bcb81d9765de2fe25c140b42036f
|
[
"MIT"
] | null | null | null |
# The MIT License (MIT)
#
# Copyright (c) 2016 Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# pylint: disable=too-few-public-methods
"""
`adafruit_register.i2c_struct`
====================================================
Generic structured registers based on `struct`
* Author(s): Scott Shawcroft
"""
try:
import struct
except ImportError:
import ustruct as struct
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_Register.git"
class Struct:
"""
Arbitrary structure register that is readable and writeable.
Values are tuples that map to the values in the defined struct. See struct
module documentation for struct format string and its possible value types.
:param int register_address: The register address to read the bit from
:param type struct_format: The struct format string for this register.
"""
def __init__(self, register_address, struct_format):
self.format = struct_format
self.buffer = bytearray(1 + struct.calcsize(self.format))
self.buffer[0] = register_address
def __get__(self, obj, objtype=None):
with obj.i2c_device as i2c:
i2c.write_then_readinto(self.buffer, self.buffer, out_end=1, in_start=1)
return struct.unpack_from(self.format, memoryview(self.buffer)[1:])
def __set__(self, obj, value):
struct.pack_into(self.format, self.buffer, 1, *value)
with obj.i2c_device as i2c:
i2c.write(self.buffer)
class UnaryStruct:
"""
Arbitrary single value structure register that is readable and writeable.
Values map to the first value in the defined struct. See struct
module documentation for struct format string and its possible value types.
:param int register_address: The register address to read the bit from
:param type struct_format: The struct format string for this register.
"""
def __init__(self, register_address, struct_format):
self.format = struct_format
self.address = register_address
def __get__(self, obj, objtype=None):
buf = bytearray(1 + struct.calcsize(self.format))
buf[0] = self.address
with obj.i2c_device as i2c:
i2c.write_then_readinto(buf, buf, out_end=1, in_start=1)
return struct.unpack_from(self.format, buf, 1)[0]
def __set__(self, obj, value):
buf = bytearray(1 + struct.calcsize(self.format))
buf[0] = self.address
struct.pack_into(self.format, buf, 1, value)
with obj.i2c_device as i2c:
i2c.write(buf)
class ROUnaryStruct(UnaryStruct):
"""
Arbitrary single value structure register that is read-only.
Values map to the first value in the defined struct. See struct
module documentation for struct format string and its possible value types.
:param int register_address: The register address to read the bit from
:param type struct_format: The struct format string for this register.
"""
def __set__(self, obj, value):
raise AttributeError()
| 36.972973
| 84
| 0.711014
|
try:
import struct
except ImportError:
import ustruct as struct
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_Register.git"
class Struct:
def __init__(self, register_address, struct_format):
self.format = struct_format
self.buffer = bytearray(1 + struct.calcsize(self.format))
self.buffer[0] = register_address
def __get__(self, obj, objtype=None):
with obj.i2c_device as i2c:
i2c.write_then_readinto(self.buffer, self.buffer, out_end=1, in_start=1)
return struct.unpack_from(self.format, memoryview(self.buffer)[1:])
def __set__(self, obj, value):
struct.pack_into(self.format, self.buffer, 1, *value)
with obj.i2c_device as i2c:
i2c.write(self.buffer)
class UnaryStruct:
def __init__(self, register_address, struct_format):
self.format = struct_format
self.address = register_address
def __get__(self, obj, objtype=None):
buf = bytearray(1 + struct.calcsize(self.format))
buf[0] = self.address
with obj.i2c_device as i2c:
i2c.write_then_readinto(buf, buf, out_end=1, in_start=1)
return struct.unpack_from(self.format, buf, 1)[0]
def __set__(self, obj, value):
buf = bytearray(1 + struct.calcsize(self.format))
buf[0] = self.address
struct.pack_into(self.format, buf, 1, value)
with obj.i2c_device as i2c:
i2c.write(buf)
class ROUnaryStruct(UnaryStruct):
def __set__(self, obj, value):
raise AttributeError()
| true
| true
|
f7090cb598007accc88c9bed1656ea701bed8a44
| 2,682
|
py
|
Python
|
dbaas/physical/tests/test_commands.py
|
amintasvrp/database-as-a-service
|
8221df604f9252ddf877cd2216bdf1e3f76220ba
|
[
"BSD-3-Clause"
] | 303
|
2015-01-08T10:35:54.000Z
|
2022-02-28T08:54:06.000Z
|
dbaas/physical/tests/test_commands.py
|
nouraellm/database-as-a-service
|
5e655c9347bea991b7218a01549f5e44f161d7be
|
[
"BSD-3-Clause"
] | 124
|
2015-01-14T12:56:15.000Z
|
2022-03-22T20:45:11.000Z
|
dbaas/physical/tests/test_commands.py
|
nouraellm/database-as-a-service
|
5e655c9347bea991b7218a01549f5e44f161d7be
|
[
"BSD-3-Clause"
] | 110
|
2015-01-02T11:59:48.000Z
|
2022-02-28T08:54:06.000Z
|
from unittest import TestCase
from model_mommy import mommy
from physical.commands import HostCommandOL6, HostCommandOL7
class CommandsBaseTestCase(object):
OS_VERSION = ''
HOST_COMMAND_CLASS = None
EXPECTED_CMD_TMPL = ''
def setUp(self):
self.host = mommy.make(
'Host',
os_description='OL {}'.format(self.OS_VERSION)
)
self.instance = mommy.make('Instance', hostname=self.host)
def test_is_instance(self):
self.assertTrue(
isinstance(self.host.commands, self.HOST_COMMAND_CLASS)
)
def test_start(self):
cmd = self.host.commands.exec_service_command(
service_name='fake_service_name',
action='fake_start'
)
self.assertEqual(
cmd,
self.EXPECTED_CMD_TMPL.format(
service_name='fake_service_name',
action='fake_start'
)
)
def test_stop(self):
cmd = self.host.commands.exec_service_command(
service_name='fake_service_name',
action='fake_stop'
)
self.assertEqual(
cmd,
self.EXPECTED_CMD_TMPL.format(
service_name='fake_service_name',
action='fake_stop'
)
)
def test_start_no_output(self):
cmd = self.host.commands.exec_service_command(
service_name='fake_service_name',
action='fake_start',
no_output=True
)
expected_cmd = '{} > /dev/null'.format(
self.EXPECTED_CMD_TMPL.format(
service_name='fake_service_name',
action='fake_start'
)
)
self.assertEqual(
cmd,
expected_cmd
)
def test_stop_no_output(self):
cmd = self.host.commands.exec_service_command(
service_name='fake_service_name',
action='fake_stop',
no_output=True
)
expected_cmd = '{} > /dev/null'.format(
self.EXPECTED_CMD_TMPL.format(
service_name='fake_service_name',
action='fake_stop'
)
)
self.assertEqual(
cmd,
expected_cmd
)
class CustomCommandOL6TestCase(CommandsBaseTestCase, TestCase):
OS_VERSION = '6.10'
HOST_COMMAND_CLASS = HostCommandOL6
EXPECTED_CMD_TMPL = '/etc/init.d/{service_name} {action}'
class CustomCommandOL7TestCase(CommandsBaseTestCase, TestCase):
OS_VERSION = '7.10'
HOST_COMMAND_CLASS = HostCommandOL7
EXPECTED_CMD_TMPL = 'sudo systemctl {action} {service_name}.service'
| 27.090909
| 72
| 0.58352
|
from unittest import TestCase
from model_mommy import mommy
from physical.commands import HostCommandOL6, HostCommandOL7
class CommandsBaseTestCase(object):
OS_VERSION = ''
HOST_COMMAND_CLASS = None
EXPECTED_CMD_TMPL = ''
def setUp(self):
self.host = mommy.make(
'Host',
os_description='OL {}'.format(self.OS_VERSION)
)
self.instance = mommy.make('Instance', hostname=self.host)
def test_is_instance(self):
self.assertTrue(
isinstance(self.host.commands, self.HOST_COMMAND_CLASS)
)
def test_start(self):
cmd = self.host.commands.exec_service_command(
service_name='fake_service_name',
action='fake_start'
)
self.assertEqual(
cmd,
self.EXPECTED_CMD_TMPL.format(
service_name='fake_service_name',
action='fake_start'
)
)
def test_stop(self):
cmd = self.host.commands.exec_service_command(
service_name='fake_service_name',
action='fake_stop'
)
self.assertEqual(
cmd,
self.EXPECTED_CMD_TMPL.format(
service_name='fake_service_name',
action='fake_stop'
)
)
def test_start_no_output(self):
cmd = self.host.commands.exec_service_command(
service_name='fake_service_name',
action='fake_start',
no_output=True
)
expected_cmd = '{} > /dev/null'.format(
self.EXPECTED_CMD_TMPL.format(
service_name='fake_service_name',
action='fake_start'
)
)
self.assertEqual(
cmd,
expected_cmd
)
def test_stop_no_output(self):
cmd = self.host.commands.exec_service_command(
service_name='fake_service_name',
action='fake_stop',
no_output=True
)
expected_cmd = '{} > /dev/null'.format(
self.EXPECTED_CMD_TMPL.format(
service_name='fake_service_name',
action='fake_stop'
)
)
self.assertEqual(
cmd,
expected_cmd
)
class CustomCommandOL6TestCase(CommandsBaseTestCase, TestCase):
OS_VERSION = '6.10'
HOST_COMMAND_CLASS = HostCommandOL6
EXPECTED_CMD_TMPL = '/etc/init.d/{service_name} {action}'
class CustomCommandOL7TestCase(CommandsBaseTestCase, TestCase):
OS_VERSION = '7.10'
HOST_COMMAND_CLASS = HostCommandOL7
EXPECTED_CMD_TMPL = 'sudo systemctl {action} {service_name}.service'
| true
| true
|
f7090d5e70f2d61e6c69b57be05d5e8ab3aee55e
| 3,566
|
py
|
Python
|
test/test_art_resize.py
|
atavakoulnia/beets
|
006d24c02e805bcabb4b99c7cf9945e3b109df15
|
[
"MIT"
] | 1
|
2020-03-03T05:46:47.000Z
|
2020-03-03T05:46:47.000Z
|
test/test_art_resize.py
|
atavakoulnia/beets
|
006d24c02e805bcabb4b99c7cf9945e3b109df15
|
[
"MIT"
] | 3
|
2020-07-12T01:22:23.000Z
|
2020-07-12T01:22:25.000Z
|
test/test_art_resize.py
|
atavakoulnia/beets
|
006d24c02e805bcabb4b99c7cf9945e3b109df15
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2020, David Swarbrick.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Tests for image resizing based on filesize."""
from __future__ import division, absolute_import, print_function
import unittest
import os
from test import _common
from test.helper import TestHelper
from beets.util import syspath
from beets.util.artresizer import (
pil_resize,
im_resize,
get_im_version,
get_pil_version,
)
class ArtResizerFileSizeTest(_common.TestCase, TestHelper):
"""Unittest test case for Art Resizer to a specific filesize."""
IMG_225x225 = os.path.join(_common.RSRC, b"abbey.jpg")
IMG_225x225_SIZE = os.stat(syspath(IMG_225x225)).st_size
def setUp(self):
"""Called before each test, setting up beets."""
self.setup_beets()
def tearDown(self):
"""Called after each test, unloading all plugins."""
self.teardown_beets()
def _test_img_resize(self, resize_func):
"""Test resizing based on file size, given a resize_func."""
# Check quality setting unaffected by new parameter
im_95_qual = resize_func(
225,
self.IMG_225x225,
quality=95,
max_filesize=0,
)
# check valid path returned - max_filesize hasn't broken resize command
self.assertExists(im_95_qual)
# Attempt a lower filesize with same quality
im_a = resize_func(
225,
self.IMG_225x225,
quality=95,
max_filesize=0.9 * os.stat(syspath(im_95_qual)).st_size,
)
self.assertExists(im_a)
# target size was achieved
self.assertLess(os.stat(syspath(im_a)).st_size,
os.stat(syspath(im_95_qual)).st_size)
# Attempt with lower initial quality
im_75_qual = resize_func(
225,
self.IMG_225x225,
quality=75,
max_filesize=0,
)
self.assertExists(im_75_qual)
im_b = resize_func(
225,
self.IMG_225x225,
quality=95,
max_filesize=0.9 * os.stat(syspath(im_75_qual)).st_size,
)
self.assertExists(im_b)
# Check high (initial) quality still gives a smaller filesize
self.assertLess(os.stat(syspath(im_b)).st_size,
os.stat(syspath(im_75_qual)).st_size)
@unittest.skipUnless(get_pil_version(), "PIL not available")
def test_pil_file_resize(self):
"""Test PIL resize function is lowering file size."""
self._test_img_resize(pil_resize)
@unittest.skipUnless(get_im_version(), "ImageMagick not available")
def test_im_file_resize(self):
"""Test IM resize function is lowering file size."""
self._test_img_resize(im_resize)
def suite():
"""Run this suite of tests."""
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == "__main__":
unittest.main(defaultTest="suite")
| 32.126126
| 79
| 0.659563
|
from __future__ import division, absolute_import, print_function
import unittest
import os
from test import _common
from test.helper import TestHelper
from beets.util import syspath
from beets.util.artresizer import (
pil_resize,
im_resize,
get_im_version,
get_pil_version,
)
class ArtResizerFileSizeTest(_common.TestCase, TestHelper):
IMG_225x225 = os.path.join(_common.RSRC, b"abbey.jpg")
IMG_225x225_SIZE = os.stat(syspath(IMG_225x225)).st_size
def setUp(self):
self.setup_beets()
def tearDown(self):
self.teardown_beets()
def _test_img_resize(self, resize_func):
im_95_qual = resize_func(
225,
self.IMG_225x225,
quality=95,
max_filesize=0,
)
self.assertExists(im_95_qual)
# Attempt a lower filesize with same quality
im_a = resize_func(
225,
self.IMG_225x225,
quality=95,
max_filesize=0.9 * os.stat(syspath(im_95_qual)).st_size,
)
self.assertExists(im_a)
# target size was achieved
self.assertLess(os.stat(syspath(im_a)).st_size,
os.stat(syspath(im_95_qual)).st_size)
# Attempt with lower initial quality
im_75_qual = resize_func(
225,
self.IMG_225x225,
quality=75,
max_filesize=0,
)
self.assertExists(im_75_qual)
im_b = resize_func(
225,
self.IMG_225x225,
quality=95,
max_filesize=0.9 * os.stat(syspath(im_75_qual)).st_size,
)
self.assertExists(im_b)
# Check high (initial) quality still gives a smaller filesize
self.assertLess(os.stat(syspath(im_b)).st_size,
os.stat(syspath(im_75_qual)).st_size)
@unittest.skipUnless(get_pil_version(), "PIL not available")
def test_pil_file_resize(self):
self._test_img_resize(pil_resize)
@unittest.skipUnless(get_im_version(), "ImageMagick not available")
def test_im_file_resize(self):
self._test_img_resize(im_resize)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == "__main__":
unittest.main(defaultTest="suite")
| true
| true
|
f7090d957aaa1b8c2a23b217a0554f11e14ef4db
| 6,740
|
py
|
Python
|
src/pacsanini/db/dcm2model.py
|
aachick/pacsanini
|
b54e4f222eede3c31b04373253e4de0b2c91217b
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause-Clear",
"BSD-3-Clause"
] | 10
|
2021-07-05T16:59:03.000Z
|
2022-02-09T11:13:03.000Z
|
src/pacsanini/db/dcm2model.py
|
aachick/pacsanini
|
b54e4f222eede3c31b04373253e4de0b2c91217b
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause-Clear",
"BSD-3-Clause"
] | 51
|
2021-07-05T08:29:35.000Z
|
2021-11-30T08:30:10.000Z
|
src/pacsanini/db/dcm2model.py
|
aachick/pacsanini
|
b54e4f222eede3c31b04373253e4de0b2c91217b
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause-Clear",
"BSD-3-Clause"
] | 2
|
2021-07-06T06:35:37.000Z
|
2021-07-09T10:26:38.000Z
|
# Copyright (C) 2019-2020, Therapixel SA.
# All rights reserved.
# This file is subject to the terms and conditions described in the
# LICENSE file distributed in this package.
"""The dcm2model module provides methods that can be used to convert pydicom.Dataset
instances to sqlalchemy instances.
"""
from typing import Tuple, Union
from pydicom import Dataset, dcmread
from pacsanini.convert import agestr2years, dcm2dict, str2datetime
from pacsanini.db.models import Image, Patient, Series, Study, StudyFind
from pacsanini.parse import DicomTagGroup
def dcm2patient(dcm: Dataset, institution: str = None) -> Patient:
"""Convert a DICOM file to a Patient instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a Patient instance.
institution : str
If set, add a specified institution name to the Patient
model. The default is None.
Returns
-------
Patient
The Patient model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "PatientID", "tag_alias": "patient_id"},
{"tag_name": "PatientName", "tag_alias": "patient_name", "callback": str},
{
"tag_name": "PatientBirthDate",
"tag_alias": "patient_birth_date",
"callback": str2datetime,
},
]
)
data = tag_grp.parse_dicom(dcm)
data["institution"] = institution
return Patient(**data)
def dcm2study(dcm: Dataset) -> Study:
"""Convert a DICOM file to a Study instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a Study instance.
Returns
-------
Study
The Study model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "StudyInstanceUID", "tag_alias": "study_uid"},
{
"tag_name": "StudyDate",
"tag_alias": "study_date",
"callback": str2datetime,
},
{
"tag_name": "PatientAge",
"tag_alias": "patient_age",
"callback": agestr2years,
"default": -1,
},
{"tag_name": "AccessionNumber", "tag_alias": "accession_number"},
]
)
data = tag_grp.parse_dicom(dcm)
return Study(**data)
def dcm2study_finding(dcm: Dataset) -> StudyFind:
"""Convert a DICOM file to a StudyFind instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a StudyFind instance.
Returns
-------
StudyFind
The StudyFind model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "PatientName", "tag_alias": "patient_name", "callback": str},
{"tag_name": "PatientID", "tag_alias": "patient_id"},
{"tag_name": "StudyInstanceUID", "tag_alias": "study_uid"},
{
"tag_name": "StudyDate",
"tag_alias": "study_date",
"callback": str2datetime,
},
{"tag_name": "AccessionNumber", "tag_alias": "accession_number"},
]
)
data = tag_grp.parse_dicom(dcm)
return StudyFind(**data)
def dcm2series(dcm: Dataset) -> Series:
"""Convert a DICOM file to a Series instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a Series instance.
Returns
-------
Series
The Series model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "SeriesInstanceUID", "tag_alias": "series_uid"},
{"tag_name": "Modality", "tag_alias": "modality"},
]
)
data = tag_grp.parse_dicom(dcm)
return Series(**data)
def dcm2image(dcm: Dataset, institution: str = None, filepath: str = None) -> Image:
"""Convert a DICOM file to a Image instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a Image instance.
institution : str
If set, add a specified institution name to the Image
model. The default is None.
filepath : str
If set, add the DICOM's filepath to the database. The default
is None.
Returns
-------
Image
The Image model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "PatientID", "tag_alias": "patient_id"},
{"tag_name": "StudyInstanceUID", "tag_alias": "study_uid"},
{
"tag_name": "StudyDate",
"tag_alias": "study_date",
"callback": str2datetime,
},
{"tag_name": "SeriesInstanceUID", "tag_alias": "series_uid"},
{"tag_name": "Modality", "tag_alias": "modality"},
{"tag_name": "SOPClassUID", "tag_alias": "sop_class_uid"},
{"tag_name": "SOPInstanceUID", "tag_alias": "image_uid"},
{"tag_name": "AcquisitionTime", "tag_alias": "acquisition_time"},
{"tag_name": "Manufacturer", "tag_alias": "manufacturer"},
{
"tag_name": "ManufacturerModelName",
"tag_alias": "manufacturer_model_name",
},
]
)
data = tag_grp.parse_dicom(dcm)
data["meta"] = dcm2dict(dcm, include_pixels=False)
data["institution"] = institution
data["filepath"] = filepath
return Image(**data)
def dcm2dbmodels(
dcm: Union[str, Dataset], institution: str = None, filepath: str = None
) -> Tuple[Patient, Study, Series, Image]:
"""Convert a DICOM file into the different database models that will be used
to insert the DICOM data into the database.
Parameters
----------
dcm : Union[str, Dataset]
The DICOM data to convert to a Patient, Study, Series, and Image instance.
institution : str
If set, add a specified institution name to the Patient
model. The default is None.
filepath : str
If set, add the DICOM's filepath to the database. The default
is None. If the input dcm parameter value is a string, filepath
will be set to this.
Returns
-------
Tuple[Patient, Study, Series, Image]
A 4-tuple corresponding to the image's
"""
if isinstance(dcm, str):
filepath = dcm
dcm = dcmread(dcm, stop_before_pixels=True)
pat = dcm2patient(dcm, institution=institution)
study = dcm2study(dcm)
series = dcm2series(dcm)
image = dcm2image(dcm, institution=institution, filepath=filepath)
return pat, study, series, image
| 30.636364
| 86
| 0.586053
|
from typing import Tuple, Union
from pydicom import Dataset, dcmread
from pacsanini.convert import agestr2years, dcm2dict, str2datetime
from pacsanini.db.models import Image, Patient, Series, Study, StudyFind
from pacsanini.parse import DicomTagGroup
def dcm2patient(dcm: Dataset, institution: str = None) -> Patient:
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "PatientID", "tag_alias": "patient_id"},
{"tag_name": "PatientName", "tag_alias": "patient_name", "callback": str},
{
"tag_name": "PatientBirthDate",
"tag_alias": "patient_birth_date",
"callback": str2datetime,
},
]
)
data = tag_grp.parse_dicom(dcm)
data["institution"] = institution
return Patient(**data)
def dcm2study(dcm: Dataset) -> Study:
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "StudyInstanceUID", "tag_alias": "study_uid"},
{
"tag_name": "StudyDate",
"tag_alias": "study_date",
"callback": str2datetime,
},
{
"tag_name": "PatientAge",
"tag_alias": "patient_age",
"callback": agestr2years,
"default": -1,
},
{"tag_name": "AccessionNumber", "tag_alias": "accession_number"},
]
)
data = tag_grp.parse_dicom(dcm)
return Study(**data)
def dcm2study_finding(dcm: Dataset) -> StudyFind:
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "PatientName", "tag_alias": "patient_name", "callback": str},
{"tag_name": "PatientID", "tag_alias": "patient_id"},
{"tag_name": "StudyInstanceUID", "tag_alias": "study_uid"},
{
"tag_name": "StudyDate",
"tag_alias": "study_date",
"callback": str2datetime,
},
{"tag_name": "AccessionNumber", "tag_alias": "accession_number"},
]
)
data = tag_grp.parse_dicom(dcm)
return StudyFind(**data)
def dcm2series(dcm: Dataset) -> Series:
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "SeriesInstanceUID", "tag_alias": "series_uid"},
{"tag_name": "Modality", "tag_alias": "modality"},
]
)
data = tag_grp.parse_dicom(dcm)
return Series(**data)
def dcm2image(dcm: Dataset, institution: str = None, filepath: str = None) -> Image:
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "PatientID", "tag_alias": "patient_id"},
{"tag_name": "StudyInstanceUID", "tag_alias": "study_uid"},
{
"tag_name": "StudyDate",
"tag_alias": "study_date",
"callback": str2datetime,
},
{"tag_name": "SeriesInstanceUID", "tag_alias": "series_uid"},
{"tag_name": "Modality", "tag_alias": "modality"},
{"tag_name": "SOPClassUID", "tag_alias": "sop_class_uid"},
{"tag_name": "SOPInstanceUID", "tag_alias": "image_uid"},
{"tag_name": "AcquisitionTime", "tag_alias": "acquisition_time"},
{"tag_name": "Manufacturer", "tag_alias": "manufacturer"},
{
"tag_name": "ManufacturerModelName",
"tag_alias": "manufacturer_model_name",
},
]
)
data = tag_grp.parse_dicom(dcm)
data["meta"] = dcm2dict(dcm, include_pixels=False)
data["institution"] = institution
data["filepath"] = filepath
return Image(**data)
def dcm2dbmodels(
dcm: Union[str, Dataset], institution: str = None, filepath: str = None
) -> Tuple[Patient, Study, Series, Image]:
if isinstance(dcm, str):
filepath = dcm
dcm = dcmread(dcm, stop_before_pixels=True)
pat = dcm2patient(dcm, institution=institution)
study = dcm2study(dcm)
series = dcm2series(dcm)
image = dcm2image(dcm, institution=institution, filepath=filepath)
return pat, study, series, image
| true
| true
|
f7090e45c95d43db13aff7af0aef357d0ac5fadb
| 5,502
|
py
|
Python
|
interact/agents/ddpg/td3.py
|
rystrauss/interact
|
4fd8a5ffd2b712beb81bc43587745b29c715d754
|
[
"MIT"
] | 1
|
2020-11-13T01:59:21.000Z
|
2020-11-13T01:59:21.000Z
|
interact/agents/ddpg/td3.py
|
rystrauss/interact
|
4fd8a5ffd2b712beb81bc43587745b29c715d754
|
[
"MIT"
] | 2
|
2021-04-04T22:27:04.000Z
|
2021-05-21T17:35:32.000Z
|
interact/agents/ddpg/td3.py
|
rystrauss/interact
|
4fd8a5ffd2b712beb81bc43587745b29c715d754
|
[
"MIT"
] | null | null | null |
from typing import Callable, Optional
import gin
import gym
from interact.agents.ddpg.ddpg import DDPGAgent
from interact.agents.utils import register
@gin.configurable(name_or_fn="td3", denylist=["env_fn"])
@register("td3")
class TD3Agent(DDPGAgent):
"""The Twin Delayed DDPG (TD3) algorithm.
This algorithm is a minor modification of DDPG. This class is merely a wrapper
around DDPG with the TD3 features enabled by default. Namely, TD3 uses twin
critic networks, delayed policy updates, and target policy smoothing.
Args:
env_fn: A function that, when called, returns an instance of the agent's
environment.
network: Base network type to be used by the policy and Q-functions.
actor_lr: Learning rate to use for updating the actor.
critic_lr: Learning rate to use for updating the critics.
tau: Parameter for the polyak averaging used to update the target networks.
target_update_interval: Frequency with which the target Q-networks are updated.
gamma: The discount factor.
buffer_size: The maximum size of the replay buffer.
train_freq: The frequency with which training updates are performed.
target_update_interval: The frequency with which the target network is updated.
learning_starts: The number of timesteps after which learning starts.
random_steps: Actions will be sampled completely at random for this many
timesteps at the beginning of training.
batch_size: The size of batches sampled from the replay buffer over which
updates are performed.
num_workers: The number of parallel workers to use for experience collection.
num_envs_per_worker: The number of synchronous environments to be executed in
each worker.
prioritized_replay: If True, a prioritized experience replay will be used.
prioritized_replay_alpha: Alpha parameter for prioritized replay.
prioritized_replay_beta: Initial beta parameter for prioritized replay.
final_prioritized_replay_beta: The final value of the prioritized replay beta
parameter.
prioritized_replay_beta_steps: Number of steps over which the prioritized
replay beta parameter will be annealed. If None, this will be set to the
total number of training steps.
prioritized_replay_epsilon: Epsilon to add to td-errors when updating
priorities.
initial_noise_scale: The initial scale of the Gaussian noise that is added to
actions for exploration.
final_noise_scale: The final scale of the Gaussian noise that is added to
actions for exploration.
noise_scale_steps: The number of timesteps over which the amount of exploration
noise is annealed from `initial_noise_scale` to `final_noise_scale`. If
None, the total duration of training is used.
use_huber: If True, the Huber loss is used in favor of MSE for critic updates.
use_twin_critic: If True, twin critic networks are used.
policy_delay: The policy is updated once for every `policy_delay` critic
updates.
smooth_target_policy: If true, target policy smoothing is used in the critic
updates.
target_noise: The amount of target noise that is used for smoothing.
target_noise_clip: The value at which target noise is clipped.
"""
def __init__(
self,
env_fn: Callable[[], gym.Env],
network: str = "mlp",
critic_lr: float = 1e-3,
actor_lr: float = 1e-3,
learning_starts: int = 10000,
random_steps: int = 10000,
target_update_interval: int = 1,
tau: float = 0.005,
gamma: float = 0.95,
buffer_size: int = 100000,
train_freq: int = 1,
batch_size: int = 100,
num_workers: int = 1,
num_envs_per_worker: int = 1,
prioritized_replay: bool = False,
prioritized_replay_alpha: float = 0.6,
prioritized_replay_beta: float = 0.4,
final_prioritized_replay_beta: float = 4.0,
prioritized_replay_beta_steps: Optional[int] = None,
prioritized_replay_epsilon: float = 1e-6,
initial_noise_scale: float = 0.1,
final_noise_scale: float = 0.1,
noise_scale_steps: Optional[int] = None,
use_huber: bool = False,
use_twin_critic: bool = True,
policy_delay: int = 2,
smooth_target_policy: bool = True,
target_noise: float = 0.2,
target_noise_clip: float = 0.5,
):
super().__init__(
env_fn,
network,
critic_lr,
actor_lr,
learning_starts,
random_steps,
target_update_interval,
tau,
gamma,
buffer_size,
train_freq,
batch_size,
num_workers,
num_envs_per_worker,
prioritized_replay,
prioritized_replay_alpha,
prioritized_replay_beta,
final_prioritized_replay_beta,
prioritized_replay_beta_steps,
prioritized_replay_epsilon,
initial_noise_scale,
final_noise_scale,
noise_scale_steps,
use_huber,
use_twin_critic,
policy_delay,
smooth_target_policy,
target_noise,
target_noise_clip,
)
| 42.651163
| 87
| 0.65667
|
from typing import Callable, Optional
import gin
import gym
from interact.agents.ddpg.ddpg import DDPGAgent
from interact.agents.utils import register
@gin.configurable(name_or_fn="td3", denylist=["env_fn"])
@register("td3")
class TD3Agent(DDPGAgent):
def __init__(
self,
env_fn: Callable[[], gym.Env],
network: str = "mlp",
critic_lr: float = 1e-3,
actor_lr: float = 1e-3,
learning_starts: int = 10000,
random_steps: int = 10000,
target_update_interval: int = 1,
tau: float = 0.005,
gamma: float = 0.95,
buffer_size: int = 100000,
train_freq: int = 1,
batch_size: int = 100,
num_workers: int = 1,
num_envs_per_worker: int = 1,
prioritized_replay: bool = False,
prioritized_replay_alpha: float = 0.6,
prioritized_replay_beta: float = 0.4,
final_prioritized_replay_beta: float = 4.0,
prioritized_replay_beta_steps: Optional[int] = None,
prioritized_replay_epsilon: float = 1e-6,
initial_noise_scale: float = 0.1,
final_noise_scale: float = 0.1,
noise_scale_steps: Optional[int] = None,
use_huber: bool = False,
use_twin_critic: bool = True,
policy_delay: int = 2,
smooth_target_policy: bool = True,
target_noise: float = 0.2,
target_noise_clip: float = 0.5,
):
super().__init__(
env_fn,
network,
critic_lr,
actor_lr,
learning_starts,
random_steps,
target_update_interval,
tau,
gamma,
buffer_size,
train_freq,
batch_size,
num_workers,
num_envs_per_worker,
prioritized_replay,
prioritized_replay_alpha,
prioritized_replay_beta,
final_prioritized_replay_beta,
prioritized_replay_beta_steps,
prioritized_replay_epsilon,
initial_noise_scale,
final_noise_scale,
noise_scale_steps,
use_huber,
use_twin_critic,
policy_delay,
smooth_target_policy,
target_noise,
target_noise_clip,
)
| true
| true
|
f7090e9c13d03314f33aa0f682190043f9d895fc
| 4,679
|
py
|
Python
|
Util/Latex_generator.py
|
LamannaLeonardo/OLAM
|
7a6611912ebb40d39a934dd454efec4cbb7913d3
|
[
"MIT"
] | null | null | null |
Util/Latex_generator.py
|
LamannaLeonardo/OLAM
|
7a6611912ebb40d39a934dd454efec4cbb7913d3
|
[
"MIT"
] | null | null | null |
Util/Latex_generator.py
|
LamannaLeonardo/OLAM
|
7a6611912ebb40d39a934dd454efec4cbb7913d3
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2022, Leonardo Lamanna
# All rights reserved.
# This source code is licensed under the MIT-style license found in the
# LICENSE file in the root directory of this source tree.
import pandas as pd
import os
pd.options.display.max_colwidth = 100
def generate_latex_table(data_file, labels, tab_name, caption, header):
with open(tab_name + ".tex", "w") as f:
df = pd.read_excel(data_file, sheet_name="Summary")
df_restricted = df[labels]
f.write(df_restricted.to_latex(index=False, escape=False,
label="tab:{}".format(tab_name),
caption= caption,
header = header))
def generate_comparison_latex_table():
labels = ["Domain", "Neg precision A", "Neg recall A", "Overall precision A", "Overall recall A",
"Neg precision B", "Neg recall B", "Overall precision B", "Overall recall B"]
header = ["Domain", "$P_{\\eff^{-}}$", "$R_{\\eff^{-}}$", "$P$", "$R$",
"$P_{\\eff^{-}}$", "$R_{\\eff^{-}}$", "$P$", "$R$"]
caption = "For each domain:statistics on final metrics of the last instance grouped by " \
"negative effects."
tab_name = "comparison_summary_uncertain"
file_path = os.path.join("comparison_summary_uncertain.xlsx")
generate_latex_table(file_path, labels, tab_name, caption, header)
def generate_comparison_latex_table_fama():
labels = ["Domain", "Tot time", "Overall precision", "Overall recall", "FAMA tot time",
"FAMA precision", "FAMA recall", "Delta act"]
header = ["Domain", "$t$", "$P$", "$R$", "$t$", "$P$", "$R$", "$\delta_{A}$"]
caption = "Comparison among OLAM and FAMA with full observability. FAMA is run with all plan traces " \
"provided in \protect\cite{aineto_AIJ2019}. MODEL WITH UNCERTAIN NEGATIVE EFFECTS AND STRIPS ASSUMPTION."
tab_name = "comparison_fama"
file_path = os.path.join("comparison_fama.xlsx")
generate_latex_table(file_path, labels, tab_name, caption, header)
def generate_summary_latex_table():
# labels = ["Domain", "Instances", "Precs precision", "Precs recall","Pos precision", "Pos recall",
# "Neg precision", "Neg recall", "Overall precision", "Overall recall"]
labels = ["Domain", "Instances", "Precs precision", "Precs recall","Pos precision", "Pos recall",
"Neg precision", "Neg recall", "Average precision", "Average recall"]
header = ["Domain", "$I$", "$P_{\\prec}$", "$R_{\\prec}$", "$P_{\\eff^{+}}$", "$R_{\\eff^{+}}$", "$P_{\\eff^{-}}$",
"$R_{\\eff^{-}}$", "$P$", "$R$"]
caption = "For each domain:statistics on final metrics of the last instance grouped by " \
"preconditions, positive effects and negative ones."
tab_name = "overall_summary_certain_nostripsass"
folder = "../Analysis/IJCAI_Results/Results_certain_NOnegeff_assumption"
file_path = os.path.join(folder, "overall_summary.xlsx")
generate_latex_table(file_path, labels, tab_name, caption, header)
def generate_domain_objects_table():
header = ["Domain", "Objects"]
caption = "For each domain, problem objects of all problems in the generated set."
tab_name = "all_problem_objects"
df = pd.DataFrame({
"Domain":[],
"Objects":[]
})
# df.set_index('Domain', inplace=True)
domain_dataframes = [name for name in os.listdir(os.path.join("..", "Analysis", "Results_cert"))
if not name.startswith("overall")]
for domain_dataframe in domain_dataframes:
domain = domain_dataframe.split("_")[0]
df_domain = pd.read_excel(os.path.join("..", "Analysis", "Results_cert", domain_dataframe),
sheet_name="Objects")
domain_obj_types = [key.strip().lower() for key in list(df_domain) if key.strip().lower() != "total objs"]
for i, row in df_domain.iterrows():
problem_objs = []
for k in domain_obj_types:
problem_objs.append("{} {}".format(k,row["\t" + k]))
eval = {
"Domain":domain,
"Objects":", ".join(problem_objs)
}
df = df.append(eval, ignore_index=True)
with open(tab_name + ".tex", "w") as f:
f.write(df.to_latex(index=False,
label="tab:{}".format(tab_name),
caption= caption,
header = header))
if __name__ == "__main__":
generate_summary_latex_table()
#
# generate_domain_objects_table()
| 40.686957
| 119
| 0.593075
|
import pandas as pd
import os
pd.options.display.max_colwidth = 100
def generate_latex_table(data_file, labels, tab_name, caption, header):
with open(tab_name + ".tex", "w") as f:
df = pd.read_excel(data_file, sheet_name="Summary")
df_restricted = df[labels]
f.write(df_restricted.to_latex(index=False, escape=False,
label="tab:{}".format(tab_name),
caption= caption,
header = header))
def generate_comparison_latex_table():
labels = ["Domain", "Neg precision A", "Neg recall A", "Overall precision A", "Overall recall A",
"Neg precision B", "Neg recall B", "Overall precision B", "Overall recall B"]
header = ["Domain", "$P_{\\eff^{-}}$", "$R_{\\eff^{-}}$", "$P$", "$R$",
"$P_{\\eff^{-}}$", "$R_{\\eff^{-}}$", "$P$", "$R$"]
caption = "For each domain:statistics on final metrics of the last instance grouped by " \
"negative effects."
tab_name = "comparison_summary_uncertain"
file_path = os.path.join("comparison_summary_uncertain.xlsx")
generate_latex_table(file_path, labels, tab_name, caption, header)
def generate_comparison_latex_table_fama():
labels = ["Domain", "Tot time", "Overall precision", "Overall recall", "FAMA tot time",
"FAMA precision", "FAMA recall", "Delta act"]
header = ["Domain", "$t$", "$P$", "$R$", "$t$", "$P$", "$R$", "$\delta_{A}$"]
caption = "Comparison among OLAM and FAMA with full observability. FAMA is run with all plan traces " \
"provided in \protect\cite{aineto_AIJ2019}. MODEL WITH UNCERTAIN NEGATIVE EFFECTS AND STRIPS ASSUMPTION."
tab_name = "comparison_fama"
file_path = os.path.join("comparison_fama.xlsx")
generate_latex_table(file_path, labels, tab_name, caption, header)
def generate_summary_latex_table():
labels = ["Domain", "Instances", "Precs precision", "Precs recall","Pos precision", "Pos recall",
"Neg precision", "Neg recall", "Average precision", "Average recall"]
header = ["Domain", "$I$", "$P_{\\prec}$", "$R_{\\prec}$", "$P_{\\eff^{+}}$", "$R_{\\eff^{+}}$", "$P_{\\eff^{-}}$",
"$R_{\\eff^{-}}$", "$P$", "$R$"]
caption = "For each domain:statistics on final metrics of the last instance grouped by " \
"preconditions, positive effects and negative ones."
tab_name = "overall_summary_certain_nostripsass"
folder = "../Analysis/IJCAI_Results/Results_certain_NOnegeff_assumption"
file_path = os.path.join(folder, "overall_summary.xlsx")
generate_latex_table(file_path, labels, tab_name, caption, header)
def generate_domain_objects_table():
header = ["Domain", "Objects"]
caption = "For each domain, problem objects of all problems in the generated set."
tab_name = "all_problem_objects"
df = pd.DataFrame({
"Domain":[],
"Objects":[]
})
domain_dataframes = [name for name in os.listdir(os.path.join("..", "Analysis", "Results_cert"))
if not name.startswith("overall")]
for domain_dataframe in domain_dataframes:
domain = domain_dataframe.split("_")[0]
df_domain = pd.read_excel(os.path.join("..", "Analysis", "Results_cert", domain_dataframe),
sheet_name="Objects")
domain_obj_types = [key.strip().lower() for key in list(df_domain) if key.strip().lower() != "total objs"]
for i, row in df_domain.iterrows():
problem_objs = []
for k in domain_obj_types:
problem_objs.append("{} {}".format(k,row["\t" + k]))
eval = {
"Domain":domain,
"Objects":", ".join(problem_objs)
}
df = df.append(eval, ignore_index=True)
with open(tab_name + ".tex", "w") as f:
f.write(df.to_latex(index=False,
label="tab:{}".format(tab_name),
caption= caption,
header = header))
if __name__ == "__main__":
generate_summary_latex_table()
| true
| true
|
f70910191fa2fdbdb515d1ee6223d72a37845ca7
| 1,398
|
py
|
Python
|
tests/components_to_test/repeated_computed_layer.py
|
RichardoLuo/ColossalAI
|
797a9dc5a9e801d7499b8667c3ef039a38aa15ba
|
[
"Apache-2.0"
] | 1,630
|
2021-10-30T01:00:27.000Z
|
2022-03-31T23:02:41.000Z
|
tests/components_to_test/repeated_computed_layer.py
|
RichardoLuo/ColossalAI
|
797a9dc5a9e801d7499b8667c3ef039a38aa15ba
|
[
"Apache-2.0"
] | 166
|
2021-10-30T01:03:01.000Z
|
2022-03-31T14:19:07.000Z
|
tests/components_to_test/repeated_computed_layer.py
|
RichardoLuo/ColossalAI
|
797a9dc5a9e801d7499b8667c3ef039a38aa15ba
|
[
"Apache-2.0"
] | 253
|
2021-10-30T06:10:29.000Z
|
2022-03-31T13:30:06.000Z
|
#!/usr/bin/env python
import torch
import torch.nn as nn
from colossalai.nn import CheckpointModule
from .utils.dummy_data_generator import DummyDataGenerator
from .registry import non_distributed_component_funcs
class NetWithRepeatedlyComputedLayers(CheckpointModule):
"""
This model is to test with layers which go through forward pass multiple times.
In this model, the fc1 and fc2 call forward twice
"""
def __init__(self, checkpoint=False) -> None:
super().__init__(checkpoint=checkpoint)
self.fc1 = nn.Linear(5, 5)
self.fc2 = nn.Linear(5, 5)
self.fc3 = nn.Linear(5, 2)
self.layers = [self.fc1, self.fc2, self.fc1, self.fc2, self.fc3]
def forward(self, x):
for layer in self.layers:
x = layer(x)
return x
class DummyDataLoader(DummyDataGenerator):
def generate(self):
data = torch.rand(16, 5)
label = torch.randint(low=0, high=2, size=(16,))
return data, label
@non_distributed_component_funcs.register(name='repeated_computed_layers')
def get_training_components():
def model_builder(checkpoint=True):
return NetWithRepeatedlyComputedLayers(checkpoint)
trainloader = DummyDataLoader()
testloader = DummyDataLoader()
criterion = torch.nn.CrossEntropyLoss()
return model_builder, trainloader, testloader, torch.optim.Adam, criterion
| 29.125
| 83
| 0.703147
|
import torch
import torch.nn as nn
from colossalai.nn import CheckpointModule
from .utils.dummy_data_generator import DummyDataGenerator
from .registry import non_distributed_component_funcs
class NetWithRepeatedlyComputedLayers(CheckpointModule):
def __init__(self, checkpoint=False) -> None:
super().__init__(checkpoint=checkpoint)
self.fc1 = nn.Linear(5, 5)
self.fc2 = nn.Linear(5, 5)
self.fc3 = nn.Linear(5, 2)
self.layers = [self.fc1, self.fc2, self.fc1, self.fc2, self.fc3]
def forward(self, x):
for layer in self.layers:
x = layer(x)
return x
class DummyDataLoader(DummyDataGenerator):
def generate(self):
data = torch.rand(16, 5)
label = torch.randint(low=0, high=2, size=(16,))
return data, label
@non_distributed_component_funcs.register(name='repeated_computed_layers')
def get_training_components():
def model_builder(checkpoint=True):
return NetWithRepeatedlyComputedLayers(checkpoint)
trainloader = DummyDataLoader()
testloader = DummyDataLoader()
criterion = torch.nn.CrossEntropyLoss()
return model_builder, trainloader, testloader, torch.optim.Adam, criterion
| true
| true
|
f709111442b99e0d8ef6aa437399990e73061ef7
| 3,524
|
py
|
Python
|
stable_nalu/layer/hard_softmax_nac.py
|
wlm2019/Neural-Arithmetic-Units
|
f9de9d004bb2dc2ee28577cd1760d0a00c185836
|
[
"MIT"
] | 147
|
2019-10-07T11:01:54.000Z
|
2021-11-16T02:51:18.000Z
|
stable_nalu/layer/hard_softmax_nac.py
|
wlm2019/Neural-Arithmetic-Units
|
f9de9d004bb2dc2ee28577cd1760d0a00c185836
|
[
"MIT"
] | 1
|
2019-12-03T12:40:21.000Z
|
2019-12-03T12:40:21.000Z
|
stable_nalu/layer/hard_softmax_nac.py
|
wlm2019/Neural-Arithmetic-Units
|
f9de9d004bb2dc2ee28577cd1760d0a00c185836
|
[
"MIT"
] | 19
|
2019-12-21T15:58:44.000Z
|
2021-09-03T08:32:38.000Z
|
import math
import torch
from ..abstract import ExtendedTorchModule
from ..functional import sparsity_error
from ._abstract_recurrent_cell import AbstractRecurrentCell
class HardSoftmaxNACLayer(ExtendedTorchModule):
"""Implements the NAC (Neural Accumulator)
Arguments:
in_features: number of ingoing features
out_features: number of outgoing features
"""
def __init__(self, in_features, out_features, **kwargs):
super().__init__('nac', **kwargs)
self.in_features = in_features
self.out_features = out_features
# Define the target weights. Also, put 0 last such that p1 = p2 = 0
# corresponds to p3 = 1 => w = 0.
self.register_buffer('target_weights', torch.tensor([1, -1, 0], dtype=torch.float32))
# Initialize a tensor, that will be the placeholder for the hard samples
self.register_buffer('sample', torch.LongTensor(out_features, in_features))
# We will only two parameters per weight, this is to prevent the redundancy
# there would otherwise exist. This also makes it much more comparable with
# NAC.
self.W_hat = torch.nn.Parameter(torch.Tensor(out_features, in_features, 2))
self.register_buffer('W_hat_k', torch.Tensor(out_features, in_features, 1))
self.register_parameter('bias', None)
def reset_parameters(self):
# Use a gain of sqrt(0.5). Lets assume that softmax'(0) ~ 1, because this
# holds for sigmoid. Then:
# Var[W] = 1 * Var[S_1] - 1 * Var[S_2] + 0 * Var[S_3] = 2 / (fan[in] + fan[out])
# Var[W] = 2 * Var[S_i] = 2 / (fan[in] + fan[out])
# Var[S_i] = 1/2 * 2 / (fan[in] + fan[out])
# sqrt(Var[S_i]) = sqrt(1/2) * sqrt(2 / (fan[in] + fan[out]))
# This is not exactly true, because S_1, S_2, and S_3 are not enterily uncorrelated.
torch.nn.init.xavier_uniform_(self.W_hat, gain=math.sqrt(0.5))
torch.nn.init.constant_(self.W_hat_k, 0)
def forward(self, input, reuse=False):
# Concat trainable and non-trainable weights
W_hat_full = torch.cat((self.W_hat, self.W_hat_k), dim=-1) # size = [out, in, 3]
# Compute W_soft
pi = torch.nn.functional.softmax(W_hat_full, dim=-1)
W_soft = pi @ self.target_weights
# Compute W_hard
if not reuse:
torch.multinomial(pi.view(-1, 3), 1, True, out=self.sample.view(-1))
W_hard = self.target_weights[self.sample]
# Use W_hard in the forward pass, but use W_soft for the gradients.
# This implementation trick comes from torch.nn.functional.gumble_softmax(hard=True)
W = W_hard - W_soft.detach() + W_soft
# Compute the linear multiplication as usual
self.writer.add_histogram('W', W)
self.writer.add_tensor('W', W)
self.writer.add_scalar('W/sparsity_error', sparsity_error(W), verbose_only=False)
return torch.nn.functional.linear(input, W, self.bias)
def extra_repr(self):
return 'in_features={}, out_features={}'.format(
self.in_features, self.out_features
)
class HardSoftmaxNACCell(AbstractRecurrentCell):
"""Implements the Gumbel NAC (Gumbel Neural Accumulator) as a recurrent cell
Arguments:
input_size: number of ingoing features
hidden_size: number of outgoing features
"""
def __init__(self, input_size, hidden_size, **kwargs):
super().__init__(HardSoftmaxNACLayer, input_size, hidden_size, **kwargs)
| 40.976744
| 93
| 0.65437
|
import math
import torch
from ..abstract import ExtendedTorchModule
from ..functional import sparsity_error
from ._abstract_recurrent_cell import AbstractRecurrentCell
class HardSoftmaxNACLayer(ExtendedTorchModule):
def __init__(self, in_features, out_features, **kwargs):
super().__init__('nac', **kwargs)
self.in_features = in_features
self.out_features = out_features
self.register_buffer('target_weights', torch.tensor([1, -1, 0], dtype=torch.float32))
self.register_buffer('sample', torch.LongTensor(out_features, in_features))
self.W_hat = torch.nn.Parameter(torch.Tensor(out_features, in_features, 2))
self.register_buffer('W_hat_k', torch.Tensor(out_features, in_features, 1))
self.register_parameter('bias', None)
def reset_parameters(self):
# holds for sigmoid. Then:
# Var[W] = 1 * Var[S_1] - 1 * Var[S_2] + 0 * Var[S_3] = 2 / (fan[in] + fan[out])
# Var[W] = 2 * Var[S_i] = 2 / (fan[in] + fan[out])
# Var[S_i] = 1/2 * 2 / (fan[in] + fan[out])
# sqrt(Var[S_i]) = sqrt(1/2) * sqrt(2 / (fan[in] + fan[out]))
# This is not exactly true, because S_1, S_2, and S_3 are not enterily uncorrelated.
torch.nn.init.xavier_uniform_(self.W_hat, gain=math.sqrt(0.5))
torch.nn.init.constant_(self.W_hat_k, 0)
def forward(self, input, reuse=False):
# Concat trainable and non-trainable weights
W_hat_full = torch.cat((self.W_hat, self.W_hat_k), dim=-1) # size = [out, in, 3]
# Compute W_soft
pi = torch.nn.functional.softmax(W_hat_full, dim=-1)
W_soft = pi @ self.target_weights
# Compute W_hard
if not reuse:
torch.multinomial(pi.view(-1, 3), 1, True, out=self.sample.view(-1))
W_hard = self.target_weights[self.sample]
# Use W_hard in the forward pass, but use W_soft for the gradients.
# This implementation trick comes from torch.nn.functional.gumble_softmax(hard=True)
W = W_hard - W_soft.detach() + W_soft
# Compute the linear multiplication as usual
self.writer.add_histogram('W', W)
self.writer.add_tensor('W', W)
self.writer.add_scalar('W/sparsity_error', sparsity_error(W), verbose_only=False)
return torch.nn.functional.linear(input, W, self.bias)
def extra_repr(self):
return 'in_features={}, out_features={}'.format(
self.in_features, self.out_features
)
class HardSoftmaxNACCell(AbstractRecurrentCell):
def __init__(self, input_size, hidden_size, **kwargs):
super().__init__(HardSoftmaxNACLayer, input_size, hidden_size, **kwargs)
| true
| true
|
f7091180d1e1b7bbe848c16021d65d8ff26b81ff
| 4,862
|
py
|
Python
|
venv/lib/python3.8/site-packages/vsts/task_agent/v4_1/models/task_agent.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/vsts/task_agent/v4_1/models/task_agent.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/vsts/task_agent/v4_1/models/task_agent.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .task_agent_reference import TaskAgentReference
class TaskAgent(TaskAgentReference):
"""TaskAgent.
:param _links:
:type _links: :class:`ReferenceLinks <task-agent.v4_1.models.ReferenceLinks>`
:param enabled: Gets or sets a value indicating whether or not this agent should be enabled for job execution.
:type enabled: bool
:param id: Gets the identifier of the agent.
:type id: int
:param name: Gets the name of the agent.
:type name: str
:param oSDescription: Gets the OS of the agent.
:type oSDescription: str
:param status: Gets the current connectivity status of the agent.
:type status: object
:param version: Gets the version of the agent.
:type version: str
:param assigned_request: Gets the request which is currently assigned to this agent.
:type assigned_request: :class:`TaskAgentJobRequest <task-agent.v4_1.models.TaskAgentJobRequest>`
:param authorization: Gets or sets the authorization information for this agent.
:type authorization: :class:`TaskAgentAuthorization <task-agent.v4_1.models.TaskAgentAuthorization>`
:param created_on: Gets the date on which this agent was created.
:type created_on: datetime
:param last_completed_request: Gets the last request which was completed by this agent.
:type last_completed_request: :class:`TaskAgentJobRequest <task-agent.v4_1.models.TaskAgentJobRequest>`
:param max_parallelism: Gets or sets the maximum job parallelism allowed on this host.
:type max_parallelism: int
:param pending_update: Gets the pending update for this agent.
:type pending_update: :class:`TaskAgentUpdate <task-agent.v4_1.models.TaskAgentUpdate>`
:param properties:
:type properties: :class:`object <task-agent.v4_1.models.object>`
:param status_changed_on: Gets the date on which the last connectivity status change occurred.
:type status_changed_on: datetime
:param system_capabilities:
:type system_capabilities: dict
:param user_capabilities:
:type user_capabilities: dict
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'oSDescription': {'key': 'oSDescription', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'},
'version': {'key': 'version', 'type': 'str'},
'assigned_request': {'key': 'assignedRequest', 'type': 'TaskAgentJobRequest'},
'authorization': {'key': 'authorization', 'type': 'TaskAgentAuthorization'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'last_completed_request': {'key': 'lastCompletedRequest', 'type': 'TaskAgentJobRequest'},
'max_parallelism': {'key': 'maxParallelism', 'type': 'int'},
'pending_update': {'key': 'pendingUpdate', 'type': 'TaskAgentUpdate'},
'properties': {'key': 'properties', 'type': 'object'},
'status_changed_on': {'key': 'statusChangedOn', 'type': 'iso-8601'},
'system_capabilities': {'key': 'systemCapabilities', 'type': '{str}'},
'user_capabilities': {'key': 'userCapabilities', 'type': '{str}'}
}
def __init__(self, _links=None, enabled=None, id=None, name=None, oSDescription=None, status=None, version=None, assigned_request=None, authorization=None, created_on=None, last_completed_request=None, max_parallelism=None, pending_update=None, properties=None, status_changed_on=None, system_capabilities=None, user_capabilities=None):
super(TaskAgent, self).__init__(_links=_links, enabled=enabled, id=id, name=name, oSDescription=oSDescription, status=status, version=version)
self.assigned_request = assigned_request
self.authorization = authorization
self.created_on = created_on
self.last_completed_request = last_completed_request
self.max_parallelism = max_parallelism
self.pending_update = pending_update
self.properties = properties
self.status_changed_on = status_changed_on
self.system_capabilities = system_capabilities
self.user_capabilities = user_capabilities
| 58.578313
| 341
| 0.651172
|
from .task_agent_reference import TaskAgentReference
class TaskAgent(TaskAgentReference):
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'oSDescription': {'key': 'oSDescription', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'},
'version': {'key': 'version', 'type': 'str'},
'assigned_request': {'key': 'assignedRequest', 'type': 'TaskAgentJobRequest'},
'authorization': {'key': 'authorization', 'type': 'TaskAgentAuthorization'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'last_completed_request': {'key': 'lastCompletedRequest', 'type': 'TaskAgentJobRequest'},
'max_parallelism': {'key': 'maxParallelism', 'type': 'int'},
'pending_update': {'key': 'pendingUpdate', 'type': 'TaskAgentUpdate'},
'properties': {'key': 'properties', 'type': 'object'},
'status_changed_on': {'key': 'statusChangedOn', 'type': 'iso-8601'},
'system_capabilities': {'key': 'systemCapabilities', 'type': '{str}'},
'user_capabilities': {'key': 'userCapabilities', 'type': '{str}'}
}
def __init__(self, _links=None, enabled=None, id=None, name=None, oSDescription=None, status=None, version=None, assigned_request=None, authorization=None, created_on=None, last_completed_request=None, max_parallelism=None, pending_update=None, properties=None, status_changed_on=None, system_capabilities=None, user_capabilities=None):
super(TaskAgent, self).__init__(_links=_links, enabled=enabled, id=id, name=name, oSDescription=oSDescription, status=status, version=version)
self.assigned_request = assigned_request
self.authorization = authorization
self.created_on = created_on
self.last_completed_request = last_completed_request
self.max_parallelism = max_parallelism
self.pending_update = pending_update
self.properties = properties
self.status_changed_on = status_changed_on
self.system_capabilities = system_capabilities
self.user_capabilities = user_capabilities
| true
| true
|
f70911869b080ad9966af907970ad157263cbb09
| 154,166
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/resource/custom.py
|
wanlwanl/azure-cli
|
3d89040f4f6e64784f66ed3ea9290530bd5c57b6
|
[
"MIT"
] | 1
|
2020-08-10T23:50:16.000Z
|
2020-08-10T23:50:16.000Z
|
src/azure-cli/azure/cli/command_modules/resource/custom.py
|
wanlwanl/azure-cli
|
3d89040f4f6e64784f66ed3ea9290530bd5c57b6
|
[
"MIT"
] | 2
|
2020-09-12T04:31:23.000Z
|
2020-09-14T06:31:04.000Z
|
src/azure-cli/azure/cli/command_modules/resource/custom.py
|
hackathon-cli-recommendation/azure-cli
|
b9df3c9cfd400627912e5751bb6dcd429670b2c7
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=too-many-lines
# pylint: disable=line-too-long
from __future__ import print_function
from collections import OrderedDict
import codecs
import json
import os
import platform
import re
import ssl
import sys
import uuid
import base64
from six.moves.urllib.request import urlopen # pylint: disable=import-error
from six.moves.urllib.parse import urlparse # pylint: disable=import-error
from msrestazure.tools import is_valid_resource_id, parse_resource_id
from azure.mgmt.resource.resources.models import GenericResource, DeploymentMode
from azure.cli.core.parser import IncorrectUsageError
from azure.cli.core.util import get_file_json, read_file_content, shell_safe_json_parse, sdk_no_wait
from azure.cli.core.commands import LongRunningOperation
from azure.cli.core.commands.client_factory import get_mgmt_service_client
from azure.cli.core.profiles import ResourceType, get_sdk, get_api_version, AZURE_API_PROFILES
from azure.cli.command_modules.resource._client_factory import (
_resource_client_factory, _resource_policy_client_factory, _resource_lock_client_factory,
_resource_links_client_factory, _resource_deploymentscripts_client_factory, _authorization_management_client, _resource_managedapps_client_factory, _resource_templatespecs_client_factory)
from azure.cli.command_modules.resource._validators import _parse_lock_id
from knack.log import get_logger
from knack.prompting import prompt, prompt_pass, prompt_t_f, prompt_choice_list, prompt_int, NoTTYException
from knack.util import CLIError
from msrest.serialization import Serializer
from msrest.pipeline import SansIOHTTPPolicy
from ._validators import MSI_LOCAL_ID
from ._formatters import format_what_if_operation_result
logger = get_logger(__name__)
def _build_resource_id(**kwargs):
from msrestazure.tools import resource_id as resource_id_from_dict
try:
return resource_id_from_dict(**kwargs)
except KeyError:
return None
def _process_parameters(template_param_defs, parameter_lists): # pylint: disable=too-many-statements
def _try_parse_json_object(value):
try:
parsed = _remove_comments_from_json(value, False)
return parsed.get('parameters', parsed)
except Exception: # pylint: disable=broad-except
return None
def _try_load_file_object(file_path):
try:
is_file = os.path.isfile(file_path)
except ValueError:
return None
if is_file is True:
try:
content = read_file_content(file_path)
if not content:
return None
parsed = _remove_comments_from_json(content, False, file_path)
return parsed.get('parameters', parsed)
except Exception as ex:
raise CLIError("Failed to parse {} with exception:\n {}".format(file_path, ex))
return None
def _try_load_uri(uri):
if "://" in uri:
try:
value = _urlretrieve(uri).decode('utf-8')
parsed = _remove_comments_from_json(value, False)
return parsed.get('parameters', parsed)
except Exception: # pylint: disable=broad-except
pass
return None
def _try_parse_key_value_object(template_param_defs, parameters, value):
# support situation where empty JSON "{}" is provided
if value == '{}' and not parameters:
return True
try:
key, value = value.split('=', 1)
except ValueError:
return False
param = template_param_defs.get(key, None)
if param is None:
raise CLIError("unrecognized template parameter '{}'. Allowed parameters: {}"
.format(key, ', '.join(sorted(template_param_defs.keys()))))
param_type = param.get('type', None)
if param_type:
param_type = param_type.lower()
if param_type in ['object', 'array', 'secureobject']:
parameters[key] = {'value': shell_safe_json_parse(value)}
elif param_type in ['string', 'securestring']:
parameters[key] = {'value': value}
elif param_type == 'bool':
parameters[key] = {'value': value.lower() == 'true'}
elif param_type == 'int':
parameters[key] = {'value': int(value)}
else:
logger.warning("Unrecognized type '%s' for parameter '%s'. Interpretting as string.", param_type, key)
parameters[key] = {'value': value}
return True
parameters = {}
for params in parameter_lists or []:
for item in params:
param_obj = _try_load_file_object(item)
if param_obj is None:
param_obj = _try_parse_json_object(item)
if param_obj is None:
param_obj = _try_load_uri(item)
if param_obj is not None:
parameters.update(param_obj)
elif not _try_parse_key_value_object(template_param_defs, parameters, item):
raise CLIError('Unable to parse parameter: {}'.format(item))
return parameters
# pylint: disable=redefined-outer-name
def _find_missing_parameters(parameters, template):
if template is None:
return {}
template_parameters = template.get('parameters', None)
if template_parameters is None:
return {}
missing = OrderedDict()
for parameter_name in template_parameters:
parameter = template_parameters[parameter_name]
if 'defaultValue' in parameter:
continue
if parameters is not None and parameters.get(parameter_name, None) is not None:
continue
missing[parameter_name] = parameter
return missing
def _prompt_for_parameters(missing_parameters, fail_on_no_tty=True): # pylint: disable=too-many-statements
prompt_list = missing_parameters.keys() if isinstance(missing_parameters, OrderedDict) \
else sorted(missing_parameters)
result = OrderedDict()
no_tty = False
for param_name in prompt_list:
param = missing_parameters[param_name]
param_type = param.get('type', 'string').lower()
description = 'Missing description'
metadata = param.get('metadata', None)
if metadata is not None:
description = metadata.get('description', description)
allowed_values = param.get('allowedValues', None)
prompt_str = "Please provide {} value for '{}' (? for help): ".format(param_type, param_name)
while True:
if allowed_values is not None:
try:
ix = prompt_choice_list(prompt_str, allowed_values, help_string=description)
result[param_name] = allowed_values[ix]
except NoTTYException:
result[param_name] = None
no_tty = True
break
elif param_type == 'securestring':
try:
value = prompt_pass(prompt_str, help_string=description)
except NoTTYException:
value = None
no_tty = True
result[param_name] = value
break
elif param_type == 'int':
try:
int_value = prompt_int(prompt_str, help_string=description)
result[param_name] = int_value
except NoTTYException:
result[param_name] = 0
no_tty = True
break
elif param_type == 'bool':
try:
value = prompt_t_f(prompt_str, help_string=description)
result[param_name] = value
except NoTTYException:
result[param_name] = False
no_tty = True
break
elif param_type in ['object', 'array']:
try:
value = prompt(prompt_str, help_string=description)
except NoTTYException:
value = ''
no_tty = True
if value == '':
value = {} if param_type == 'object' else []
else:
try:
value = shell_safe_json_parse(value)
except Exception as ex: # pylint: disable=broad-except
logger.error(ex)
continue
result[param_name] = value
break
else:
try:
result[param_name] = prompt(prompt_str, help_string=description)
except NoTTYException:
result[param_name] = None
no_tty = True
break
if no_tty and fail_on_no_tty:
raise NoTTYException
return result
# pylint: disable=redefined-outer-name
def _get_missing_parameters(parameters, template, prompt_fn, no_prompt=False):
missing = _find_missing_parameters(parameters, template)
if missing:
if no_prompt is True:
logger.warning("Missing input parameters: %s ", ', '.join(sorted(missing.keys())))
else:
try:
prompt_parameters = prompt_fn(missing)
for param_name in prompt_parameters:
parameters[param_name] = {
"value": prompt_parameters[param_name]
}
except NoTTYException:
raise CLIError("Missing input parameters: {}".format(', '.join(sorted(missing.keys()))))
return parameters
def _ssl_context():
if sys.version_info < (3, 4):
return ssl.SSLContext(ssl.PROTOCOL_TLSv1)
return ssl.create_default_context()
def _urlretrieve(url):
req = urlopen(url, context=_ssl_context())
return req.read()
# pylint: disable=redefined-outer-name
def _remove_comments_from_json(template, preserve_order=True, file_path=None):
from jsmin import jsmin
# When commenting at the bottom of all elements in a JSON object, jsmin has a bug that will wrap lines.
# It will affect the subsequent multi-line processing logic, so deal with this situation in advance here.
template = re.sub(r'(^[\t ]*//[\s\S]*?\n)|(^[\t ]*/\*{1,2}[\s\S]*?\*/)', '', template, flags=re.M)
minified = jsmin(template)
# Get rid of multi-line strings. Note, we are not sending it on the wire rather just extract parameters to prompt for values
result = re.sub(r'"[^"]*?\n[^"]*?(?<!\\)"', '"#Azure Cli#"', minified, re.DOTALL)
try:
return shell_safe_json_parse(result, preserve_order)
except CLIError:
# Because the processing of removing comments and compression will lead to misplacement of error location,
# so the error message should be wrapped.
if file_path:
raise CLIError("Failed to parse '{}', please check whether it is a valid JSON format".format(file_path))
raise CLIError("Failed to parse the JSON data, please check whether it is a valid JSON format")
# pylint: disable=too-many-locals, too-many-statements, too-few-public-methods
def _deploy_arm_template_core_unmodified(cmd, resource_group_name, template_file=None,
template_uri=None, deployment_name=None, parameters=None,
mode=None, rollback_on_error=None, validate_only=False, no_wait=False,
aux_subscriptions=None, aux_tenants=None, no_prompt=False):
DeploymentProperties, TemplateLink, OnErrorDeployment = cmd.get_models('DeploymentProperties', 'TemplateLink',
'OnErrorDeployment')
template_link = None
template_obj = None
on_error_deployment = None
template_content = None
if template_uri:
template_link = TemplateLink(uri=template_uri)
template_obj = _remove_comments_from_json(_urlretrieve(template_uri).decode('utf-8'), file_path=template_uri)
else:
template_content = read_file_content(template_file)
template_obj = _remove_comments_from_json(template_content, file_path=template_file)
if rollback_on_error == '':
on_error_deployment = OnErrorDeployment(type='LastSuccessful')
elif rollback_on_error:
on_error_deployment = OnErrorDeployment(type='SpecificDeployment', deployment_name=rollback_on_error)
template_param_defs = template_obj.get('parameters', {})
template_obj['resources'] = template_obj.get('resources', [])
parameters = _process_parameters(template_param_defs, parameters) or {}
parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters, no_prompt)
parameters = json.loads(json.dumps(parameters))
properties = DeploymentProperties(template=template_content, template_link=template_link,
parameters=parameters, mode=mode, on_error_deployment=on_error_deployment)
smc = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
aux_subscriptions=aux_subscriptions, aux_tenants=aux_tenants)
deployment_client = smc.deployments # This solves the multi-api for you
if not template_uri:
# pylint: disable=protected-access
deployment_client._serialize = JSONSerializer(
deployment_client._serialize.dependencies
)
# Plug this as default HTTP pipeline
from msrest.pipeline import Pipeline
from msrest.pipeline.requests import (
RequestsCredentialsPolicy,
RequestsPatchSession,
PipelineRequestsHTTPSender
)
from msrest.universal_http.requests import RequestsHTTPSender
smc.config.pipeline = Pipeline(
policies=[
JsonCTemplatePolicy(),
smc.config.user_agent_policy,
RequestsPatchSession(),
smc.config.http_logger_policy,
RequestsCredentialsPolicy(smc.config.credentials)
],
sender=PipelineRequestsHTTPSender(RequestsHTTPSender(smc.config))
)
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment')
deployment = Deployment(properties=properties)
validation_poller = deployment_client.validate(resource_group_name, deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = deployment_client.validate(resource_group_name, deployment_name, properties)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, deployment_client.create_or_update, resource_group_name, deployment_name, deployment)
return sdk_no_wait(no_wait, deployment_client.create_or_update, resource_group_name, deployment_name, properties)
class JsonCTemplate:
def __init__(self, template_as_bytes):
self.template_as_bytes = template_as_bytes
class JSONSerializer(Serializer):
def body(self, data, data_type, **kwargs):
if data_type in ('Deployment', 'ScopedDeployment', 'DeploymentWhatIf', 'ScopedDeploymentWhatIf'):
# Be sure to pass a DeploymentProperties
template = data.properties.template
if template:
data_as_dict = data.serialize()
data_as_dict["properties"]["template"] = JsonCTemplate(template)
return data_as_dict
return super(JSONSerializer, self).body(data, data_type, **kwargs)
class JsonCTemplatePolicy(SansIOHTTPPolicy):
def on_request(self, request, **kwargs):
http_request = request.http_request
logger.info(http_request.data)
if (getattr(http_request, 'data', {}) or {}).get('properties', {}).get('template'):
template = http_request.data["properties"]["template"]
if not isinstance(template, JsonCTemplate):
raise ValueError()
del http_request.data["properties"]["template"]
# templateLink nad template cannot exist at the same time in deployment_dry_run mode
if "templateLink" in http_request.data["properties"].keys():
del http_request.data["properties"]["templateLink"]
partial_request = json.dumps(http_request.data)
http_request.data = partial_request[:-2] + ", template:" + template.template_as_bytes + r"}}"
http_request.data = http_request.data.encode('utf-8')
# pylint: disable=unused-argument
def deploy_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_subscription_scope(cmd,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name,
deployment_location=deployment_location,
result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_subscription_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=False, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
# pylint: disable=unused-argument
def validate_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None,
no_prompt=False, template_spec=None):
return _deploy_arm_template_at_subscription_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec,)
def _deploy_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None, validate_only=False,
no_wait=False, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri, parameters=parameters,
mode='Incremental',
no_prompt=no_prompt,
template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment')
deployment = Deployment(properties=deployment_properties, location=deployment_location)
validation_poller = mgmt_client.validate_at_subscription_scope(deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate_at_subscription_scope(deployment_name, deployment_properties, deployment_location)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_subscription_scope, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_subscription_scope, deployment_name,
deployment_properties, deployment_location)
# pylint: disable=unused-argument
def deploy_arm_template_at_resource_group(cmd,
resource_group_name=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=None, rollback_on_error=None,
no_wait=False, handle_extended_json_format=None,
aux_subscriptions=None, aux_tenants=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_resource_group(cmd,
resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name, mode=mode,
aux_tenants=aux_tenants, result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_resource_group(cmd=cmd,
resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, mode=mode, rollback_on_error=rollback_on_error,
validate_only=False, no_wait=no_wait,
aux_subscriptions=aux_subscriptions, aux_tenants=aux_tenants,
no_prompt=no_prompt, template_spec=template_spec)
# pylint: disable=unused-argument
def validate_arm_template_at_resource_group(cmd,
resource_group_name=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=None, rollback_on_error=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False, template_spec=None):
return _deploy_arm_template_at_resource_group(cmd,
resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, mode=mode, rollback_on_error=rollback_on_error,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def _deploy_arm_template_at_resource_group(cmd,
resource_group_name=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=None, rollback_on_error=None,
validate_only=False, no_wait=False,
aux_subscriptions=None, aux_tenants=None, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri,
parameters=parameters, mode=mode,
rollback_on_error=rollback_on_error,
no_prompt=no_prompt, template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, aux_subscriptions=aux_subscriptions,
aux_tenants=aux_tenants, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment')
deployment = Deployment(properties=deployment_properties)
validation_poller = mgmt_client.validate(resource_group_name, deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate(resource_group_name, deployment_name, deployment_properties)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update, resource_group_name, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update, resource_group_name, deployment_name, deployment_properties)
# pylint: disable=unused-argument
def deploy_arm_template_at_management_group(cmd,
management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_management_group(cmd,
management_group_id=management_group_id,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name,
deployment_location=deployment_location,
result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_management_group(cmd=cmd,
management_group_id=management_group_id,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=False, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
# pylint: disable=unused-argument
def validate_arm_template_at_management_group(cmd,
management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None,
no_prompt=False, template_spec=None):
return _deploy_arm_template_at_management_group(cmd=cmd,
management_group_id=management_group_id,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def _deploy_arm_template_at_management_group(cmd,
management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None, validate_only=False,
no_wait=False, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri,
parameters=parameters, mode='Incremental',
no_prompt=no_prompt, template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
ScopedDeployment = cmd.get_models('ScopedDeployment')
deployment = ScopedDeployment(properties=deployment_properties, location=deployment_location)
validation_poller = mgmt_client.validate_at_management_group_scope(management_group_id, deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate_at_management_group_scope(management_group_id, deployment_name,
deployment_properties, deployment_location)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_management_group_scope,
management_group_id, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_management_group_scope,
management_group_id, deployment_name, deployment_properties, deployment_location)
# pylint: disable=unused-argument
def deploy_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_tenant_scope(cmd,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name,
deployment_location=deployment_location,
result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_tenant_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=False, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
# pylint: disable=unused-argument
def validate_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False, template_spec=None):
return _deploy_arm_template_at_tenant_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def _deploy_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None, validate_only=False,
no_wait=False, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri,
parameters=parameters, mode='Incremental',
no_prompt=no_prompt, template_spec=template_spec,)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
ScopedDeployment = cmd.get_models('ScopedDeployment')
deployment = ScopedDeployment(properties=deployment_properties, location=deployment_location)
validation_poller = mgmt_client.validate_at_tenant_scope(deployment_name=deployment_name, parameters=deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate_at_tenant_scope(deployment_name=deployment_name,
properties=deployment_properties,
location=deployment_location)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_tenant_scope, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_tenant_scope, deployment_name,
deployment_properties, deployment_location)
def what_if_deploy_arm_template_at_resource_group(cmd, resource_group_name,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=DeploymentMode.incremental,
aux_tenants=None, result_format=None,
no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri,
parameters, mode, result_format, no_prompt, template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, aux_tenants=aux_tenants,
plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if(resource_group_name, deployment_name, what_if_properties)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def what_if_deploy_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
result_format=None, no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
DeploymentMode.incremental, result_format, no_prompt, template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if_at_subscription_scope(deployment_name, what_if_properties, deployment_location)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def what_if_deploy_arm_template_at_management_group(cmd, management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
result_format=None, no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
DeploymentMode.incremental, result_format, no_prompt, template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if_at_management_group_scope(management_group_id, deployment_name,
deployment_location, what_if_properties)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def what_if_deploy_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
result_format=None, no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
DeploymentMode.incremental, result_format, no_prompt, template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if_at_tenant_scope(deployment_name, deployment_location, what_if_properties)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def _what_if_deploy_arm_template_core(cli_ctx, what_if_poller, no_pretty_print, exclude_change_types):
what_if_result = LongRunningOperation(cli_ctx)(what_if_poller)
if what_if_result.error:
# The status code is 200 even when there's an error, because
# it is technically a successful What-If operation. The error
# is on the ARM template but not the operation.
err_message = _build_preflight_error_message(what_if_result.error)
raise CLIError(err_message)
if exclude_change_types:
exclude_change_types = set(map(lambda x: x.lower(), exclude_change_types))
what_if_result.changes = list(
filter(lambda x: x.change_type.lower() not in exclude_change_types, what_if_result.changes)
)
if no_pretty_print:
return what_if_result
try:
if cli_ctx.enable_color:
# Diabling colorama since it will silently strip out the Xterm 256 color codes the What-If formatter
# is using. Unfortuanately, the colors that colorama supports are very limited, which doesn't meet our needs.
from colorama import deinit
deinit()
# Enable virtual terminal mode for Windows console so it processes color codes.
if platform.system() == "Windows":
from ._win_vt import enable_vt_mode
enable_vt_mode()
print(format_what_if_operation_result(what_if_result, cli_ctx.enable_color))
finally:
if cli_ctx.enable_color:
from colorama import init
init()
return None
def _build_preflight_error_message(preflight_error):
err_messages = [f'{preflight_error.code} - {preflight_error.message}']
for detail in preflight_error.details or []:
err_messages.append(_build_preflight_error_message(detail))
return '\n'.join(err_messages)
def _prepare_deployment_properties_unmodified(cmd, template_file=None, template_uri=None, parameters=None,
mode=None, rollback_on_error=None, no_prompt=False, template_spec=None):
cli_ctx = cmd.cli_ctx
DeploymentProperties, TemplateLink, OnErrorDeployment = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
'DeploymentProperties', 'TemplateLink',
'OnErrorDeployment', mod='models')
template_link = None
template_obj = None
on_error_deployment = None
template_content = None
if template_uri:
template_link = TemplateLink(uri=template_uri)
template_obj = _remove_comments_from_json(_urlretrieve(template_uri).decode('utf-8'), file_path=template_uri)
elif template_spec:
template_link = TemplateLink(id=template_spec, mode="Incremental")
template_obj = show_resource(cmd=cmd, resource_ids=[template_spec]).properties['template']
else:
template_content = read_file_content(template_file)
template_obj = _remove_comments_from_json(template_content, file_path=template_file)
if rollback_on_error == '':
on_error_deployment = OnErrorDeployment(type='LastSuccessful')
elif rollback_on_error:
on_error_deployment = OnErrorDeployment(type='SpecificDeployment', deployment_name=rollback_on_error)
template_param_defs = template_obj.get('parameters', {})
template_obj['resources'] = template_obj.get('resources', [])
parameters = _process_parameters(template_param_defs, parameters) or {}
parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters, no_prompt)
parameters = json.loads(json.dumps(parameters))
properties = DeploymentProperties(template=template_content, template_link=template_link,
parameters=parameters, mode=mode, on_error_deployment=on_error_deployment)
return properties
def _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
mode, result_format, no_prompt, template_spec):
DeploymentWhatIfProperties, DeploymentWhatIfSettings = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
'DeploymentWhatIfProperties', 'DeploymentWhatIfSettings',
mod='models')
deployment_properties = _prepare_deployment_properties_unmodified(cmd=cmd, template_file=template_file, template_uri=template_uri,
parameters=parameters, mode=mode, no_prompt=no_prompt, template_spec=template_spec)
deployment_what_if_properties = DeploymentWhatIfProperties(template=deployment_properties.template, template_link=deployment_properties.template_link,
parameters=deployment_properties.parameters, mode=deployment_properties.mode,
what_if_settings=DeploymentWhatIfSettings(result_format=result_format))
return deployment_what_if_properties
def _get_deployment_management_client(cli_ctx, aux_subscriptions=None, aux_tenants=None, plug_pipeline=True):
smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, aux_subscriptions=aux_subscriptions,
aux_tenants=aux_tenants)
deployment_client = smc.deployments # This solves the multi-api for you
if plug_pipeline:
# pylint: disable=protected-access
deployment_client._serialize = JSONSerializer(
deployment_client._serialize.dependencies
)
# Plug this as default HTTP pipeline
from msrest.pipeline import Pipeline
from msrest.pipeline.requests import (
RequestsCredentialsPolicy,
RequestsPatchSession,
PipelineRequestsHTTPSender
)
from msrest.universal_http.requests import RequestsHTTPSender
smc.config.pipeline = Pipeline(
policies=[
JsonCTemplatePolicy(),
smc.config.user_agent_policy,
RequestsPatchSession(),
smc.config.http_logger_policy,
RequestsCredentialsPolicy(smc.config.credentials)
],
sender=PipelineRequestsHTTPSender(RequestsHTTPSender(smc.config))
)
return deployment_client
def _list_resources_odata_filter_builder(resource_group_name=None, resource_provider_namespace=None,
resource_type=None, name=None, tag=None, location=None):
"""Build up OData filter string from parameters """
if tag is not None:
if resource_group_name:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--resource-group\''
'(If the default value for resource group is set, please use \'az configure --defaults group=""\' command to clear it first)')
if resource_provider_namespace:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--namespace\'')
if resource_type:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--resource-type\'')
if name:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--name\'')
if location:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--location\''
'(If the default value for location is set, please use \'az configure --defaults location=""\' command to clear it first)')
filters = []
if resource_group_name:
filters.append("resourceGroup eq '{}'".format(resource_group_name))
if name:
filters.append("name eq '{}'".format(name))
if location:
filters.append("location eq '{}'".format(location))
if resource_type:
if resource_provider_namespace:
f = "'{}/{}'".format(resource_provider_namespace, resource_type)
else:
if not re.match('[^/]+/[^/]+', resource_type):
raise CLIError(
'Malformed resource-type: '
'--resource-type=<namespace>/<resource-type> expected.')
# assume resource_type is <namespace>/<type>. The worst is to get a server error
f = "'{}'".format(resource_type)
filters.append("resourceType eq " + f)
else:
if resource_provider_namespace:
raise CLIError('--namespace also requires --resource-type')
if tag:
tag_name = list(tag.keys())[0] if isinstance(tag, dict) else tag
tag_value = tag[tag_name] if isinstance(tag, dict) else ''
if tag_name:
if tag_name[-1] == '*':
filters.append("startswith(tagname, '%s')" % tag_name[0:-1])
else:
filters.append("tagname eq '%s'" % tag_name)
if tag_value != '':
filters.append("tagvalue eq '%s'" % tag_value)
return ' and '.join(filters)
def _get_auth_provider_latest_api_version(cli_ctx):
rcf = _resource_client_factory(cli_ctx)
api_version = _ResourceUtils.resolve_api_version(rcf, 'Microsoft.Authorization', None, 'providerOperations')
return api_version
def _update_provider(cli_ctx, namespace, registering, wait):
import time
target_state = 'Registered' if registering else 'Unregistered'
rcf = _resource_client_factory(cli_ctx)
if registering:
r = rcf.providers.register(namespace)
else:
r = rcf.providers.unregister(namespace)
if r.registration_state == target_state:
return
if wait:
while True:
time.sleep(10)
rp_info = rcf.providers.get(namespace)
if rp_info.registration_state == target_state:
break
else:
action = 'Registering' if registering else 'Unregistering'
msg_template = '%s is still on-going. You can monitor using \'az provider show -n %s\''
logger.warning(msg_template, action, namespace)
def _build_policy_scope(subscription_id, resource_group_name, scope):
subscription_scope = '/subscriptions/' + subscription_id
if scope:
if resource_group_name:
err = "Resource group '{}' is redundant because 'scope' is supplied"
raise CLIError(err.format(resource_group_name))
elif resource_group_name:
scope = subscription_scope + '/resourceGroups/' + resource_group_name
else:
scope = subscription_scope
return scope
def _resolve_policy_id(cmd, policy, policy_set_definition, client):
policy_id = policy or policy_set_definition
if not is_valid_resource_id(policy_id):
if policy:
policy_def = _get_custom_or_builtin_policy(cmd, client, policy)
policy_id = policy_def.id
else:
policy_set_def = _get_custom_or_builtin_policy(cmd, client, policy_set_definition, None, None, True)
policy_id = policy_set_def.id
return policy_id
def _parse_management_group_reference(name):
if _is_management_group_scope(name):
parts = name.split('/')
if len(parts) >= 9:
return parts[4], parts[8]
return None, name
def _parse_management_group_id(scope):
if _is_management_group_scope(scope):
parts = scope.split('/')
if len(parts) >= 5:
return parts[4]
return None
def _get_custom_or_builtin_policy(cmd, client, name, subscription=None, management_group=None, for_policy_set=False):
from msrest.exceptions import HttpOperationError
from msrestazure.azure_exceptions import CloudError
policy_operations = client.policy_set_definitions if for_policy_set else client.policy_definitions
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
client.config.subscription_id = subscription_id
try:
if cmd.supported_api_version(min_api='2018-03-01'):
if not management_group:
management_group, name = _parse_management_group_reference(name)
if management_group:
return policy_operations.get_at_management_group(name, management_group)
return policy_operations.get(name)
except (CloudError, HttpOperationError) as ex:
status_code = ex.status_code if isinstance(ex, CloudError) else ex.response.status_code
if status_code == 404:
try:
return policy_operations.get_built_in(name)
except CloudError as ex2:
# When the `--policy` parameter is neither a valid policy definition name nor conforms to the policy definition id format,
# an exception of "AuthorizationFailed" will be reported to mislead customers.
# So we need to modify the exception information thrown here.
if ex2.status_code == 403 and ex2.error and ex2.error.error == 'AuthorizationFailed':
raise IncorrectUsageError('\'--policy\' should be a valid name or id of the policy definition')
raise ex2
raise
def _load_file_string_or_uri(file_or_string_or_uri, name, required=True):
if file_or_string_or_uri is None:
if required:
raise CLIError('--{} is required'.format(name))
return None
url = urlparse(file_or_string_or_uri)
if url.scheme == 'http' or url.scheme == 'https' or url.scheme == 'file':
response = urlopen(file_or_string_or_uri)
reader = codecs.getreader('utf-8')
result = json.load(reader(response))
response.close()
return result
if os.path.exists(file_or_string_or_uri):
return get_file_json(file_or_string_or_uri)
return shell_safe_json_parse(file_or_string_or_uri)
def _call_subscription_get(cmd, lock_client, *args):
if cmd.supported_api_version(max_api='2015-01-01'):
return lock_client.management_locks.get(*args)
return lock_client.management_locks.get_at_subscription_level(*args)
def _extract_lock_params(resource_group_name, resource_provider_namespace,
resource_type, resource_name):
if resource_group_name is None:
return (None, None, None, None)
if resource_name is None:
return (resource_group_name, None, None, None)
parts = resource_type.split('/', 2)
if resource_provider_namespace is None and len(parts) == 2:
resource_provider_namespace = parts[0]
resource_type = parts[1]
return (resource_group_name, resource_name, resource_provider_namespace, resource_type)
def _update_lock_parameters(parameters, level, notes):
if level is not None:
parameters.level = level
if notes is not None:
parameters.notes = notes
def _validate_resource_inputs(resource_group_name, resource_provider_namespace,
resource_type, resource_name):
if resource_group_name is None:
raise CLIError('--resource-group/-g is required.')
if resource_type is None:
raise CLIError('--resource-type is required')
if resource_name is None:
raise CLIError('--name/-n is required')
if resource_provider_namespace is None:
raise CLIError('--namespace is required')
# region Custom Commands
def list_resource_groups(cmd, tag=None): # pylint: disable=no-self-use
""" List resource groups, optionally filtered by a tag.
:param str tag:tag to filter by in 'key[=value]' format
"""
rcf = _resource_client_factory(cmd.cli_ctx)
filters = []
if tag:
key = list(tag.keys())[0]
filters.append("tagname eq '{}'".format(key))
filters.append("tagvalue eq '{}'".format(tag[key]))
filter_text = ' and '.join(filters) if filters else None
groups = rcf.resource_groups.list(filter=filter_text)
return list(groups)
def create_resource_group(cmd, rg_name, location, tags=None, managed_by=None):
""" Create a new resource group.
:param str resource_group_name:the desired resource group name
:param str location:the resource group location
:param str tags:tags in 'a=b c' format
"""
rcf = _resource_client_factory(cmd.cli_ctx)
ResourceGroup = cmd.get_models('ResourceGroup')
parameters = ResourceGroup(
location=location,
tags=tags
)
if cmd.supported_api_version(min_api='2016-09-01'):
parameters.managed_by = managed_by
return rcf.resource_groups.create_or_update(rg_name, parameters)
def update_resource_group(instance, tags=None):
if tags is not None:
instance.tags = tags
return instance
def export_group_as_template(
cmd, resource_group_name, include_comments=False, include_parameter_default_value=False, resource_ids=None, skip_resource_name_params=False, skip_all_params=False):
"""Captures a resource group as a template.
:param str resource_group_name: the name of the resource group.
:param resource_ids: space-separated resource ids to filter the export by. To export all resources, do not specify this argument or supply "*".
:param bool include_comments: export template with comments.
:param bool include_parameter_default_value: export template parameter with default value.
:param bool skip_resource_name_params: export template and skip resource name parameterization.
:param bool skip_all_params: export template parameter and skip all parameterization.
"""
rcf = _resource_client_factory(cmd.cli_ctx)
export_options = []
if include_comments:
export_options.append('IncludeComments')
if include_parameter_default_value:
export_options.append('IncludeParameterDefaultValue')
if skip_resource_name_params:
export_options.append('SkipResourceNameParameterization')
if skip_all_params:
export_options.append('SkipAllParameterization')
resources = []
if resource_ids is None or resource_ids[0] == "*":
resources = ["*"]
else:
for i in resource_ids:
if is_valid_resource_id(i):
resources.append(i)
else:
raise CLIError('az resource: error: argument --resource-ids: invalid ResourceId value: \'%s\'' % i)
options = ','.join(export_options) if export_options else None
# Exporting a resource group as a template is async since API version 2019-08-01.
if cmd.supported_api_version(min_api='2019-08-01'):
result_poller = rcf.resource_groups.export_template(resource_group_name, resources, options=options)
result = LongRunningOperation(cmd.cli_ctx)(result_poller)
else:
result = rcf.resource_groups.export_template(resource_group_name, resources, options=options)
# pylint: disable=no-member
# On error, server still returns 200, with details in the error attribute
if result.error:
error = result.error
try:
logger.warning(error.message)
except AttributeError:
logger.warning(str(error))
for detail in getattr(error, 'details', None) or []:
logger.error(detail.message)
return result.template
def create_application(cmd, resource_group_name,
application_name, managedby_resource_group_id,
kind, managedapp_definition_id=None, location=None,
plan_name=None, plan_publisher=None, plan_product=None,
plan_version=None, tags=None, parameters=None):
""" Create a new managed application.
:param str resource_group_name:the desired resource group name
:param str application_name:the managed application name
:param str kind:the managed application kind. can be marketplace or servicecatalog
:param str plan_name:the managed application package plan name
:param str plan_publisher:the managed application package plan publisher
:param str plan_product:the managed application package plan product
:param str plan_version:the managed application package plan version
:param str tags:tags in 'a=b c' format
"""
from azure.mgmt.resource.managedapplications.models import Application, Plan
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
rcf = _resource_client_factory(cmd.cli_ctx)
if not location:
location = rcf.resource_groups.get(resource_group_name).location
application = Application(
location=location,
managed_resource_group_id=managedby_resource_group_id,
kind=kind,
tags=tags
)
if kind.lower() == 'servicecatalog':
if managedapp_definition_id:
application.application_definition_id = managedapp_definition_id
else:
raise CLIError('--managedapp-definition-id is required if kind is ServiceCatalog')
elif kind.lower() == 'marketplace':
if (plan_name is None and plan_product is None and
plan_publisher is None and plan_version is None):
raise CLIError('--plan-name, --plan-product, --plan-publisher and \
--plan-version are all required if kind is MarketPlace')
application.plan = Plan(name=plan_name, publisher=plan_publisher, product=plan_product, version=plan_version)
applicationParameters = None
if parameters:
if os.path.exists(parameters):
applicationParameters = get_file_json(parameters)
else:
applicationParameters = shell_safe_json_parse(parameters)
application.parameters = applicationParameters
return racf.applications.create_or_update(resource_group_name, application_name, application)
def show_application(cmd, resource_group_name=None, application_name=None):
""" Gets a managed application.
:param str resource_group_name:the resource group name
:param str application_name:the managed application name
"""
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
return racf.applications.get(resource_group_name, application_name)
def show_applicationdefinition(cmd, resource_group_name=None, application_definition_name=None):
""" Gets a managed application definition.
:param str resource_group_name:the resource group name
:param str application_definition_name:the managed application definition name
"""
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
return racf.application_definitions.get(resource_group_name, application_definition_name)
def create_applicationdefinition(cmd, resource_group_name,
application_definition_name,
lock_level, authorizations,
description, display_name,
package_file_uri=None, create_ui_definition=None,
main_template=None, location=None, tags=None):
""" Create a new managed application definition.
:param str resource_group_name:the desired resource group name
:param str application_definition_name:the managed application definition name
:param str description:the managed application definition description
:param str display_name:the managed application definition display name
:param str package_file_uri:the managed application definition package file uri
:param str create_ui_definition:the managed application definition create ui definition
:param str main_template:the managed application definition main template
:param str tags:tags in 'a=b c' format
"""
from azure.mgmt.resource.managedapplications.models import ApplicationDefinition, ApplicationProviderAuthorization
if not package_file_uri and not create_ui_definition and not main_template:
raise CLIError('usage error: --package-file-uri <url> | --create-ui-definition --main-template')
if package_file_uri:
if create_ui_definition or main_template:
raise CLIError('usage error: must not specify --create-ui-definition --main-template')
if not package_file_uri:
if not create_ui_definition or not main_template:
raise CLIError('usage error: must specify --create-ui-definition --main-template')
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
rcf = _resource_client_factory(cmd.cli_ctx)
if not location:
location = rcf.resource_groups.get(resource_group_name).location
authorizations = authorizations or []
applicationAuthList = []
for name_value in authorizations:
# split at the first ':', neither principalId nor roldeDefinitionId should have a ':'
principalId, roleDefinitionId = name_value.split(':', 1)
applicationAuth = ApplicationProviderAuthorization(
principal_id=principalId,
role_definition_id=roleDefinitionId)
applicationAuthList.append(applicationAuth)
applicationDef = ApplicationDefinition(lock_level=lock_level,
authorizations=applicationAuthList,
package_file_uri=package_file_uri)
applicationDef.display_name = display_name
applicationDef.description = description
applicationDef.location = location
applicationDef.package_file_uri = package_file_uri
applicationDef.create_ui_definition = create_ui_definition
applicationDef.main_template = main_template
applicationDef.tags = tags
return racf.application_definitions.create_or_update(resource_group_name,
application_definition_name, applicationDef)
def list_applications(cmd, resource_group_name=None):
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
if resource_group_name:
applications = racf.applications.list_by_resource_group(resource_group_name)
else:
applications = racf.applications.list_by_subscription()
return list(applications)
def list_deployments_at_subscription_scope(cmd, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_at_subscription_scope(filter=filter_string)
def list_deployments_at_resource_group(cmd, resource_group_name, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_by_resource_group(resource_group_name, filter=filter_string)
def list_deployments_at_management_group(cmd, management_group_id, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_at_management_group_scope(management_group_id, filter=filter_string)
def list_deployments_at_tenant_scope(cmd, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_at_tenant_scope(filter=filter_string)
def get_deployment_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get_at_subscription_scope(deployment_name)
def get_deployment_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get(resource_group_name, deployment_name)
def get_deployment_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get_at_management_group_scope(management_group_id, deployment_name)
def get_deployment_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get_at_tenant_scope(deployment_name)
def delete_deployment_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete_at_subscription_scope(deployment_name)
def delete_deployment_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete(resource_group_name, deployment_name)
def delete_deployment_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete_at_management_group_scope(management_group_id, deployment_name)
def delete_deployment_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete_at_tenant_scope(deployment_name)
def cancel_deployment_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel_at_subscription_scope(deployment_name)
def cancel_deployment_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel(resource_group_name, deployment_name)
def cancel_deployment_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel_at_management_group_scope(management_group_id, deployment_name)
def cancel_deployment_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel_at_tenant_scope(deployment_name)
# pylint: disable=unused-argument
def deploy_arm_template(cmd, resource_group_name,
template_file=None, template_uri=None, deployment_name=None,
parameters=None, mode=None, rollback_on_error=None, no_wait=False,
handle_extended_json_format=None, aux_subscriptions=None, aux_tenants=None,
no_prompt=False):
return _deploy_arm_template_core_unmodified(cmd, resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri,
deployment_name=deployment_name, parameters=parameters, mode=mode,
rollback_on_error=rollback_on_error, no_wait=no_wait,
aux_subscriptions=aux_subscriptions, aux_tenants=aux_tenants,
no_prompt=no_prompt)
# pylint: disable=unused-argument
def validate_arm_template(cmd, resource_group_name, template_file=None, template_uri=None,
parameters=None, mode=None, rollback_on_error=None, handle_extended_json_format=None,
no_prompt=False):
return _deploy_arm_template_core_unmodified(cmd, resource_group_name, template_file, template_uri,
'deployment_dry_run', parameters, mode, rollback_on_error,
validate_only=True, no_prompt=no_prompt)
def export_template_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template_at_subscription_scope(deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def export_template_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template(resource_group_name, deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def export_template_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template_at_management_group_scope(management_group_id, deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def export_template_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template_at_tenant_scope(deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def export_deployment_as_template(cmd, resource_group_name, deployment_name):
smc = _resource_client_factory(cmd.cli_ctx)
result = smc.deployments.export_template(resource_group_name, deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def create_resource(cmd, properties,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None,
resource_id=None, api_version=None, location=None, is_full_object=False,
latest_include_preview=False):
res = _ResourceUtils(cmd.cli_ctx, resource_group_name, resource_provider_namespace,
parent_resource_path, resource_type, resource_name,
resource_id, api_version, latest_include_preview=latest_include_preview)
return res.create_resource(properties, location, is_full_object)
def _get_parsed_resource_ids(resource_ids):
"""
Returns a generator of parsed resource ids. Raise when there is invalid resource id.
"""
if not resource_ids:
return None
for rid in resource_ids:
if not is_valid_resource_id(rid):
raise CLIError('az resource: error: argument --ids: invalid ResourceId value: \'%s\'' % rid)
return ({'resource_id': rid} for rid in resource_ids)
def _get_rsrc_util_from_parsed_id(cli_ctx, parsed_id, api_version, latest_include_preview=False):
return _ResourceUtils(cli_ctx,
parsed_id.get('resource_group', None),
parsed_id.get('resource_namespace', None),
parsed_id.get('resource_parent', None),
parsed_id.get('resource_type', None),
parsed_id.get('resource_name', None),
parsed_id.get('resource_id', None),
api_version,
latest_include_preview=latest_include_preview)
def _create_parsed_id(cli_ctx, resource_group_name=None, resource_provider_namespace=None, parent_resource_path=None,
resource_type=None, resource_name=None):
from azure.cli.core.commands.client_factory import get_subscription_id
subscription = get_subscription_id(cli_ctx)
return {
'resource_group': resource_group_name,
'resource_namespace': resource_provider_namespace,
'resource_parent': parent_resource_path,
'resource_type': resource_type,
'resource_name': resource_name,
'subscription': subscription
}
def _single_or_collection(obj, default=None):
if not obj:
return default
if isinstance(obj, list) and len(obj) == 1:
return obj[0]
return obj
# pylint: unused-argument
def show_resource(cmd, resource_ids=None, resource_group_name=None,
resource_provider_namespace=None, parent_resource_path=None, resource_type=None,
resource_name=None, api_version=None, include_response_body=False, latest_include_preview=False):
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).get_resource(
include_response_body) for id_dict in parsed_ids])
# pylint: disable=unused-argument
def delete_resource(cmd, resource_ids=None, resource_group_name=None,
resource_provider_namespace=None, parent_resource_path=None, resource_type=None,
resource_name=None, api_version=None, latest_include_preview=False):
"""
Deletes the given resource(s).
This function allows deletion of ids with dependencies on one another.
This is done with multiple passes through the given ids.
"""
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
to_be_deleted = [(_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview), id_dict)
for id_dict in parsed_ids]
results = []
from msrestazure.azure_exceptions import CloudError
while to_be_deleted:
logger.debug("Start new loop to delete resources.")
operations = []
failed_to_delete = []
for rsrc_utils, id_dict in to_be_deleted:
try:
operations.append(rsrc_utils.delete())
resource = _build_resource_id(**id_dict) or resource_name
logger.debug("deleting %s", resource)
except CloudError as e:
# request to delete failed, add parsed id dict back to queue
id_dict['exception'] = str(e)
failed_to_delete.append((rsrc_utils, id_dict))
to_be_deleted = failed_to_delete
# stop deleting if none deletable
if not operations:
break
# all operations return result before next pass
for operation in operations:
results.append(operation.result())
if to_be_deleted:
error_msg_builder = ['Some resources failed to be deleted (run with `--verbose` for more information):']
for _, id_dict in to_be_deleted:
logger.info(id_dict['exception'])
resource_id = _build_resource_id(**id_dict) or id_dict['resource_id']
error_msg_builder.append(resource_id)
raise CLIError(os.linesep.join(error_msg_builder))
return _single_or_collection(results)
# pylint: unused-argument
def update_resource(cmd, parameters, resource_ids=None,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, api_version=None,
latest_include_preview=False):
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).update(parameters)
for id_dict in parsed_ids])
# pylint: unused-argument
def tag_resource(cmd, tags, resource_ids=None, resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, api_version=None,
is_incremental=None, latest_include_preview=False):
""" Updates the tags on an existing resource. To clear tags, specify the --tag option
without anything else. """
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).tag(
tags, is_incremental) for id_dict in parsed_ids])
# pylint: unused-argument
def invoke_resource_action(cmd, action, request_body=None, resource_ids=None,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None,
api_version=None, latest_include_preview=False):
""" Invokes the provided action on an existing resource."""
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).invoke_action(
action, request_body) for id_dict in parsed_ids])
def get_deployment_operations(client, resource_group_name, deployment_name, operation_ids):
"""get a deployment's operation."""
result = []
for op_id in operation_ids:
dep = client.get(resource_group_name, deployment_name, op_id)
result.append(dep)
return result
def get_deployment_operations_at_subscription_scope(client, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
deployment = client.get_at_subscription_scope(deployment_name, op_id)
result.append(deployment)
return result
def get_deployment_operations_at_resource_group(client, resource_group_name, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get(resource_group_name, deployment_name, op_id)
result.append(dep)
return result
def get_deployment_operations_at_management_group(client, management_group_id, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get_at_management_group_scope(management_group_id, deployment_name, op_id)
result.append(dep)
return result
def get_deployment_operations_at_tenant_scope(client, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get_at_tenant_scope(deployment_name, op_id)
result.append(dep)
return result
def list_deployment_scripts(cmd, resource_group_name=None):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
if resource_group_name is not None:
return rcf.deployment_scripts.list_by_resource_group(resource_group_name)
return rcf.deployment_scripts.list_by_subscription()
def get_deployment_script(cmd, resource_group_name, name):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
return rcf.deployment_scripts.get(resource_group_name, name)
def get_deployment_script_logs(cmd, resource_group_name, name):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
return rcf.deployment_scripts.get_logs(resource_group_name, name)
def delete_deployment_script(cmd, resource_group_name, name):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
rcf.deployment_scripts.delete(resource_group_name, name)
def get_template_spec(cmd, resource_group_name=None, name=None, version=None, template_spec=None):
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if version:
return rcf.template_spec_versions.get(resource_group_name, name, version)
return rcf.template_specs.get(resource_group_name, name)
def create_template_spec(cmd, resource_group_name, name, template_file=None, location=None, display_name=None,
description=None, version=None, version_description=None):
artifacts = None
input_template = None
if location is None:
rcf = _resource_client_factory(cmd.cli_ctx)
location = rcf.resource_groups.get(resource_group_name).location
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if version:
if template_file:
from azure.cli.command_modules.resource._packing_engine import (pack)
packed_template = pack(cmd, template_file)
input_template = getattr(packed_template, 'RootTemplate')
artifacts = getattr(packed_template, 'Artifacts')
try: # Check if parent template spec already exists.
rcf.template_specs.get(resource_group_name=resource_group_name, template_spec_name=name)
except Exception: # pylint: disable=broad-except
TemplateSpec = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpec', mod='models')
template_spec_parent = TemplateSpec(location=location, description=description, display_name=display_name, tags=None)
rcf.template_specs.create_or_update(resource_group_name, name, template_spec_parent)
TemplateSpecVersion = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpecVersion', mod='models')
template_spec_child = TemplateSpecVersion(location=location, artifacts=artifacts, description=version_description, template=input_template, tags=None)
return rcf.template_spec_versions.create_or_update(resource_group_name, name, version, template_spec_child)
TemplateSpec = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpec', mod='models')
template_spec_parent = TemplateSpec(location=location, description=description, display_name=display_name, tags=None)
return rcf.template_specs.create_or_update(resource_group_name, name, template_spec_parent)
def update_template_spec(cmd, resource_group_name=None, name=None, template_spec=None, template_file=None, display_name=None,
description=None, version=None, version_description=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
existing_template = None
artifacts = None
if template_file:
from azure.cli.command_modules.resource._packing_engine import (pack)
packed_template = pack(cmd, template_file)
input_template = getattr(packed_template, 'RootTemplate')
artifacts = getattr(packed_template, 'Artifacts')
if version:
existing_template = rcf.template_spec_versions.get(resource_group_name=resource_group_name, template_spec_name=name, template_spec_version=version)
location = getattr(existing_template, 'location')
version_tags = getattr(existing_template, 'tags')
if version_description is None:
version_description = getattr(existing_template, 'description')
if template_file is None:
input_template = getattr(existing_template, 'template')
TemplateSpecVersion = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpecVersion', mod='models')
updated_template_spec = TemplateSpecVersion(location=location, artifacts=artifacts, description=version_description, template=input_template, tags=version_tags)
return rcf.template_spec_versions.create_or_update(resource_group_name, name, version, updated_template_spec)
existing_template = rcf.template_specs.get(resource_group_name=resource_group_name, template_spec_name=name)
location = getattr(existing_template, 'location')
tags = getattr(existing_template, 'tags')
if display_name is None:
display_name = getattr(existing_template, 'display_name')
if description is None:
description = getattr(existing_template, 'description')
TemplateSpec = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpec', mod='models')
root_template = TemplateSpec(location=location, description=description, display_name=display_name, tags=tags)
return rcf.template_specs.create_or_update(resource_group_name, name, root_template)
def export_template_spec(cmd, output_folder, resource_group_name=None, name=None, version=None, template_spec=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
exported_template = rcf.template_spec_versions.get(resource_group_name, name, version) if version else rcf.template_specs.get(resource_group_name, name)
from azure.cli.command_modules.resource._packing_engine import (unpack)
return unpack(cmd, exported_template, output_folder, (str(name) + '.JSON'))
def delete_template_spec(cmd, resource_group_name=None, name=None, version=None, template_spec=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
if version:
return rcf.template_specs.delete(resource_group_name=resource_group_name, template_spec_name=name, template_spec_version=version)
return rcf.template_specs.delete(resource_group_name=resource_group_name, template_spec_name=name)
def list_template_specs(cmd, resource_group_name=None, name=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if resource_group_name is not None:
if name is not None:
return rcf.template_spec_versions.list(resource_group_name=resource_group_name, template_spec_name=name)
return rcf.template_specs.list_by_resource_group(resource_group_name)
return rcf.template_specs.list_by_subscription()
def list_deployment_operations_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list_at_subscription_scope(deployment_name)
def list_deployment_operations_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list(resource_group_name, deployment_name)
def list_deployment_operations_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list_at_management_group_scope(management_group_id, deployment_name)
def list_deployment_operations_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list_at_tenant_scope(deployment_name)
def get_deployment_operation_at_subscription_scope(cmd, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get_at_subscription_scope(deployment_name, op_id)
def get_deployment_operation_at_resource_group(cmd, resource_group_name, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get(resource_group_name, deployment_name, op_id)
def get_deployment_operation_at_management_group(cmd, management_group_id, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get_at_management_group_scope(management_group_id, deployment_name, op_id)
def get_deployment_operation_at_tenant_scope(cmd, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get_at_tenant_scope(deployment_name, op_id)
def list_resources(cmd, resource_group_name=None,
resource_provider_namespace=None, resource_type=None, name=None, tag=None,
location=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_group_name is not None:
rcf.resource_groups.get(resource_group_name)
odata_filter = _list_resources_odata_filter_builder(resource_group_name,
resource_provider_namespace,
resource_type, name, tag, location)
expand = "createdTime,changedTime,provisioningState"
resources = rcf.resources.list(filter=odata_filter, expand=expand)
return list(resources)
def register_provider(cmd, resource_provider_namespace, wait=False):
_update_provider(cmd.cli_ctx, resource_provider_namespace, registering=True, wait=wait)
def unregister_provider(cmd, resource_provider_namespace, wait=False):
_update_provider(cmd.cli_ctx, resource_provider_namespace, registering=False, wait=wait)
def list_provider_operations(cmd):
auth_client = _authorization_management_client(cmd.cli_ctx)
return auth_client.provider_operations_metadata.list()
def show_provider_operations(cmd, resource_provider_namespace):
version = getattr(get_api_version(cmd.cli_ctx, ResourceType.MGMT_AUTHORIZATION), 'provider_operations_metadata')
auth_client = _authorization_management_client(cmd.cli_ctx)
if version == '2015-07-01':
return auth_client.provider_operations_metadata.get(resource_provider_namespace, version)
return auth_client.provider_operations_metadata.get(resource_provider_namespace)
def move_resource(cmd, ids, destination_group, destination_subscription_id=None):
"""Moves resources from one resource group to another(can be under different subscription)
:param ids: the space-separated resource ids to be moved
:param destination_group: the destination resource group name
:param destination_subscription_id: the destination subscription identifier
"""
# verify all resource ids are valid and under the same group
resources = []
for i in ids:
if is_valid_resource_id(i):
resources.append(parse_resource_id(i))
else:
raise CLIError('Invalid id "{}", as it has no group or subscription field'.format(i))
if len({r['subscription'] for r in resources}) > 1:
raise CLIError('All resources should be under the same subscription')
if len({r['resource_group'] for r in resources}) > 1:
raise CLIError('All resources should be under the same group')
rcf = _resource_client_factory(cmd.cli_ctx)
target = _build_resource_id(subscription=(destination_subscription_id or rcf.config.subscription_id),
resource_group=destination_group)
return rcf.resources.move_resources(resources[0]['resource_group'], ids, target)
def list_features(client, resource_provider_namespace=None):
if resource_provider_namespace:
return client.list(resource_provider_namespace=resource_provider_namespace)
return client.list_all()
def register_feature(client, resource_provider_namespace, feature_name):
logger.warning("Once the feature '%s' is registered, invoking 'az provider register -n %s' is required "
"to get the change propagated", feature_name, resource_provider_namespace)
return client.register(resource_provider_namespace, feature_name)
def unregister_feature(client, resource_provider_namespace, feature_name):
logger.warning("Once the feature '%s' is unregistered, invoking 'az provider register -n %s' is required "
"to get the change propagated", feature_name, resource_provider_namespace)
return client.unregister(resource_provider_namespace, feature_name)
# pylint: disable=inconsistent-return-statements,too-many-locals
def create_policy_assignment(cmd, policy=None, policy_set_definition=None,
name=None, display_name=None, params=None,
resource_group_name=None, scope=None, sku=None,
not_scopes=None, location=None, assign_identity=None,
identity_scope=None, identity_role='Contributor', enforcement_mode='Default'):
"""Creates a policy assignment
:param not_scopes: Space-separated scopes where the policy assignment does not apply.
"""
if bool(policy) == bool(policy_set_definition):
raise CLIError('usage error: --policy NAME_OR_ID | '
'--policy-set-definition NAME_OR_ID')
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id,
resource_group_name, scope)
policy_id = _resolve_policy_id(cmd, policy, policy_set_definition, policy_client)
params = _load_file_string_or_uri(params, 'params', False)
PolicyAssignment = cmd.get_models('PolicyAssignment')
assignment = PolicyAssignment(display_name=display_name, policy_definition_id=policy_id, scope=scope, enforcement_mode=enforcement_mode)
assignment.parameters = params if params else None
if cmd.supported_api_version(min_api='2017-06-01-preview'):
if not_scopes:
kwargs_list = []
for id_arg in not_scopes.split(' '):
if parse_resource_id(id_arg):
kwargs_list.append(id_arg)
else:
logger.error('az policy assignment create error: argument --not-scopes: \
invalid notscopes value: \'%s\'', id_arg)
return
assignment.not_scopes = kwargs_list
PolicySku = cmd.get_models('PolicySku')
policySku = PolicySku(name='A0', tier='Free')
if sku:
policySku = policySku if sku.lower() == 'free' else PolicySku(name='A1', tier='Standard')
assignment.sku = policySku
if cmd.supported_api_version(min_api='2018-05-01'):
if location:
assignment.location = location
identity = None
if assign_identity is not None:
identity = _build_identities_info(cmd, assign_identity)
assignment.identity = identity
if name is None:
name = (base64.urlsafe_b64encode(uuid.uuid4().bytes).decode())[:-2]
createdAssignment = policy_client.policy_assignments.create(scope, name, assignment)
# Create the identity's role assignment if requested
if assign_identity is not None and identity_scope:
from azure.cli.core.commands.arm import assign_identity as _assign_identity_helper
_assign_identity_helper(cmd.cli_ctx, lambda: createdAssignment, lambda resource: createdAssignment, identity_role, identity_scope)
return createdAssignment
def _build_identities_info(cmd, identities):
identities = identities or []
ResourceIdentityType = cmd.get_models('ResourceIdentityType')
identity_type = ResourceIdentityType.none
if not identities or MSI_LOCAL_ID in identities:
identity_type = ResourceIdentityType.system_assigned
ResourceIdentity = cmd.get_models('Identity')
return ResourceIdentity(type=identity_type)
def delete_policy_assignment(cmd, name, resource_group_name=None, scope=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id,
resource_group_name, scope)
policy_client.policy_assignments.delete(scope, name)
def show_policy_assignment(cmd, name, resource_group_name=None, scope=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id,
resource_group_name, scope)
return policy_client.policy_assignments.get(scope, name)
def list_policy_assignment(cmd, disable_scope_strict_match=None, resource_group_name=None, scope=None):
from azure.cli.core.commands.client_factory import get_subscription_id
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
_scope = _build_policy_scope(get_subscription_id(cmd.cli_ctx),
resource_group_name, scope)
id_parts = parse_resource_id(_scope)
subscription = id_parts.get('subscription')
resource_group = id_parts.get('resource_group')
resource_type = id_parts.get('child_type_1') or id_parts.get('type')
resource_name = id_parts.get('child_name_1') or id_parts.get('name')
management_group = _parse_management_group_id(scope)
if management_group:
result = policy_client.policy_assignments.list_for_management_group(management_group_id=management_group, filter='atScope()')
elif all([resource_type, resource_group, subscription]):
namespace = id_parts.get('namespace')
parent_resource_path = '' if not id_parts.get('child_name_1') else (id_parts['type'] + '/' + id_parts['name'])
result = policy_client.policy_assignments.list_for_resource(
resource_group, namespace,
parent_resource_path, resource_type, resource_name)
elif resource_group:
result = policy_client.policy_assignments.list_for_resource_group(resource_group)
elif subscription:
result = policy_client.policy_assignments.list()
elif scope:
raise CLIError('usage error `--scope`: must be a fully qualified ARM ID.')
else:
raise CLIError('usage error: --scope ARM_ID | --resource-group NAME')
if not disable_scope_strict_match:
result = [i for i in result if _scope.lower().strip('/') == i.scope.lower().strip('/')]
return result
def set_identity(cmd, name, scope=None, resource_group_name=None, identity_role='Contributor', identity_scope=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id, resource_group_name, scope)
def getter():
return policy_client.policy_assignments.get(scope, name)
def setter(policyAssignment):
policyAssignment.identity = _build_identities_info(cmd, [MSI_LOCAL_ID])
return policy_client.policy_assignments.create(scope, name, policyAssignment)
from azure.cli.core.commands.arm import assign_identity as _assign_identity_helper
updatedAssignment = _assign_identity_helper(cmd.cli_ctx, getter, setter, identity_role, identity_scope)
return updatedAssignment.identity
def show_identity(cmd, name, scope=None, resource_group_name=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id, resource_group_name, scope)
return policy_client.policy_assignments.get(scope, name).identity
def remove_identity(cmd, name, scope=None, resource_group_name=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id, resource_group_name, scope)
policyAssignment = policy_client.policy_assignments.get(scope, name)
ResourceIdentityType = cmd.get_models('ResourceIdentityType')
ResourceIdentity = cmd.get_models('Identity')
policyAssignment.identity = ResourceIdentity(type=ResourceIdentityType.none)
policyAssignment = policy_client.policy_assignments.create(scope, name, policyAssignment)
return policyAssignment.identity
def enforce_mutually_exclusive(subscription, management_group):
if subscription and management_group:
raise IncorrectUsageError('cannot provide both --subscription and --management-group')
def create_policy_definition(cmd, name, rules=None, params=None, display_name=None, description=None, mode=None,
metadata=None, subscription=None, management_group=None):
rules = _load_file_string_or_uri(rules, 'rules')
params = _load_file_string_or_uri(params, 'params', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
PolicyDefinition = cmd.get_models('PolicyDefinition')
parameters = PolicyDefinition(policy_rule=rules, parameters=params, description=description,
display_name=display_name)
if cmd.supported_api_version(min_api='2016-12-01'):
parameters.mode = mode
if cmd.supported_api_version(min_api='2017-06-01-preview'):
parameters.metadata = metadata
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.create_or_update_at_management_group(name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.create_or_update(name, parameters)
def create_policy_setdefinition(cmd, name, definitions, params=None, display_name=None, description=None,
subscription=None, management_group=None, definition_groups=None, metadata=None):
definitions = _load_file_string_or_uri(definitions, 'definitions')
params = _load_file_string_or_uri(params, 'params', False)
definition_groups = _load_file_string_or_uri(definition_groups, 'definition_groups', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
PolicySetDefinition = cmd.get_models('PolicySetDefinition')
parameters = PolicySetDefinition(policy_definitions=definitions, parameters=params, description=description,
display_name=display_name, policy_definition_groups=definition_groups)
if cmd.supported_api_version(min_api='2017-06-01-preview'):
parameters.metadata = metadata
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.create_or_update_at_management_group(name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.create_or_update(name, parameters)
def get_policy_definition(cmd, policy_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
return _get_custom_or_builtin_policy(cmd, policy_client, policy_definition_name, subscription, management_group)
def get_policy_setdefinition(cmd, policy_set_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
return _get_custom_or_builtin_policy(cmd, policy_client, policy_set_definition_name, subscription, management_group, True)
def list_policy_definition(cmd, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.list_by_management_group(management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.list()
def list_policy_setdefinition(cmd, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.list_by_management_group(management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.list()
def delete_policy_definition(cmd, policy_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.delete_at_management_group(policy_definition_name, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.delete(policy_definition_name)
def delete_policy_setdefinition(cmd, policy_set_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.delete_at_management_group(policy_set_definition_name, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.delete(policy_set_definition_name)
def update_policy_definition(cmd, policy_definition_name, rules=None, params=None,
display_name=None, description=None, metadata=None, mode=None,
subscription=None, management_group=None):
rules = _load_file_string_or_uri(rules, 'rules', False)
params = _load_file_string_or_uri(params, 'params', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
definition = _get_custom_or_builtin_policy(cmd, policy_client, policy_definition_name, subscription, management_group)
# pylint: disable=line-too-long,no-member
PolicyDefinition = cmd.get_models('PolicyDefinition')
parameters = PolicyDefinition(
policy_rule=rules if rules is not None else definition.policy_rule,
parameters=params if params is not None else definition.parameters,
display_name=display_name if display_name is not None else definition.display_name,
description=description if description is not None else definition.description,
metadata=metadata if metadata is not None else definition.metadata)
if cmd.supported_api_version(min_api='2016-12-01'):
parameters.mode = mode
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.create_or_update_at_management_group(policy_definition_name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.create_or_update(policy_definition_name, parameters)
def update_policy_setdefinition(cmd, policy_set_definition_name, definitions=None, params=None,
display_name=None, description=None,
subscription=None, management_group=None, definition_groups=None, metadata=None):
definitions = _load_file_string_or_uri(definitions, 'definitions', False)
params = _load_file_string_or_uri(params, 'params', False)
definition_groups = _load_file_string_or_uri(definition_groups, 'definition_groups', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
definition = _get_custom_or_builtin_policy(cmd, policy_client, policy_set_definition_name, subscription, management_group, True)
# pylint: disable=line-too-long,no-member
PolicySetDefinition = cmd.get_models('PolicySetDefinition')
parameters = PolicySetDefinition(
policy_definitions=definitions if definitions is not None else definition.policy_definitions,
description=description if description is not None else definition.description,
display_name=display_name if display_name is not None else definition.display_name,
parameters=params if params is not None else definition.parameters,
policy_definition_groups=definition_groups if definition_groups is not None else definition.policy_definition_groups,
metadata=metadata if metadata is not None else definition.metadata)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.create_or_update_at_management_group(policy_set_definition_name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.create_or_update(policy_set_definition_name, parameters)
def _register_rp(cli_ctx, subscription_id=None):
rp = "Microsoft.Management"
import time
rcf = get_mgmt_service_client(
cli_ctx,
ResourceType.MGMT_RESOURCE_RESOURCES,
subscription_id)
rcf.providers.register(rp)
while True:
time.sleep(10)
rp_info = rcf.providers.get(rp)
if rp_info.registration_state == 'Registered':
break
def _get_subscription_id_from_subscription(cli_ctx, subscription): # pylint: disable=inconsistent-return-statements
from azure.cli.core._profile import Profile
profile = Profile(cli_ctx=cli_ctx)
subscriptions_list = profile.load_cached_subscriptions()
for sub in subscriptions_list:
if subscription in (sub['id'], sub['name']):
return sub['id']
raise CLIError("Subscription not found in the current context.")
def _get_parent_id_from_parent(parent):
if parent is None or _is_management_group_scope(parent):
return parent
return "/providers/Microsoft.Management/managementGroups/" + parent
def _is_management_group_scope(scope):
return scope is not None and scope.lower().startswith("/providers/microsoft.management/managementgroups")
def cli_managementgroups_group_list(cmd, client):
_register_rp(cmd.cli_ctx)
return client.list()
def cli_managementgroups_group_show(
cmd,
client,
group_name,
expand=False,
recurse=False):
_register_rp(cmd.cli_ctx)
if expand:
return client.get(group_name, "children", recurse)
return client.get(group_name)
def cli_managementgroups_group_create(
cmd,
client,
group_name,
display_name=None,
parent=None):
_register_rp(cmd.cli_ctx)
parent_id = _get_parent_id_from_parent(parent)
from azure.mgmt.managementgroups.models import (
CreateManagementGroupRequest, CreateManagementGroupDetails, CreateParentGroupInfo)
create_parent_grp_info = CreateParentGroupInfo(id=parent_id)
create_mgmt_grp_details = CreateManagementGroupDetails(parent=create_parent_grp_info)
create_mgmt_grp_request = CreateManagementGroupRequest(
name=group_name,
display_name=display_name,
details=create_mgmt_grp_details)
return client.create_or_update(group_name, create_mgmt_grp_request)
def cli_managementgroups_group_update_custom_func(
instance,
display_name=None,
parent_id=None):
parent_id = _get_parent_id_from_parent(parent_id)
instance.display_name = display_name
instance.parent_id = parent_id
return instance
def cli_managementgroups_group_update_get():
from azure.mgmt.managementgroups.models import PatchManagementGroupRequest
update_parameters = PatchManagementGroupRequest(display_name=None, parent_id=None)
return update_parameters
def cli_managementgroups_group_update_set(
cmd, client, group_name, parameters=None):
return client.update(group_name, parameters)
def cli_managementgroups_group_delete(cmd, client, group_name):
_register_rp(cmd.cli_ctx)
return client.delete(group_name)
def cli_managementgroups_subscription_add(
cmd, client, group_name, subscription):
subscription_id = _get_subscription_id_from_subscription(
cmd.cli_ctx, subscription)
return client.create(group_name, subscription_id)
def cli_managementgroups_subscription_remove(
cmd, client, group_name, subscription):
subscription_id = _get_subscription_id_from_subscription(
cmd.cli_ctx, subscription)
return client.delete(group_name, subscription_id)
# region Locks
def _validate_lock_params_match_lock(
lock_client, name, resource_group, resource_provider_namespace, parent_resource_path,
resource_type, resource_name):
"""
Locks are scoped to subscription, resource group or resource.
However, the az list command returns all locks for the current scopes
and all lower scopes (e.g. resource group level also includes resource locks).
This can lead to a confusing user experience where the user specifies a lock
name and assumes that it will work, even if they haven't given the right
scope. This function attempts to validate the parameters and help the
user find the right scope, by first finding the lock, and then infering
what it's parameters should be.
"""
locks = lock_client.management_locks.list_at_subscription_level()
found_count = 0 # locks at different levels can have the same name
lock_resource_id = None
for lock in locks:
if lock.name == name:
found_count = found_count + 1
lock_resource_id = lock.id
if found_count == 1:
# If we only found one lock, let's validate that the parameters are correct,
# if we found more than one, we'll assume the user knows what they're doing
# TODO: Add validation for that case too?
resource = parse_resource_id(lock_resource_id)
_resource_group = resource.get('resource_group', None)
_resource_namespace = resource.get('namespace', None)
if _resource_group is None:
return
if resource_group != _resource_group:
raise CLIError(
'Unexpected --resource-group for lock {}, expected {}'.format(
name, _resource_group))
if _resource_namespace is None or _resource_namespace == 'Microsoft.Authorization':
return
if resource_provider_namespace != _resource_namespace:
raise CLIError(
'Unexpected --namespace for lock {}, expected {}'.format(name, _resource_namespace))
if resource.get('child_type_2', None) is None:
_resource_type = resource.get('type', None)
_resource_name = resource.get('name', None)
else:
if resource.get('child_type_3', None) is None:
_resource_type = resource.get('child_type_1', None)
_resource_name = resource.get('child_name_1', None)
parent = (resource['type'] + '/' + resource['name'])
else:
_resource_type = resource.get('child_type_2', None)
_resource_name = resource.get('child_name_2', None)
parent = (resource['type'] + '/' + resource['name'] + '/' +
resource['child_type_1'] + '/' + resource['child_name_1'])
if parent != parent_resource_path:
raise CLIError(
'Unexpected --parent for lock {}, expected {}'.format(
name, parent))
if resource_type != _resource_type:
raise CLIError('Unexpected --resource-type for lock {}, expected {}'.format(
name, _resource_type))
if resource_name != _resource_name:
raise CLIError('Unexpected --resource-name for lock {}, expected {}'.format(
name, _resource_name))
def list_locks(cmd, resource_group=None,
resource_provider_namespace=None, parent_resource_path=None, resource_type=None,
resource_name=None, filter_string=None):
"""
:param resource_provider_namespace: Name of a resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: Path to a parent resource
:type parent_resource_path: str
:param resource_type: The type for the resource with the lock.
:type resource_type: str
:param resource_name: Name of a resource that has a lock.
:type resource_name: str
:param filter_string: A query filter to use to restrict the results.
:type filter_string: str
"""
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
if resource_group is None:
return lock_client.management_locks.list_at_subscription_level(filter=filter_string)
if resource_name is None:
return lock_client.management_locks.list_at_resource_group_level(
resource_group, filter=filter_string)
return lock_client.management_locks.list_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, filter=filter_string)
# pylint: disable=inconsistent-return-statements
def get_lock(cmd, lock_name=None, resource_group=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, ids=None):
"""
:param name: The name of the lock.
:type name: str
"""
if ids:
kwargs_list = []
for id_arg in ids:
try:
kwargs_list.append(_parse_lock_id(id_arg))
except AttributeError:
logger.error('az lock show: error: argument --ids: invalid ResourceId value: \'%s\'', id_arg)
return
results = [get_lock(cmd, **kwargs) for kwargs in kwargs_list]
return results[0] if len(results) == 1 else results
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
_validate_lock_params_match_lock(lock_client, lock_name, resource_group,
resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
if resource_group is None:
return _call_subscription_get(cmd, lock_client, lock_name)
if resource_name is None:
return lock_client.management_locks.get_at_resource_group_level(resource_group, lock_name)
if cmd.supported_api_version(max_api='2015-01-01'):
lock_list = list_locks(resource_group, resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
return next((lock for lock in lock_list if lock.name == lock_name), None)
return lock_client.management_locks.get_at_resource_level(
resource_group, resource_provider_namespace,
parent_resource_path or '', resource_type, resource_name, lock_name)
# pylint: disable=inconsistent-return-statements
def delete_lock(cmd, lock_name=None, resource_group=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, ids=None):
"""
:param name: The name of the lock.
:type name: str
:param resource_provider_namespace: Name of a resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: Path to a parent resource
:type parent_resource_path: str
:param resource_type: The type for the resource with the lock.
:type resource_type: str
:param resource_name: Name of a resource that has a lock.
:type resource_name: str
"""
if ids:
kwargs_list = []
for id_arg in ids:
try:
kwargs_list.append(_parse_lock_id(id_arg))
except AttributeError:
logger.error('az lock delete: error: argument --ids: invalid ResourceId value: \'%s\'', id_arg)
return
results = [delete_lock(cmd, **kwargs) for kwargs in kwargs_list]
return results[0] if len(results) == 1 else results
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
_validate_lock_params_match_lock(lock_client, lock_name, resource_group,
resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
if resource_group is None:
return lock_client.management_locks.delete_at_subscription_level(lock_name)
if resource_name is None:
return lock_client.management_locks.delete_at_resource_group_level(
resource_group, lock_name)
return lock_client.management_locks.delete_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name)
def create_lock(cmd, lock_name, level,
resource_group=None, resource_provider_namespace=None, notes=None,
parent_resource_path=None, resource_type=None, resource_name=None):
"""
:param name: The name of the lock.
:type name: str
:param resource_provider_namespace: Name of a resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: Path to a parent resource
:type parent_resource_path: str
:param resource_type: The type for the resource with the lock.
:type resource_type: str
:param resource_name: Name of a resource that has a lock.
:type resource_name: str
:param notes: Notes about this lock.
:type notes: str
"""
ManagementLockObject = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_LOCKS, 'ManagementLockObject', mod='models')
parameters = ManagementLockObject(level=level, notes=notes, name=lock_name)
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
if resource_group is None:
return lock_client.management_locks.create_or_update_at_subscription_level(lock_name, parameters)
if resource_name is None:
return lock_client.management_locks.create_or_update_at_resource_group_level(
resource_group, lock_name, parameters)
return lock_client.management_locks.create_or_update_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name, parameters)
# pylint: disable=inconsistent-return-statements
def update_lock(cmd, lock_name=None, resource_group=None, resource_provider_namespace=None, notes=None,
parent_resource_path=None, resource_type=None, resource_name=None, level=None, ids=None):
"""
Allows updates to the lock-type(level) and the notes of the lock
"""
if ids:
kwargs_list = []
for id_arg in ids:
try:
kwargs_list.append(_parse_lock_id(id_arg))
except AttributeError:
logger.error('az lock update: error: argument --ids: invalid ResourceId value: \'%s\'', id_arg)
return
results = [update_lock(cmd, level=level, notes=notes, **kwargs) for kwargs in kwargs_list]
return results[0] if len(results) == 1 else results
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
_validate_lock_params_match_lock(lock_client, lock_name, resource_group, resource_provider_namespace,
parent_resource_path, resource_type, resource_name)
if resource_group is None:
params = _call_subscription_get(cmd, lock_client, lock_name)
_update_lock_parameters(params, level, notes)
return lock_client.management_locks.create_or_update_at_subscription_level(lock_name, params)
if resource_name is None:
params = lock_client.management_locks.get_at_resource_group_level(resource_group, lock_name)
_update_lock_parameters(params, level, notes)
return lock_client.management_locks.create_or_update_at_resource_group_level(
resource_group, lock_name, params)
if cmd.supported_api_version(max_api='2015-01-01'):
lock_list = list_locks(resource_group, resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
return next((lock for lock in lock_list if lock.name == lock_name), None)
params = lock_client.management_locks.get_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name)
_update_lock_parameters(params, level, notes)
return lock_client.management_locks.create_or_update_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name, params)
# endregion
# region ResourceLinks
def create_resource_link(cmd, link_id, target_id, notes=None):
links_client = _resource_links_client_factory(cmd.cli_ctx).resource_links
ResourceLinkProperties = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_LINKS,
'ResourceLinkProperties', mod='models')
properties = ResourceLinkProperties(target_id=target_id, notes=notes)
links_client.create_or_update(link_id, properties)
def update_resource_link(cmd, link_id, target_id=None, notes=None):
links_client = _resource_links_client_factory(cmd.cli_ctx).resource_links
params = links_client.get(link_id)
ResourceLinkProperties = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_LINKS,
'ResourceLinkProperties', mod='models')
properties = ResourceLinkProperties(
target_id=target_id if target_id is not None else params.properties.target_id,
# pylint: disable=no-member
notes=notes if notes is not None else params.properties.notes) # pylint: disable=no-member
links_client.create_or_update(link_id, properties)
def list_resource_links(cmd, scope=None, filter_string=None):
links_client = _resource_links_client_factory(cmd.cli_ctx).resource_links
if scope is not None:
return links_client.list_at_source_scope(scope, filter=filter_string)
return links_client.list_at_subscription(filter=filter_string)
# endregion
# region tags
def get_tag_at_scope(cmd, resource_id=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_id is not None:
return rcf.tags.get_at_scope(scope=resource_id)
return rcf.tags.list()
def create_or_update_tag_at_scope(cmd, resource_id=None, tags=None, tag_name=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_id is not None:
if not tags:
raise IncorrectUsageError("Tags could not be empty.")
Tags = cmd.get_models('Tags')
tag_obj = Tags(tags=tags)
return rcf.tags.create_or_update_at_scope(scope=resource_id, properties=tag_obj)
return rcf.tags.create_or_update(tag_name=tag_name)
def delete_tag_at_scope(cmd, resource_id=None, tag_name=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_id is not None:
return rcf.tags.delete_at_scope(scope=resource_id)
return rcf.tags.delete(tag_name=tag_name)
def update_tag_at_scope(cmd, resource_id, tags, operation):
rcf = _resource_client_factory(cmd.cli_ctx)
if not tags:
raise IncorrectUsageError("Tags could not be empty.")
Tags = cmd.get_models('Tags')
tag_obj = Tags(tags=tags)
return rcf.tags.update_at_scope(scope=resource_id, properties=tag_obj, operation=operation)
# endregion
class _ResourceUtils: # pylint: disable=too-many-instance-attributes
def __init__(self, cli_ctx,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None,
resource_id=None, api_version=None, rcf=None, latest_include_preview=False):
# if the resouce_type is in format 'namespace/type' split it.
# (we don't have to do this, but commands like 'vm show' returns such values)
if resource_type and not resource_provider_namespace and not parent_resource_path:
parts = resource_type.split('/')
if len(parts) > 1:
resource_provider_namespace = parts[0]
resource_type = parts[1]
self.rcf = rcf or _resource_client_factory(cli_ctx)
if api_version is None:
if resource_id:
api_version = _ResourceUtils._resolve_api_version_by_id(self.rcf, resource_id,
latest_include_preview=latest_include_preview)
else:
_validate_resource_inputs(resource_group_name, resource_provider_namespace,
resource_type, resource_name)
api_version = _ResourceUtils.resolve_api_version(self.rcf,
resource_provider_namespace,
parent_resource_path,
resource_type,
latest_include_preview=latest_include_preview)
self.resource_group_name = resource_group_name
self.resource_provider_namespace = resource_provider_namespace
self.parent_resource_path = parent_resource_path if parent_resource_path else ''
self.resource_type = resource_type
self.resource_name = resource_name
self.resource_id = resource_id
self.api_version = api_version
def create_resource(self, properties, location, is_full_object):
try:
res = json.loads(properties)
except json.decoder.JSONDecodeError as ex:
raise CLIError('Error parsing JSON.\n{}\n{}'.format(properties, ex))
if not is_full_object:
if not location:
if self.resource_id:
rg_name = parse_resource_id(self.resource_id)['resource_group']
else:
rg_name = self.resource_group_name
location = self.rcf.resource_groups.get(rg_name).location
res = GenericResource(location=location, properties=res)
elif res.get('location', None) is None:
raise IncorrectUsageError("location of the resource is required")
if self.resource_id:
resource = self.rcf.resources.create_or_update_by_id(self.resource_id,
self.api_version,
res)
else:
resource = self.rcf.resources.create_or_update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
res)
return resource
def get_resource(self, include_response_body=False):
if self.resource_id:
resource = self.rcf.resources.get_by_id(self.resource_id, self.api_version, raw=include_response_body)
else:
resource = self.rcf.resources.get(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
raw=include_response_body)
if include_response_body:
temp = resource.output
setattr(temp, 'response_body', json.loads(resource.response.content.decode()))
resource = temp
return resource
def delete(self):
if self.resource_id:
return self.rcf.resources.delete_by_id(self.resource_id, self.api_version)
return self.rcf.resources.delete(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version)
def update(self, parameters):
if self.resource_id:
return self.rcf.resources.create_or_update_by_id(self.resource_id,
self.api_version,
parameters)
return self.rcf.resources.create_or_update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
parameters)
def tag(self, tags, is_incremental=False):
resource = self.get_resource()
if is_incremental is True:
if not tags:
raise CLIError("When modifying tag incrementally, the parameters of tag must have specific values.")
if resource.tags:
resource.tags.update(tags)
tags = resource.tags
# please add the service type that needs to be requested with PATCH type here
# for example: the properties of RecoveryServices/vaults must be filled, and a PUT request that passes back
# to properties will fail due to the lack of properties, so the PATCH type should be used
need_patch_service = ['Microsoft.RecoveryServices/vaults', 'Microsoft.Resources/resourceGroups',
'Microsoft.ContainerRegistry/registries/webhooks',
'Microsoft.ContainerInstance/containerGroups']
if resource is not None and resource.type in need_patch_service:
parameters = GenericResource(tags=tags)
if self.resource_id:
return self.rcf.resources.update_by_id(self.resource_id, self.api_version, parameters)
return self.rcf.resources.update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
parameters)
# pylint: disable=no-member
parameters = GenericResource(
location=resource.location,
tags=tags,
plan=resource.plan,
properties=resource.properties,
kind=resource.kind,
managed_by=resource.managed_by,
sku=resource.sku,
identity=resource.identity)
if self.resource_id:
return self.rcf.resources.create_or_update_by_id(self.resource_id, self.api_version,
parameters)
return self.rcf.resources.create_or_update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
parameters)
def invoke_action(self, action, request_body):
"""
Formats Url if none provided and sends the POST request with the url and request-body.
"""
from msrestazure.azure_operation import AzureOperationPoller
query_parameters = {}
serialize = self.rcf.resources._serialize # pylint: disable=protected-access
client = self.rcf.resources._client # pylint: disable=protected-access
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/' \
'{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/{action}'
if self.resource_id:
url = client.format_url(
'{resource_id}/{action}',
resource_id=self.resource_id,
action=serialize.url("action", action, 'str'))
else:
url = client.format_url(
url,
resourceGroupName=serialize.url(
"resource_group_name", self.resource_group_name, 'str',
max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
resourceProviderNamespace=serialize.url(
"resource_provider_namespace", self.resource_provider_namespace, 'str'),
parentResourcePath=serialize.url(
"parent_resource_path", self.parent_resource_path, 'str', skip_quote=True),
resourceType=serialize.url("resource_type", self.resource_type, 'str', skip_quote=True),
resourceName=serialize.url("resource_name", self.resource_name, 'str'),
subscriptionId=serialize.url(
"self.config.subscription_id", self.rcf.resources.config.subscription_id, 'str'),
action=serialize.url("action", action, 'str'))
# Construct parameters
query_parameters['api-version'] = serialize.query("api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.rcf.resources.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid4())
if self.rcf.resources.config.accept_language is not None:
header_parameters['accept-language'] = serialize.header(
"self.config.accept_language", self.rcf.resources.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = client.post(url, query_parameters)
return client.send(
request, header_parameters, json.loads(request_body) if request_body else None)
def get_long_running_status(status_link, headers=None):
request = client.get(status_link)
if headers:
request.headers.update(headers)
return client.send(request, header_parameters)
def get_long_running_output(response):
from msrestazure.azure_exceptions import CloudError
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response.text
return AzureOperationPoller(long_running_send, get_long_running_output, get_long_running_status,
self.rcf.resources.config.long_running_operation_timeout)
@staticmethod
def resolve_api_version(rcf, resource_provider_namespace, parent_resource_path, resource_type,
latest_include_preview=False):
provider = rcf.providers.get(resource_provider_namespace)
# If available, we will use parent resource's api-version
resource_type_str = (parent_resource_path.split('/')[0] if parent_resource_path else resource_type)
rt = [t for t in provider.resource_types
if t.resource_type.lower() == resource_type_str.lower()]
if not rt:
raise IncorrectUsageError('Resource type {} not found.'.format(resource_type_str))
if len(rt) == 1 and rt[0].api_versions:
# If latest_include_preview is true,
# the last api-version will be taken regardless of whether it is preview version or not
if latest_include_preview:
return rt[0].api_versions[0]
# Take the latest stable version first.
# if there is no stable version, the latest preview version will be taken.
npv = [v for v in rt[0].api_versions if 'preview' not in v.lower()]
return npv[0] if npv else rt[0].api_versions[0]
raise IncorrectUsageError(
'API version is required and could not be resolved for resource {}'
.format(resource_type))
@staticmethod
def _resolve_api_version_by_id(rcf, resource_id, latest_include_preview=False):
parts = parse_resource_id(resource_id)
if len(parts) == 2 and parts['subscription'] is not None and parts['resource_group'] is not None:
return AZURE_API_PROFILES['latest'][ResourceType.MGMT_RESOURCE_RESOURCES]
if 'namespace' not in parts:
raise CLIError('The type of value entered by --ids parameter is not supported.')
namespace = parts.get('child_namespace_1', parts['namespace'])
if parts.get('child_type_2'):
parent = (parts['type'] + '/' + parts['name'] + '/' +
parts['child_type_1'] + '/' + parts['child_name_1'])
resource_type = parts['child_type_2']
elif parts.get('child_type_1'):
# if the child resource has a provider namespace it is independent of the
# parent, so set the parent to empty
if parts.get('child_namespace_1') is not None:
parent = ''
else:
parent = parts['type'] + '/' + parts['name']
resource_type = parts['child_type_1']
else:
parent = None
resource_type = parts['type']
return _ResourceUtils.resolve_api_version(rcf, namespace, parent, resource_type,
latest_include_preview=latest_include_preview)
| 50.184245
| 191
| 0.663564
|
from __future__ import print_function
from collections import OrderedDict
import codecs
import json
import os
import platform
import re
import ssl
import sys
import uuid
import base64
from six.moves.urllib.request import urlopen from six.moves.urllib.parse import urlparse
from msrestazure.tools import is_valid_resource_id, parse_resource_id
from azure.mgmt.resource.resources.models import GenericResource, DeploymentMode
from azure.cli.core.parser import IncorrectUsageError
from azure.cli.core.util import get_file_json, read_file_content, shell_safe_json_parse, sdk_no_wait
from azure.cli.core.commands import LongRunningOperation
from azure.cli.core.commands.client_factory import get_mgmt_service_client
from azure.cli.core.profiles import ResourceType, get_sdk, get_api_version, AZURE_API_PROFILES
from azure.cli.command_modules.resource._client_factory import (
_resource_client_factory, _resource_policy_client_factory, _resource_lock_client_factory,
_resource_links_client_factory, _resource_deploymentscripts_client_factory, _authorization_management_client, _resource_managedapps_client_factory, _resource_templatespecs_client_factory)
from azure.cli.command_modules.resource._validators import _parse_lock_id
from knack.log import get_logger
from knack.prompting import prompt, prompt_pass, prompt_t_f, prompt_choice_list, prompt_int, NoTTYException
from knack.util import CLIError
from msrest.serialization import Serializer
from msrest.pipeline import SansIOHTTPPolicy
from ._validators import MSI_LOCAL_ID
from ._formatters import format_what_if_operation_result
logger = get_logger(__name__)
def _build_resource_id(**kwargs):
from msrestazure.tools import resource_id as resource_id_from_dict
try:
return resource_id_from_dict(**kwargs)
except KeyError:
return None
def _process_parameters(template_param_defs, parameter_lists):
def _try_parse_json_object(value):
try:
parsed = _remove_comments_from_json(value, False)
return parsed.get('parameters', parsed)
except Exception: return None
def _try_load_file_object(file_path):
try:
is_file = os.path.isfile(file_path)
except ValueError:
return None
if is_file is True:
try:
content = read_file_content(file_path)
if not content:
return None
parsed = _remove_comments_from_json(content, False, file_path)
return parsed.get('parameters', parsed)
except Exception as ex:
raise CLIError("Failed to parse {} with exception:\n {}".format(file_path, ex))
return None
def _try_load_uri(uri):
if "://" in uri:
try:
value = _urlretrieve(uri).decode('utf-8')
parsed = _remove_comments_from_json(value, False)
return parsed.get('parameters', parsed)
except Exception: pass
return None
def _try_parse_key_value_object(template_param_defs, parameters, value):
if value == '{}' and not parameters:
return True
try:
key, value = value.split('=', 1)
except ValueError:
return False
param = template_param_defs.get(key, None)
if param is None:
raise CLIError("unrecognized template parameter '{}'. Allowed parameters: {}"
.format(key, ', '.join(sorted(template_param_defs.keys()))))
param_type = param.get('type', None)
if param_type:
param_type = param_type.lower()
if param_type in ['object', 'array', 'secureobject']:
parameters[key] = {'value': shell_safe_json_parse(value)}
elif param_type in ['string', 'securestring']:
parameters[key] = {'value': value}
elif param_type == 'bool':
parameters[key] = {'value': value.lower() == 'true'}
elif param_type == 'int':
parameters[key] = {'value': int(value)}
else:
logger.warning("Unrecognized type '%s' for parameter '%s'. Interpretting as string.", param_type, key)
parameters[key] = {'value': value}
return True
parameters = {}
for params in parameter_lists or []:
for item in params:
param_obj = _try_load_file_object(item)
if param_obj is None:
param_obj = _try_parse_json_object(item)
if param_obj is None:
param_obj = _try_load_uri(item)
if param_obj is not None:
parameters.update(param_obj)
elif not _try_parse_key_value_object(template_param_defs, parameters, item):
raise CLIError('Unable to parse parameter: {}'.format(item))
return parameters
def _find_missing_parameters(parameters, template):
if template is None:
return {}
template_parameters = template.get('parameters', None)
if template_parameters is None:
return {}
missing = OrderedDict()
for parameter_name in template_parameters:
parameter = template_parameters[parameter_name]
if 'defaultValue' in parameter:
continue
if parameters is not None and parameters.get(parameter_name, None) is not None:
continue
missing[parameter_name] = parameter
return missing
def _prompt_for_parameters(missing_parameters, fail_on_no_tty=True):
prompt_list = missing_parameters.keys() if isinstance(missing_parameters, OrderedDict) \
else sorted(missing_parameters)
result = OrderedDict()
no_tty = False
for param_name in prompt_list:
param = missing_parameters[param_name]
param_type = param.get('type', 'string').lower()
description = 'Missing description'
metadata = param.get('metadata', None)
if metadata is not None:
description = metadata.get('description', description)
allowed_values = param.get('allowedValues', None)
prompt_str = "Please provide {} value for '{}' (? for help): ".format(param_type, param_name)
while True:
if allowed_values is not None:
try:
ix = prompt_choice_list(prompt_str, allowed_values, help_string=description)
result[param_name] = allowed_values[ix]
except NoTTYException:
result[param_name] = None
no_tty = True
break
elif param_type == 'securestring':
try:
value = prompt_pass(prompt_str, help_string=description)
except NoTTYException:
value = None
no_tty = True
result[param_name] = value
break
elif param_type == 'int':
try:
int_value = prompt_int(prompt_str, help_string=description)
result[param_name] = int_value
except NoTTYException:
result[param_name] = 0
no_tty = True
break
elif param_type == 'bool':
try:
value = prompt_t_f(prompt_str, help_string=description)
result[param_name] = value
except NoTTYException:
result[param_name] = False
no_tty = True
break
elif param_type in ['object', 'array']:
try:
value = prompt(prompt_str, help_string=description)
except NoTTYException:
value = ''
no_tty = True
if value == '':
value = {} if param_type == 'object' else []
else:
try:
value = shell_safe_json_parse(value)
except Exception as ex: logger.error(ex)
continue
result[param_name] = value
break
else:
try:
result[param_name] = prompt(prompt_str, help_string=description)
except NoTTYException:
result[param_name] = None
no_tty = True
break
if no_tty and fail_on_no_tty:
raise NoTTYException
return result
def _get_missing_parameters(parameters, template, prompt_fn, no_prompt=False):
missing = _find_missing_parameters(parameters, template)
if missing:
if no_prompt is True:
logger.warning("Missing input parameters: %s ", ', '.join(sorted(missing.keys())))
else:
try:
prompt_parameters = prompt_fn(missing)
for param_name in prompt_parameters:
parameters[param_name] = {
"value": prompt_parameters[param_name]
}
except NoTTYException:
raise CLIError("Missing input parameters: {}".format(', '.join(sorted(missing.keys()))))
return parameters
def _ssl_context():
if sys.version_info < (3, 4):
return ssl.SSLContext(ssl.PROTOCOL_TLSv1)
return ssl.create_default_context()
def _urlretrieve(url):
req = urlopen(url, context=_ssl_context())
return req.read()
def _remove_comments_from_json(template, preserve_order=True, file_path=None):
from jsmin import jsmin
template = re.sub(r'(^[\t ]*//[\s\S]*?\n)|(^[\t ]*/\*{1,2}[\s\S]*?\*/)', '', template, flags=re.M)
minified = jsmin(template)
result = re.sub(r'"[^"]*?\n[^"]*?(?<!\\)"', '"#Azure Cli#"', minified, re.DOTALL)
try:
return shell_safe_json_parse(result, preserve_order)
except CLIError:
if file_path:
raise CLIError("Failed to parse '{}', please check whether it is a valid JSON format".format(file_path))
raise CLIError("Failed to parse the JSON data, please check whether it is a valid JSON format")
def _deploy_arm_template_core_unmodified(cmd, resource_group_name, template_file=None,
template_uri=None, deployment_name=None, parameters=None,
mode=None, rollback_on_error=None, validate_only=False, no_wait=False,
aux_subscriptions=None, aux_tenants=None, no_prompt=False):
DeploymentProperties, TemplateLink, OnErrorDeployment = cmd.get_models('DeploymentProperties', 'TemplateLink',
'OnErrorDeployment')
template_link = None
template_obj = None
on_error_deployment = None
template_content = None
if template_uri:
template_link = TemplateLink(uri=template_uri)
template_obj = _remove_comments_from_json(_urlretrieve(template_uri).decode('utf-8'), file_path=template_uri)
else:
template_content = read_file_content(template_file)
template_obj = _remove_comments_from_json(template_content, file_path=template_file)
if rollback_on_error == '':
on_error_deployment = OnErrorDeployment(type='LastSuccessful')
elif rollback_on_error:
on_error_deployment = OnErrorDeployment(type='SpecificDeployment', deployment_name=rollback_on_error)
template_param_defs = template_obj.get('parameters', {})
template_obj['resources'] = template_obj.get('resources', [])
parameters = _process_parameters(template_param_defs, parameters) or {}
parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters, no_prompt)
parameters = json.loads(json.dumps(parameters))
properties = DeploymentProperties(template=template_content, template_link=template_link,
parameters=parameters, mode=mode, on_error_deployment=on_error_deployment)
smc = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
aux_subscriptions=aux_subscriptions, aux_tenants=aux_tenants)
deployment_client = smc.deployments
if not template_uri:
deployment_client._serialize = JSONSerializer(
deployment_client._serialize.dependencies
)
from msrest.pipeline import Pipeline
from msrest.pipeline.requests import (
RequestsCredentialsPolicy,
RequestsPatchSession,
PipelineRequestsHTTPSender
)
from msrest.universal_http.requests import RequestsHTTPSender
smc.config.pipeline = Pipeline(
policies=[
JsonCTemplatePolicy(),
smc.config.user_agent_policy,
RequestsPatchSession(),
smc.config.http_logger_policy,
RequestsCredentialsPolicy(smc.config.credentials)
],
sender=PipelineRequestsHTTPSender(RequestsHTTPSender(smc.config))
)
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment')
deployment = Deployment(properties=properties)
validation_poller = deployment_client.validate(resource_group_name, deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = deployment_client.validate(resource_group_name, deployment_name, properties)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, deployment_client.create_or_update, resource_group_name, deployment_name, deployment)
return sdk_no_wait(no_wait, deployment_client.create_or_update, resource_group_name, deployment_name, properties)
class JsonCTemplate:
def __init__(self, template_as_bytes):
self.template_as_bytes = template_as_bytes
class JSONSerializer(Serializer):
def body(self, data, data_type, **kwargs):
if data_type in ('Deployment', 'ScopedDeployment', 'DeploymentWhatIf', 'ScopedDeploymentWhatIf'):
template = data.properties.template
if template:
data_as_dict = data.serialize()
data_as_dict["properties"]["template"] = JsonCTemplate(template)
return data_as_dict
return super(JSONSerializer, self).body(data, data_type, **kwargs)
class JsonCTemplatePolicy(SansIOHTTPPolicy):
def on_request(self, request, **kwargs):
http_request = request.http_request
logger.info(http_request.data)
if (getattr(http_request, 'data', {}) or {}).get('properties', {}).get('template'):
template = http_request.data["properties"]["template"]
if not isinstance(template, JsonCTemplate):
raise ValueError()
del http_request.data["properties"]["template"]
if "templateLink" in http_request.data["properties"].keys():
del http_request.data["properties"]["templateLink"]
partial_request = json.dumps(http_request.data)
http_request.data = partial_request[:-2] + ", template:" + template.template_as_bytes + r"}}"
http_request.data = http_request.data.encode('utf-8')
def deploy_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_subscription_scope(cmd,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name,
deployment_location=deployment_location,
result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_subscription_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=False, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def validate_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None,
no_prompt=False, template_spec=None):
return _deploy_arm_template_at_subscription_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec,)
def _deploy_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None, validate_only=False,
no_wait=False, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri, parameters=parameters,
mode='Incremental',
no_prompt=no_prompt,
template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment')
deployment = Deployment(properties=deployment_properties, location=deployment_location)
validation_poller = mgmt_client.validate_at_subscription_scope(deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate_at_subscription_scope(deployment_name, deployment_properties, deployment_location)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_subscription_scope, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_subscription_scope, deployment_name,
deployment_properties, deployment_location)
def deploy_arm_template_at_resource_group(cmd,
resource_group_name=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=None, rollback_on_error=None,
no_wait=False, handle_extended_json_format=None,
aux_subscriptions=None, aux_tenants=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_resource_group(cmd,
resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name, mode=mode,
aux_tenants=aux_tenants, result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_resource_group(cmd=cmd,
resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, mode=mode, rollback_on_error=rollback_on_error,
validate_only=False, no_wait=no_wait,
aux_subscriptions=aux_subscriptions, aux_tenants=aux_tenants,
no_prompt=no_prompt, template_spec=template_spec)
def validate_arm_template_at_resource_group(cmd,
resource_group_name=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=None, rollback_on_error=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False, template_spec=None):
return _deploy_arm_template_at_resource_group(cmd,
resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, mode=mode, rollback_on_error=rollback_on_error,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def _deploy_arm_template_at_resource_group(cmd,
resource_group_name=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=None, rollback_on_error=None,
validate_only=False, no_wait=False,
aux_subscriptions=None, aux_tenants=None, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri,
parameters=parameters, mode=mode,
rollback_on_error=rollback_on_error,
no_prompt=no_prompt, template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, aux_subscriptions=aux_subscriptions,
aux_tenants=aux_tenants, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment')
deployment = Deployment(properties=deployment_properties)
validation_poller = mgmt_client.validate(resource_group_name, deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate(resource_group_name, deployment_name, deployment_properties)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update, resource_group_name, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update, resource_group_name, deployment_name, deployment_properties)
def deploy_arm_template_at_management_group(cmd,
management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_management_group(cmd,
management_group_id=management_group_id,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name,
deployment_location=deployment_location,
result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_management_group(cmd=cmd,
management_group_id=management_group_id,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=False, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def validate_arm_template_at_management_group(cmd,
management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None,
no_prompt=False, template_spec=None):
return _deploy_arm_template_at_management_group(cmd=cmd,
management_group_id=management_group_id,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def _deploy_arm_template_at_management_group(cmd,
management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None, validate_only=False,
no_wait=False, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri,
parameters=parameters, mode='Incremental',
no_prompt=no_prompt, template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
ScopedDeployment = cmd.get_models('ScopedDeployment')
deployment = ScopedDeployment(properties=deployment_properties, location=deployment_location)
validation_poller = mgmt_client.validate_at_management_group_scope(management_group_id, deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate_at_management_group_scope(management_group_id, deployment_name,
deployment_properties, deployment_location)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_management_group_scope,
management_group_id, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_management_group_scope,
management_group_id, deployment_name, deployment_properties, deployment_location)
def deploy_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_tenant_scope(cmd,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name,
deployment_location=deployment_location,
result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_tenant_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=False, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def validate_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False, template_spec=None):
return _deploy_arm_template_at_tenant_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def _deploy_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None, validate_only=False,
no_wait=False, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri,
parameters=parameters, mode='Incremental',
no_prompt=no_prompt, template_spec=template_spec,)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
ScopedDeployment = cmd.get_models('ScopedDeployment')
deployment = ScopedDeployment(properties=deployment_properties, location=deployment_location)
validation_poller = mgmt_client.validate_at_tenant_scope(deployment_name=deployment_name, parameters=deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate_at_tenant_scope(deployment_name=deployment_name,
properties=deployment_properties,
location=deployment_location)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_tenant_scope, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_tenant_scope, deployment_name,
deployment_properties, deployment_location)
def what_if_deploy_arm_template_at_resource_group(cmd, resource_group_name,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=DeploymentMode.incremental,
aux_tenants=None, result_format=None,
no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri,
parameters, mode, result_format, no_prompt, template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, aux_tenants=aux_tenants,
plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if(resource_group_name, deployment_name, what_if_properties)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def what_if_deploy_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
result_format=None, no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
DeploymentMode.incremental, result_format, no_prompt, template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if_at_subscription_scope(deployment_name, what_if_properties, deployment_location)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def what_if_deploy_arm_template_at_management_group(cmd, management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
result_format=None, no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
DeploymentMode.incremental, result_format, no_prompt, template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if_at_management_group_scope(management_group_id, deployment_name,
deployment_location, what_if_properties)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def what_if_deploy_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
result_format=None, no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
DeploymentMode.incremental, result_format, no_prompt, template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if_at_tenant_scope(deployment_name, deployment_location, what_if_properties)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def _what_if_deploy_arm_template_core(cli_ctx, what_if_poller, no_pretty_print, exclude_change_types):
what_if_result = LongRunningOperation(cli_ctx)(what_if_poller)
if what_if_result.error:
# it is technically a successful What-If operation. The error
# is on the ARM template but not the operation.
err_message = _build_preflight_error_message(what_if_result.error)
raise CLIError(err_message)
if exclude_change_types:
exclude_change_types = set(map(lambda x: x.lower(), exclude_change_types))
what_if_result.changes = list(
filter(lambda x: x.change_type.lower() not in exclude_change_types, what_if_result.changes)
)
if no_pretty_print:
return what_if_result
try:
if cli_ctx.enable_color:
# Diabling colorama since it will silently strip out the Xterm 256 color codes the What-If formatter
# is using. Unfortuanately, the colors that colorama supports are very limited, which doesn't meet our needs.
from colorama import deinit
deinit()
if platform.system() == "Windows":
from ._win_vt import enable_vt_mode
enable_vt_mode()
print(format_what_if_operation_result(what_if_result, cli_ctx.enable_color))
finally:
if cli_ctx.enable_color:
from colorama import init
init()
return None
def _build_preflight_error_message(preflight_error):
err_messages = [f'{preflight_error.code} - {preflight_error.message}']
for detail in preflight_error.details or []:
err_messages.append(_build_preflight_error_message(detail))
return '\n'.join(err_messages)
def _prepare_deployment_properties_unmodified(cmd, template_file=None, template_uri=None, parameters=None,
mode=None, rollback_on_error=None, no_prompt=False, template_spec=None):
cli_ctx = cmd.cli_ctx
DeploymentProperties, TemplateLink, OnErrorDeployment = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
'DeploymentProperties', 'TemplateLink',
'OnErrorDeployment', mod='models')
template_link = None
template_obj = None
on_error_deployment = None
template_content = None
if template_uri:
template_link = TemplateLink(uri=template_uri)
template_obj = _remove_comments_from_json(_urlretrieve(template_uri).decode('utf-8'), file_path=template_uri)
elif template_spec:
template_link = TemplateLink(id=template_spec, mode="Incremental")
template_obj = show_resource(cmd=cmd, resource_ids=[template_spec]).properties['template']
else:
template_content = read_file_content(template_file)
template_obj = _remove_comments_from_json(template_content, file_path=template_file)
if rollback_on_error == '':
on_error_deployment = OnErrorDeployment(type='LastSuccessful')
elif rollback_on_error:
on_error_deployment = OnErrorDeployment(type='SpecificDeployment', deployment_name=rollback_on_error)
template_param_defs = template_obj.get('parameters', {})
template_obj['resources'] = template_obj.get('resources', [])
parameters = _process_parameters(template_param_defs, parameters) or {}
parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters, no_prompt)
parameters = json.loads(json.dumps(parameters))
properties = DeploymentProperties(template=template_content, template_link=template_link,
parameters=parameters, mode=mode, on_error_deployment=on_error_deployment)
return properties
def _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
mode, result_format, no_prompt, template_spec):
DeploymentWhatIfProperties, DeploymentWhatIfSettings = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
'DeploymentWhatIfProperties', 'DeploymentWhatIfSettings',
mod='models')
deployment_properties = _prepare_deployment_properties_unmodified(cmd=cmd, template_file=template_file, template_uri=template_uri,
parameters=parameters, mode=mode, no_prompt=no_prompt, template_spec=template_spec)
deployment_what_if_properties = DeploymentWhatIfProperties(template=deployment_properties.template, template_link=deployment_properties.template_link,
parameters=deployment_properties.parameters, mode=deployment_properties.mode,
what_if_settings=DeploymentWhatIfSettings(result_format=result_format))
return deployment_what_if_properties
def _get_deployment_management_client(cli_ctx, aux_subscriptions=None, aux_tenants=None, plug_pipeline=True):
smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, aux_subscriptions=aux_subscriptions,
aux_tenants=aux_tenants)
deployment_client = smc.deployments
if plug_pipeline:
deployment_client._serialize = JSONSerializer(
deployment_client._serialize.dependencies
)
from msrest.pipeline import Pipeline
from msrest.pipeline.requests import (
RequestsCredentialsPolicy,
RequestsPatchSession,
PipelineRequestsHTTPSender
)
from msrest.universal_http.requests import RequestsHTTPSender
smc.config.pipeline = Pipeline(
policies=[
JsonCTemplatePolicy(),
smc.config.user_agent_policy,
RequestsPatchSession(),
smc.config.http_logger_policy,
RequestsCredentialsPolicy(smc.config.credentials)
],
sender=PipelineRequestsHTTPSender(RequestsHTTPSender(smc.config))
)
return deployment_client
def _list_resources_odata_filter_builder(resource_group_name=None, resource_provider_namespace=None,
resource_type=None, name=None, tag=None, location=None):
if tag is not None:
if resource_group_name:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--resource-group\''
'(If the default value for resource group is set, please use \'az configure --defaults group=""\' command to clear it first)')
if resource_provider_namespace:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--namespace\'')
if resource_type:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--resource-type\'')
if name:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--name\'')
if location:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--location\''
'(If the default value for location is set, please use \'az configure --defaults location=""\' command to clear it first)')
filters = []
if resource_group_name:
filters.append("resourceGroup eq '{}'".format(resource_group_name))
if name:
filters.append("name eq '{}'".format(name))
if location:
filters.append("location eq '{}'".format(location))
if resource_type:
if resource_provider_namespace:
f = "'{}/{}'".format(resource_provider_namespace, resource_type)
else:
if not re.match('[^/]+/[^/]+', resource_type):
raise CLIError(
'Malformed resource-type: '
'--resource-type=<namespace>/<resource-type> expected.')
f = "'{}'".format(resource_type)
filters.append("resourceType eq " + f)
else:
if resource_provider_namespace:
raise CLIError('--namespace also requires --resource-type')
if tag:
tag_name = list(tag.keys())[0] if isinstance(tag, dict) else tag
tag_value = tag[tag_name] if isinstance(tag, dict) else ''
if tag_name:
if tag_name[-1] == '*':
filters.append("startswith(tagname, '%s')" % tag_name[0:-1])
else:
filters.append("tagname eq '%s'" % tag_name)
if tag_value != '':
filters.append("tagvalue eq '%s'" % tag_value)
return ' and '.join(filters)
def _get_auth_provider_latest_api_version(cli_ctx):
rcf = _resource_client_factory(cli_ctx)
api_version = _ResourceUtils.resolve_api_version(rcf, 'Microsoft.Authorization', None, 'providerOperations')
return api_version
def _update_provider(cli_ctx, namespace, registering, wait):
import time
target_state = 'Registered' if registering else 'Unregistered'
rcf = _resource_client_factory(cli_ctx)
if registering:
r = rcf.providers.register(namespace)
else:
r = rcf.providers.unregister(namespace)
if r.registration_state == target_state:
return
if wait:
while True:
time.sleep(10)
rp_info = rcf.providers.get(namespace)
if rp_info.registration_state == target_state:
break
else:
action = 'Registering' if registering else 'Unregistering'
msg_template = '%s is still on-going. You can monitor using \'az provider show -n %s\''
logger.warning(msg_template, action, namespace)
def _build_policy_scope(subscription_id, resource_group_name, scope):
subscription_scope = '/subscriptions/' + subscription_id
if scope:
if resource_group_name:
err = "Resource group '{}' is redundant because 'scope' is supplied"
raise CLIError(err.format(resource_group_name))
elif resource_group_name:
scope = subscription_scope + '/resourceGroups/' + resource_group_name
else:
scope = subscription_scope
return scope
def _resolve_policy_id(cmd, policy, policy_set_definition, client):
policy_id = policy or policy_set_definition
if not is_valid_resource_id(policy_id):
if policy:
policy_def = _get_custom_or_builtin_policy(cmd, client, policy)
policy_id = policy_def.id
else:
policy_set_def = _get_custom_or_builtin_policy(cmd, client, policy_set_definition, None, None, True)
policy_id = policy_set_def.id
return policy_id
def _parse_management_group_reference(name):
if _is_management_group_scope(name):
parts = name.split('/')
if len(parts) >= 9:
return parts[4], parts[8]
return None, name
def _parse_management_group_id(scope):
if _is_management_group_scope(scope):
parts = scope.split('/')
if len(parts) >= 5:
return parts[4]
return None
def _get_custom_or_builtin_policy(cmd, client, name, subscription=None, management_group=None, for_policy_set=False):
from msrest.exceptions import HttpOperationError
from msrestazure.azure_exceptions import CloudError
policy_operations = client.policy_set_definitions if for_policy_set else client.policy_definitions
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
client.config.subscription_id = subscription_id
try:
if cmd.supported_api_version(min_api='2018-03-01'):
if not management_group:
management_group, name = _parse_management_group_reference(name)
if management_group:
return policy_operations.get_at_management_group(name, management_group)
return policy_operations.get(name)
except (CloudError, HttpOperationError) as ex:
status_code = ex.status_code if isinstance(ex, CloudError) else ex.response.status_code
if status_code == 404:
try:
return policy_operations.get_built_in(name)
except CloudError as ex2:
if ex2.status_code == 403 and ex2.error and ex2.error.error == 'AuthorizationFailed':
raise IncorrectUsageError('\'--policy\' should be a valid name or id of the policy definition')
raise ex2
raise
def _load_file_string_or_uri(file_or_string_or_uri, name, required=True):
if file_or_string_or_uri is None:
if required:
raise CLIError('--{} is required'.format(name))
return None
url = urlparse(file_or_string_or_uri)
if url.scheme == 'http' or url.scheme == 'https' or url.scheme == 'file':
response = urlopen(file_or_string_or_uri)
reader = codecs.getreader('utf-8')
result = json.load(reader(response))
response.close()
return result
if os.path.exists(file_or_string_or_uri):
return get_file_json(file_or_string_or_uri)
return shell_safe_json_parse(file_or_string_or_uri)
def _call_subscription_get(cmd, lock_client, *args):
if cmd.supported_api_version(max_api='2015-01-01'):
return lock_client.management_locks.get(*args)
return lock_client.management_locks.get_at_subscription_level(*args)
def _extract_lock_params(resource_group_name, resource_provider_namespace,
resource_type, resource_name):
if resource_group_name is None:
return (None, None, None, None)
if resource_name is None:
return (resource_group_name, None, None, None)
parts = resource_type.split('/', 2)
if resource_provider_namespace is None and len(parts) == 2:
resource_provider_namespace = parts[0]
resource_type = parts[1]
return (resource_group_name, resource_name, resource_provider_namespace, resource_type)
def _update_lock_parameters(parameters, level, notes):
if level is not None:
parameters.level = level
if notes is not None:
parameters.notes = notes
def _validate_resource_inputs(resource_group_name, resource_provider_namespace,
resource_type, resource_name):
if resource_group_name is None:
raise CLIError('--resource-group/-g is required.')
if resource_type is None:
raise CLIError('--resource-type is required')
if resource_name is None:
raise CLIError('--name/-n is required')
if resource_provider_namespace is None:
raise CLIError('--namespace is required')
def list_resource_groups(cmd, tag=None): rcf = _resource_client_factory(cmd.cli_ctx)
filters = []
if tag:
key = list(tag.keys())[0]
filters.append("tagname eq '{}'".format(key))
filters.append("tagvalue eq '{}'".format(tag[key]))
filter_text = ' and '.join(filters) if filters else None
groups = rcf.resource_groups.list(filter=filter_text)
return list(groups)
def create_resource_group(cmd, rg_name, location, tags=None, managed_by=None):
rcf = _resource_client_factory(cmd.cli_ctx)
ResourceGroup = cmd.get_models('ResourceGroup')
parameters = ResourceGroup(
location=location,
tags=tags
)
if cmd.supported_api_version(min_api='2016-09-01'):
parameters.managed_by = managed_by
return rcf.resource_groups.create_or_update(rg_name, parameters)
def update_resource_group(instance, tags=None):
if tags is not None:
instance.tags = tags
return instance
def export_group_as_template(
cmd, resource_group_name, include_comments=False, include_parameter_default_value=False, resource_ids=None, skip_resource_name_params=False, skip_all_params=False):
rcf = _resource_client_factory(cmd.cli_ctx)
export_options = []
if include_comments:
export_options.append('IncludeComments')
if include_parameter_default_value:
export_options.append('IncludeParameterDefaultValue')
if skip_resource_name_params:
export_options.append('SkipResourceNameParameterization')
if skip_all_params:
export_options.append('SkipAllParameterization')
resources = []
if resource_ids is None or resource_ids[0] == "*":
resources = ["*"]
else:
for i in resource_ids:
if is_valid_resource_id(i):
resources.append(i)
else:
raise CLIError('az resource: error: argument --resource-ids: invalid ResourceId value: \'%s\'' % i)
options = ','.join(export_options) if export_options else None
if cmd.supported_api_version(min_api='2019-08-01'):
result_poller = rcf.resource_groups.export_template(resource_group_name, resources, options=options)
result = LongRunningOperation(cmd.cli_ctx)(result_poller)
else:
result = rcf.resource_groups.export_template(resource_group_name, resources, options=options)
if result.error:
error = result.error
try:
logger.warning(error.message)
except AttributeError:
logger.warning(str(error))
for detail in getattr(error, 'details', None) or []:
logger.error(detail.message)
return result.template
def create_application(cmd, resource_group_name,
application_name, managedby_resource_group_id,
kind, managedapp_definition_id=None, location=None,
plan_name=None, plan_publisher=None, plan_product=None,
plan_version=None, tags=None, parameters=None):
from azure.mgmt.resource.managedapplications.models import Application, Plan
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
rcf = _resource_client_factory(cmd.cli_ctx)
if not location:
location = rcf.resource_groups.get(resource_group_name).location
application = Application(
location=location,
managed_resource_group_id=managedby_resource_group_id,
kind=kind,
tags=tags
)
if kind.lower() == 'servicecatalog':
if managedapp_definition_id:
application.application_definition_id = managedapp_definition_id
else:
raise CLIError('--managedapp-definition-id is required if kind is ServiceCatalog')
elif kind.lower() == 'marketplace':
if (plan_name is None and plan_product is None and
plan_publisher is None and plan_version is None):
raise CLIError('--plan-name, --plan-product, --plan-publisher and \
--plan-version are all required if kind is MarketPlace')
application.plan = Plan(name=plan_name, publisher=plan_publisher, product=plan_product, version=plan_version)
applicationParameters = None
if parameters:
if os.path.exists(parameters):
applicationParameters = get_file_json(parameters)
else:
applicationParameters = shell_safe_json_parse(parameters)
application.parameters = applicationParameters
return racf.applications.create_or_update(resource_group_name, application_name, application)
def show_application(cmd, resource_group_name=None, application_name=None):
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
return racf.applications.get(resource_group_name, application_name)
def show_applicationdefinition(cmd, resource_group_name=None, application_definition_name=None):
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
return racf.application_definitions.get(resource_group_name, application_definition_name)
def create_applicationdefinition(cmd, resource_group_name,
application_definition_name,
lock_level, authorizations,
description, display_name,
package_file_uri=None, create_ui_definition=None,
main_template=None, location=None, tags=None):
from azure.mgmt.resource.managedapplications.models import ApplicationDefinition, ApplicationProviderAuthorization
if not package_file_uri and not create_ui_definition and not main_template:
raise CLIError('usage error: --package-file-uri <url> | --create-ui-definition --main-template')
if package_file_uri:
if create_ui_definition or main_template:
raise CLIError('usage error: must not specify --create-ui-definition --main-template')
if not package_file_uri:
if not create_ui_definition or not main_template:
raise CLIError('usage error: must specify --create-ui-definition --main-template')
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
rcf = _resource_client_factory(cmd.cli_ctx)
if not location:
location = rcf.resource_groups.get(resource_group_name).location
authorizations = authorizations or []
applicationAuthList = []
for name_value in authorizations:
principalId, roleDefinitionId = name_value.split(':', 1)
applicationAuth = ApplicationProviderAuthorization(
principal_id=principalId,
role_definition_id=roleDefinitionId)
applicationAuthList.append(applicationAuth)
applicationDef = ApplicationDefinition(lock_level=lock_level,
authorizations=applicationAuthList,
package_file_uri=package_file_uri)
applicationDef.display_name = display_name
applicationDef.description = description
applicationDef.location = location
applicationDef.package_file_uri = package_file_uri
applicationDef.create_ui_definition = create_ui_definition
applicationDef.main_template = main_template
applicationDef.tags = tags
return racf.application_definitions.create_or_update(resource_group_name,
application_definition_name, applicationDef)
def list_applications(cmd, resource_group_name=None):
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
if resource_group_name:
applications = racf.applications.list_by_resource_group(resource_group_name)
else:
applications = racf.applications.list_by_subscription()
return list(applications)
def list_deployments_at_subscription_scope(cmd, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_at_subscription_scope(filter=filter_string)
def list_deployments_at_resource_group(cmd, resource_group_name, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_by_resource_group(resource_group_name, filter=filter_string)
def list_deployments_at_management_group(cmd, management_group_id, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_at_management_group_scope(management_group_id, filter=filter_string)
def list_deployments_at_tenant_scope(cmd, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_at_tenant_scope(filter=filter_string)
def get_deployment_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get_at_subscription_scope(deployment_name)
def get_deployment_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get(resource_group_name, deployment_name)
def get_deployment_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get_at_management_group_scope(management_group_id, deployment_name)
def get_deployment_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get_at_tenant_scope(deployment_name)
def delete_deployment_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete_at_subscription_scope(deployment_name)
def delete_deployment_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete(resource_group_name, deployment_name)
def delete_deployment_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete_at_management_group_scope(management_group_id, deployment_name)
def delete_deployment_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete_at_tenant_scope(deployment_name)
def cancel_deployment_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel_at_subscription_scope(deployment_name)
def cancel_deployment_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel(resource_group_name, deployment_name)
def cancel_deployment_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel_at_management_group_scope(management_group_id, deployment_name)
def cancel_deployment_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel_at_tenant_scope(deployment_name)
def deploy_arm_template(cmd, resource_group_name,
template_file=None, template_uri=None, deployment_name=None,
parameters=None, mode=None, rollback_on_error=None, no_wait=False,
handle_extended_json_format=None, aux_subscriptions=None, aux_tenants=None,
no_prompt=False):
return _deploy_arm_template_core_unmodified(cmd, resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri,
deployment_name=deployment_name, parameters=parameters, mode=mode,
rollback_on_error=rollback_on_error, no_wait=no_wait,
aux_subscriptions=aux_subscriptions, aux_tenants=aux_tenants,
no_prompt=no_prompt)
def validate_arm_template(cmd, resource_group_name, template_file=None, template_uri=None,
parameters=None, mode=None, rollback_on_error=None, handle_extended_json_format=None,
no_prompt=False):
return _deploy_arm_template_core_unmodified(cmd, resource_group_name, template_file, template_uri,
'deployment_dry_run', parameters, mode, rollback_on_error,
validate_only=True, no_prompt=no_prompt)
def export_template_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template_at_subscription_scope(deployment_name)
print(json.dumps(result.template, indent=2))
def export_template_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template(resource_group_name, deployment_name)
print(json.dumps(result.template, indent=2))
def export_template_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template_at_management_group_scope(management_group_id, deployment_name)
print(json.dumps(result.template, indent=2))
def export_template_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template_at_tenant_scope(deployment_name)
print(json.dumps(result.template, indent=2))
def export_deployment_as_template(cmd, resource_group_name, deployment_name):
smc = _resource_client_factory(cmd.cli_ctx)
result = smc.deployments.export_template(resource_group_name, deployment_name)
print(json.dumps(result.template, indent=2))
def create_resource(cmd, properties,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None,
resource_id=None, api_version=None, location=None, is_full_object=False,
latest_include_preview=False):
res = _ResourceUtils(cmd.cli_ctx, resource_group_name, resource_provider_namespace,
parent_resource_path, resource_type, resource_name,
resource_id, api_version, latest_include_preview=latest_include_preview)
return res.create_resource(properties, location, is_full_object)
def _get_parsed_resource_ids(resource_ids):
if not resource_ids:
return None
for rid in resource_ids:
if not is_valid_resource_id(rid):
raise CLIError('az resource: error: argument --ids: invalid ResourceId value: \'%s\'' % rid)
return ({'resource_id': rid} for rid in resource_ids)
def _get_rsrc_util_from_parsed_id(cli_ctx, parsed_id, api_version, latest_include_preview=False):
return _ResourceUtils(cli_ctx,
parsed_id.get('resource_group', None),
parsed_id.get('resource_namespace', None),
parsed_id.get('resource_parent', None),
parsed_id.get('resource_type', None),
parsed_id.get('resource_name', None),
parsed_id.get('resource_id', None),
api_version,
latest_include_preview=latest_include_preview)
def _create_parsed_id(cli_ctx, resource_group_name=None, resource_provider_namespace=None, parent_resource_path=None,
resource_type=None, resource_name=None):
from azure.cli.core.commands.client_factory import get_subscription_id
subscription = get_subscription_id(cli_ctx)
return {
'resource_group': resource_group_name,
'resource_namespace': resource_provider_namespace,
'resource_parent': parent_resource_path,
'resource_type': resource_type,
'resource_name': resource_name,
'subscription': subscription
}
def _single_or_collection(obj, default=None):
if not obj:
return default
if isinstance(obj, list) and len(obj) == 1:
return obj[0]
return obj
def show_resource(cmd, resource_ids=None, resource_group_name=None,
resource_provider_namespace=None, parent_resource_path=None, resource_type=None,
resource_name=None, api_version=None, include_response_body=False, latest_include_preview=False):
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).get_resource(
include_response_body) for id_dict in parsed_ids])
def delete_resource(cmd, resource_ids=None, resource_group_name=None,
resource_provider_namespace=None, parent_resource_path=None, resource_type=None,
resource_name=None, api_version=None, latest_include_preview=False):
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
to_be_deleted = [(_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview), id_dict)
for id_dict in parsed_ids]
results = []
from msrestazure.azure_exceptions import CloudError
while to_be_deleted:
logger.debug("Start new loop to delete resources.")
operations = []
failed_to_delete = []
for rsrc_utils, id_dict in to_be_deleted:
try:
operations.append(rsrc_utils.delete())
resource = _build_resource_id(**id_dict) or resource_name
logger.debug("deleting %s", resource)
except CloudError as e:
id_dict['exception'] = str(e)
failed_to_delete.append((rsrc_utils, id_dict))
to_be_deleted = failed_to_delete
if not operations:
break
for operation in operations:
results.append(operation.result())
if to_be_deleted:
error_msg_builder = ['Some resources failed to be deleted (run with `--verbose` for more information):']
for _, id_dict in to_be_deleted:
logger.info(id_dict['exception'])
resource_id = _build_resource_id(**id_dict) or id_dict['resource_id']
error_msg_builder.append(resource_id)
raise CLIError(os.linesep.join(error_msg_builder))
return _single_or_collection(results)
def update_resource(cmd, parameters, resource_ids=None,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, api_version=None,
latest_include_preview=False):
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).update(parameters)
for id_dict in parsed_ids])
def tag_resource(cmd, tags, resource_ids=None, resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, api_version=None,
is_incremental=None, latest_include_preview=False):
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).tag(
tags, is_incremental) for id_dict in parsed_ids])
def invoke_resource_action(cmd, action, request_body=None, resource_ids=None,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None,
api_version=None, latest_include_preview=False):
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).invoke_action(
action, request_body) for id_dict in parsed_ids])
def get_deployment_operations(client, resource_group_name, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get(resource_group_name, deployment_name, op_id)
result.append(dep)
return result
def get_deployment_operations_at_subscription_scope(client, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
deployment = client.get_at_subscription_scope(deployment_name, op_id)
result.append(deployment)
return result
def get_deployment_operations_at_resource_group(client, resource_group_name, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get(resource_group_name, deployment_name, op_id)
result.append(dep)
return result
def get_deployment_operations_at_management_group(client, management_group_id, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get_at_management_group_scope(management_group_id, deployment_name, op_id)
result.append(dep)
return result
def get_deployment_operations_at_tenant_scope(client, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get_at_tenant_scope(deployment_name, op_id)
result.append(dep)
return result
def list_deployment_scripts(cmd, resource_group_name=None):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
if resource_group_name is not None:
return rcf.deployment_scripts.list_by_resource_group(resource_group_name)
return rcf.deployment_scripts.list_by_subscription()
def get_deployment_script(cmd, resource_group_name, name):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
return rcf.deployment_scripts.get(resource_group_name, name)
def get_deployment_script_logs(cmd, resource_group_name, name):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
return rcf.deployment_scripts.get_logs(resource_group_name, name)
def delete_deployment_script(cmd, resource_group_name, name):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
rcf.deployment_scripts.delete(resource_group_name, name)
def get_template_spec(cmd, resource_group_name=None, name=None, version=None, template_spec=None):
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if version:
return rcf.template_spec_versions.get(resource_group_name, name, version)
return rcf.template_specs.get(resource_group_name, name)
def create_template_spec(cmd, resource_group_name, name, template_file=None, location=None, display_name=None,
description=None, version=None, version_description=None):
artifacts = None
input_template = None
if location is None:
rcf = _resource_client_factory(cmd.cli_ctx)
location = rcf.resource_groups.get(resource_group_name).location
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if version:
if template_file:
from azure.cli.command_modules.resource._packing_engine import (pack)
packed_template = pack(cmd, template_file)
input_template = getattr(packed_template, 'RootTemplate')
artifacts = getattr(packed_template, 'Artifacts')
try: rcf.template_specs.get(resource_group_name=resource_group_name, template_spec_name=name)
except Exception: TemplateSpec = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpec', mod='models')
template_spec_parent = TemplateSpec(location=location, description=description, display_name=display_name, tags=None)
rcf.template_specs.create_or_update(resource_group_name, name, template_spec_parent)
TemplateSpecVersion = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpecVersion', mod='models')
template_spec_child = TemplateSpecVersion(location=location, artifacts=artifacts, description=version_description, template=input_template, tags=None)
return rcf.template_spec_versions.create_or_update(resource_group_name, name, version, template_spec_child)
TemplateSpec = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpec', mod='models')
template_spec_parent = TemplateSpec(location=location, description=description, display_name=display_name, tags=None)
return rcf.template_specs.create_or_update(resource_group_name, name, template_spec_parent)
def update_template_spec(cmd, resource_group_name=None, name=None, template_spec=None, template_file=None, display_name=None,
description=None, version=None, version_description=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
existing_template = None
artifacts = None
if template_file:
from azure.cli.command_modules.resource._packing_engine import (pack)
packed_template = pack(cmd, template_file)
input_template = getattr(packed_template, 'RootTemplate')
artifacts = getattr(packed_template, 'Artifacts')
if version:
existing_template = rcf.template_spec_versions.get(resource_group_name=resource_group_name, template_spec_name=name, template_spec_version=version)
location = getattr(existing_template, 'location')
version_tags = getattr(existing_template, 'tags')
if version_description is None:
version_description = getattr(existing_template, 'description')
if template_file is None:
input_template = getattr(existing_template, 'template')
TemplateSpecVersion = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpecVersion', mod='models')
updated_template_spec = TemplateSpecVersion(location=location, artifacts=artifacts, description=version_description, template=input_template, tags=version_tags)
return rcf.template_spec_versions.create_or_update(resource_group_name, name, version, updated_template_spec)
existing_template = rcf.template_specs.get(resource_group_name=resource_group_name, template_spec_name=name)
location = getattr(existing_template, 'location')
tags = getattr(existing_template, 'tags')
if display_name is None:
display_name = getattr(existing_template, 'display_name')
if description is None:
description = getattr(existing_template, 'description')
TemplateSpec = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpec', mod='models')
root_template = TemplateSpec(location=location, description=description, display_name=display_name, tags=tags)
return rcf.template_specs.create_or_update(resource_group_name, name, root_template)
def export_template_spec(cmd, output_folder, resource_group_name=None, name=None, version=None, template_spec=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
exported_template = rcf.template_spec_versions.get(resource_group_name, name, version) if version else rcf.template_specs.get(resource_group_name, name)
from azure.cli.command_modules.resource._packing_engine import (unpack)
return unpack(cmd, exported_template, output_folder, (str(name) + '.JSON'))
def delete_template_spec(cmd, resource_group_name=None, name=None, version=None, template_spec=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
if version:
return rcf.template_specs.delete(resource_group_name=resource_group_name, template_spec_name=name, template_spec_version=version)
return rcf.template_specs.delete(resource_group_name=resource_group_name, template_spec_name=name)
def list_template_specs(cmd, resource_group_name=None, name=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if resource_group_name is not None:
if name is not None:
return rcf.template_spec_versions.list(resource_group_name=resource_group_name, template_spec_name=name)
return rcf.template_specs.list_by_resource_group(resource_group_name)
return rcf.template_specs.list_by_subscription()
def list_deployment_operations_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list_at_subscription_scope(deployment_name)
def list_deployment_operations_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list(resource_group_name, deployment_name)
def list_deployment_operations_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list_at_management_group_scope(management_group_id, deployment_name)
def list_deployment_operations_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list_at_tenant_scope(deployment_name)
def get_deployment_operation_at_subscription_scope(cmd, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get_at_subscription_scope(deployment_name, op_id)
def get_deployment_operation_at_resource_group(cmd, resource_group_name, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get(resource_group_name, deployment_name, op_id)
def get_deployment_operation_at_management_group(cmd, management_group_id, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get_at_management_group_scope(management_group_id, deployment_name, op_id)
def get_deployment_operation_at_tenant_scope(cmd, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get_at_tenant_scope(deployment_name, op_id)
def list_resources(cmd, resource_group_name=None,
resource_provider_namespace=None, resource_type=None, name=None, tag=None,
location=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_group_name is not None:
rcf.resource_groups.get(resource_group_name)
odata_filter = _list_resources_odata_filter_builder(resource_group_name,
resource_provider_namespace,
resource_type, name, tag, location)
expand = "createdTime,changedTime,provisioningState"
resources = rcf.resources.list(filter=odata_filter, expand=expand)
return list(resources)
def register_provider(cmd, resource_provider_namespace, wait=False):
_update_provider(cmd.cli_ctx, resource_provider_namespace, registering=True, wait=wait)
def unregister_provider(cmd, resource_provider_namespace, wait=False):
_update_provider(cmd.cli_ctx, resource_provider_namespace, registering=False, wait=wait)
def list_provider_operations(cmd):
auth_client = _authorization_management_client(cmd.cli_ctx)
return auth_client.provider_operations_metadata.list()
def show_provider_operations(cmd, resource_provider_namespace):
version = getattr(get_api_version(cmd.cli_ctx, ResourceType.MGMT_AUTHORIZATION), 'provider_operations_metadata')
auth_client = _authorization_management_client(cmd.cli_ctx)
if version == '2015-07-01':
return auth_client.provider_operations_metadata.get(resource_provider_namespace, version)
return auth_client.provider_operations_metadata.get(resource_provider_namespace)
def move_resource(cmd, ids, destination_group, destination_subscription_id=None):
resources = []
for i in ids:
if is_valid_resource_id(i):
resources.append(parse_resource_id(i))
else:
raise CLIError('Invalid id "{}", as it has no group or subscription field'.format(i))
if len({r['subscription'] for r in resources}) > 1:
raise CLIError('All resources should be under the same subscription')
if len({r['resource_group'] for r in resources}) > 1:
raise CLIError('All resources should be under the same group')
rcf = _resource_client_factory(cmd.cli_ctx)
target = _build_resource_id(subscription=(destination_subscription_id or rcf.config.subscription_id),
resource_group=destination_group)
return rcf.resources.move_resources(resources[0]['resource_group'], ids, target)
def list_features(client, resource_provider_namespace=None):
if resource_provider_namespace:
return client.list(resource_provider_namespace=resource_provider_namespace)
return client.list_all()
def register_feature(client, resource_provider_namespace, feature_name):
logger.warning("Once the feature '%s' is registered, invoking 'az provider register -n %s' is required "
"to get the change propagated", feature_name, resource_provider_namespace)
return client.register(resource_provider_namespace, feature_name)
def unregister_feature(client, resource_provider_namespace, feature_name):
logger.warning("Once the feature '%s' is unregistered, invoking 'az provider register -n %s' is required "
"to get the change propagated", feature_name, resource_provider_namespace)
return client.unregister(resource_provider_namespace, feature_name)
def create_policy_assignment(cmd, policy=None, policy_set_definition=None,
name=None, display_name=None, params=None,
resource_group_name=None, scope=None, sku=None,
not_scopes=None, location=None, assign_identity=None,
identity_scope=None, identity_role='Contributor', enforcement_mode='Default'):
if bool(policy) == bool(policy_set_definition):
raise CLIError('usage error: --policy NAME_OR_ID | '
'--policy-set-definition NAME_OR_ID')
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id,
resource_group_name, scope)
policy_id = _resolve_policy_id(cmd, policy, policy_set_definition, policy_client)
params = _load_file_string_or_uri(params, 'params', False)
PolicyAssignment = cmd.get_models('PolicyAssignment')
assignment = PolicyAssignment(display_name=display_name, policy_definition_id=policy_id, scope=scope, enforcement_mode=enforcement_mode)
assignment.parameters = params if params else None
if cmd.supported_api_version(min_api='2017-06-01-preview'):
if not_scopes:
kwargs_list = []
for id_arg in not_scopes.split(' '):
if parse_resource_id(id_arg):
kwargs_list.append(id_arg)
else:
logger.error('az policy assignment create error: argument --not-scopes: \
invalid notscopes value: \'%s\'', id_arg)
return
assignment.not_scopes = kwargs_list
PolicySku = cmd.get_models('PolicySku')
policySku = PolicySku(name='A0', tier='Free')
if sku:
policySku = policySku if sku.lower() == 'free' else PolicySku(name='A1', tier='Standard')
assignment.sku = policySku
if cmd.supported_api_version(min_api='2018-05-01'):
if location:
assignment.location = location
identity = None
if assign_identity is not None:
identity = _build_identities_info(cmd, assign_identity)
assignment.identity = identity
if name is None:
name = (base64.urlsafe_b64encode(uuid.uuid4().bytes).decode())[:-2]
createdAssignment = policy_client.policy_assignments.create(scope, name, assignment)
if assign_identity is not None and identity_scope:
from azure.cli.core.commands.arm import assign_identity as _assign_identity_helper
_assign_identity_helper(cmd.cli_ctx, lambda: createdAssignment, lambda resource: createdAssignment, identity_role, identity_scope)
return createdAssignment
def _build_identities_info(cmd, identities):
identities = identities or []
ResourceIdentityType = cmd.get_models('ResourceIdentityType')
identity_type = ResourceIdentityType.none
if not identities or MSI_LOCAL_ID in identities:
identity_type = ResourceIdentityType.system_assigned
ResourceIdentity = cmd.get_models('Identity')
return ResourceIdentity(type=identity_type)
def delete_policy_assignment(cmd, name, resource_group_name=None, scope=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id,
resource_group_name, scope)
policy_client.policy_assignments.delete(scope, name)
def show_policy_assignment(cmd, name, resource_group_name=None, scope=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id,
resource_group_name, scope)
return policy_client.policy_assignments.get(scope, name)
def list_policy_assignment(cmd, disable_scope_strict_match=None, resource_group_name=None, scope=None):
from azure.cli.core.commands.client_factory import get_subscription_id
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
_scope = _build_policy_scope(get_subscription_id(cmd.cli_ctx),
resource_group_name, scope)
id_parts = parse_resource_id(_scope)
subscription = id_parts.get('subscription')
resource_group = id_parts.get('resource_group')
resource_type = id_parts.get('child_type_1') or id_parts.get('type')
resource_name = id_parts.get('child_name_1') or id_parts.get('name')
management_group = _parse_management_group_id(scope)
if management_group:
result = policy_client.policy_assignments.list_for_management_group(management_group_id=management_group, filter='atScope()')
elif all([resource_type, resource_group, subscription]):
namespace = id_parts.get('namespace')
parent_resource_path = '' if not id_parts.get('child_name_1') else (id_parts['type'] + '/' + id_parts['name'])
result = policy_client.policy_assignments.list_for_resource(
resource_group, namespace,
parent_resource_path, resource_type, resource_name)
elif resource_group:
result = policy_client.policy_assignments.list_for_resource_group(resource_group)
elif subscription:
result = policy_client.policy_assignments.list()
elif scope:
raise CLIError('usage error `--scope`: must be a fully qualified ARM ID.')
else:
raise CLIError('usage error: --scope ARM_ID | --resource-group NAME')
if not disable_scope_strict_match:
result = [i for i in result if _scope.lower().strip('/') == i.scope.lower().strip('/')]
return result
def set_identity(cmd, name, scope=None, resource_group_name=None, identity_role='Contributor', identity_scope=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id, resource_group_name, scope)
def getter():
return policy_client.policy_assignments.get(scope, name)
def setter(policyAssignment):
policyAssignment.identity = _build_identities_info(cmd, [MSI_LOCAL_ID])
return policy_client.policy_assignments.create(scope, name, policyAssignment)
from azure.cli.core.commands.arm import assign_identity as _assign_identity_helper
updatedAssignment = _assign_identity_helper(cmd.cli_ctx, getter, setter, identity_role, identity_scope)
return updatedAssignment.identity
def show_identity(cmd, name, scope=None, resource_group_name=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id, resource_group_name, scope)
return policy_client.policy_assignments.get(scope, name).identity
def remove_identity(cmd, name, scope=None, resource_group_name=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id, resource_group_name, scope)
policyAssignment = policy_client.policy_assignments.get(scope, name)
ResourceIdentityType = cmd.get_models('ResourceIdentityType')
ResourceIdentity = cmd.get_models('Identity')
policyAssignment.identity = ResourceIdentity(type=ResourceIdentityType.none)
policyAssignment = policy_client.policy_assignments.create(scope, name, policyAssignment)
return policyAssignment.identity
def enforce_mutually_exclusive(subscription, management_group):
if subscription and management_group:
raise IncorrectUsageError('cannot provide both --subscription and --management-group')
def create_policy_definition(cmd, name, rules=None, params=None, display_name=None, description=None, mode=None,
metadata=None, subscription=None, management_group=None):
rules = _load_file_string_or_uri(rules, 'rules')
params = _load_file_string_or_uri(params, 'params', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
PolicyDefinition = cmd.get_models('PolicyDefinition')
parameters = PolicyDefinition(policy_rule=rules, parameters=params, description=description,
display_name=display_name)
if cmd.supported_api_version(min_api='2016-12-01'):
parameters.mode = mode
if cmd.supported_api_version(min_api='2017-06-01-preview'):
parameters.metadata = metadata
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.create_or_update_at_management_group(name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.create_or_update(name, parameters)
def create_policy_setdefinition(cmd, name, definitions, params=None, display_name=None, description=None,
subscription=None, management_group=None, definition_groups=None, metadata=None):
definitions = _load_file_string_or_uri(definitions, 'definitions')
params = _load_file_string_or_uri(params, 'params', False)
definition_groups = _load_file_string_or_uri(definition_groups, 'definition_groups', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
PolicySetDefinition = cmd.get_models('PolicySetDefinition')
parameters = PolicySetDefinition(policy_definitions=definitions, parameters=params, description=description,
display_name=display_name, policy_definition_groups=definition_groups)
if cmd.supported_api_version(min_api='2017-06-01-preview'):
parameters.metadata = metadata
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.create_or_update_at_management_group(name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.create_or_update(name, parameters)
def get_policy_definition(cmd, policy_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
return _get_custom_or_builtin_policy(cmd, policy_client, policy_definition_name, subscription, management_group)
def get_policy_setdefinition(cmd, policy_set_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
return _get_custom_or_builtin_policy(cmd, policy_client, policy_set_definition_name, subscription, management_group, True)
def list_policy_definition(cmd, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.list_by_management_group(management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.list()
def list_policy_setdefinition(cmd, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.list_by_management_group(management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.list()
def delete_policy_definition(cmd, policy_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.delete_at_management_group(policy_definition_name, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.delete(policy_definition_name)
def delete_policy_setdefinition(cmd, policy_set_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.delete_at_management_group(policy_set_definition_name, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.delete(policy_set_definition_name)
def update_policy_definition(cmd, policy_definition_name, rules=None, params=None,
display_name=None, description=None, metadata=None, mode=None,
subscription=None, management_group=None):
rules = _load_file_string_or_uri(rules, 'rules', False)
params = _load_file_string_or_uri(params, 'params', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
definition = _get_custom_or_builtin_policy(cmd, policy_client, policy_definition_name, subscription, management_group)
# pylint: disable=line-too-long,no-member
PolicyDefinition = cmd.get_models('PolicyDefinition')
parameters = PolicyDefinition(
policy_rule=rules if rules is not None else definition.policy_rule,
parameters=params if params is not None else definition.parameters,
display_name=display_name if display_name is not None else definition.display_name,
description=description if description is not None else definition.description,
metadata=metadata if metadata is not None else definition.metadata)
if cmd.supported_api_version(min_api='2016-12-01'):
parameters.mode = mode
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.create_or_update_at_management_group(policy_definition_name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.create_or_update(policy_definition_name, parameters)
def update_policy_setdefinition(cmd, policy_set_definition_name, definitions=None, params=None,
display_name=None, description=None,
subscription=None, management_group=None, definition_groups=None, metadata=None):
definitions = _load_file_string_or_uri(definitions, 'definitions', False)
params = _load_file_string_or_uri(params, 'params', False)
definition_groups = _load_file_string_or_uri(definition_groups, 'definition_groups', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
definition = _get_custom_or_builtin_policy(cmd, policy_client, policy_set_definition_name, subscription, management_group, True)
# pylint: disable=line-too-long,no-member
PolicySetDefinition = cmd.get_models('PolicySetDefinition')
parameters = PolicySetDefinition(
policy_definitions=definitions if definitions is not None else definition.policy_definitions,
description=description if description is not None else definition.description,
display_name=display_name if display_name is not None else definition.display_name,
parameters=params if params is not None else definition.parameters,
policy_definition_groups=definition_groups if definition_groups is not None else definition.policy_definition_groups,
metadata=metadata if metadata is not None else definition.metadata)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.create_or_update_at_management_group(policy_set_definition_name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.create_or_update(policy_set_definition_name, parameters)
def _register_rp(cli_ctx, subscription_id=None):
rp = "Microsoft.Management"
import time
rcf = get_mgmt_service_client(
cli_ctx,
ResourceType.MGMT_RESOURCE_RESOURCES,
subscription_id)
rcf.providers.register(rp)
while True:
time.sleep(10)
rp_info = rcf.providers.get(rp)
if rp_info.registration_state == 'Registered':
break
def _get_subscription_id_from_subscription(cli_ctx, subscription): # pylint: disable=inconsistent-return-statements
from azure.cli.core._profile import Profile
profile = Profile(cli_ctx=cli_ctx)
subscriptions_list = profile.load_cached_subscriptions()
for sub in subscriptions_list:
if subscription in (sub['id'], sub['name']):
return sub['id']
raise CLIError("Subscription not found in the current context.")
def _get_parent_id_from_parent(parent):
if parent is None or _is_management_group_scope(parent):
return parent
return "/providers/Microsoft.Management/managementGroups/" + parent
def _is_management_group_scope(scope):
return scope is not None and scope.lower().startswith("/providers/microsoft.management/managementgroups")
def cli_managementgroups_group_list(cmd, client):
_register_rp(cmd.cli_ctx)
return client.list()
def cli_managementgroups_group_show(
cmd,
client,
group_name,
expand=False,
recurse=False):
_register_rp(cmd.cli_ctx)
if expand:
return client.get(group_name, "children", recurse)
return client.get(group_name)
def cli_managementgroups_group_create(
cmd,
client,
group_name,
display_name=None,
parent=None):
_register_rp(cmd.cli_ctx)
parent_id = _get_parent_id_from_parent(parent)
from azure.mgmt.managementgroups.models import (
CreateManagementGroupRequest, CreateManagementGroupDetails, CreateParentGroupInfo)
create_parent_grp_info = CreateParentGroupInfo(id=parent_id)
create_mgmt_grp_details = CreateManagementGroupDetails(parent=create_parent_grp_info)
create_mgmt_grp_request = CreateManagementGroupRequest(
name=group_name,
display_name=display_name,
details=create_mgmt_grp_details)
return client.create_or_update(group_name, create_mgmt_grp_request)
def cli_managementgroups_group_update_custom_func(
instance,
display_name=None,
parent_id=None):
parent_id = _get_parent_id_from_parent(parent_id)
instance.display_name = display_name
instance.parent_id = parent_id
return instance
def cli_managementgroups_group_update_get():
from azure.mgmt.managementgroups.models import PatchManagementGroupRequest
update_parameters = PatchManagementGroupRequest(display_name=None, parent_id=None)
return update_parameters
def cli_managementgroups_group_update_set(
cmd, client, group_name, parameters=None):
return client.update(group_name, parameters)
def cli_managementgroups_group_delete(cmd, client, group_name):
_register_rp(cmd.cli_ctx)
return client.delete(group_name)
def cli_managementgroups_subscription_add(
cmd, client, group_name, subscription):
subscription_id = _get_subscription_id_from_subscription(
cmd.cli_ctx, subscription)
return client.create(group_name, subscription_id)
def cli_managementgroups_subscription_remove(
cmd, client, group_name, subscription):
subscription_id = _get_subscription_id_from_subscription(
cmd.cli_ctx, subscription)
return client.delete(group_name, subscription_id)
# region Locks
def _validate_lock_params_match_lock(
lock_client, name, resource_group, resource_provider_namespace, parent_resource_path,
resource_type, resource_name):
locks = lock_client.management_locks.list_at_subscription_level()
found_count = 0 # locks at different levels can have the same name
lock_resource_id = None
for lock in locks:
if lock.name == name:
found_count = found_count + 1
lock_resource_id = lock.id
if found_count == 1:
# If we only found one lock, let's validate that the parameters are correct,
resource = parse_resource_id(lock_resource_id)
_resource_group = resource.get('resource_group', None)
_resource_namespace = resource.get('namespace', None)
if _resource_group is None:
return
if resource_group != _resource_group:
raise CLIError(
'Unexpected --resource-group for lock {}, expected {}'.format(
name, _resource_group))
if _resource_namespace is None or _resource_namespace == 'Microsoft.Authorization':
return
if resource_provider_namespace != _resource_namespace:
raise CLIError(
'Unexpected --namespace for lock {}, expected {}'.format(name, _resource_namespace))
if resource.get('child_type_2', None) is None:
_resource_type = resource.get('type', None)
_resource_name = resource.get('name', None)
else:
if resource.get('child_type_3', None) is None:
_resource_type = resource.get('child_type_1', None)
_resource_name = resource.get('child_name_1', None)
parent = (resource['type'] + '/' + resource['name'])
else:
_resource_type = resource.get('child_type_2', None)
_resource_name = resource.get('child_name_2', None)
parent = (resource['type'] + '/' + resource['name'] + '/' +
resource['child_type_1'] + '/' + resource['child_name_1'])
if parent != parent_resource_path:
raise CLIError(
'Unexpected --parent for lock {}, expected {}'.format(
name, parent))
if resource_type != _resource_type:
raise CLIError('Unexpected --resource-type for lock {}, expected {}'.format(
name, _resource_type))
if resource_name != _resource_name:
raise CLIError('Unexpected --resource-name for lock {}, expected {}'.format(
name, _resource_name))
def list_locks(cmd, resource_group=None,
resource_provider_namespace=None, parent_resource_path=None, resource_type=None,
resource_name=None, filter_string=None):
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
if resource_group is None:
return lock_client.management_locks.list_at_subscription_level(filter=filter_string)
if resource_name is None:
return lock_client.management_locks.list_at_resource_group_level(
resource_group, filter=filter_string)
return lock_client.management_locks.list_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, filter=filter_string)
def get_lock(cmd, lock_name=None, resource_group=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, ids=None):
if ids:
kwargs_list = []
for id_arg in ids:
try:
kwargs_list.append(_parse_lock_id(id_arg))
except AttributeError:
logger.error('az lock show: error: argument --ids: invalid ResourceId value: \'%s\'', id_arg)
return
results = [get_lock(cmd, **kwargs) for kwargs in kwargs_list]
return results[0] if len(results) == 1 else results
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
_validate_lock_params_match_lock(lock_client, lock_name, resource_group,
resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
if resource_group is None:
return _call_subscription_get(cmd, lock_client, lock_name)
if resource_name is None:
return lock_client.management_locks.get_at_resource_group_level(resource_group, lock_name)
if cmd.supported_api_version(max_api='2015-01-01'):
lock_list = list_locks(resource_group, resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
return next((lock for lock in lock_list if lock.name == lock_name), None)
return lock_client.management_locks.get_at_resource_level(
resource_group, resource_provider_namespace,
parent_resource_path or '', resource_type, resource_name, lock_name)
def delete_lock(cmd, lock_name=None, resource_group=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, ids=None):
if ids:
kwargs_list = []
for id_arg in ids:
try:
kwargs_list.append(_parse_lock_id(id_arg))
except AttributeError:
logger.error('az lock delete: error: argument --ids: invalid ResourceId value: \'%s\'', id_arg)
return
results = [delete_lock(cmd, **kwargs) for kwargs in kwargs_list]
return results[0] if len(results) == 1 else results
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
_validate_lock_params_match_lock(lock_client, lock_name, resource_group,
resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
if resource_group is None:
return lock_client.management_locks.delete_at_subscription_level(lock_name)
if resource_name is None:
return lock_client.management_locks.delete_at_resource_group_level(
resource_group, lock_name)
return lock_client.management_locks.delete_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name)
def create_lock(cmd, lock_name, level,
resource_group=None, resource_provider_namespace=None, notes=None,
parent_resource_path=None, resource_type=None, resource_name=None):
ManagementLockObject = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_LOCKS, 'ManagementLockObject', mod='models')
parameters = ManagementLockObject(level=level, notes=notes, name=lock_name)
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
if resource_group is None:
return lock_client.management_locks.create_or_update_at_subscription_level(lock_name, parameters)
if resource_name is None:
return lock_client.management_locks.create_or_update_at_resource_group_level(
resource_group, lock_name, parameters)
return lock_client.management_locks.create_or_update_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name, parameters)
def update_lock(cmd, lock_name=None, resource_group=None, resource_provider_namespace=None, notes=None,
parent_resource_path=None, resource_type=None, resource_name=None, level=None, ids=None):
if ids:
kwargs_list = []
for id_arg in ids:
try:
kwargs_list.append(_parse_lock_id(id_arg))
except AttributeError:
logger.error('az lock update: error: argument --ids: invalid ResourceId value: \'%s\'', id_arg)
return
results = [update_lock(cmd, level=level, notes=notes, **kwargs) for kwargs in kwargs_list]
return results[0] if len(results) == 1 else results
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
_validate_lock_params_match_lock(lock_client, lock_name, resource_group, resource_provider_namespace,
parent_resource_path, resource_type, resource_name)
if resource_group is None:
params = _call_subscription_get(cmd, lock_client, lock_name)
_update_lock_parameters(params, level, notes)
return lock_client.management_locks.create_or_update_at_subscription_level(lock_name, params)
if resource_name is None:
params = lock_client.management_locks.get_at_resource_group_level(resource_group, lock_name)
_update_lock_parameters(params, level, notes)
return lock_client.management_locks.create_or_update_at_resource_group_level(
resource_group, lock_name, params)
if cmd.supported_api_version(max_api='2015-01-01'):
lock_list = list_locks(resource_group, resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
return next((lock for lock in lock_list if lock.name == lock_name), None)
params = lock_client.management_locks.get_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name)
_update_lock_parameters(params, level, notes)
return lock_client.management_locks.create_or_update_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name, params)
def create_resource_link(cmd, link_id, target_id, notes=None):
links_client = _resource_links_client_factory(cmd.cli_ctx).resource_links
ResourceLinkProperties = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_LINKS,
'ResourceLinkProperties', mod='models')
properties = ResourceLinkProperties(target_id=target_id, notes=notes)
links_client.create_or_update(link_id, properties)
def update_resource_link(cmd, link_id, target_id=None, notes=None):
links_client = _resource_links_client_factory(cmd.cli_ctx).resource_links
params = links_client.get(link_id)
ResourceLinkProperties = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_LINKS,
'ResourceLinkProperties', mod='models')
properties = ResourceLinkProperties(
target_id=target_id if target_id is not None else params.properties.target_id,
notes=notes if notes is not None else params.properties.notes) links_client.create_or_update(link_id, properties)
def list_resource_links(cmd, scope=None, filter_string=None):
links_client = _resource_links_client_factory(cmd.cli_ctx).resource_links
if scope is not None:
return links_client.list_at_source_scope(scope, filter=filter_string)
return links_client.list_at_subscription(filter=filter_string)
def get_tag_at_scope(cmd, resource_id=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_id is not None:
return rcf.tags.get_at_scope(scope=resource_id)
return rcf.tags.list()
def create_or_update_tag_at_scope(cmd, resource_id=None, tags=None, tag_name=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_id is not None:
if not tags:
raise IncorrectUsageError("Tags could not be empty.")
Tags = cmd.get_models('Tags')
tag_obj = Tags(tags=tags)
return rcf.tags.create_or_update_at_scope(scope=resource_id, properties=tag_obj)
return rcf.tags.create_or_update(tag_name=tag_name)
def delete_tag_at_scope(cmd, resource_id=None, tag_name=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_id is not None:
return rcf.tags.delete_at_scope(scope=resource_id)
return rcf.tags.delete(tag_name=tag_name)
def update_tag_at_scope(cmd, resource_id, tags, operation):
rcf = _resource_client_factory(cmd.cli_ctx)
if not tags:
raise IncorrectUsageError("Tags could not be empty.")
Tags = cmd.get_models('Tags')
tag_obj = Tags(tags=tags)
return rcf.tags.update_at_scope(scope=resource_id, properties=tag_obj, operation=operation)
class _ResourceUtils: def __init__(self, cli_ctx,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None,
resource_id=None, api_version=None, rcf=None, latest_include_preview=False):
if resource_type and not resource_provider_namespace and not parent_resource_path:
parts = resource_type.split('/')
if len(parts) > 1:
resource_provider_namespace = parts[0]
resource_type = parts[1]
self.rcf = rcf or _resource_client_factory(cli_ctx)
if api_version is None:
if resource_id:
api_version = _ResourceUtils._resolve_api_version_by_id(self.rcf, resource_id,
latest_include_preview=latest_include_preview)
else:
_validate_resource_inputs(resource_group_name, resource_provider_namespace,
resource_type, resource_name)
api_version = _ResourceUtils.resolve_api_version(self.rcf,
resource_provider_namespace,
parent_resource_path,
resource_type,
latest_include_preview=latest_include_preview)
self.resource_group_name = resource_group_name
self.resource_provider_namespace = resource_provider_namespace
self.parent_resource_path = parent_resource_path if parent_resource_path else ''
self.resource_type = resource_type
self.resource_name = resource_name
self.resource_id = resource_id
self.api_version = api_version
def create_resource(self, properties, location, is_full_object):
try:
res = json.loads(properties)
except json.decoder.JSONDecodeError as ex:
raise CLIError('Error parsing JSON.\n{}\n{}'.format(properties, ex))
if not is_full_object:
if not location:
if self.resource_id:
rg_name = parse_resource_id(self.resource_id)['resource_group']
else:
rg_name = self.resource_group_name
location = self.rcf.resource_groups.get(rg_name).location
res = GenericResource(location=location, properties=res)
elif res.get('location', None) is None:
raise IncorrectUsageError("location of the resource is required")
if self.resource_id:
resource = self.rcf.resources.create_or_update_by_id(self.resource_id,
self.api_version,
res)
else:
resource = self.rcf.resources.create_or_update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
res)
return resource
def get_resource(self, include_response_body=False):
if self.resource_id:
resource = self.rcf.resources.get_by_id(self.resource_id, self.api_version, raw=include_response_body)
else:
resource = self.rcf.resources.get(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
raw=include_response_body)
if include_response_body:
temp = resource.output
setattr(temp, 'response_body', json.loads(resource.response.content.decode()))
resource = temp
return resource
def delete(self):
if self.resource_id:
return self.rcf.resources.delete_by_id(self.resource_id, self.api_version)
return self.rcf.resources.delete(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version)
def update(self, parameters):
if self.resource_id:
return self.rcf.resources.create_or_update_by_id(self.resource_id,
self.api_version,
parameters)
return self.rcf.resources.create_or_update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
parameters)
def tag(self, tags, is_incremental=False):
resource = self.get_resource()
if is_incremental is True:
if not tags:
raise CLIError("When modifying tag incrementally, the parameters of tag must have specific values.")
if resource.tags:
resource.tags.update(tags)
tags = resource.tags
# please add the service type that needs to be requested with PATCH type here
# for example: the properties of RecoveryServices/vaults must be filled, and a PUT request that passes back
# to properties will fail due to the lack of properties, so the PATCH type should be used
need_patch_service = ['Microsoft.RecoveryServices/vaults', 'Microsoft.Resources/resourceGroups',
'Microsoft.ContainerRegistry/registries/webhooks',
'Microsoft.ContainerInstance/containerGroups']
if resource is not None and resource.type in need_patch_service:
parameters = GenericResource(tags=tags)
if self.resource_id:
return self.rcf.resources.update_by_id(self.resource_id, self.api_version, parameters)
return self.rcf.resources.update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
parameters)
# pylint: disable=no-member
parameters = GenericResource(
location=resource.location,
tags=tags,
plan=resource.plan,
properties=resource.properties,
kind=resource.kind,
managed_by=resource.managed_by,
sku=resource.sku,
identity=resource.identity)
if self.resource_id:
return self.rcf.resources.create_or_update_by_id(self.resource_id, self.api_version,
parameters)
return self.rcf.resources.create_or_update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
parameters)
def invoke_action(self, action, request_body):
from msrestazure.azure_operation import AzureOperationPoller
query_parameters = {}
serialize = self.rcf.resources._serialize # pylint: disable=protected-access
client = self.rcf.resources._client # pylint: disable=protected-access
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/' \
'{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/{action}'
if self.resource_id:
url = client.format_url(
'{resource_id}/{action}',
resource_id=self.resource_id,
action=serialize.url("action", action, 'str'))
else:
url = client.format_url(
url,
resourceGroupName=serialize.url(
"resource_group_name", self.resource_group_name, 'str',
max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
resourceProviderNamespace=serialize.url(
"resource_provider_namespace", self.resource_provider_namespace, 'str'),
parentResourcePath=serialize.url(
"parent_resource_path", self.parent_resource_path, 'str', skip_quote=True),
resourceType=serialize.url("resource_type", self.resource_type, 'str', skip_quote=True),
resourceName=serialize.url("resource_name", self.resource_name, 'str'),
subscriptionId=serialize.url(
"self.config.subscription_id", self.rcf.resources.config.subscription_id, 'str'),
action=serialize.url("action", action, 'str'))
# Construct parameters
query_parameters['api-version'] = serialize.query("api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.rcf.resources.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid4())
if self.rcf.resources.config.accept_language is not None:
header_parameters['accept-language'] = serialize.header(
"self.config.accept_language", self.rcf.resources.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = client.post(url, query_parameters)
return client.send(
request, header_parameters, json.loads(request_body) if request_body else None)
def get_long_running_status(status_link, headers=None):
request = client.get(status_link)
if headers:
request.headers.update(headers)
return client.send(request, header_parameters)
def get_long_running_output(response):
from msrestazure.azure_exceptions import CloudError
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response.text
return AzureOperationPoller(long_running_send, get_long_running_output, get_long_running_status,
self.rcf.resources.config.long_running_operation_timeout)
@staticmethod
def resolve_api_version(rcf, resource_provider_namespace, parent_resource_path, resource_type,
latest_include_preview=False):
provider = rcf.providers.get(resource_provider_namespace)
# If available, we will use parent resource's api-version
resource_type_str = (parent_resource_path.split('/')[0] if parent_resource_path else resource_type)
rt = [t for t in provider.resource_types
if t.resource_type.lower() == resource_type_str.lower()]
if not rt:
raise IncorrectUsageError('Resource type {} not found.'.format(resource_type_str))
if len(rt) == 1 and rt[0].api_versions:
if latest_include_preview:
return rt[0].api_versions[0]
npv = [v for v in rt[0].api_versions if 'preview' not in v.lower()]
return npv[0] if npv else rt[0].api_versions[0]
raise IncorrectUsageError(
'API version is required and could not be resolved for resource {}'
.format(resource_type))
@staticmethod
def _resolve_api_version_by_id(rcf, resource_id, latest_include_preview=False):
parts = parse_resource_id(resource_id)
if len(parts) == 2 and parts['subscription'] is not None and parts['resource_group'] is not None:
return AZURE_API_PROFILES['latest'][ResourceType.MGMT_RESOURCE_RESOURCES]
if 'namespace' not in parts:
raise CLIError('The type of value entered by --ids parameter is not supported.')
namespace = parts.get('child_namespace_1', parts['namespace'])
if parts.get('child_type_2'):
parent = (parts['type'] + '/' + parts['name'] + '/' +
parts['child_type_1'] + '/' + parts['child_name_1'])
resource_type = parts['child_type_2']
elif parts.get('child_type_1'):
if parts.get('child_namespace_1') is not None:
parent = ''
else:
parent = parts['type'] + '/' + parts['name']
resource_type = parts['child_type_1']
else:
parent = None
resource_type = parts['type']
return _ResourceUtils.resolve_api_version(rcf, namespace, parent, resource_type,
latest_include_preview=latest_include_preview)
| true
| true
|
f709121ec8d4532010013541f330e6a67735c286
| 251
|
py
|
Python
|
nginx_router/backend/synth_app/views.py
|
BennettDixon/book_query_app
|
b1afd6967c432520540c0427948808ff7b5d8556
|
[
"MIT"
] | 2
|
2019-08-22T00:49:16.000Z
|
2022-01-21T21:27:53.000Z
|
nginx_router/backend/synth_app/views.py
|
BennettDixon/book_query_app
|
b1afd6967c432520540c0427948808ff7b5d8556
|
[
"MIT"
] | 7
|
2020-09-06T23:47:51.000Z
|
2022-02-26T16:47:58.000Z
|
nginx_router/backend/synth_app/views.py
|
BennettDixon/book_query_app
|
b1afd6967c432520540c0427948808ff7b5d8556
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponse
from django.shortcuts import render
# Create your views here.
def index(request):
return HttpResponse('{"response": "Synth is running!"}')
def test(request):
return HttpResponse('ANOTHER RESPONSE YO')
| 19.307692
| 60
| 0.741036
|
from django.http import HttpResponse
from django.shortcuts import render
def index(request):
return HttpResponse('{"response": "Synth is running!"}')
def test(request):
return HttpResponse('ANOTHER RESPONSE YO')
| true
| true
|
f7091272a083f663a8bf3500eec9312864fe379c
| 9,761
|
py
|
Python
|
pypy/module/_cffi_backend/test/test_re_python.py
|
ruby-compiler-survey/pypy
|
c76ed8d0979e13497786cf99eb427ef8f94ea816
|
[
"Apache-2.0",
"OpenSSL"
] | 1
|
2021-07-19T17:42:42.000Z
|
2021-07-19T17:42:42.000Z
|
pypy/module/_cffi_backend/test/test_re_python.py
|
CAS-Atlantic/pypy
|
0988788dd911ff0d5b1cfcf0657412810168d37e
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
pypy/module/_cffi_backend/test/test_re_python.py
|
CAS-Atlantic/pypy
|
0988788dd911ff0d5b1cfcf0657412810168d37e
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
import py
import sys, shutil, os
from rpython.tool.udir import udir
from pypy.interpreter.gateway import interp2app
from pypy.module._cffi_backend.newtype import _clean_cache
if sys.platform == 'win32':
WIN32 = True
else:
WIN32 = False
class AppTestRecompilerPython:
spaceconfig = dict(usemodules=['_cffi_backend'])
def setup_class(cls):
try:
from cffi import FFI # <== the system one, which
from cffi import recompiler # needs to be at least cffi 1.0.0
from cffi import ffiplatform
except ImportError:
py.test.skip("system cffi module not found or older than 1.0.0")
space = cls.space
SRC = """
#define FOOBAR (-42)
static const int FOOBAZ = -43;
#define BIGPOS 420000000000L
#define BIGNEG -420000000000L
int add42(int x) { return x + 42; }
int globalvar42 = 1234;
const int globalconst42 = 4321;
const char *const globalconsthello = "hello";
struct foo_s;
typedef struct bar_s { int x; signed char a[]; } bar_t;
enum foo_e { AA, BB, CC };
void init_test_re_python(void) { } /* windows hack */
void PyInit__test_re_python(void) { } /* windows hack */
"""
tmpdir = udir.join('test_re_python')
tmpdir.ensure(dir=1)
c_file = tmpdir.join('_test_re_python.c')
c_file.write(SRC)
ext = ffiplatform.get_extension(str(c_file), '_test_re_python',
export_symbols=['add42', 'globalvar42',
'globalconst42', 'globalconsthello'])
outputfilename = ffiplatform.compile(str(tmpdir), ext)
cls.w_extmod = space.wrap(outputfilename)
if WIN32:
unicode_name = u'load\u03betest.dll'
else:
unicode_name = u'load_caf\xe9' + os.path.splitext(outputfilename)[1]
try:
unicode_name.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
unicode_name = None # skip test_dlopen_unicode
if unicode_name is not None:
outputfileUname = os.path.join(unicode(udir), unicode_name)
shutil.copyfile(outputfilename, outputfileUname)
cls.w_extmodU = space.wrap(outputfileUname)
#mod.tmpdir = tmpdir
#
ffi = FFI()
ffi.cdef("""
#define FOOBAR -42
static const int FOOBAZ = -43;
#define BIGPOS 420000000000L
#define BIGNEG -420000000000L
int add42(int);
int globalvar42;
const int globalconst42;
const char *const globalconsthello = "hello";
int no_such_function(int);
int no_such_globalvar;
struct foo_s;
typedef struct bar_s { int x; signed char a[]; } bar_t;
enum foo_e { AA, BB, CC };
typedef struct selfref { struct selfref *next; } *selfref_ptr_t;
void *dlopen(const char *filename, int flags);
int dlclose(void *handle);
""")
ffi.set_source('re_python_pysrc', None)
ffi.emit_python_code(str(tmpdir.join('re_python_pysrc.py')))
#
sub_ffi = FFI()
sub_ffi.cdef("static const int k2 = 121212;")
sub_ffi.include(ffi)
assert 'macro FOOBAR' in ffi._parser._declarations
assert 'macro FOOBAZ' in ffi._parser._declarations
sub_ffi.set_source('re_py_subsrc', None)
sub_ffi.emit_python_code(str(tmpdir.join('re_py_subsrc.py')))
#
cls.w_fix_path = space.appexec([space.wrap(str(tmpdir))], """(path):
def fix_path(ignored=None):
import _cffi_backend # force it to be initialized
import sys
if path not in sys.path:
sys.path.insert(0, path)
return fix_path
""")
cls.w_dl_libpath = space.w_None
if sys.platform != 'win32':
import ctypes.util
cls.w_dl_libpath = space.wrap(ctypes.util.find_library('dl'))
def teardown_method(self, meth):
self.space.appexec([], """():
import sys
for name in ['re_py_subsrc', 're_python_pysrc']:
if name in sys.modules:
del sys.modules[name]
""")
_clean_cache(self.space)
def test_constant_1(self):
self.fix_path()
from re_python_pysrc import ffi
assert ffi.integer_const('FOOBAR') == -42
assert ffi.integer_const('FOOBAZ') == -43
def test_large_constant(self):
self.fix_path()
from re_python_pysrc import ffi
assert ffi.integer_const('BIGPOS') == 420000000000
assert ffi.integer_const('BIGNEG') == -420000000000
def test_function(self):
import _cffi_backend
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert lib.add42(-10) == 32
assert type(lib.add42) is _cffi_backend.FFI.CData
def test_dlopen_unicode(self):
if not getattr(self, 'extmodU', None):
skip("no unicode file name")
import _cffi_backend
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmodU)
assert lib.add42(-10) == 32
def test_dlclose(self):
import _cffi_backend
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
ffi.dlclose(lib)
e = raises(ffi.error, getattr, lib, 'add42')
assert str(e.value) == (
"library '%s' has been closed" % (self.extmod,))
ffi.dlclose(lib) # does not raise
def test_constant_via_lib(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert lib.FOOBAR == -42
assert lib.FOOBAZ == -43
def test_opaque_struct(self):
self.fix_path()
from re_python_pysrc import ffi
ffi.cast("struct foo_s *", 0)
raises(TypeError, ffi.new, "struct foo_s *")
def test_nonopaque_struct(self):
self.fix_path()
from re_python_pysrc import ffi
for p in [ffi.new("struct bar_s *", [5, b"foobar"]),
ffi.new("bar_t *", [5, b"foobar"])]:
assert p.x == 5
assert p.a[0] == ord('f')
assert p.a[5] == ord('r')
def test_enum(self):
self.fix_path()
from re_python_pysrc import ffi
assert ffi.integer_const("BB") == 1
e = ffi.cast("enum foo_e", 2)
assert ffi.string(e) == "CC"
def test_include_1(self):
self.fix_path()
from re_py_subsrc import ffi
assert ffi.integer_const('FOOBAR') == -42
assert ffi.integer_const('FOOBAZ') == -43
assert ffi.integer_const('k2') == 121212
lib = ffi.dlopen(self.extmod) # <- a random unrelated library would be fine
assert lib.FOOBAR == -42
assert lib.FOOBAZ == -43
assert lib.k2 == 121212
#
p = ffi.new("bar_t *", [5, b"foobar"])
assert p.a[4] == ord('a')
def test_global_var(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert lib.globalvar42 == 1234
p = ffi.addressof(lib, 'globalvar42')
lib.globalvar42 += 5
assert p[0] == 1239
p[0] -= 1
assert lib.globalvar42 == 1238
def test_global_const_int(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert lib.globalconst42 == 4321
raises(AttributeError, ffi.addressof, lib, 'globalconst42')
def test_global_const_nonint(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert ffi.string(lib.globalconsthello, 8) == "hello"
raises(AttributeError, ffi.addressof, lib, 'globalconsthello')
def test_rtld_constants(self):
self.fix_path()
from re_python_pysrc import ffi
ffi.RTLD_NOW # check that we have the attributes
ffi.RTLD_LAZY
ffi.RTLD_GLOBAL
def test_no_such_function_or_global_var(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
e = raises(ffi.error, getattr, lib, 'no_such_function')
assert str(e.value).startswith(
"symbol 'no_such_function' not found in library '")
e = raises(ffi.error, getattr, lib, 'no_such_globalvar')
assert str(e.value).startswith(
"symbol 'no_such_globalvar' not found in library '")
def test_check_version(self):
import _cffi_backend
e = raises(ImportError, _cffi_backend.FFI,
"foobar", _version=0x2594)
assert str(e.value).startswith(
"cffi out-of-line Python module 'foobar' has unknown version")
def test_selfref(self):
# based on cffi issue #429
self.fix_path()
from re_python_pysrc import ffi
ffi.new("selfref_ptr_t")
def test_dlopen_handle(self):
import _cffi_backend, sys
self.fix_path()
from re_python_pysrc import ffi
if self.dl_libpath is None:
py.test.skip("uses 'dl' explicitly")
lib1 = ffi.dlopen(self.dl_libpath)
handle = lib1.dlopen(self.extmod.encode(sys.getfilesystemencoding()),
_cffi_backend.RTLD_LAZY)
assert ffi.typeof(handle) == ffi.typeof("void *")
assert handle
lib = ffi.dlopen(handle)
assert lib.add42(-10) == 32
assert type(lib.add42) is _cffi_backend.FFI.CData
err = lib1.dlclose(handle)
assert err == 0
| 35.624088
| 87
| 0.595431
|
import py
import sys, shutil, os
from rpython.tool.udir import udir
from pypy.interpreter.gateway import interp2app
from pypy.module._cffi_backend.newtype import _clean_cache
if sys.platform == 'win32':
WIN32 = True
else:
WIN32 = False
class AppTestRecompilerPython:
spaceconfig = dict(usemodules=['_cffi_backend'])
def setup_class(cls):
try:
from cffi import FFI from cffi import recompiler from cffi import ffiplatform
except ImportError:
py.test.skip("system cffi module not found or older than 1.0.0")
space = cls.space
SRC = """
#define FOOBAR (-42)
static const int FOOBAZ = -43;
#define BIGPOS 420000000000L
#define BIGNEG -420000000000L
int add42(int x) { return x + 42; }
int globalvar42 = 1234;
const int globalconst42 = 4321;
const char *const globalconsthello = "hello";
struct foo_s;
typedef struct bar_s { int x; signed char a[]; } bar_t;
enum foo_e { AA, BB, CC };
void init_test_re_python(void) { } /* windows hack */
void PyInit__test_re_python(void) { } /* windows hack */
"""
tmpdir = udir.join('test_re_python')
tmpdir.ensure(dir=1)
c_file = tmpdir.join('_test_re_python.c')
c_file.write(SRC)
ext = ffiplatform.get_extension(str(c_file), '_test_re_python',
export_symbols=['add42', 'globalvar42',
'globalconst42', 'globalconsthello'])
outputfilename = ffiplatform.compile(str(tmpdir), ext)
cls.w_extmod = space.wrap(outputfilename)
if WIN32:
unicode_name = u'load\u03betest.dll'
else:
unicode_name = u'load_caf\xe9' + os.path.splitext(outputfilename)[1]
try:
unicode_name.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
unicode_name = None if unicode_name is not None:
outputfileUname = os.path.join(unicode(udir), unicode_name)
shutil.copyfile(outputfilename, outputfileUname)
cls.w_extmodU = space.wrap(outputfileUname)
ffi = FFI()
ffi.cdef("""
#define FOOBAR -42
static const int FOOBAZ = -43;
#define BIGPOS 420000000000L
#define BIGNEG -420000000000L
int add42(int);
int globalvar42;
const int globalconst42;
const char *const globalconsthello = "hello";
int no_such_function(int);
int no_such_globalvar;
struct foo_s;
typedef struct bar_s { int x; signed char a[]; } bar_t;
enum foo_e { AA, BB, CC };
typedef struct selfref { struct selfref *next; } *selfref_ptr_t;
void *dlopen(const char *filename, int flags);
int dlclose(void *handle);
""")
ffi.set_source('re_python_pysrc', None)
ffi.emit_python_code(str(tmpdir.join('re_python_pysrc.py')))
sub_ffi = FFI()
sub_ffi.cdef("static const int k2 = 121212;")
sub_ffi.include(ffi)
assert 'macro FOOBAR' in ffi._parser._declarations
assert 'macro FOOBAZ' in ffi._parser._declarations
sub_ffi.set_source('re_py_subsrc', None)
sub_ffi.emit_python_code(str(tmpdir.join('re_py_subsrc.py')))
cls.w_fix_path = space.appexec([space.wrap(str(tmpdir))], """(path):
def fix_path(ignored=None):
import _cffi_backend # force it to be initialized
import sys
if path not in sys.path:
sys.path.insert(0, path)
return fix_path
""")
cls.w_dl_libpath = space.w_None
if sys.platform != 'win32':
import ctypes.util
cls.w_dl_libpath = space.wrap(ctypes.util.find_library('dl'))
def teardown_method(self, meth):
self.space.appexec([], """():
import sys
for name in ['re_py_subsrc', 're_python_pysrc']:
if name in sys.modules:
del sys.modules[name]
""")
_clean_cache(self.space)
def test_constant_1(self):
self.fix_path()
from re_python_pysrc import ffi
assert ffi.integer_const('FOOBAR') == -42
assert ffi.integer_const('FOOBAZ') == -43
def test_large_constant(self):
self.fix_path()
from re_python_pysrc import ffi
assert ffi.integer_const('BIGPOS') == 420000000000
assert ffi.integer_const('BIGNEG') == -420000000000
def test_function(self):
import _cffi_backend
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert lib.add42(-10) == 32
assert type(lib.add42) is _cffi_backend.FFI.CData
def test_dlopen_unicode(self):
if not getattr(self, 'extmodU', None):
skip("no unicode file name")
import _cffi_backend
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmodU)
assert lib.add42(-10) == 32
def test_dlclose(self):
import _cffi_backend
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
ffi.dlclose(lib)
e = raises(ffi.error, getattr, lib, 'add42')
assert str(e.value) == (
"library '%s' has been closed" % (self.extmod,))
ffi.dlclose(lib)
def test_constant_via_lib(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert lib.FOOBAR == -42
assert lib.FOOBAZ == -43
def test_opaque_struct(self):
self.fix_path()
from re_python_pysrc import ffi
ffi.cast("struct foo_s *", 0)
raises(TypeError, ffi.new, "struct foo_s *")
def test_nonopaque_struct(self):
self.fix_path()
from re_python_pysrc import ffi
for p in [ffi.new("struct bar_s *", [5, b"foobar"]),
ffi.new("bar_t *", [5, b"foobar"])]:
assert p.x == 5
assert p.a[0] == ord('f')
assert p.a[5] == ord('r')
def test_enum(self):
self.fix_path()
from re_python_pysrc import ffi
assert ffi.integer_const("BB") == 1
e = ffi.cast("enum foo_e", 2)
assert ffi.string(e) == "CC"
def test_include_1(self):
self.fix_path()
from re_py_subsrc import ffi
assert ffi.integer_const('FOOBAR') == -42
assert ffi.integer_const('FOOBAZ') == -43
assert ffi.integer_const('k2') == 121212
lib = ffi.dlopen(self.extmod) assert lib.FOOBAR == -42
assert lib.FOOBAZ == -43
assert lib.k2 == 121212
p = ffi.new("bar_t *", [5, b"foobar"])
assert p.a[4] == ord('a')
def test_global_var(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert lib.globalvar42 == 1234
p = ffi.addressof(lib, 'globalvar42')
lib.globalvar42 += 5
assert p[0] == 1239
p[0] -= 1
assert lib.globalvar42 == 1238
def test_global_const_int(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert lib.globalconst42 == 4321
raises(AttributeError, ffi.addressof, lib, 'globalconst42')
def test_global_const_nonint(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
assert ffi.string(lib.globalconsthello, 8) == "hello"
raises(AttributeError, ffi.addressof, lib, 'globalconsthello')
def test_rtld_constants(self):
self.fix_path()
from re_python_pysrc import ffi
ffi.RTLD_NOW ffi.RTLD_LAZY
ffi.RTLD_GLOBAL
def test_no_such_function_or_global_var(self):
self.fix_path()
from re_python_pysrc import ffi
lib = ffi.dlopen(self.extmod)
e = raises(ffi.error, getattr, lib, 'no_such_function')
assert str(e.value).startswith(
"symbol 'no_such_function' not found in library '")
e = raises(ffi.error, getattr, lib, 'no_such_globalvar')
assert str(e.value).startswith(
"symbol 'no_such_globalvar' not found in library '")
def test_check_version(self):
import _cffi_backend
e = raises(ImportError, _cffi_backend.FFI,
"foobar", _version=0x2594)
assert str(e.value).startswith(
"cffi out-of-line Python module 'foobar' has unknown version")
def test_selfref(self):
self.fix_path()
from re_python_pysrc import ffi
ffi.new("selfref_ptr_t")
def test_dlopen_handle(self):
import _cffi_backend, sys
self.fix_path()
from re_python_pysrc import ffi
if self.dl_libpath is None:
py.test.skip("uses 'dl' explicitly")
lib1 = ffi.dlopen(self.dl_libpath)
handle = lib1.dlopen(self.extmod.encode(sys.getfilesystemencoding()),
_cffi_backend.RTLD_LAZY)
assert ffi.typeof(handle) == ffi.typeof("void *")
assert handle
lib = ffi.dlopen(handle)
assert lib.add42(-10) == 32
assert type(lib.add42) is _cffi_backend.FFI.CData
err = lib1.dlclose(handle)
assert err == 0
| true
| true
|
f70913690bbeee6a5cd7e42289093722d0f892d5
| 3,080
|
py
|
Python
|
app/app/settings.py
|
gonzales-juan/recipe-app-api
|
6cf46737cd3b63e845ad3e5ade3c6e91ab156542
|
[
"MIT"
] | null | null | null |
app/app/settings.py
|
gonzales-juan/recipe-app-api
|
6cf46737cd3b63e845ad3e5ade3c6e91ab156542
|
[
"MIT"
] | null | null | null |
app/app/settings.py
|
gonzales-juan/recipe-app-api
|
6cf46737cd3b63e845ad3e5ade3c6e91ab156542
|
[
"MIT"
] | null | null | null |
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 2.1.15.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h26q@cw!pa#7*jjx$sda&0*c0&u&alf4^a)hwoh4j+6)j5y*&_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| 25.454545
| 91
| 0.694156
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'h26q@cw!pa#7*jjx$sda&0*c0&u&alf4^a)hwoh4j+6)j5y*&_'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| true
| true
|
f70913d332a19eeea3c8a155688731e9ab4da5b0
| 1,468
|
py
|
Python
|
tests/test_models/test_engine/test_file_storage.py
|
arvicrin/AirBnB_clone
|
b851c48fabb85e942f57f9afdeca043e104bb6d1
|
[
"MIT"
] | null | null | null |
tests/test_models/test_engine/test_file_storage.py
|
arvicrin/AirBnB_clone
|
b851c48fabb85e942f57f9afdeca043e104bb6d1
|
[
"MIT"
] | null | null | null |
tests/test_models/test_engine/test_file_storage.py
|
arvicrin/AirBnB_clone
|
b851c48fabb85e942f57f9afdeca043e104bb6d1
|
[
"MIT"
] | 1
|
2020-02-27T18:42:46.000Z
|
2020-02-27T18:42:46.000Z
|
#!/usr/bin/python3
"""
Defines a class TestFileStorage.
"""
from models.engine.file_storage import FileStorage
import unittest
import models
import os
class TestFileStorage(unittest.TestCase):
"""Represent a TestFileStorage."""
def setUp(self):
"""SetUp method"""
self.file_storage = FileStorage()
def TearDown(self):
"""TearDown method."""
del self.file_storage
def test_docstring(self):
"""Test docstring for the module and the class"""
self.assertIsNotNone(
models.engine.file_storage.__doc__,
"No docstring in the module"
)
self.assertIsNotNone(FileStorage.__doc__, "No docstring in the class")
def test_permissions_file(self):
"""Test File file_storage.py permissions"""
test_file = os.access("models/engine/file_storage.py", os.R_OK)
self.assertTrue(test_file, "Read permissions")
test_file = os.access("models/engine/file_storage.py", os.W_OK)
self.assertTrue(test_file, "Write Permissions")
test_file = os.access("models/engine/file_storage.py", os.X_OK)
self.assertTrue(test_file, "Execute permissions")
def test_type_object(self):
"""Test type object of FileStorage"""
self.assertEqual(
str(type(self.file_storage)),
"<class 'models.engine.file_storage.FileStorage'>")
self.assertIsInstance(self.file_storage, FileStorage)
| 27.698113
| 78
| 0.658719
|
from models.engine.file_storage import FileStorage
import unittest
import models
import os
class TestFileStorage(unittest.TestCase):
def setUp(self):
self.file_storage = FileStorage()
def TearDown(self):
del self.file_storage
def test_docstring(self):
self.assertIsNotNone(
models.engine.file_storage.__doc__,
"No docstring in the module"
)
self.assertIsNotNone(FileStorage.__doc__, "No docstring in the class")
def test_permissions_file(self):
test_file = os.access("models/engine/file_storage.py", os.R_OK)
self.assertTrue(test_file, "Read permissions")
test_file = os.access("models/engine/file_storage.py", os.W_OK)
self.assertTrue(test_file, "Write Permissions")
test_file = os.access("models/engine/file_storage.py", os.X_OK)
self.assertTrue(test_file, "Execute permissions")
def test_type_object(self):
self.assertEqual(
str(type(self.file_storage)),
"<class 'models.engine.file_storage.FileStorage'>")
self.assertIsInstance(self.file_storage, FileStorage)
| true
| true
|
f7091472584e11aa0109b97212c5ff1f162ae32a
| 15,488
|
py
|
Python
|
Nested_Adversarial_Networks/NAN_rework/modeleag.py
|
ZhaoJ9014/Multi-Human-Parsing-MHP-
|
a24eae67e9b4e730c75bcd8aec3e2ed06cb4b046
|
[
"MIT"
] | 481
|
2019-01-28T07:37:42.000Z
|
2022-03-30T02:23:56.000Z
|
Nested_Adversarial_Networks/NAN_rework/modeleag.py
|
ZhaoJ9014/Multi-Human-Parsing-MHP-
|
a24eae67e9b4e730c75bcd8aec3e2ed06cb4b046
|
[
"MIT"
] | 36
|
2019-02-06T15:14:27.000Z
|
2022-02-08T18:04:17.000Z
|
Nested_Adversarial_Networks/NAN_rework/modeleag.py
|
ZhaoJ9014/Multi-Human-Parsing-MHP-
|
a24eae67e9b4e730c75bcd8aec3e2ed06cb4b046
|
[
"MIT"
] | 70
|
2019-01-29T05:42:06.000Z
|
2022-03-26T04:59:16.000Z
|
# Rework of model.py
# https://github.com/ddddwee1/sul
# This wrap-up is targeted for better touching low-level implementations
import layers2 as L
import tensorflow as tf
config = tf.ConfigProto()
config.gpu_options.allow_growth=True
tf.enable_eager_execution(config=config)
import numpy as np
import os
import random
import time
PARAM_RELU = 0
PARAM_LRELU = 1
PARAM_ELU = 2
PARAM_TANH = 3
PARAM_MFM = 4
PARAM_MFM_FC = 5
PARAM_SIGMOID = 6
######## util functions ###########
def accuracy(pred,y,name='acc', one_hot=True):
with tf.variable_scope(name):
if one_hot:
correct = tf.equal(tf.cast(tf.argmax(pred,-1),tf.int64),tf.cast(tf.argmax(y,-1),tf.int64))
else:
correct = tf.equal(tf.cast(tf.argmax(pred,-1),tf.int64),tf.cast(y,tf.int64))
acc = tf.reduce_mean(tf.cast(correct,tf.float32))
return acc
##########################
# ETA class. I want to see the ETA. It's too boring to wait here.
class ETA():
def __init__(self,max_value):
self.start_time = time.time()
self.max_value = max_value
self.current = 0
def start(self):
self.start_time = time.time()
self.current = 0
def sec2hms(self,sec):
hm = sec//60
s = sec%60
h = hm//60
m = hm%60
return h,m,s
def get_ETA(self,current,is_string=True):
self.current = current
time_div = time.time() - self.start_time
time_remain = time_div * float(self.max_value - self.current) / float(self.current + 1)
h,m,s = self.sec2hms(int(time_remain))
if is_string:
return '%d:%d:%d'%(h,m,s)
else:
return h,m,s
########### universal model class ##########
class Model(tf.contrib.checkpoint.Checkpointable):
def __init__(self,*args,**kwargs):
self.initialized = False
self.variables = []
self.initialize(*args,**kwargs)
def initialize(self,*args,**kwargs):
pass
def _gather_variables(self):
self.variables = []
atrs = dir(self)
for i in atrs:
if i[0] == '_':
continue
obj = getattr(self, i)
self.variables += self._gather_variables_recursive(obj)
def _gather_variables_recursive(self, obj):
result = []
if isinstance(obj, list) or isinstance(obj, tuple):
for sub_obj in obj:
result += self._gather_variables_recursive(sub_obj)
elif isinstance(obj, Model) or isinstance(obj, L.Layer):
result += obj.variables
return result
def get_variables(self, layers=None):
if layers is None:
return self.variables
else:
res = []
for l in layers:
res += l.variables
return res
def set_bn_training(self, is_training):
atrs = dir(self)
# print(atrs)
for i in atrs:
if i[0] == '_':
continue
obj = getattr(self, i)
self._set_bn_training_recursive(obj, is_training)
def _set_bn_training_recursive(self, obj, is_training):
if isinstance(obj, list):
for sub_obj in obj:
self._set_bn_training_recursive(sub_obj, is_training)
if isinstance(obj, Model) and obj!=self:
obj.set_bn_training(is_training)
if isinstance(obj, L.batch_norm):
obj.is_training = is_training
def set_bn_epsilon(self, epsilon):
atrs = dir(self)
# print(atrs)
for i in atrs:
if i[0] == '_':
continue
obj = getattr(self, i)
self._set_bn_epsilon_recursive(obj, epsilon)
def _set_bn_epsilon_recursive(self, obj, epsilon):
if isinstance(obj, list):
for sub_obj in obj:
self._set_bn_training_recursive(sub_obj, epsilon)
if isinstance(obj, Model) and obj!=self:
obj.set_bn_training(epsilon)
if isinstance(obj, L.batch_norm):
obj.epsilon = epsilon
def __call__(self, x, *args, **kwargs):
x = tf.convert_to_tensor(x, preferred_dtype=tf.float32)
res = self.forward(x, *args, **kwargs)
if not self.initialized:
self._gather_variables()
self.initialized = True
return res
########### universal layer classes ##########
class ConvLayer(Model):
def initialize(self, size, outchn, dilation_rate=1, stride=1,pad='SAME',activation=-1,batch_norm=False, usebias=True,kernel_data=None,bias_data=None,weight_norm=False):
self.conv = L.conv2D(size,outchn,stride=stride,pad=pad,usebias=usebias,kernel_data=kernel_data,bias_data=bias_data,dilation_rate=dilation_rate,weight_norm=weight_norm)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.conv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class ConvLayer1D(Model):
def initialize(self, size, outchn, dilation_rate=1, stride=1,pad='SAME',activation=-1,batch_norm=False, usebias=True,kernel_data=None,bias_data=None,weight_norm=False):
self.conv = L.conv1D(size,outchn,stride=stride,pad=pad,usebias=usebias,kernel_data=kernel_data,bias_data=bias_data,dilation_rate=dilation_rate,weight_norm=weight_norm)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.conv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class ConvLayer3D(Model):
def initialize(self, size, outchn, dilation_rate=1, stride=1,pad='SAME',activation=-1,batch_norm=False, usebias=True,kernel_data=None,bias_data=None,weight_norm=False):
self.conv = L.conv3D(size,outchn,stride=stride,pad=pad,usebias=usebias,kernel_data=kernel_data,bias_data=bias_data,dilation_rate=dilation_rate,weight_norm=weight_norm)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.conv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class DeconvLayer(Model):
def initialize(self, size, outchn, activation=-1, stride=1, usebias=True, pad='SAME', batch_norm=False):
self.deconv = L.deconv2D(size,outchn,stride=stride,usebias=usebias,pad=pad, name=None)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.deconv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class DeconvLayer3D(Model):
def initialize(self, size, outchn, activation=-1, stride=1, usebias=True, pad='SAME', batch_norm=False):
self.deconv = L.deconv3D(size,outchn,stride=stride,usebias=usebias,pad=pad, name=None)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.deconv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class Dense(Model):
def initialize(self, outsize, usebias=True, batch_norm=False, activation=-1):
self.fclayer = L.fcLayer(outsize,usebias=usebias)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.fclayer(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class GraphConvLayer(Model):
def initialize(self, outsize, adj_mtx=None, adj_fn=None, usebias=True, activation=-1, batch_norm=False):
self.GCL = L.graphConvLayer(outsize, adj_mtx=adj_mtx, adj_fn=adj_fn, usebias=usebias)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self, x):
x = self.GCL(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
flatten = L.flatten()
maxPool = L.maxpoolLayer
avgPool = L.avgpoolLayer
########### higher wrapped block ##########
class ResBlock(Model):
def initialize(self, outchn, stride=1, ratio=4, activation=PARAM_RELU):
self.outchn = outchn
# self.stride = stride
self.activ = L.activation(activation)
self.bn = L.batch_norm()
self.l1 = ConvLayer(1, outchn//ratio, activation=PARAM_RELU, batch_norm=True)
self.l2 = ConvLayer(3, outchn//ratio, activation=PARAM_RELU, batch_norm=True, stride=stride)
self.l3 = ConvLayer(1, outchn)
self.shortcut_conv = ConvLayer(1, outchn, activation=PARAM_RELU, stride=stride)
self.shortcut_pool = L.maxpoolLayer(stride)
def forward(self, x):
inshape = x.get_shape().as_list()[-1]
if inshape==self.outchn:
short = self.shortcut_pool(x)
else:
short = self.shortcut_conv(x)
branch = self.bn(x)
branch = self.activ(branch)
branch = self.l1(branch)
branch = self.l2(branch)
branch = self.l3(branch)
return branch + short
class Sequential(Model):
def initialize(self, modules):
self.modules = modules
def forward(self, x):
for m in self.modules:
x = m(x)
return x
########### saver ##########
class Saver():
def __init__(self, model, optim=None):
self.mod = model
self.obj = tf.contrib.checkpoint.Checkpointable()
self.obj.m = self.mod
self.optim = optim
if optim is None:
self.ckpt = tf.train.Checkpoint(model=self.obj, optimizer_step=tf.train.get_or_create_global_step())
else:
self.ckpt = tf.train.Checkpoint(optimizer=optim, model=self.obj, optimizer_step=tf.train.get_or_create_global_step())
def save(self, path):
print('Saving model to path:',path)
head, tail = os.path.split(path)
if not os.path.exists(head):
os.makedirs(head)
self.ckpt.save(path)
print('Model saved to path:',path)
def restore(self, path, ptype='folder'):
print('Load from:', path)
try:
if ptype=='folder':
last_ckpt = tf.train.latest_checkpoint(path)
print('Checkpoint:', last_ckpt)
if last_ckpt is None:
print('No model found in checkpoint.')
print('Model will auto-initialize after first iteration.')
self.ckpt.restore(last_ckpt)
else:
self.ckpt.restore(path)
print('Finish loading.')
except Exception as e:
print('Model restore failed, Exception:',e)
print('Model will auto-initialize after first iteration.')
######### Gradient accumulator #########
class GradAccumulator():
def __init__(self):
self.steps = 0
self.grads = []
def accumulate(self, grads):
if len(grads) == 0:
self.grads = grads
else:
for old_g, new_g in zip(self.grads, grads):
old_g.assign_add(new_g)
self.steps += 1
def get_gradient(self):
res = [i/self.steps for i in self.grads]
self.grads = []
self.steps = 0
return res
def get_step(self):
return self.steps
######### Data Reader Template (serial) ##########
class DataReaderSerial():
def __init__(self, one_hot=None):
self.data_pos = 0
self.val_pos = 0
self.data = []
self.val = []
self.one_hot = False
if one_hot is not None:
self.one_hot = True
self.eye = np.eye(one_hot)
self.load_data()
def get_next_batch(self,BSIZE):
if self.data_pos + BSIZE > len(self.data):
random.shuffle(self.data)
self.data_pos = 0
batch = self.data[self.data_pos : self.data_pos+BSIZE]
x = [i[0] for i in batch]
y = [i[1] for i in batch]
if self.one_hot:
y = self.eye[np.array(y)]
self.data_pos += BSIZE
return x,y
def get_val_next_batch(self, BSIZE):
if self.val_pos + BSIZE >= len(self.val):
batch = self.val[self.val_pos:]
random.shuffle(self.val)
self.val_pos = 0
is_end = True
else:
batch = self.data[self.data_pos : self.data_pos+BSIZE]
is_end = False
x = [i[0] for i in batch]
y = [i[1] for i in batch]
if self.one_hot:
y = self.eye[np.array(y)]
self.val_pos += BSIZE
return x,y, is_end
def get_train_iter(self, BSIZE):
return len(self.data)//BSIZE
def get_val_iter(self, BSIZE):
return len(self.val)//BSIZE + 1
class ListReader():
def __init__(self, one_hot=None):
self.data_pos = 0
self.val_pos = 0
self.data = []
self.val = []
self.one_hot = False
if one_hot is not None:
self.one_hot = True
self.eye = np.eye(one_hot)
self.load_data()
def get_next_batch(self,BSIZE):
if self.data_pos + BSIZE > len(self.data):
random.shuffle(self.data)
self.data_pos = 0
batch = self.data[self.data_pos : self.data_pos+BSIZE]
x = [i[0] for i in batch]
y = [i[1] for i in batch]
if self.one_hot:
y = self.eye[np.array(y)]
self.data_pos += BSIZE
x = [self.process_img(i) for i in x]
return x,y
def get_val_next_batch(self, BSIZE):
if self.val_pos + BSIZE >= len(self.val):
batch = self.val[self.val_pos:]
random.shuffle(self.val)
self.val_pos = 0
is_end = True
else:
batch = self.data[self.data_pos : self.data_pos+BSIZE]
is_end = False
x = [i[0] for i in batch]
y = [i[1] for i in batch]
if self.one_hot:
y = self.eye[np.array(y)]
self.val_pos += BSIZE
x = [self.process_img(i) for i in x]
return x,y, is_end
def get_train_iter(self, BSIZE):
return len(self.data)//BSIZE
def get_val_iter(self, BSIZE):
return len(self.val)//BSIZE + 1
######### Data Reader Template (parallel) ##########
# multi-process to read data
class DataReader():
def __init__(self, data, fn, batch_size, shuffle=False, random_sample=False, processes=2, post_fn=None):
from multiprocessing import Pool
self.pool = Pool(processes)
print('Starting parallel data loader...')
self.process_fn = fn
self.data = data
self.batch_size = batch_size
self.position = batch_size
self.post_fn = post_fn
self.random_sample = random_sample
self.shuffle = shuffle
if shuffle:
random.shuffle(self.data)
self._start_p(self.data[:batch_size])
def _start_p(self, data):
self.ps = []
for i in data:
self.ps.append(self.pool.apply_async(self.process_fn, [i]))
def get_next_batch(self):
# print('call')
# fetch data
res = [i.get() for i in self.ps]
# start new pre-fetch
if self.random_sample:
batch = random.sample(self.data, self.batch_size)
else:
if self.position + self.batch_size > len(self.data):
self.position = 0
if self.shuffle:
random.shuffle(self.data)
batch = self.data[self.position:self.position+self.batch_size]
self.position += self.batch_size
self._start_p(batch)
# post_process the data
if self.post_fn is not None:
res = self.post_fn(res)
return res
######### short-cut functions #########
gradient_reverse = L.gradient_reverse
def pad(x, pad):
if isinstance(pad, list):
x = tf.pad(x, [[0,0],[pad[0],pad[1]], [pad[2],pad[3]], [0,0]])
else:
x = tf.pad(x, [[0,0],[pad,pad],[pad,pad],[0,0]])
return x
def pad3D(x, pad):
if isinstance(pad, list):
x = tf.pad(x, [[0,0],[pad[0],pad[1]], [pad[2],pad[3]], [pad[4], pad[5]], [0,0]])
else:
x = tf.pad(x, [[0,0],[pad,pad],[pad,pad],[pad,pad],[0,0]])
return x
def image_transform(x, H, out_shape=None, interpolation='NEAREST'):
# Will produce error if not specify 'output_shape' in eager mode
shape = x.get_shape().as_list()
if out_shape is None:
if len(shape)==4:
out_shape = shape[1:3]
else:
out_shape = shape[:2]
return tf.contrib.image.transform(x, H, interpolation=interpolation, output_shape=out_shape)
def zip_grad(grads, vars):
assert len(grads)==len(vars)
grads_1 = []
vars_1 = []
for i in range(len(grads)):
if not grads[i] is None:
grads_1.append(grads[i])
vars_1.append(vars[i])
assert len(grads_1)!=0
return zip(grads_1, vars_1)
| 28.057971
| 169
| 0.688985
|
import layers2 as L
import tensorflow as tf
config = tf.ConfigProto()
config.gpu_options.allow_growth=True
tf.enable_eager_execution(config=config)
import numpy as np
import os
import random
import time
PARAM_RELU = 0
PARAM_LRELU = 1
PARAM_ELU = 2
PARAM_TANH = 3
PARAM_MFM = 4
PARAM_MFM_FC = 5
PARAM_SIGMOID = 6
def accuracy(pred,y,name='acc', one_hot=True):
with tf.variable_scope(name):
if one_hot:
correct = tf.equal(tf.cast(tf.argmax(pred,-1),tf.int64),tf.cast(tf.argmax(y,-1),tf.int64))
else:
correct = tf.equal(tf.cast(tf.argmax(pred,-1),tf.int64),tf.cast(y,tf.int64))
acc = tf.reduce_mean(tf.cast(correct,tf.float32))
return acc
class ETA():
def __init__(self,max_value):
self.start_time = time.time()
self.max_value = max_value
self.current = 0
def start(self):
self.start_time = time.time()
self.current = 0
def sec2hms(self,sec):
hm = sec//60
s = sec%60
h = hm//60
m = hm%60
return h,m,s
def get_ETA(self,current,is_string=True):
self.current = current
time_div = time.time() - self.start_time
time_remain = time_div * float(self.max_value - self.current) / float(self.current + 1)
h,m,s = self.sec2hms(int(time_remain))
if is_string:
return '%d:%d:%d'%(h,m,s)
else:
return h,m,s
########### universal model class ##########
class Model(tf.contrib.checkpoint.Checkpointable):
def __init__(self,*args,**kwargs):
self.initialized = False
self.variables = []
self.initialize(*args,**kwargs)
def initialize(self,*args,**kwargs):
pass
def _gather_variables(self):
self.variables = []
atrs = dir(self)
for i in atrs:
if i[0] == '_':
continue
obj = getattr(self, i)
self.variables += self._gather_variables_recursive(obj)
def _gather_variables_recursive(self, obj):
result = []
if isinstance(obj, list) or isinstance(obj, tuple):
for sub_obj in obj:
result += self._gather_variables_recursive(sub_obj)
elif isinstance(obj, Model) or isinstance(obj, L.Layer):
result += obj.variables
return result
def get_variables(self, layers=None):
if layers is None:
return self.variables
else:
res = []
for l in layers:
res += l.variables
return res
def set_bn_training(self, is_training):
atrs = dir(self)
# print(atrs)
for i in atrs:
if i[0] == '_':
continue
obj = getattr(self, i)
self._set_bn_training_recursive(obj, is_training)
def _set_bn_training_recursive(self, obj, is_training):
if isinstance(obj, list):
for sub_obj in obj:
self._set_bn_training_recursive(sub_obj, is_training)
if isinstance(obj, Model) and obj!=self:
obj.set_bn_training(is_training)
if isinstance(obj, L.batch_norm):
obj.is_training = is_training
def set_bn_epsilon(self, epsilon):
atrs = dir(self)
# print(atrs)
for i in atrs:
if i[0] == '_':
continue
obj = getattr(self, i)
self._set_bn_epsilon_recursive(obj, epsilon)
def _set_bn_epsilon_recursive(self, obj, epsilon):
if isinstance(obj, list):
for sub_obj in obj:
self._set_bn_training_recursive(sub_obj, epsilon)
if isinstance(obj, Model) and obj!=self:
obj.set_bn_training(epsilon)
if isinstance(obj, L.batch_norm):
obj.epsilon = epsilon
def __call__(self, x, *args, **kwargs):
x = tf.convert_to_tensor(x, preferred_dtype=tf.float32)
res = self.forward(x, *args, **kwargs)
if not self.initialized:
self._gather_variables()
self.initialized = True
return res
########### universal layer classes ##########
class ConvLayer(Model):
def initialize(self, size, outchn, dilation_rate=1, stride=1,pad='SAME',activation=-1,batch_norm=False, usebias=True,kernel_data=None,bias_data=None,weight_norm=False):
self.conv = L.conv2D(size,outchn,stride=stride,pad=pad,usebias=usebias,kernel_data=kernel_data,bias_data=bias_data,dilation_rate=dilation_rate,weight_norm=weight_norm)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.conv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class ConvLayer1D(Model):
def initialize(self, size, outchn, dilation_rate=1, stride=1,pad='SAME',activation=-1,batch_norm=False, usebias=True,kernel_data=None,bias_data=None,weight_norm=False):
self.conv = L.conv1D(size,outchn,stride=stride,pad=pad,usebias=usebias,kernel_data=kernel_data,bias_data=bias_data,dilation_rate=dilation_rate,weight_norm=weight_norm)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.conv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class ConvLayer3D(Model):
def initialize(self, size, outchn, dilation_rate=1, stride=1,pad='SAME',activation=-1,batch_norm=False, usebias=True,kernel_data=None,bias_data=None,weight_norm=False):
self.conv = L.conv3D(size,outchn,stride=stride,pad=pad,usebias=usebias,kernel_data=kernel_data,bias_data=bias_data,dilation_rate=dilation_rate,weight_norm=weight_norm)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.conv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class DeconvLayer(Model):
def initialize(self, size, outchn, activation=-1, stride=1, usebias=True, pad='SAME', batch_norm=False):
self.deconv = L.deconv2D(size,outchn,stride=stride,usebias=usebias,pad=pad, name=None)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.deconv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class DeconvLayer3D(Model):
def initialize(self, size, outchn, activation=-1, stride=1, usebias=True, pad='SAME', batch_norm=False):
self.deconv = L.deconv3D(size,outchn,stride=stride,usebias=usebias,pad=pad, name=None)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.deconv(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class Dense(Model):
def initialize(self, outsize, usebias=True, batch_norm=False, activation=-1):
self.fclayer = L.fcLayer(outsize,usebias=usebias)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self,x):
x = self.fclayer(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
class GraphConvLayer(Model):
def initialize(self, outsize, adj_mtx=None, adj_fn=None, usebias=True, activation=-1, batch_norm=False):
self.GCL = L.graphConvLayer(outsize, adj_mtx=adj_mtx, adj_fn=adj_fn, usebias=usebias)
self.batch_norm = batch_norm
self.activation_ = activation
if batch_norm:
self.bn = L.batch_norm()
if activation!=-1:
self.activation = L.activation(activation)
def forward(self, x):
x = self.GCL(x)
if self.batch_norm:
x = self.bn(x)
if self.activation_!=-1:
x = self.activation(x)
return x
flatten = L.flatten()
maxPool = L.maxpoolLayer
avgPool = L.avgpoolLayer
########### higher wrapped block ##########
class ResBlock(Model):
def initialize(self, outchn, stride=1, ratio=4, activation=PARAM_RELU):
self.outchn = outchn
# self.stride = stride
self.activ = L.activation(activation)
self.bn = L.batch_norm()
self.l1 = ConvLayer(1, outchn//ratio, activation=PARAM_RELU, batch_norm=True)
self.l2 = ConvLayer(3, outchn//ratio, activation=PARAM_RELU, batch_norm=True, stride=stride)
self.l3 = ConvLayer(1, outchn)
self.shortcut_conv = ConvLayer(1, outchn, activation=PARAM_RELU, stride=stride)
self.shortcut_pool = L.maxpoolLayer(stride)
def forward(self, x):
inshape = x.get_shape().as_list()[-1]
if inshape==self.outchn:
short = self.shortcut_pool(x)
else:
short = self.shortcut_conv(x)
branch = self.bn(x)
branch = self.activ(branch)
branch = self.l1(branch)
branch = self.l2(branch)
branch = self.l3(branch)
return branch + short
class Sequential(Model):
def initialize(self, modules):
self.modules = modules
def forward(self, x):
for m in self.modules:
x = m(x)
return x
########### saver ##########
class Saver():
def __init__(self, model, optim=None):
self.mod = model
self.obj = tf.contrib.checkpoint.Checkpointable()
self.obj.m = self.mod
self.optim = optim
if optim is None:
self.ckpt = tf.train.Checkpoint(model=self.obj, optimizer_step=tf.train.get_or_create_global_step())
else:
self.ckpt = tf.train.Checkpoint(optimizer=optim, model=self.obj, optimizer_step=tf.train.get_or_create_global_step())
def save(self, path):
print('Saving model to path:',path)
head, tail = os.path.split(path)
if not os.path.exists(head):
os.makedirs(head)
self.ckpt.save(path)
print('Model saved to path:',path)
def restore(self, path, ptype='folder'):
print('Load from:', path)
try:
if ptype=='folder':
last_ckpt = tf.train.latest_checkpoint(path)
print('Checkpoint:', last_ckpt)
if last_ckpt is None:
print('No model found in checkpoint.')
print('Model will auto-initialize after first iteration.')
self.ckpt.restore(last_ckpt)
else:
self.ckpt.restore(path)
print('Finish loading.')
except Exception as e:
print('Model restore failed, Exception:',e)
print('Model will auto-initialize after first iteration.')
######### Gradient accumulator #########
class GradAccumulator():
def __init__(self):
self.steps = 0
self.grads = []
def accumulate(self, grads):
if len(grads) == 0:
self.grads = grads
else:
for old_g, new_g in zip(self.grads, grads):
old_g.assign_add(new_g)
self.steps += 1
def get_gradient(self):
res = [i/self.steps for i in self.grads]
self.grads = []
self.steps = 0
return res
def get_step(self):
return self.steps
######### Data Reader Template (serial) ##########
class DataReaderSerial():
def __init__(self, one_hot=None):
self.data_pos = 0
self.val_pos = 0
self.data = []
self.val = []
self.one_hot = False
if one_hot is not None:
self.one_hot = True
self.eye = np.eye(one_hot)
self.load_data()
def get_next_batch(self,BSIZE):
if self.data_pos + BSIZE > len(self.data):
random.shuffle(self.data)
self.data_pos = 0
batch = self.data[self.data_pos : self.data_pos+BSIZE]
x = [i[0] for i in batch]
y = [i[1] for i in batch]
if self.one_hot:
y = self.eye[np.array(y)]
self.data_pos += BSIZE
return x,y
def get_val_next_batch(self, BSIZE):
if self.val_pos + BSIZE >= len(self.val):
batch = self.val[self.val_pos:]
random.shuffle(self.val)
self.val_pos = 0
is_end = True
else:
batch = self.data[self.data_pos : self.data_pos+BSIZE]
is_end = False
x = [i[0] for i in batch]
y = [i[1] for i in batch]
if self.one_hot:
y = self.eye[np.array(y)]
self.val_pos += BSIZE
return x,y, is_end
def get_train_iter(self, BSIZE):
return len(self.data)//BSIZE
def get_val_iter(self, BSIZE):
return len(self.val)//BSIZE + 1
class ListReader():
def __init__(self, one_hot=None):
self.data_pos = 0
self.val_pos = 0
self.data = []
self.val = []
self.one_hot = False
if one_hot is not None:
self.one_hot = True
self.eye = np.eye(one_hot)
self.load_data()
def get_next_batch(self,BSIZE):
if self.data_pos + BSIZE > len(self.data):
random.shuffle(self.data)
self.data_pos = 0
batch = self.data[self.data_pos : self.data_pos+BSIZE]
x = [i[0] for i in batch]
y = [i[1] for i in batch]
if self.one_hot:
y = self.eye[np.array(y)]
self.data_pos += BSIZE
x = [self.process_img(i) for i in x]
return x,y
def get_val_next_batch(self, BSIZE):
if self.val_pos + BSIZE >= len(self.val):
batch = self.val[self.val_pos:]
random.shuffle(self.val)
self.val_pos = 0
is_end = True
else:
batch = self.data[self.data_pos : self.data_pos+BSIZE]
is_end = False
x = [i[0] for i in batch]
y = [i[1] for i in batch]
if self.one_hot:
y = self.eye[np.array(y)]
self.val_pos += BSIZE
x = [self.process_img(i) for i in x]
return x,y, is_end
def get_train_iter(self, BSIZE):
return len(self.data)//BSIZE
def get_val_iter(self, BSIZE):
return len(self.val)//BSIZE + 1
######### Data Reader Template (parallel) ##########
# multi-process to read data
class DataReader():
def __init__(self, data, fn, batch_size, shuffle=False, random_sample=False, processes=2, post_fn=None):
from multiprocessing import Pool
self.pool = Pool(processes)
print('Starting parallel data loader...')
self.process_fn = fn
self.data = data
self.batch_size = batch_size
self.position = batch_size
self.post_fn = post_fn
self.random_sample = random_sample
self.shuffle = shuffle
if shuffle:
random.shuffle(self.data)
self._start_p(self.data[:batch_size])
def _start_p(self, data):
self.ps = []
for i in data:
self.ps.append(self.pool.apply_async(self.process_fn, [i]))
def get_next_batch(self):
# print('call')
# fetch data
res = [i.get() for i in self.ps]
# start new pre-fetch
if self.random_sample:
batch = random.sample(self.data, self.batch_size)
else:
if self.position + self.batch_size > len(self.data):
self.position = 0
if self.shuffle:
random.shuffle(self.data)
batch = self.data[self.position:self.position+self.batch_size]
self.position += self.batch_size
self._start_p(batch)
# post_process the data
if self.post_fn is not None:
res = self.post_fn(res)
return res
######### short-cut functions #########
gradient_reverse = L.gradient_reverse
def pad(x, pad):
if isinstance(pad, list):
x = tf.pad(x, [[0,0],[pad[0],pad[1]], [pad[2],pad[3]], [0,0]])
else:
x = tf.pad(x, [[0,0],[pad,pad],[pad,pad],[0,0]])
return x
def pad3D(x, pad):
if isinstance(pad, list):
x = tf.pad(x, [[0,0],[pad[0],pad[1]], [pad[2],pad[3]], [pad[4], pad[5]], [0,0]])
else:
x = tf.pad(x, [[0,0],[pad,pad],[pad,pad],[pad,pad],[0,0]])
return x
def image_transform(x, H, out_shape=None, interpolation='NEAREST'):
# Will produce error if not specify 'output_shape' in eager mode
shape = x.get_shape().as_list()
if out_shape is None:
if len(shape)==4:
out_shape = shape[1:3]
else:
out_shape = shape[:2]
return tf.contrib.image.transform(x, H, interpolation=interpolation, output_shape=out_shape)
def zip_grad(grads, vars):
assert len(grads)==len(vars)
grads_1 = []
vars_1 = []
for i in range(len(grads)):
if not grads[i] is None:
grads_1.append(grads[i])
vars_1.append(vars[i])
assert len(grads_1)!=0
return zip(grads_1, vars_1)
| true
| true
|
f70914ece766c06da0a91283d76e7f41f01c5ac6
| 7,440
|
py
|
Python
|
tests/platform_tests/link_flap/test_cont_link_flap.py
|
Megathrone/sonic-mgmt
|
e319c0ad94c4773aa342e3777c67455d7e5b9bad
|
[
"Apache-2.0"
] | 1
|
2021-09-15T17:04:21.000Z
|
2021-09-15T17:04:21.000Z
|
tests/platform_tests/link_flap/test_cont_link_flap.py
|
Megathrone/sonic-mgmt
|
e319c0ad94c4773aa342e3777c67455d7e5b9bad
|
[
"Apache-2.0"
] | 3
|
2021-10-06T19:48:49.000Z
|
2021-11-18T17:11:19.000Z
|
tests/platform_tests/link_flap/test_cont_link_flap.py
|
Megathrone/sonic-mgmt
|
e319c0ad94c4773aa342e3777c67455d7e5b9bad
|
[
"Apache-2.0"
] | null | null | null |
"""
Tests the continuous link flap in SONiC.
Parameters:
--orch_cpu_threshold <port> (int): Which port you want the test to send traffic
to. Default is 3.
"""
import logging
import time
import pytest
from tests.common.helpers.assertions import pytest_assert, pytest_require
from tests.common import port_toggle
from tests.platform_tests.link_flap.link_flap_utils import build_test_candidates, toggle_one_link, check_orch_cpu_utilization, check_bgp_routes, check_portchannel_status
from tests.common.utilities import wait_until
from tests.common.devices.eos import EosHost
from tests.common.devices.sonic import SonicHost
pytestmark = [
pytest.mark.disable_loganalyzer,
pytest.mark.topology('any')
]
class TestContLinkFlap(object):
"""
TestContLinkFlap class for continuous link flap
"""
def test_cont_link_flap(self, request, duthosts, nbrhosts, enum_rand_one_per_hwsku_frontend_hostname, fanouthosts, bring_up_dut_interfaces, tbinfo):
"""
Validates that continuous link flap works as expected
Test steps:
1.) Flap all interfaces one by one in 1-3 iteration
to cause BGP Flaps.
2.) Flap all interfaces on peer (FanOutLeaf) one by one 1-3 iteration
to cause BGP Flaps.
3.) Watch for memory (show system-memory) ,orchagent CPU Utilization
and Redis_memory.
Pass Criteria: All routes must be re-learned with < 5% increase in Redis and
ORCH agent CPU consumption below threshold after 3 mins after stopping flaps.
"""
duthost = duthosts[enum_rand_one_per_hwsku_frontend_hostname]
orch_cpu_threshold = request.config.getoption("--orch_cpu_threshold")
# Record memory status at start
memory_output = duthost.shell("show system-memory")["stdout"]
logging.info("Memory Status at start: %s", memory_output)
# Record Redis Memory at start
start_time_redis_memory = duthost.shell("redis-cli info memory | grep used_memory_human | sed -e 's/.*:\(.*\)M/\\1/'")["stdout"]
logging.info("Redis Memory: %s M", start_time_redis_memory)
# Record ipv4 route counts at start
sumv4, sumv6 = duthost.get_ip_route_summary()
totalsv4 = sumv4.get('Totals', {})
totalsv6 = sumv6.get('Totals', {})
start_time_ipv4_route_counts = totalsv4.get('routes', 0)
start_time_ipv6_route_counts = totalsv6.get('routes', 0)
logging.info("IPv4 routes: start {}, summary {}".format(start_time_ipv4_route_counts, sumv4))
logging.info("IPv6 routes: start {}, summary {}".format(start_time_ipv6_route_counts, sumv6))
# Make Sure Orch CPU < orch_cpu_threshold before starting test.
logging.info("Make Sure orchagent CPU utilization is less that %d before link flap", orch_cpu_threshold)
pytest_assert(wait_until(100, 2, 0, check_orch_cpu_utilization, duthost, orch_cpu_threshold),
"Orch CPU utilization {} > orch cpu threshold {} before link flap"
.format(duthost.shell("show processes cpu | grep orchagent | awk '{print $9}'")["stdout"], orch_cpu_threshold))
# Flap all interfaces one by one on DUT
for iteration in range(3):
logging.info("%d Iteration flap all interfaces one by one on DUT", iteration + 1)
port_toggle(duthost, tbinfo, watch=True)
# Flap all interfaces one by one on Peer Device
for iteration in range(3):
logging.info("%d Iteration flap all interfaces one by one on Peer Device", iteration + 1)
candidates = build_test_candidates(duthost, fanouthosts, 'all_ports')
pytest_require(candidates, "Didn't find any port that is admin up and present in the connection graph")
for dut_port, fanout, fanout_port in candidates:
toggle_one_link(duthost, dut_port, fanout, fanout_port, watch=True)
config_facts = duthost.get_running_config_facts()
for portchannel in config_facts['PORTCHANNEL'].keys():
pytest_assert(check_portchannel_status(duthost, portchannel, "up", verbose=True),
"Fail: dut interface {}: link operational down".format(portchannel))
# Make Sure all ipv4/ipv6 routes are relearned with jitter of ~5
if not wait_until(120, 2, 0, check_bgp_routes, duthost, start_time_ipv4_route_counts, start_time_ipv6_route_counts):
endv4, endv6 = duthost.get_ip_route_summary()
failmsg = []
failmsg.append(
"IP routes are not equal after link flap: before ipv4 {} ipv6 {}, after ipv4 {} ipv6 {}".format(sumv4,
sumv6,
endv4,
endv6))
nei_meta = config_facts.get('DEVICE_NEIGHBOR_METADATA', {})
for k in nei_meta.keys():
nbrhost = nbrhosts[k]['host']
if isinstance(nbrhost, EosHost):
res = nbrhost.eos_command(commands=['show ip bgp sum'])
elif isinstance(nbrhost, SonicHost):
res = nbrhost.command('vtysh -c "show ip bgp sum"')
else:
res = ""
failmsg.append(res['stdout'])
pytest.fail(str(failmsg))
# Record memory status at end
memory_output = duthost.shell("show system-memory")["stdout"]
logging.info("Memory Status at end: %s", memory_output)
# Record orchagent CPU utilization at end
orch_cpu = duthost.shell("show processes cpu | grep orchagent | awk '{print $9}'")["stdout"]
logging.info("Orchagent CPU Util at end: %s", orch_cpu)
# Record Redis Memory at end
end_time_redis_memory = duthost.shell("redis-cli info memory | grep used_memory_human | sed -e 's/.*:\(.*\)M/\\1/'")["stdout"]
logging.info("Redis Memory at start: %s M", start_time_redis_memory)
logging.info("Redis Memory at end: %s M", end_time_redis_memory)
# Calculate diff in Redis memory
incr_redis_memory = float(end_time_redis_memory) - float(start_time_redis_memory)
logging.info("Redis absolute difference: %d", incr_redis_memory)
# Check redis memory only if it is increased else default to pass
if incr_redis_memory > 0.0:
percent_incr_redis_memory = (incr_redis_memory / float(start_time_redis_memory)) * 100
logging.info("Redis Memory percentage Increase: %d", percent_incr_redis_memory)
pytest_assert(percent_incr_redis_memory < 5, "Redis Memory Increase more than expected: {}".format(percent_incr_redis_memory))
# Orchagent CPU should consume < orch_cpu_threshold at last.
logging.info("watch orchagent CPU utilization when it goes below %d", orch_cpu_threshold)
pytest_assert(wait_until(45, 2, 0, check_orch_cpu_utilization, duthost, orch_cpu_threshold),
"Orch CPU utilization {} > orch cpu threshold {} before link flap"
.format(duthost.shell("show processes cpu | grep orchagent | awk '{print $9}'")["stdout"], orch_cpu_threshold))
| 52.765957
| 169
| 0.638844
|
import logging
import time
import pytest
from tests.common.helpers.assertions import pytest_assert, pytest_require
from tests.common import port_toggle
from tests.platform_tests.link_flap.link_flap_utils import build_test_candidates, toggle_one_link, check_orch_cpu_utilization, check_bgp_routes, check_portchannel_status
from tests.common.utilities import wait_until
from tests.common.devices.eos import EosHost
from tests.common.devices.sonic import SonicHost
pytestmark = [
pytest.mark.disable_loganalyzer,
pytest.mark.topology('any')
]
class TestContLinkFlap(object):
def test_cont_link_flap(self, request, duthosts, nbrhosts, enum_rand_one_per_hwsku_frontend_hostname, fanouthosts, bring_up_dut_interfaces, tbinfo):
duthost = duthosts[enum_rand_one_per_hwsku_frontend_hostname]
orch_cpu_threshold = request.config.getoption("--orch_cpu_threshold")
memory_output = duthost.shell("show system-memory")["stdout"]
logging.info("Memory Status at start: %s", memory_output)
start_time_redis_memory = duthost.shell("redis-cli info memory | grep used_memory_human | sed -e 's/.*:\(.*\)M/\\1/'")["stdout"]
logging.info("Redis Memory: %s M", start_time_redis_memory)
sumv4, sumv6 = duthost.get_ip_route_summary()
totalsv4 = sumv4.get('Totals', {})
totalsv6 = sumv6.get('Totals', {})
start_time_ipv4_route_counts = totalsv4.get('routes', 0)
start_time_ipv6_route_counts = totalsv6.get('routes', 0)
logging.info("IPv4 routes: start {}, summary {}".format(start_time_ipv4_route_counts, sumv4))
logging.info("IPv6 routes: start {}, summary {}".format(start_time_ipv6_route_counts, sumv6))
logging.info("Make Sure orchagent CPU utilization is less that %d before link flap", orch_cpu_threshold)
pytest_assert(wait_until(100, 2, 0, check_orch_cpu_utilization, duthost, orch_cpu_threshold),
"Orch CPU utilization {} > orch cpu threshold {} before link flap"
.format(duthost.shell("show processes cpu | grep orchagent | awk '{print $9}'")["stdout"], orch_cpu_threshold))
for iteration in range(3):
logging.info("%d Iteration flap all interfaces one by one on DUT", iteration + 1)
port_toggle(duthost, tbinfo, watch=True)
for iteration in range(3):
logging.info("%d Iteration flap all interfaces one by one on Peer Device", iteration + 1)
candidates = build_test_candidates(duthost, fanouthosts, 'all_ports')
pytest_require(candidates, "Didn't find any port that is admin up and present in the connection graph")
for dut_port, fanout, fanout_port in candidates:
toggle_one_link(duthost, dut_port, fanout, fanout_port, watch=True)
config_facts = duthost.get_running_config_facts()
for portchannel in config_facts['PORTCHANNEL'].keys():
pytest_assert(check_portchannel_status(duthost, portchannel, "up", verbose=True),
"Fail: dut interface {}: link operational down".format(portchannel))
# Make Sure all ipv4/ipv6 routes are relearned with jitter of ~5
if not wait_until(120, 2, 0, check_bgp_routes, duthost, start_time_ipv4_route_counts, start_time_ipv6_route_counts):
endv4, endv6 = duthost.get_ip_route_summary()
failmsg = []
failmsg.append(
"IP routes are not equal after link flap: before ipv4 {} ipv6 {}, after ipv4 {} ipv6 {}".format(sumv4,
sumv6,
endv4,
endv6))
nei_meta = config_facts.get('DEVICE_NEIGHBOR_METADATA', {})
for k in nei_meta.keys():
nbrhost = nbrhosts[k]['host']
if isinstance(nbrhost, EosHost):
res = nbrhost.eos_command(commands=['show ip bgp sum'])
elif isinstance(nbrhost, SonicHost):
res = nbrhost.command('vtysh -c "show ip bgp sum"')
else:
res = ""
failmsg.append(res['stdout'])
pytest.fail(str(failmsg))
# Record memory status at end
memory_output = duthost.shell("show system-memory")["stdout"]
logging.info("Memory Status at end: %s", memory_output)
# Record orchagent CPU utilization at end
orch_cpu = duthost.shell("show processes cpu | grep orchagent | awk '{print $9}'")["stdout"]
logging.info("Orchagent CPU Util at end: %s", orch_cpu)
# Record Redis Memory at end
end_time_redis_memory = duthost.shell("redis-cli info memory | grep used_memory_human | sed -e 's/.*:\(.*\)M/\\1/'")["stdout"]
logging.info("Redis Memory at start: %s M", start_time_redis_memory)
logging.info("Redis Memory at end: %s M", end_time_redis_memory)
# Calculate diff in Redis memory
incr_redis_memory = float(end_time_redis_memory) - float(start_time_redis_memory)
logging.info("Redis absolute difference: %d", incr_redis_memory)
# Check redis memory only if it is increased else default to pass
if incr_redis_memory > 0.0:
percent_incr_redis_memory = (incr_redis_memory / float(start_time_redis_memory)) * 100
logging.info("Redis Memory percentage Increase: %d", percent_incr_redis_memory)
pytest_assert(percent_incr_redis_memory < 5, "Redis Memory Increase more than expected: {}".format(percent_incr_redis_memory))
# Orchagent CPU should consume < orch_cpu_threshold at last.
logging.info("watch orchagent CPU utilization when it goes below %d", orch_cpu_threshold)
pytest_assert(wait_until(45, 2, 0, check_orch_cpu_utilization, duthost, orch_cpu_threshold),
"Orch CPU utilization {} > orch cpu threshold {} before link flap"
.format(duthost.shell("show processes cpu | grep orchagent | awk '{print $9}'")["stdout"], orch_cpu_threshold))
| true
| true
|
f70915190ac3245bfa14a9456d71f24c01cebe8c
| 5,407
|
py
|
Python
|
news_crawler/spiders/sputnik.py
|
andreeaiana/german-news
|
39e879aca46dfb73b0a631de7c053daff451f63e
|
[
"MIT"
] | 1
|
2021-12-07T16:27:02.000Z
|
2021-12-07T16:27:02.000Z
|
news_crawler/spiders/sputnik.py
|
andreeaiana/german-news
|
39e879aca46dfb73b0a631de7c053daff451f63e
|
[
"MIT"
] | null | null | null |
news_crawler/spiders/sputnik.py
|
andreeaiana/german-news
|
39e879aca46dfb73b0a631de7c053daff451f63e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
from news_crawler.spiders import BaseSpider
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
from datetime import datetime
sys.path.insert(0, os.path.join(os.getcwd(), "..",))
from news_crawler.items import NewsCrawlerItem
from news_crawler.utils import remove_empty_paragraphs
class Sputniknews(BaseSpider):
"""Spider for Sputniknews"""
name = 'sputniknews'
rotate_user_agent = True
allowed_domains = ['snanews.de']
start_urls = ['https://snanews.de']
# Exclude pages without relevant articles
rules = (
Rule(
LinkExtractor(
allow=(r'snanews\.de\/\d+\/\w.*\.html$'),
deny=(r'snanews\.de\/category\_multimedia\/',
r'snanews\.de\/location\_oesterreich\/',
r'snanews\.de\/\?modal\=feedback',
r'snanews\.de\/docs\/impressum\.html',
r'snanews\.de\/docs\/cookie\.html',
r'snanews\.de\/docs\/nutzungsrichtlinien\.html',
r'snanews\.de\/docs\/ueber\_uns\.html',
r'snanews\.de\/docs\/privacy\_policy\.html'
)
),
callback='parse_item',
follow=True
),
)
def parse_item(self, response):
"""
Checks article validity. If valid, it parses it.
"""
# Check date validity
creation_date = response.xpath('//div[@itemprop="datePublished"]/text()').get()
if not creation_date:
return
creation_date = datetime.fromisoformat(creation_date.split('T')[0])
if self.is_out_of_date(creation_date):
return
# Extract the article's paragraphs
paragraphs = [node.xpath('string()').get().strip() for node in response.xpath('//div[@class="article__text"] | //div[@class="article__quote-text"]')]
paragraphs = remove_empty_paragraphs(paragraphs)
text = ' '.join([para for para in paragraphs])
# Check article's length validity
if not self.has_min_length(text):
return
# Check keywords validity
if not self.has_valid_keywords(text):
return
# Parse the valid article
item = NewsCrawlerItem()
item['news_outlet'] = 'sputniknews'
item['provenance'] = response.url
item['query_keywords'] = self.get_query_keywords()
# Get creation, modification, and crawling dates
item['creation_date'] = creation_date.strftime('%d.%m.%Y')
last_modified = response.xpath('//div[@itemprop="dateModified"]/text()').get()
item['last_modified'] = datetime.fromisoformat(last_modified.split('T')[0]).strftime('%d.%m.%Y')
item['crawl_date'] = datetime.now().strftime('%d.%m.%Y')
# Get authors
authors = response.xpath('//div[@itemprop="creator"]/div[@itemprop="name"]/text()').getall()
item['author_person'] = authors if authors else list()
item['author_organization'] = list()
# Extract keywords, if available
news_keywords = response.xpath('//meta[@name="keywords"]/@content').get()
item['news_keywords'] = news_keywords.split(', ') if news_keywords else list()
# Get title, description, and body of article
title = response.xpath('//meta[@property="og:title"]/@content').get()
description = response.xpath('//meta[@property="og:description"]/@content').get()
# Body as dictionary: key = headline (if available, otherwise empty string), values = list of corresponding paragraphs
body = dict()
if response.xpath('//h3[@class="article__h2"] | //h2[@class="article__h2"]'):
# Extract headlines
headlines = [h2.xpath('string()').get().strip() for h2 in response.xpath('//h3[@class="article__h2"] | //h2[@class="article__h2"]')]
# Extract the paragraphs and headlines together
text = [node.xpath('string()').get().strip() for node in response.xpath('//div[@class="article__text"] | //div[@class="article__quote-text"] | //h3[@class="article__h2"] | //h2[@class="article__h2"]')]
# Extract paragraphs between the abstract and the first headline
body[''] = remove_empty_paragraphs(text[:text.index(headlines[0])])
# Extract paragraphs corresponding to each headline, except the last one
for i in range(len(headlines)-1):
body[headlines[i]] = remove_empty_paragraphs(text[text.index(headlines[i])+1:text.index(headlines[i+1])])
# Extract the paragraphs belonging to the last headline
body[headlines[-1]] = remove_empty_paragraphs(text[text.index(headlines[-1])+1:])
else:
# The article has no headlines, just paragraphs
body[''] = paragraphs
item['content'] = {'title': title, 'description': description, 'body':body}
# Extract first 5 recommendations towards articles from the same news outlet, if available
item['recommendations'] = list()
item['response_body'] = response.body
yield item
| 43.256
| 214
| 0.585352
|
import os
import sys
from news_crawler.spiders import BaseSpider
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
from datetime import datetime
sys.path.insert(0, os.path.join(os.getcwd(), "..",))
from news_crawler.items import NewsCrawlerItem
from news_crawler.utils import remove_empty_paragraphs
class Sputniknews(BaseSpider):
name = 'sputniknews'
rotate_user_agent = True
allowed_domains = ['snanews.de']
start_urls = ['https://snanews.de']
rules = (
Rule(
LinkExtractor(
allow=(r'snanews\.de\/\d+\/\w.*\.html$'),
deny=(r'snanews\.de\/category\_multimedia\/',
r'snanews\.de\/location\_oesterreich\/',
r'snanews\.de\/\?modal\=feedback',
r'snanews\.de\/docs\/impressum\.html',
r'snanews\.de\/docs\/cookie\.html',
r'snanews\.de\/docs\/nutzungsrichtlinien\.html',
r'snanews\.de\/docs\/ueber\_uns\.html',
r'snanews\.de\/docs\/privacy\_policy\.html'
)
),
callback='parse_item',
follow=True
),
)
def parse_item(self, response):
creation_date = response.xpath('//div[@itemprop="datePublished"]/text()').get()
if not creation_date:
return
creation_date = datetime.fromisoformat(creation_date.split('T')[0])
if self.is_out_of_date(creation_date):
return
paragraphs = [node.xpath('string()').get().strip() for node in response.xpath('//div[@class="article__text"] | //div[@class="article__quote-text"]')]
paragraphs = remove_empty_paragraphs(paragraphs)
text = ' '.join([para for para in paragraphs])
# Check article's length validity
if not self.has_min_length(text):
return
if not self.has_valid_keywords(text):
return
item = NewsCrawlerItem()
item['news_outlet'] = 'sputniknews'
item['provenance'] = response.url
item['query_keywords'] = self.get_query_keywords()
item['creation_date'] = creation_date.strftime('%d.%m.%Y')
last_modified = response.xpath('//div[@itemprop="dateModified"]/text()').get()
item['last_modified'] = datetime.fromisoformat(last_modified.split('T')[0]).strftime('%d.%m.%Y')
item['crawl_date'] = datetime.now().strftime('%d.%m.%Y')
authors = response.xpath('//div[@itemprop="creator"]/div[@itemprop="name"]/text()').getall()
item['author_person'] = authors if authors else list()
item['author_organization'] = list()
news_keywords = response.xpath('//meta[@name="keywords"]/@content').get()
item['news_keywords'] = news_keywords.split(', ') if news_keywords else list()
title = response.xpath('//meta[@property="og:title"]/@content').get()
description = response.xpath('//meta[@property="og:description"]/@content').get()
body = dict()
if response.xpath('//h3[@class="article__h2"] | //h2[@class="article__h2"]'):
headlines = [h2.xpath('string()').get().strip() for h2 in response.xpath('//h3[@class="article__h2"] | //h2[@class="article__h2"]')]
text = [node.xpath('string()').get().strip() for node in response.xpath('//div[@class="article__text"] | //div[@class="article__quote-text"] | //h3[@class="article__h2"] | //h2[@class="article__h2"]')]
body[''] = remove_empty_paragraphs(text[:text.index(headlines[0])])
for i in range(len(headlines)-1):
body[headlines[i]] = remove_empty_paragraphs(text[text.index(headlines[i])+1:text.index(headlines[i+1])])
body[headlines[-1]] = remove_empty_paragraphs(text[text.index(headlines[-1])+1:])
else:
body[''] = paragraphs
item['content'] = {'title': title, 'description': description, 'body':body}
item['recommendations'] = list()
item['response_body'] = response.body
yield item
| true
| true
|
f709151bb9972f3f3a0ec505604bd40b159eeb29
| 435
|
py
|
Python
|
z2.py
|
12W300/Five
|
e5090153b207df71046df40d4054507d96d87207
|
[
"MIT"
] | null | null | null |
z2.py
|
12W300/Five
|
e5090153b207df71046df40d4054507d96d87207
|
[
"MIT"
] | null | null | null |
z2.py
|
12W300/Five
|
e5090153b207df71046df40d4054507d96d87207
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
if __name__ == "__main__":
def midlgeom(a):
if len(a) != 0:
res = 0
for i in range(len(a)):
res += 1/a[i]
return len(a) / res
else:
return None
raw = input('Введите последовательность чисел через пробел: ')
mas = [int(i) for i in raw.split(' ') if i.isdigit()]
print(midlgeom(mas))
| 22.894737
| 63
| 0.485057
|
if __name__ == "__main__":
def midlgeom(a):
if len(a) != 0:
res = 0
for i in range(len(a)):
res += 1/a[i]
return len(a) / res
else:
return None
raw = input('Введите последовательность чисел через пробел: ')
mas = [int(i) for i in raw.split(' ') if i.isdigit()]
print(midlgeom(mas))
| true
| true
|
f7091608c502de23441376a9faa4ef4af8177aea
| 1,816
|
py
|
Python
|
kluctl/utils/yaml_utils.py
|
codablock/kluctl
|
a7069bf22bfe78c5529fe403c3b3c877f026d3c3
|
[
"Apache-2.0"
] | 26
|
2021-08-18T11:18:46.000Z
|
2022-03-16T09:28:43.000Z
|
kluctl/utils/yaml_utils.py
|
codablock/kluctl
|
a7069bf22bfe78c5529fe403c3b3c877f026d3c3
|
[
"Apache-2.0"
] | 4
|
2021-09-07T09:55:29.000Z
|
2022-03-03T09:05:01.000Z
|
kluctl/utils/yaml_utils.py
|
codablock/kluctl
|
a7069bf22bfe78c5529fe403c3b3c877f026d3c3
|
[
"Apache-2.0"
] | 4
|
2021-09-04T11:52:33.000Z
|
2022-03-16T09:18:20.000Z
|
import sys
import yaml
try:
from yaml import CSafeLoader as SafeLoader, CSafeDumper as SafeDumper
except ImportError:
print("Failed to load fast LibYAML bindings. You should install them to speed up kluctl.", file=sys.stderr)
from yaml import SafeLoader as SafeLoader, SafeDumper as SafeDumper
def construct_value(load, node):
if not isinstance(node, yaml.ScalarNode):
raise yaml.constructor.ConstructorError(
"while constructing a value",
node.start_mark,
"expected a scalar, but found %s" % node.id, node.start_mark
)
yield str(node.value)
# See https://github.com/yaml/pyyaml/issues/89
SafeLoader.add_constructor(u'tag:yaml.org,2002:value', construct_value)
def multiline_str_representer(dumper, data):
if len(data.splitlines()) > 1: # check for multiline string
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
class MultilineStrDumper(SafeDumper):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_representer(str, multiline_str_representer)
def yaml_load(s):
return yaml.load(s, Loader=SafeLoader)
def yaml_load_all(s):
return list(yaml.load_all(s, Loader=SafeLoader))
def yaml_load_file(path, all=False):
with open(path) as f:
if all:
y = yaml_load_all(f)
else:
y = yaml_load(f)
return y
def yaml_dump(y, stream=None):
return yaml.dump(y, stream=stream, Dumper=MultilineStrDumper, sort_keys=False)
def yaml_dump_all(y, stream=None):
return yaml.dump_all(y, stream=stream, Dumper=MultilineStrDumper, sort_keys=False)
def yaml_save_file(y, path):
with open(path, mode='w') as f:
yaml_dump(y, f)
| 30.779661
| 111
| 0.696035
|
import sys
import yaml
try:
from yaml import CSafeLoader as SafeLoader, CSafeDumper as SafeDumper
except ImportError:
print("Failed to load fast LibYAML bindings. You should install them to speed up kluctl.", file=sys.stderr)
from yaml import SafeLoader as SafeLoader, SafeDumper as SafeDumper
def construct_value(load, node):
if not isinstance(node, yaml.ScalarNode):
raise yaml.constructor.ConstructorError(
"while constructing a value",
node.start_mark,
"expected a scalar, but found %s" % node.id, node.start_mark
)
yield str(node.value)
SafeLoader.add_constructor(u'tag:yaml.org,2002:value', construct_value)
def multiline_str_representer(dumper, data):
if len(data.splitlines()) > 1: return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
class MultilineStrDumper(SafeDumper):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_representer(str, multiline_str_representer)
def yaml_load(s):
return yaml.load(s, Loader=SafeLoader)
def yaml_load_all(s):
return list(yaml.load_all(s, Loader=SafeLoader))
def yaml_load_file(path, all=False):
with open(path) as f:
if all:
y = yaml_load_all(f)
else:
y = yaml_load(f)
return y
def yaml_dump(y, stream=None):
return yaml.dump(y, stream=stream, Dumper=MultilineStrDumper, sort_keys=False)
def yaml_dump_all(y, stream=None):
return yaml.dump_all(y, stream=stream, Dumper=MultilineStrDumper, sort_keys=False)
def yaml_save_file(y, path):
with open(path, mode='w') as f:
yaml_dump(y, f)
| true
| true
|
f709168738885702dd8ab877116a8ff2a483280b
| 3,456
|
py
|
Python
|
examples/tutorials/plot.py
|
carocamargo/pygmt
|
6139c1735cff7f7d615d243145c21b1efef3f2c6
|
[
"BSD-3-Clause"
] | null | null | null |
examples/tutorials/plot.py
|
carocamargo/pygmt
|
6139c1735cff7f7d615d243145c21b1efef3f2c6
|
[
"BSD-3-Clause"
] | null | null | null |
examples/tutorials/plot.py
|
carocamargo/pygmt
|
6139c1735cff7f7d615d243145c21b1efef3f2c6
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Plotting data points
--------------------
GMT shines when it comes to plotting data on a map. We can use some sample data that is
packaged with GMT to try this out. PyGMT provides access to these datasets through the
:mod:`pygmt.datasets` package. If you don't have the data files already, they are
automatically downloaded and saved to a cache directory the first time you use them
(usually ``~/.gmt/cache``).
"""
import pygmt
########################################################################################
# For example, let's load the sample dataset of tsunami generating earthquakes around
# Japan (:func:`pygmt.datasets.load_japan_quakes`). The data is loaded as a
# :class:`pandas.DataFrame`.
data = pygmt.datasets.load_japan_quakes()
# Set the region for the plot to be slightly larger than the data bounds.
region = [
data.longitude.min() - 1,
data.longitude.max() + 1,
data.latitude.min() - 1,
data.latitude.max() + 1,
]
print(region)
print(data.head())
########################################################################################
# We'll use :meth:`pygmt.Figure.plot` method to plot circles on the locations of the
# hypocenters of the earthquakes.
fig = pygmt.Figure()
fig.basemap(region=region, projection="M8i", frame=True)
fig.coast(land="black", water="skyblue")
fig.plot(x=data.longitude, y=data.latitude, style="c0.3c", color="white", pen="black")
fig.show()
########################################################################################
# We used the style ``c0.3c`` which means "circles of 0.3 centimeter size". The ``pen``
# attribute controls the outline of the symbols and the ``color`` controls the fill.
#
# We can map the size of the circles to the earthquake magnitude by passing an array to
# the ``sizes`` argument. Because the magnitude is on a logarithmic scale, it helps to
# show the differences by scaling the values using a power law.
fig = pygmt.Figure()
fig.basemap(region=region, projection="M8i", frame=True)
fig.coast(land="black", water="skyblue")
fig.plot(
x=data.longitude,
y=data.latitude,
sizes=0.02 * (2 ** data.magnitude),
style="cc",
color="white",
pen="black",
)
fig.show()
########################################################################################
# Notice that we didn't include the size in the ``style`` argument this time, just the
# symbol ``c`` (circles) and the unit ``c`` (centimeter). So in this case, the sizes
# will be interpreted as being in centimeters.
#
# We can also map the colors of the markers to the depths by passing an array to the
# ``color`` argument and providing a colormap name (``cmap``). We can even use the new
# matplotlib colormap "viridis". Here, we first create a continuous colormap
# ranging from the minimum depth to the maximum depth of the earthquakes
# using :func:`pygmt.makecpt`, then set ``cmap=True`` in :func:`pygmt.Figure.plot`
# to use the colormap. At the end of the plot, we also plot a colorbar showing
# the colormap used in the plot.
#
fig = pygmt.Figure()
fig.basemap(region=region, projection="M8i", frame=True)
fig.coast(land="black", water="skyblue")
pygmt.makecpt(cmap="viridis", series=[data.depth_km.min(), data.depth_km.max()])
fig.plot(
x=data.longitude,
y=data.latitude,
sizes=0.02 * 2 ** data.magnitude,
color=data.depth_km,
cmap=True,
style="cc",
pen="black",
)
fig.colorbar(frame='af+l"Depth (km)"')
fig.show()
| 36.765957
| 88
| 0.634838
|
import pygmt
# Japan (:func:`pygmt.datasets.load_japan_quakes`). The data is loaded as a
# :class:`pandas.DataFrame`.
data = pygmt.datasets.load_japan_quakes()
# Set the region for the plot to be slightly larger than the data bounds.
region = [
data.longitude.min() - 1,
data.longitude.max() + 1,
data.latitude.min() - 1,
data.latitude.max() + 1,
]
print(region)
print(data.head())
########################################################################################
# We'll use :meth:`pygmt.Figure.plot` method to plot circles on the locations of the
fig = pygmt.Figure()
fig.basemap(region=region, projection="M8i", frame=True)
fig.coast(land="black", water="skyblue")
fig.plot(x=data.longitude, y=data.latitude, style="c0.3c", color="white", pen="black")
fig.show()
fig = pygmt.Figure()
fig.basemap(region=region, projection="M8i", frame=True)
fig.coast(land="black", water="skyblue")
fig.plot(
x=data.longitude,
y=data.latitude,
sizes=0.02 * (2 ** data.magnitude),
style="cc",
color="white",
pen="black",
)
fig.show()
# symbol ``c`` (circles) and the unit ``c`` (centimeter). So in this case, the sizes
# will be interpreted as being in centimeters.
#
# We can also map the colors of the markers to the depths by passing an array to the
# ``color`` argument and providing a colormap name (``cmap``). We can even use the new
# matplotlib colormap "viridis". Here, we first create a continuous colormap
# ranging from the minimum depth to the maximum depth of the earthquakes
# using :func:`pygmt.makecpt`, then set ``cmap=True`` in :func:`pygmt.Figure.plot`
# to use the colormap. At the end of the plot, we also plot a colorbar showing
# the colormap used in the plot.
#
fig = pygmt.Figure()
fig.basemap(region=region, projection="M8i", frame=True)
fig.coast(land="black", water="skyblue")
pygmt.makecpt(cmap="viridis", series=[data.depth_km.min(), data.depth_km.max()])
fig.plot(
x=data.longitude,
y=data.latitude,
sizes=0.02 * 2 ** data.magnitude,
color=data.depth_km,
cmap=True,
style="cc",
pen="black",
)
fig.colorbar(frame='af+l"Depth (km)"')
fig.show()
| true
| true
|
f70919c541c332631ac6a45b18d36f9f9c301bf5
| 4,662
|
py
|
Python
|
epidose/back_end/ha_server.py
|
osnas/epidose
|
061d4aff6386d571b1940d2f18359eef99dc2ea5
|
[
"Apache-2.0"
] | 40
|
2020-05-08T17:22:15.000Z
|
2020-06-18T13:21:25.000Z
|
epidose/back_end/ha_server.py
|
osnas/epidose
|
061d4aff6386d571b1940d2f18359eef99dc2ea5
|
[
"Apache-2.0"
] | 50
|
2020-06-27T08:34:13.000Z
|
2021-04-20T10:18:25.000Z
|
epidose/back_end/ha_server.py
|
dspinellis/reference_implementation
|
416cc0305141746d9fe39e3e8698cb152bb3124f
|
[
"Apache-2.0"
] | 10
|
2020-07-05T19:55:24.000Z
|
2021-02-04T14:55:46.000Z
|
#!/usr/bin/env python3
""" Health authority back end REST and static content server """
__copyright__ = """
Copyright 2020 Diomidis Spinellis
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__license__ = "Apache 2.0"
import argparse
from dp3t.protocols.server_database import ServerDatabase
from epidose.common.daemon import Daemon
from flask import Flask, abort, jsonify, request, send_from_directory
import logging
from os.path import basename, dirname
API_VERSION = "1"
app = Flask("ha-server")
db = None
FILTER_LOCATION = "/var/lib/epidose/filter.bin"
DATABASE_LOCATION = "/var/lib/epidose/server-database.db"
UPDATE_LOCATION = "/var/lib/epidose/update.sh"
def shutdown_server():
func = request.environ.get("werkzeug.server.shutdown")
if func is None:
raise RuntimeError("Not running with the Werkzeug Server")
func()
@app.before_request
def before_request():
global db
if not db:
db = ServerDatabase(DATABASE_LOCATION)
db.connect(reuse_if_open=True)
@app.after_request
def after_request(response):
global db
if not app.config["TESTING"]:
db.close()
return response
@app.route("/filter", methods=["GET"])
def filter():
"""Send the Cuckoo filter as a static file.
In a production deployment this should be handled by the front-end server,
such as nginx.
"""
return send_from_directory(dirname(FILTER_LOCATION), basename(FILTER_LOCATION))
@app.route("/update", methods=["GET"])
def update():
"""Send the update shell script as a static file."""
return send_from_directory(dirname(UPDATE_LOCATION), basename(UPDATE_LOCATION))
@app.route("/shutdown")
def shutdown():
if app.debug:
shutdown_server()
return "Server shutting down..."
else:
abort(405)
@app.route("/version", methods=["GET"])
def version():
return jsonify({"version": API_VERSION})
@app.route("/add_contagious", methods=["POST"])
def add_contagious():
content = request.json
with db.atomic():
logger.debug(f"Add new data with authorization {content['authorization']}")
# TODO: Check authorization
for rec in content["data"]:
epoch = rec["epoch"]
seed = bytes.fromhex(rec["seed"])
db.add_epoch_seed(epoch, seed)
logger.debug(f"Add {epoch} {seed.hex()}")
# TODO: Delete authorization
return "OK"
def initialize(args):
"""Initialize the server's database and logger. """
global daemon
daemon = Daemon("ha_server", args)
# Setup logging
global logger
logger = daemon.get_logger()
# Connect to the database
global db
db = ServerDatabase(args.database)
def main():
parser = argparse.ArgumentParser(
description="Health authority back end REST and static content server "
)
parser.add_argument(
"-d", "--debug", help="Run in debug mode logging to stderr", action="store_true"
)
global DATABASE_LOCATION
parser.add_argument(
"-D",
"--database",
help="Specify the database location",
default=DATABASE_LOCATION,
)
global FILTER_LOCATION
parser.add_argument(
"-f",
"--filter",
help="Specify the location of the Cuckoo filter",
default=FILTER_LOCATION,
)
parser.add_argument(
"-s",
"--server-name",
help="Specify the server name (0.0.0.0 for externally visible)",
default="127.0.0.1",
)
parser.add_argument("-p", "--port", help="Set TCP port to listen", type=int)
parser.add_argument(
"-v", "--verbose", help="Set verbose logging", action="store_true"
)
args = parser.parse_args()
initialize(args)
FILTER_LOCATION = args.filter
DATABASE_LOCATION = args.database
# Daemonize with gunicorn or other means, because the daemonize
# module has trouble dealing with the lock files when the app
# reloads itself.
app.run(debug=args.debug, host=args.server_name, port=args.port)
if __name__ == "__main__":
main()
else:
global logger
logger = logging.getLogger("gunicorn.error")
| 27.104651
| 88
| 0.669884
|
__copyright__ = """
Copyright 2020 Diomidis Spinellis
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__license__ = "Apache 2.0"
import argparse
from dp3t.protocols.server_database import ServerDatabase
from epidose.common.daemon import Daemon
from flask import Flask, abort, jsonify, request, send_from_directory
import logging
from os.path import basename, dirname
API_VERSION = "1"
app = Flask("ha-server")
db = None
FILTER_LOCATION = "/var/lib/epidose/filter.bin"
DATABASE_LOCATION = "/var/lib/epidose/server-database.db"
UPDATE_LOCATION = "/var/lib/epidose/update.sh"
def shutdown_server():
func = request.environ.get("werkzeug.server.shutdown")
if func is None:
raise RuntimeError("Not running with the Werkzeug Server")
func()
@app.before_request
def before_request():
global db
if not db:
db = ServerDatabase(DATABASE_LOCATION)
db.connect(reuse_if_open=True)
@app.after_request
def after_request(response):
global db
if not app.config["TESTING"]:
db.close()
return response
@app.route("/filter", methods=["GET"])
def filter():
return send_from_directory(dirname(FILTER_LOCATION), basename(FILTER_LOCATION))
@app.route("/update", methods=["GET"])
def update():
return send_from_directory(dirname(UPDATE_LOCATION), basename(UPDATE_LOCATION))
@app.route("/shutdown")
def shutdown():
if app.debug:
shutdown_server()
return "Server shutting down..."
else:
abort(405)
@app.route("/version", methods=["GET"])
def version():
return jsonify({"version": API_VERSION})
@app.route("/add_contagious", methods=["POST"])
def add_contagious():
content = request.json
with db.atomic():
logger.debug(f"Add new data with authorization {content['authorization']}")
for rec in content["data"]:
epoch = rec["epoch"]
seed = bytes.fromhex(rec["seed"])
db.add_epoch_seed(epoch, seed)
logger.debug(f"Add {epoch} {seed.hex()}")
return "OK"
def initialize(args):
global daemon
daemon = Daemon("ha_server", args)
global logger
logger = daemon.get_logger()
global db
db = ServerDatabase(args.database)
def main():
parser = argparse.ArgumentParser(
description="Health authority back end REST and static content server "
)
parser.add_argument(
"-d", "--debug", help="Run in debug mode logging to stderr", action="store_true"
)
global DATABASE_LOCATION
parser.add_argument(
"-D",
"--database",
help="Specify the database location",
default=DATABASE_LOCATION,
)
global FILTER_LOCATION
parser.add_argument(
"-f",
"--filter",
help="Specify the location of the Cuckoo filter",
default=FILTER_LOCATION,
)
parser.add_argument(
"-s",
"--server-name",
help="Specify the server name (0.0.0.0 for externally visible)",
default="127.0.0.1",
)
parser.add_argument("-p", "--port", help="Set TCP port to listen", type=int)
parser.add_argument(
"-v", "--verbose", help="Set verbose logging", action="store_true"
)
args = parser.parse_args()
initialize(args)
FILTER_LOCATION = args.filter
DATABASE_LOCATION = args.database
app.run(debug=args.debug, host=args.server_name, port=args.port)
if __name__ == "__main__":
main()
else:
global logger
logger = logging.getLogger("gunicorn.error")
| true
| true
|
f7091a63c75da3469e8ce32bc0cd159841c0a7fd
| 16,728
|
py
|
Python
|
data/utils.py
|
cuis15/xorder
|
6dde5a18552ffa07f29100038464a38c49495527
|
[
"MIT"
] | null | null | null |
data/utils.py
|
cuis15/xorder
|
6dde5a18552ffa07f29100038464a38c49495527
|
[
"MIT"
] | null | null | null |
data/utils.py
|
cuis15/xorder
|
6dde5a18552ffa07f29100038464a38c49495527
|
[
"MIT"
] | null | null | null |
import numpy as np
from sklearn.metrics import roc_auc_score
from numba import jit
def array2str(tmp_array, sep = " "):
str_list = ["{:.3f}".format(tmp_item) for tmp_item in tmp_array]
return sep.join(str_list)
def generate_sorted_groups(pred, y, a):
a_idx = np.where(a == 0)
b_idx = np.where(a == 1)
b_score = pred[b_idx].reshape(-1)
b_index = np.argsort(-b_score)
b_score_sort = b_score[b_index]
b_label = y[b_idx]
b_label_sort = b_label[b_index]
a_score = pred[a_idx].reshape(-1)
a_index = np.argsort(-a_score)
a_score_sort = a_score[a_index]
a_label = y[a_idx]
a_label_sort = a_label[a_index]
return a_score_sort,b_score_sort,a_label_sort,b_label_sort
def cal_fairness_metric_by_groups(a_score, b_score, a_label, b_label, metric = "xauc"):
if metric == "xauc":
metric_ab, metric_ba, _ = xAUC_fast(a_score, b_score, a_label, b_label)
else:
metric_ab, metric_ba = pairwise_fast(a_score, b_score, a_label, b_label)
return abs(metric_ab - metric_ba),metric_ab,metric_ba
def cal_fairness_metric(pred, y, a, metric = "xauc"):
a_idx, b_idx = np.where(a == 0), np.where(a == 1)
a_score, b_score = pred[a_idx].reshape(-1), pred[b_idx].reshape(-1)
a_label, b_label = y[a_idx].reshape(-1), y[b_idx].reshape(-1)
if metric == "xauc":
metric_ab, metric_ba, _ = xAUC_fast(a_score, b_score, a_label, b_label)
else:
metric_ab, metric_ba = pairwise_fast(a_score, b_score, a_label, b_label)
return abs(metric_ab - metric_ba),metric_ab,metric_ba
def AUC(score, label):
###[from big to small]
sum_ = 0
num = len(label)
for i in range(num):
for j in range(num):
if label[i]==1 and label[j]==0:
if score[i]>score[j]:
sum_ += 1
return sum_/(np.sum(label)*(num-np.sum(label))), sum_
def xAUC(a_score, b_score, a_label, b_label):
sum_ab = 0
sum_ba = 0
numa = len(a_label)
numb = len(b_label)
a_num1 = np.sum(a_label)
a_num0 = len(a_label) - a_num1
b_num1 = np.sum(b_label)
b_num0 = len(b_label) - b_num1
for i in range(numa):
for j in range(numb):
if a_label[i] ==1 and b_label[j] ==0:
if a_score[i]>b_score[j]:
sum_ab+=1
elif a_label[i]==0 and b_label[j]==1:
if b_score[j]>a_score[i]:
sum_ba+=1
return sum_ab/(a_num1*b_num0), sum_ba/(b_num1*a_num0), sum_ab+sum_ba
def xAUC_fast(a_score, b_score, a_label, b_label):
a_num1 = np.sum(a_label)
a_num0 = len(a_label) - a_num1
b_num1 = np.sum(b_label)
b_num0 = len(b_label) - b_num1
a_score1,a_score0 = a_score[a_label == 1],a_score[a_label == 0]
b_score1,b_score0 = b_score[b_label == 1],b_score[b_label == 0]
ab_label = np.concatenate((np.ones(int(a_num1)),np.zeros(int(b_num0))))
ab_score = np.concatenate((a_score1,b_score0))
xauc_ab = roc_auc_score(ab_label,ab_score)
ba_label = np.concatenate((np.ones(int(b_num1)),np.zeros(int(a_num0))))
ba_score = np.concatenate((b_score1,a_score0))
xauc_ba = roc_auc_score(ba_label,ba_score)
return xauc_ab, xauc_ba, xauc_ab * a_num1 * b_num0 + xauc_ba * b_num1 * a_num0
def post_score(train_score, train_score_post, test_score):
tep_id = 0
bins = [[] for i in range(len(train_score)+1)]
for i in range(len(test_score)):
s = test_score[i]
if s>train_score[0]:
bins[0].append(s)
elif s<=train_score[-1]:
bins[-1].append(s)
else:
for j in range(tep_id,len(train_score)):
if train_score[j-1]>=s and train_score[j]<s:
bins[j].append(s)
tep_id = j
break
changed_b_score = []
for bin_ in range(len(bins)):
for item in range(len(bins[bin_])):
num = (len(bins[bin_]))
if bin_==0:
changed_b_score.append((item)*train_score_post[bin_]/num+(num-item)/num)
elif bin_==len(train_score_post):
changed_b_score.append((num -item)*train_score_post[bin_-1]/num)
else:
changed_b_score.append((item)*train_score_post[bin_]/num + (num-item)*train_score_post[bin_-1]/num)
return np.array(changed_b_score)
@jit(nopython=True)
def maxAUC(a_label, b_label):
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a_label)
b_1 = np.sum(b_label)
path = np.zeros((M+1, N+1,2,2))
cost = np.zeros((M+1, N+1))
for i in range(1,M+1):
if a_label[i]==1:
cost[i,0] = N-b_1 + cost[i-1, 0]
else:
cost[i,0] = cost[i-1,0]
path[i,0,:,:] = np.array([[i-1, 0], [ i, 0]])
for i in range(1,N+1):
if b_label[i]==1:
cost[0, i] = cost[0,i-1]+ M - a_1
else:
cost[0, i] = cost[0,i-1]
path[0,i,:,:] = np.array([[0, i-1],[0, i]])
for i in range(2, M+1+N+1):
for j in range(max(1, i-N), min(i, M+1)): # j[1, i-1]
if i-j+1>N or a_label[j]==0:
tep_b = 0
else:
tep_b = N - (i-j) - np.sum(b_label[i-j+1:])
if j+1>M or b_label[i-j]==0:
tep_a = 0
else:
tep_a = M - j -np.sum(a_label[j+1:])
if cost[j-1, i-j] + tep_b > cost[j, i-j-1] + tep_a:
cost[j, i-j] = cost[j-1, i-j] + tep_b
path[j, i-j,:,:] = np.array([[j-1, i-j], [j, i-j]])
else:
cost[j, i-j] = cost[j, i-j-1] + tep_a
path[j, i-j,:,:] = np.array([[j, i-j-1], [j, i-j]])
return cost[M,N], path
@jit(nopython=True)
def xAUC_post(a_label, b_label, lamb):
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a_label)
b_1 = np.sum(b_label)
a_1_b_0 = a_1*(N-b_1)
b_1_a_0 = b_1*(M - a_1)
path = np.zeros((M+1, N+1,2,2))
cost_unfair = np.zeros((M+1, N+1))
cost = np.zeros((M+1, N+1))
for i in range(1,M+1):
if a_label[i]==1:
cost_unfair[i, 0] = (N-b_1)/a_1_b_0*lamb + cost_unfair[i-1,0]
cost[i,0] = N-b_1 + cost[i-1, 0]
else:
cost_unfair[i, 0] = cost_unfair[i-1,0]
cost[i,0] = cost[i-1,0]
path[i,0,:,:] = np.array([[i-1, 0], [ i, 0]])
for i in range(1,N+1):
if b_label[i]==1:
cost_unfair[0,i] = -(M-a_1)/b_1_a_0*lamb + cost_unfair[0, i-1]
cost[0, i] = cost[0,i-1] + M - a_1
else:
cost[0, i] = cost[0,i-1]
cost_unfair[0, i] = cost_unfair[0,i-1]
path[0,i,:,:] = np.array([[0, i-1],[0, i]])
for i in range(2, M+1+N+1):
for j in range(max(1, i-N), min(i, M+1)): # j[1, i-1]
if i-j+1>N or a_label[j]==0:
tep_b = 0
tep_unfair_b = 0
else:
tep_b = N - (i-j) - np.sum(b_label[i-j+1:])
tep_unfair_b = tep_b/a_1_b_0*lamb
if j+1>M or b_label[i-j]==0:
tep_a = 0
tep_unfair_a = 0
else:
tep_a = M - j -np.sum(a_label[j+1:])
tep_unfair_a = -tep_a/b_1_a_0*lamb
if cost[j-1, i-j] + tep_b - abs(tep_unfair_b + cost_unfair[j-1, i-j]) > cost[j, i-j-1] + tep_a - abs(tep_unfair_a + cost_unfair[j, i-j-1]):
cost_unfair[j, i-j] = tep_unfair_b + cost_unfair[j-1, i-j]
cost[j, i-j] = cost[j-1, i-j] + tep_b
path[j, i-j,:,:] = np.array([[j-1, i-j], [j, i-j]])
else:
cost_unfair[j, i-j] = tep_unfair_a + cost_unfair[j, i-j-1]
cost[j, i-j] = cost[j, i-j-1] + tep_a
path[j, i-j,:,:] = np.array([[j, i-j-1], [j, i-j]])
return cost, path, cost_unfair
@jit(nopython=True)
def xAUC_post_(a_label, b_label, lamb):
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a_label)
b_1 = np.sum(b_label)
a_1_b_0 = a_1*(N-b_1)
b_1_a_0 = b_1*(M - a_1)
path = np.zeros((M+1, N+1,2,2))
cost_unfair = np.zeros((M+1, N+1))
cost = np.zeros((M+1, N+1))
for i in range(1,M+1):
if a_label[i]==1:
cost_unfair[i, 0] = (N-b_1)/a_1_b_0 * lamb + cost_unfair[i-1,0]
cost[i,0] = N-b_1 + cost[i-1, 0]
else:
cost_unfair[i, 0] = cost_unfair[i-1,0]
cost[i,0] = cost[i-1,0]
path[i,0,:,:] = np.array([[i-1, 0], [ i, 0]])
for i in range(1,N+1):
if b_label[i]==1:
cost_unfair[0,i] = -(M - a_1) / b_1_a_0 * lamb + cost_unfair[0, i-1]
cost[0, i] = cost[0,i-1] + M - a_1
else:
cost[0, i] = cost[0,i-1]
cost_unfair[0, i] = cost_unfair[0,i-1]
path[0,i,:,:] = np.array([[0, i-1],[0, i]])
for i in range(2, M+1+N+1):
# print(i)
for j in range(max(1, i-N), min(i, M+1)): # j[1, i-1]
if a_label[j]==0:
tep_b = 0
tep_unfair_b = 0
else:
tep_b = N - (i-j) - np.sum(b_label[i-j+1:])
tep_unfair_b = tep_b/a_1_b_0*lamb
if b_label[i-j]==0:
tep_a = 0
tep_unfair_a = 0
else:
tep_a = M - j -np.sum(a_label[j+1:])
tep_unfair_a = -tep_a/b_1_a_0*lamb
if cost[j-1, i-j] + tep_b - abs(tep_unfair_b + cost_unfair[j-1, i-j]) > cost[j, i-j-1] + tep_a - abs(tep_unfair_a + cost_unfair[j, i-j-1]):
cost_unfair[j, i-j] = tep_unfair_b + cost_unfair[j-1, i-j]
cost[j, i-j] = cost[j-1, i-j] + tep_b
path[j, i-j,:,:] = np.array([[j-1, i-j], [j, i-j]])
else:
cost_unfair[j, i-j] = tep_unfair_a + cost_unfair[j, i-j-1]
cost[j, i-j] = cost[j, i-j-1] + tep_a
path[j, i-j,:,:] = np.array([[j, i-j-1], [j, i-j]])
return cost, path, cost_unfair
@jit(nopython=True)
def pairwise_post(a_label, b_label, lamb):
###a, b has been sorted decreasing sort.
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a_label)
b_1 = np.sum(b_label)
a_1_0 = a_1*((N-b_1)+(M - a_1))
b_1_0 = b_1*((M - a_1)+(N-b_1))
path = np.zeros((M+1, N+1,2,2))
cost_unfair = np.zeros((M+1, N+1))
cost = np.zeros((M+1, N+1))
zeros_mat = np.zeros((M+1, N+1))
zeros_mat[0,0] = ((N-b_1)+(M - a_1))
for i in range(1,N+1):
if b_label[i]==1:
zeros_mat[0,i] = zeros_mat[0,i-1]
else:
zeros_mat[0,i] = zeros_mat[0,i-1]-1
for i in range(1,M+1):
if a_label[i]==0:
zeros_mat[i,0] = zeros_mat[i-1,0]-1
else:
zeros_mat[i,0] = zeros_mat[i-1,0]
for j in range(1,N+1):
if b_label[j]==0:
zeros_mat[i,j] = zeros_mat[i,j-1]-1
else:
zeros_mat[i,j] = zeros_mat[i,j-1]
for i in range(1,M+1):
if a_label[i]==1:
cost_unfair[i, 0] = zeros_mat[i,0]/a_1_0*lamb + cost_unfair[i-1,0]
cost[i,0] = N-b_1 + cost[i-1, 0]
else:
cost_unfair[i, 0] = cost_unfair[i-1,0]
cost[i,0] = cost[i-1,0]
path[i,0,:,:] = np.array([[i-1, 0], [ i, 0]])
for i in range(1,N+1):
if b_label[i]==1:
cost_unfair[0,i] = -zeros_mat[0,i]/b_1_0*lamb + cost_unfair[0, i-1]
cost[0, i] = cost[0,i-1] + M - a_1
else:
cost[0, i] = cost[0,i-1]
cost_unfair[0, i] = cost_unfair[0, i-1]
path[0,i,:,:] = np.array([[0, i-1],[0, i]])
for i in range(2, M+1+N+1):
for j in range(max(1, i-N), min(i, M+1)): # j[1, i-1]
if a_label[j]==0:
tep_b = 0
tep_unfair_b = 0
else:
tep_b = N - (i-j) - np.sum(b_label[i-j+1:])
tep_unfair_b = zeros_mat[j,i-j]/a_1_0*lamb
if b_label[i-j]==0:
tep_a = 0
tep_unfair_a = 0
else:
tep_a = M - j -np.sum(a_label[j+1:])
tep_unfair_a = -zeros_mat[j,i-j]/b_1_0*lamb
if cost[j-1, i-j] + tep_b - abs(tep_unfair_b + cost_unfair[j-1, i-j]) > cost[j, i-j-1] + tep_a - abs(tep_unfair_a + cost_unfair[j, i-j-1]):
cost_unfair[j, i-j] = tep_unfair_b + cost_unfair[j-1, i-j]
cost[j, i-j] = cost[j-1, i-j] + tep_b
path[j, i-j,:,:] = np.array([[j-1, i-j], [j, i-j]])
else:
cost_unfair[j, i-j] = tep_unfair_a + cost_unfair[j, i-j-1]
cost[j, i-j] = cost[j, i-j-1] + tep_a
path[j, i-j,:,:] = np.array([[j, i-j-1], [j, i-j]])
return cost, path, cost_unfair
def post_b_score(a_score, b_score, a_label, b_label, lamb = 0, _type="xauc"): ## score has to be decreasing.
M = len(a_score)
N = len(b_score)
if _type == "xauc":
cost, path_ , cost_unfair = xAUC_post(a_label, b_label, lamb = lamb)
elif _type=="AUC":
cost, path_ = maxAUC(a_label, b_label)
elif _type=="prf":
cost, path_ , cost_unfair = pairwise_post(a_label, b_label, lamb = lamb)
else:
print("Unknown type")
exit()
@jit(nopython=True)
def pathTrace(path):
trace = []
tep = path[M,N,:,:]
trace.append(tep[-1,:])
trace.append(tep[0,:])
for i in range(M+N-1):
tep = path[int(tep[0][0]), int(tep[0][1]), :,:]
trace.append(tep[0,:])
trace.reverse()
return trace
path = pathTrace(path_)
gap_a = [[] for i in range(M+1)]
for i in range(1,len(path)):
if int(path[i][0])==int(path[i-1][0]):
gap_a[int(path[i][0])].append(int(path[i][1]))
changed_b_score = []
for bin_ in range(len(gap_a)):
for item in range(len(gap_a[bin_])):
num = (len(gap_a[bin_])+1)
if bin_==0:
changed_b_score.append((item+1)*a_score[bin_]/num+(num-item-1)/num)
elif bin_==len(a_score):
changed_b_score.append((num -item-1)*a_score[bin_-1]/num)
else:
changed_b_score.append((item+1)*a_score[bin_]/num + (num-item-1)*a_score[bin_-1]/num)
if _type=="AUC":
return np.array(changed_b_score), 0
else:
return np.array(changed_b_score), cost_unfair[-1, -1]
def pairwise(a_score, b_score, a_label, b_label):
sum_ab = 0
sum_ba = 0
numa = len(a_label)
numb = len(b_label)
a_num1 = np.sum(a_label)
a_num0 = len(a_label) - a_num1
b_num1 = np.sum(b_label)
b_num0 = len(b_label) - b_num1
i_AUCa = roc_auc_score(a_label, a_score)
i_AUCb = roc_auc_score(b_label, b_score)
for i in range(numa):
for j in range(numb):
if a_label[i] ==1 and b_label[j] ==0:
if a_score[i]>b_score[j]:
sum_ab+=1
elif a_label[i]==0 and b_label[j]==1:
if b_score[j]>a_score[i]:
sum_ba+=1
return (sum_ab+i_AUCa*a_num0*a_num1)/(a_num1*(b_num0+a_num0)), (sum_ba+i_AUCb*b_num0*b_num1)/(b_num1*(a_num0+b_num0))
def pairwise_fast(a_score, b_score, a_label, b_label):
a_num1 = np.sum(a_label)
a_num0 = len(a_label) - a_num1
b_num1 = np.sum(b_label)
b_num0 = len(b_label) - b_num1
a_score1,a_score0 = a_score[a_label == 1],a_score[a_label == 0]
b_score1,b_score0 = b_score[b_label == 1],b_score[b_label == 0]
ab_label = np.concatenate((np.ones(int(a_num1)),np.zeros(int(b_num0+a_num0))))
ab_score = np.concatenate((a_score1,a_score0,b_score0))
pair_ab = roc_auc_score(ab_label,ab_score) #[a=1, 0]
ba_label = np.concatenate((np.ones(int(b_num1)),np.zeros(int(a_num0+b_num0))))
ba_score = np.concatenate((b_score1,b_score0, a_score0))
pair_ba = roc_auc_score(ba_label,ba_score) #[b=1, 0]
return pair_ab, pair_ba
def zeros_mat(a, b):
a_label = [0] + a
b_label = [0] + b
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a)
b_1 = np.sum(b)
zeros_mat = np.zeros((M+1, N+1))
zeros_mat[0,0] = ((N-b_1)+(M - a_1))
for i in range(1,N+1):
if b_label[i]==1:
zeros_mat[0,i] = zeros_mat[0,i-1]
else:
zeros_mat[0,i] = zeros_mat[0,i-1]-1
for i in range(1,M+1):
if a_label[i]==0:
zeros_mat[i,0] = zeros_mat[i-1,0]-1
else:
zeros_mat[i,0] = zeros_mat[i-1,0]
for j in range(1,N+1):
if b_label[j]==0:
zeros_mat[i,j] = zeros_mat[i,j-1]-1
else:
zeros_mat[i,j] = zeros_mat[i,j-1]
return zeros_mat
| 32.735812
| 151
| 0.519727
|
import numpy as np
from sklearn.metrics import roc_auc_score
from numba import jit
def array2str(tmp_array, sep = " "):
str_list = ["{:.3f}".format(tmp_item) for tmp_item in tmp_array]
return sep.join(str_list)
def generate_sorted_groups(pred, y, a):
a_idx = np.where(a == 0)
b_idx = np.where(a == 1)
b_score = pred[b_idx].reshape(-1)
b_index = np.argsort(-b_score)
b_score_sort = b_score[b_index]
b_label = y[b_idx]
b_label_sort = b_label[b_index]
a_score = pred[a_idx].reshape(-1)
a_index = np.argsort(-a_score)
a_score_sort = a_score[a_index]
a_label = y[a_idx]
a_label_sort = a_label[a_index]
return a_score_sort,b_score_sort,a_label_sort,b_label_sort
def cal_fairness_metric_by_groups(a_score, b_score, a_label, b_label, metric = "xauc"):
if metric == "xauc":
metric_ab, metric_ba, _ = xAUC_fast(a_score, b_score, a_label, b_label)
else:
metric_ab, metric_ba = pairwise_fast(a_score, b_score, a_label, b_label)
return abs(metric_ab - metric_ba),metric_ab,metric_ba
def cal_fairness_metric(pred, y, a, metric = "xauc"):
a_idx, b_idx = np.where(a == 0), np.where(a == 1)
a_score, b_score = pred[a_idx].reshape(-1), pred[b_idx].reshape(-1)
a_label, b_label = y[a_idx].reshape(-1), y[b_idx].reshape(-1)
if metric == "xauc":
metric_ab, metric_ba, _ = xAUC_fast(a_score, b_score, a_label, b_label)
else:
metric_ab, metric_ba = pairwise_fast(a_score, b_score, a_label, b_label)
return abs(metric_ab - metric_ba),metric_ab,metric_ba
def AUC(score, label):
sum_ = 0
num = len(label)
for i in range(num):
for j in range(num):
if label[i]==1 and label[j]==0:
if score[i]>score[j]:
sum_ += 1
return sum_/(np.sum(label)*(num-np.sum(label))), sum_
def xAUC(a_score, b_score, a_label, b_label):
sum_ab = 0
sum_ba = 0
numa = len(a_label)
numb = len(b_label)
a_num1 = np.sum(a_label)
a_num0 = len(a_label) - a_num1
b_num1 = np.sum(b_label)
b_num0 = len(b_label) - b_num1
for i in range(numa):
for j in range(numb):
if a_label[i] ==1 and b_label[j] ==0:
if a_score[i]>b_score[j]:
sum_ab+=1
elif a_label[i]==0 and b_label[j]==1:
if b_score[j]>a_score[i]:
sum_ba+=1
return sum_ab/(a_num1*b_num0), sum_ba/(b_num1*a_num0), sum_ab+sum_ba
def xAUC_fast(a_score, b_score, a_label, b_label):
a_num1 = np.sum(a_label)
a_num0 = len(a_label) - a_num1
b_num1 = np.sum(b_label)
b_num0 = len(b_label) - b_num1
a_score1,a_score0 = a_score[a_label == 1],a_score[a_label == 0]
b_score1,b_score0 = b_score[b_label == 1],b_score[b_label == 0]
ab_label = np.concatenate((np.ones(int(a_num1)),np.zeros(int(b_num0))))
ab_score = np.concatenate((a_score1,b_score0))
xauc_ab = roc_auc_score(ab_label,ab_score)
ba_label = np.concatenate((np.ones(int(b_num1)),np.zeros(int(a_num0))))
ba_score = np.concatenate((b_score1,a_score0))
xauc_ba = roc_auc_score(ba_label,ba_score)
return xauc_ab, xauc_ba, xauc_ab * a_num1 * b_num0 + xauc_ba * b_num1 * a_num0
def post_score(train_score, train_score_post, test_score):
tep_id = 0
bins = [[] for i in range(len(train_score)+1)]
for i in range(len(test_score)):
s = test_score[i]
if s>train_score[0]:
bins[0].append(s)
elif s<=train_score[-1]:
bins[-1].append(s)
else:
for j in range(tep_id,len(train_score)):
if train_score[j-1]>=s and train_score[j]<s:
bins[j].append(s)
tep_id = j
break
changed_b_score = []
for bin_ in range(len(bins)):
for item in range(len(bins[bin_])):
num = (len(bins[bin_]))
if bin_==0:
changed_b_score.append((item)*train_score_post[bin_]/num+(num-item)/num)
elif bin_==len(train_score_post):
changed_b_score.append((num -item)*train_score_post[bin_-1]/num)
else:
changed_b_score.append((item)*train_score_post[bin_]/num + (num-item)*train_score_post[bin_-1]/num)
return np.array(changed_b_score)
@jit(nopython=True)
def maxAUC(a_label, b_label):
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a_label)
b_1 = np.sum(b_label)
path = np.zeros((M+1, N+1,2,2))
cost = np.zeros((M+1, N+1))
for i in range(1,M+1):
if a_label[i]==1:
cost[i,0] = N-b_1 + cost[i-1, 0]
else:
cost[i,0] = cost[i-1,0]
path[i,0,:,:] = np.array([[i-1, 0], [ i, 0]])
for i in range(1,N+1):
if b_label[i]==1:
cost[0, i] = cost[0,i-1]+ M - a_1
else:
cost[0, i] = cost[0,i-1]
path[0,i,:,:] = np.array([[0, i-1],[0, i]])
for i in range(2, M+1+N+1):
for j in range(max(1, i-N), min(i, M+1)):
if i-j+1>N or a_label[j]==0:
tep_b = 0
else:
tep_b = N - (i-j) - np.sum(b_label[i-j+1:])
if j+1>M or b_label[i-j]==0:
tep_a = 0
else:
tep_a = M - j -np.sum(a_label[j+1:])
if cost[j-1, i-j] + tep_b > cost[j, i-j-1] + tep_a:
cost[j, i-j] = cost[j-1, i-j] + tep_b
path[j, i-j,:,:] = np.array([[j-1, i-j], [j, i-j]])
else:
cost[j, i-j] = cost[j, i-j-1] + tep_a
path[j, i-j,:,:] = np.array([[j, i-j-1], [j, i-j]])
return cost[M,N], path
@jit(nopython=True)
def xAUC_post(a_label, b_label, lamb):
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a_label)
b_1 = np.sum(b_label)
a_1_b_0 = a_1*(N-b_1)
b_1_a_0 = b_1*(M - a_1)
path = np.zeros((M+1, N+1,2,2))
cost_unfair = np.zeros((M+1, N+1))
cost = np.zeros((M+1, N+1))
for i in range(1,M+1):
if a_label[i]==1:
cost_unfair[i, 0] = (N-b_1)/a_1_b_0*lamb + cost_unfair[i-1,0]
cost[i,0] = N-b_1 + cost[i-1, 0]
else:
cost_unfair[i, 0] = cost_unfair[i-1,0]
cost[i,0] = cost[i-1,0]
path[i,0,:,:] = np.array([[i-1, 0], [ i, 0]])
for i in range(1,N+1):
if b_label[i]==1:
cost_unfair[0,i] = -(M-a_1)/b_1_a_0*lamb + cost_unfair[0, i-1]
cost[0, i] = cost[0,i-1] + M - a_1
else:
cost[0, i] = cost[0,i-1]
cost_unfair[0, i] = cost_unfair[0,i-1]
path[0,i,:,:] = np.array([[0, i-1],[0, i]])
for i in range(2, M+1+N+1):
for j in range(max(1, i-N), min(i, M+1)):
if i-j+1>N or a_label[j]==0:
tep_b = 0
tep_unfair_b = 0
else:
tep_b = N - (i-j) - np.sum(b_label[i-j+1:])
tep_unfair_b = tep_b/a_1_b_0*lamb
if j+1>M or b_label[i-j]==0:
tep_a = 0
tep_unfair_a = 0
else:
tep_a = M - j -np.sum(a_label[j+1:])
tep_unfair_a = -tep_a/b_1_a_0*lamb
if cost[j-1, i-j] + tep_b - abs(tep_unfair_b + cost_unfair[j-1, i-j]) > cost[j, i-j-1] + tep_a - abs(tep_unfair_a + cost_unfair[j, i-j-1]):
cost_unfair[j, i-j] = tep_unfair_b + cost_unfair[j-1, i-j]
cost[j, i-j] = cost[j-1, i-j] + tep_b
path[j, i-j,:,:] = np.array([[j-1, i-j], [j, i-j]])
else:
cost_unfair[j, i-j] = tep_unfair_a + cost_unfair[j, i-j-1]
cost[j, i-j] = cost[j, i-j-1] + tep_a
path[j, i-j,:,:] = np.array([[j, i-j-1], [j, i-j]])
return cost, path, cost_unfair
@jit(nopython=True)
def xAUC_post_(a_label, b_label, lamb):
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a_label)
b_1 = np.sum(b_label)
a_1_b_0 = a_1*(N-b_1)
b_1_a_0 = b_1*(M - a_1)
path = np.zeros((M+1, N+1,2,2))
cost_unfair = np.zeros((M+1, N+1))
cost = np.zeros((M+1, N+1))
for i in range(1,M+1):
if a_label[i]==1:
cost_unfair[i, 0] = (N-b_1)/a_1_b_0 * lamb + cost_unfair[i-1,0]
cost[i,0] = N-b_1 + cost[i-1, 0]
else:
cost_unfair[i, 0] = cost_unfair[i-1,0]
cost[i,0] = cost[i-1,0]
path[i,0,:,:] = np.array([[i-1, 0], [ i, 0]])
for i in range(1,N+1):
if b_label[i]==1:
cost_unfair[0,i] = -(M - a_1) / b_1_a_0 * lamb + cost_unfair[0, i-1]
cost[0, i] = cost[0,i-1] + M - a_1
else:
cost[0, i] = cost[0,i-1]
cost_unfair[0, i] = cost_unfair[0,i-1]
path[0,i,:,:] = np.array([[0, i-1],[0, i]])
for i in range(2, M+1+N+1):
for j in range(max(1, i-N), min(i, M+1)):
if a_label[j]==0:
tep_b = 0
tep_unfair_b = 0
else:
tep_b = N - (i-j) - np.sum(b_label[i-j+1:])
tep_unfair_b = tep_b/a_1_b_0*lamb
if b_label[i-j]==0:
tep_a = 0
tep_unfair_a = 0
else:
tep_a = M - j -np.sum(a_label[j+1:])
tep_unfair_a = -tep_a/b_1_a_0*lamb
if cost[j-1, i-j] + tep_b - abs(tep_unfair_b + cost_unfair[j-1, i-j]) > cost[j, i-j-1] + tep_a - abs(tep_unfair_a + cost_unfair[j, i-j-1]):
cost_unfair[j, i-j] = tep_unfair_b + cost_unfair[j-1, i-j]
cost[j, i-j] = cost[j-1, i-j] + tep_b
path[j, i-j,:,:] = np.array([[j-1, i-j], [j, i-j]])
else:
cost_unfair[j, i-j] = tep_unfair_a + cost_unfair[j, i-j-1]
cost[j, i-j] = cost[j, i-j-1] + tep_a
path[j, i-j,:,:] = np.array([[j, i-j-1], [j, i-j]])
return cost, path, cost_unfair
@jit(nopython=True)
def pairwise_post(a_label, b_label, lamb):
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a_label)
b_1 = np.sum(b_label)
a_1_0 = a_1*((N-b_1)+(M - a_1))
b_1_0 = b_1*((M - a_1)+(N-b_1))
path = np.zeros((M+1, N+1,2,2))
cost_unfair = np.zeros((M+1, N+1))
cost = np.zeros((M+1, N+1))
zeros_mat = np.zeros((M+1, N+1))
zeros_mat[0,0] = ((N-b_1)+(M - a_1))
for i in range(1,N+1):
if b_label[i]==1:
zeros_mat[0,i] = zeros_mat[0,i-1]
else:
zeros_mat[0,i] = zeros_mat[0,i-1]-1
for i in range(1,M+1):
if a_label[i]==0:
zeros_mat[i,0] = zeros_mat[i-1,0]-1
else:
zeros_mat[i,0] = zeros_mat[i-1,0]
for j in range(1,N+1):
if b_label[j]==0:
zeros_mat[i,j] = zeros_mat[i,j-1]-1
else:
zeros_mat[i,j] = zeros_mat[i,j-1]
for i in range(1,M+1):
if a_label[i]==1:
cost_unfair[i, 0] = zeros_mat[i,0]/a_1_0*lamb + cost_unfair[i-1,0]
cost[i,0] = N-b_1 + cost[i-1, 0]
else:
cost_unfair[i, 0] = cost_unfair[i-1,0]
cost[i,0] = cost[i-1,0]
path[i,0,:,:] = np.array([[i-1, 0], [ i, 0]])
for i in range(1,N+1):
if b_label[i]==1:
cost_unfair[0,i] = -zeros_mat[0,i]/b_1_0*lamb + cost_unfair[0, i-1]
cost[0, i] = cost[0,i-1] + M - a_1
else:
cost[0, i] = cost[0,i-1]
cost_unfair[0, i] = cost_unfair[0, i-1]
path[0,i,:,:] = np.array([[0, i-1],[0, i]])
for i in range(2, M+1+N+1):
for j in range(max(1, i-N), min(i, M+1)):
if a_label[j]==0:
tep_b = 0
tep_unfair_b = 0
else:
tep_b = N - (i-j) - np.sum(b_label[i-j+1:])
tep_unfair_b = zeros_mat[j,i-j]/a_1_0*lamb
if b_label[i-j]==0:
tep_a = 0
tep_unfair_a = 0
else:
tep_a = M - j -np.sum(a_label[j+1:])
tep_unfair_a = -zeros_mat[j,i-j]/b_1_0*lamb
if cost[j-1, i-j] + tep_b - abs(tep_unfair_b + cost_unfair[j-1, i-j]) > cost[j, i-j-1] + tep_a - abs(tep_unfair_a + cost_unfair[j, i-j-1]):
cost_unfair[j, i-j] = tep_unfair_b + cost_unfair[j-1, i-j]
cost[j, i-j] = cost[j-1, i-j] + tep_b
path[j, i-j,:,:] = np.array([[j-1, i-j], [j, i-j]])
else:
cost_unfair[j, i-j] = tep_unfair_a + cost_unfair[j, i-j-1]
cost[j, i-j] = cost[j, i-j-1] + tep_a
path[j, i-j,:,:] = np.array([[j, i-j-1], [j, i-j]])
return cost, path, cost_unfair
def post_b_score(a_score, b_score, a_label, b_label, lamb = 0, _type="xauc"): M = len(a_score)
N = len(b_score)
if _type == "xauc":
cost, path_ , cost_unfair = xAUC_post(a_label, b_label, lamb = lamb)
elif _type=="AUC":
cost, path_ = maxAUC(a_label, b_label)
elif _type=="prf":
cost, path_ , cost_unfair = pairwise_post(a_label, b_label, lamb = lamb)
else:
print("Unknown type")
exit()
@jit(nopython=True)
def pathTrace(path):
trace = []
tep = path[M,N,:,:]
trace.append(tep[-1,:])
trace.append(tep[0,:])
for i in range(M+N-1):
tep = path[int(tep[0][0]), int(tep[0][1]), :,:]
trace.append(tep[0,:])
trace.reverse()
return trace
path = pathTrace(path_)
gap_a = [[] for i in range(M+1)]
for i in range(1,len(path)):
if int(path[i][0])==int(path[i-1][0]):
gap_a[int(path[i][0])].append(int(path[i][1]))
changed_b_score = []
for bin_ in range(len(gap_a)):
for item in range(len(gap_a[bin_])):
num = (len(gap_a[bin_])+1)
if bin_==0:
changed_b_score.append((item+1)*a_score[bin_]/num+(num-item-1)/num)
elif bin_==len(a_score):
changed_b_score.append((num -item-1)*a_score[bin_-1]/num)
else:
changed_b_score.append((item+1)*a_score[bin_]/num + (num-item-1)*a_score[bin_-1]/num)
if _type=="AUC":
return np.array(changed_b_score), 0
else:
return np.array(changed_b_score), cost_unfair[-1, -1]
def pairwise(a_score, b_score, a_label, b_label):
sum_ab = 0
sum_ba = 0
numa = len(a_label)
numb = len(b_label)
a_num1 = np.sum(a_label)
a_num0 = len(a_label) - a_num1
b_num1 = np.sum(b_label)
b_num0 = len(b_label) - b_num1
i_AUCa = roc_auc_score(a_label, a_score)
i_AUCb = roc_auc_score(b_label, b_score)
for i in range(numa):
for j in range(numb):
if a_label[i] ==1 and b_label[j] ==0:
if a_score[i]>b_score[j]:
sum_ab+=1
elif a_label[i]==0 and b_label[j]==1:
if b_score[j]>a_score[i]:
sum_ba+=1
return (sum_ab+i_AUCa*a_num0*a_num1)/(a_num1*(b_num0+a_num0)), (sum_ba+i_AUCb*b_num0*b_num1)/(b_num1*(a_num0+b_num0))
def pairwise_fast(a_score, b_score, a_label, b_label):
a_num1 = np.sum(a_label)
a_num0 = len(a_label) - a_num1
b_num1 = np.sum(b_label)
b_num0 = len(b_label) - b_num1
a_score1,a_score0 = a_score[a_label == 1],a_score[a_label == 0]
b_score1,b_score0 = b_score[b_label == 1],b_score[b_label == 0]
ab_label = np.concatenate((np.ones(int(a_num1)),np.zeros(int(b_num0+a_num0))))
ab_score = np.concatenate((a_score1,a_score0,b_score0))
pair_ab = roc_auc_score(ab_label,ab_score)
ba_label = np.concatenate((np.ones(int(b_num1)),np.zeros(int(a_num0+b_num0))))
ba_score = np.concatenate((b_score1,b_score0, a_score0))
pair_ba = roc_auc_score(ba_label,ba_score)
return pair_ab, pair_ba
def zeros_mat(a, b):
a_label = [0] + a
b_label = [0] + b
M = len(a_label)-1
N = len(b_label)-1
a_1 = np.sum(a)
b_1 = np.sum(b)
zeros_mat = np.zeros((M+1, N+1))
zeros_mat[0,0] = ((N-b_1)+(M - a_1))
for i in range(1,N+1):
if b_label[i]==1:
zeros_mat[0,i] = zeros_mat[0,i-1]
else:
zeros_mat[0,i] = zeros_mat[0,i-1]-1
for i in range(1,M+1):
if a_label[i]==0:
zeros_mat[i,0] = zeros_mat[i-1,0]-1
else:
zeros_mat[i,0] = zeros_mat[i-1,0]
for j in range(1,N+1):
if b_label[j]==0:
zeros_mat[i,j] = zeros_mat[i,j-1]-1
else:
zeros_mat[i,j] = zeros_mat[i,j-1]
return zeros_mat
| true
| true
|
f7091b492b1c1ff7f7cbbe859004f6d63c441970
| 1,459
|
py
|
Python
|
test/test_info_contact.py
|
spirit-87/python_training
|
f2e2389ba4e96139d666365abecf16a2db89cd6e
|
[
"Apache-2.0"
] | null | null | null |
test/test_info_contact.py
|
spirit-87/python_training
|
f2e2389ba4e96139d666365abecf16a2db89cd6e
|
[
"Apache-2.0"
] | null | null | null |
test/test_info_contact.py
|
spirit-87/python_training
|
f2e2389ba4e96139d666365abecf16a2db89cd6e
|
[
"Apache-2.0"
] | null | null | null |
from model.contact import Contact
from random import randrange
def test_contacts_on_homepage(app, db):
contacts_from_homepage = sorted(app.contact.get_contact_list(), key = Contact.id_or_max)
contacts_from_db = sorted(db.get_contact_list(), key = Contact.id_or_max)
assert len(contacts_from_homepage) == len(contacts_from_db)
for i in range(len(contacts_from_homepage)):
assert contacts_from_homepage[i].firstname == contacts_from_db[i].firstname
assert contacts_from_homepage[i].lastname == contacts_from_db[i].lastname
assert contacts_from_homepage[i].address == contacts_from_db[i].address
assert contacts_from_homepage[i].all_phones_from_home_page == contacts_from_db[i].all_phones_from_home_page
assert contacts_from_homepage[i].all_emails_from_home_page == contacts_from_db[i].all_emails_from_home_page
# def test_phones_on_contact_view_page(app):
# contact_from_viewpage = app.contact.get_contact_info_from_view_page(0) #контакт из viewpage контакта
# contact_from_editpage = app.contact.get_contact_info_from_edit_page(0) #контакт из формы редактирования
# assert contact_from_viewpage.phone_home == contact_from_editpage.phone_home
# assert contact_from_viewpage.phone_mobile == contact_from_editpage.phone_mobile
# assert contact_from_viewpage.phone_work == contact_from_editpage.phone_work
# assert contact_from_viewpage.phone2 == contact_from_editpage.phone2
| 56.115385
| 115
| 0.800548
|
from model.contact import Contact
from random import randrange
def test_contacts_on_homepage(app, db):
contacts_from_homepage = sorted(app.contact.get_contact_list(), key = Contact.id_or_max)
contacts_from_db = sorted(db.get_contact_list(), key = Contact.id_or_max)
assert len(contacts_from_homepage) == len(contacts_from_db)
for i in range(len(contacts_from_homepage)):
assert contacts_from_homepage[i].firstname == contacts_from_db[i].firstname
assert contacts_from_homepage[i].lastname == contacts_from_db[i].lastname
assert contacts_from_homepage[i].address == contacts_from_db[i].address
assert contacts_from_homepage[i].all_phones_from_home_page == contacts_from_db[i].all_phones_from_home_page
assert contacts_from_homepage[i].all_emails_from_home_page == contacts_from_db[i].all_emails_from_home_page
| true
| true
|
f7091b6372c395203c4bc05e0c18ff979fa41275
| 456
|
py
|
Python
|
remove_errors.py
|
martinetmayank/telegram-chats
|
ad79f3357d657415f57c83f219fc3ad7d57081eb
|
[
"Apache-2.0"
] | null | null | null |
remove_errors.py
|
martinetmayank/telegram-chats
|
ad79f3357d657415f57c83f219fc3ad7d57081eb
|
[
"Apache-2.0"
] | 1
|
2021-04-30T21:26:01.000Z
|
2021-04-30T21:26:01.000Z
|
remove_errors.py
|
martinetmayank/telegram-chats
|
ad79f3357d657415f57c83f219fc3ad7d57081eb
|
[
"Apache-2.0"
] | null | null | null |
def correct_ini_file(config_file):
with open(config_file, mode='r') as raw_open:
raw_open.seek(0)
temp_api_details = raw_open.readlines(0)
# print(type(temp_api_details[0]))
with open(config_file, mode='w') as rewrite_config:
if temp_api_details[0] != '[TELEGRAM]\n':
rewrite_config.write('[TELEGRAM]\n')
for i in temp_api_details:
rewrite_config.write(i)
| 32.571429
| 56
| 0.60307
|
def correct_ini_file(config_file):
with open(config_file, mode='r') as raw_open:
raw_open.seek(0)
temp_api_details = raw_open.readlines(0)
with open(config_file, mode='w') as rewrite_config:
if temp_api_details[0] != '[TELEGRAM]\n':
rewrite_config.write('[TELEGRAM]\n')
for i in temp_api_details:
rewrite_config.write(i)
| true
| true
|
f7091b68ebbfbd69780202759a09375d54581042
| 2,147
|
py
|
Python
|
tests/test_encoding.py
|
samv/unique
|
d5d8deb109d0b14ce072118432baf0bebc11826b
|
[
"MIT"
] | 1
|
2015-04-02T20:27:25.000Z
|
2015-04-02T20:27:25.000Z
|
tests/test_encoding.py
|
samv/unique
|
d5d8deb109d0b14ce072118432baf0bebc11826b
|
[
"MIT"
] | null | null | null |
tests/test_encoding.py
|
samv/unique
|
d5d8deb109d0b14ce072118432baf0bebc11826b
|
[
"MIT"
] | null | null | null |
import json
import unittest2
from normalize import from_json
from normalize import JsonProperty
from normalize import JsonRecord
from normalize import Property
from normalize import Record
from normalize import to_json
from unique.encoding import JSONRecordIO
from testclasses import MultiLevelKeyValue
from testclasses import SimpleKeyValue
def jdump(obj):
return json.dumps(
obj,
indent=4,
separators=(',', ': '),
sort_keys=True,
)
class CustomMarshalled(JsonRecord):
key = Property(json_name="id")
value = Property()
def json_data(self, **args):
jd = super(CustomMarshalled, self).json_data(**args)
jd['oid'] = "1234567"
return jd
@classmethod
def json_to_initkwargs(cls, json_data, kwargs):
return super(CustomMarshalled, cls).json_to_initkwargs(
dict((k, v) for k, v in json_data.items() if k != 'oid'),
kwargs,
)
class SanityTest(unittest2.TestCase):
def test_simple_key(self):
sk = SimpleKeyValue(key="Bob", value="bill")
encoded = JSONRecordIO.encode_str(sk)
self.assertEqual(
encoded, '{\n "key": "Bob",\n "value": "bill"\n}',
)
decoded = JSONRecordIO.decode_str(SimpleKeyValue, encoded)[0]
self.assertEqual(sk, decoded)
def test_multi_level_key(self):
mlkv = MultiLevelKeyValue(
key="Casper",
items=[{"key": "toast", "value": "Charlie_Brown"},
{"key": "ham", "value": "Lucy"},
{"key": "spam", "value": "Franklin"}],
custom_val="Minotaur",
)
# IO using regular normalize
default_json = jdump(to_json(mlkv))
default_decoded = from_json(MultiLevelKeyValue, json.loads(default_json))
self.assertEqual(mlkv, default_decoded)
encoded = JSONRecordIO.encode_str(mlkv)
decoded = JSONRecordIO.decode_str(MultiLevelKeyValue, encoded)[0]
# FIXME: visitor should either respect all JsonRecord hints or none.
decoded.custom_val = 'Minotaur'
self.assertEqual(mlkv, decoded)
| 28.25
| 81
| 0.63251
|
import json
import unittest2
from normalize import from_json
from normalize import JsonProperty
from normalize import JsonRecord
from normalize import Property
from normalize import Record
from normalize import to_json
from unique.encoding import JSONRecordIO
from testclasses import MultiLevelKeyValue
from testclasses import SimpleKeyValue
def jdump(obj):
return json.dumps(
obj,
indent=4,
separators=(',', ': '),
sort_keys=True,
)
class CustomMarshalled(JsonRecord):
key = Property(json_name="id")
value = Property()
def json_data(self, **args):
jd = super(CustomMarshalled, self).json_data(**args)
jd['oid'] = "1234567"
return jd
@classmethod
def json_to_initkwargs(cls, json_data, kwargs):
return super(CustomMarshalled, cls).json_to_initkwargs(
dict((k, v) for k, v in json_data.items() if k != 'oid'),
kwargs,
)
class SanityTest(unittest2.TestCase):
def test_simple_key(self):
sk = SimpleKeyValue(key="Bob", value="bill")
encoded = JSONRecordIO.encode_str(sk)
self.assertEqual(
encoded, '{\n "key": "Bob",\n "value": "bill"\n}',
)
decoded = JSONRecordIO.decode_str(SimpleKeyValue, encoded)[0]
self.assertEqual(sk, decoded)
def test_multi_level_key(self):
mlkv = MultiLevelKeyValue(
key="Casper",
items=[{"key": "toast", "value": "Charlie_Brown"},
{"key": "ham", "value": "Lucy"},
{"key": "spam", "value": "Franklin"}],
custom_val="Minotaur",
)
default_json = jdump(to_json(mlkv))
default_decoded = from_json(MultiLevelKeyValue, json.loads(default_json))
self.assertEqual(mlkv, default_decoded)
encoded = JSONRecordIO.encode_str(mlkv)
decoded = JSONRecordIO.decode_str(MultiLevelKeyValue, encoded)[0]
decoded.custom_val = 'Minotaur'
self.assertEqual(mlkv, decoded)
| true
| true
|
f7091ba4774ace99be57da4a09c120ccf6dd67e9
| 22,801
|
py
|
Python
|
examples/sc2autosave.py
|
HADB/sc2reader
|
6bb984dbe85f46a6684680dd0e56c09d7188214b
|
[
"MIT"
] | 117
|
2016-09-11T16:42:05.000Z
|
2022-03-27T22:07:34.000Z
|
examples/sc2autosave.py
|
Kaszanas/sc2reader
|
86bd9b70c3aef8319ce7c8c06cac8a4bdfe3fd23
|
[
"MIT"
] | 120
|
2016-01-10T17:41:45.000Z
|
2022-03-28T04:46:16.000Z
|
examples/sc2autosave.py
|
Kaszanas/sc2reader
|
86bd9b70c3aef8319ce7c8c06cac8a4bdfe3fd23
|
[
"MIT"
] | 58
|
2016-02-03T18:06:26.000Z
|
2021-09-07T03:08:50.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""sc2autosave is a utility for reorganizing and renaming Starcraft II files.
Overview
==============
sc2autosave provides a simple mechanism for renaming replay files as they are
copied or moved from a source directory to a destination directory. In between
runs the state is stored in the sc2autosave.dat file saved to the destination
folder. In this way, multiple destination folders with different organizations
and formats can be maintained independently.
General Operation
-------------------
When first run for a given destination directory, sc2autosave scans for all
files since the epoch. Each subsequent run scans only for files new files
since the previous scan time. This behavior can be modified on a run by run
basis by with the --since DATETIME option. By default the source directory
is scanned recursively. The --depth DEPTH option can limit and/or eliminate
this is recursion.
Files identified as new are then copied to the destination directory. The
--move option can override this behavior. The default behavior is a good idea
because it ensures that there is a backup copy and allows for several different
file structures to be constructed with different sc2autosave configurations for
easy replay navigation. You might keep your replay files redundantly stored
sorted by format, by map, and by matchup for easy lookup later on.
While normally run as a batch process, the --period SECONDS option can be used
to run sc2autosave as a background process, scanning the directory for changes
every SECONDS seconds. This is useful for creating background processes on
operating system start up.
Renaming Replays
--------------------
The --rename option allows you to specify a renaming format string. The string
is constructed the pythonic (3.0) way with {:field} indicating the substitution
of a field. The forward slash (/) is a special character here which terminates
a folder name and allows for organization into subdirectories. All other string
characters form the template into which the fields are inserted.
Fields related to dates and times (:date, :datetime, :length fields) can be
formatted through their respective directives (--date, --datetime, --length)
according to python date formatting conventions. Additionally, the player
display format can be refined with the --player-format FORMAT directive which
is interpreted similarly to the --rename FORMAT directive detailed above.
Once content has been defined to your tastes you may wish to get specific about
the ordering of the teams and players on those teams in the replay name. The
--team-order-by and --player-order-by directives can be used for this purpose.
A common preference is to favor specific players (like yourself and friends)
and their teams in the ordering by placing them first in the listing. The
--favor PLAYER1 [PLAYER2] directive supports this preference.
Filtering Replays
---------------------
Once a replay has been scanned and parsed you have an opportunity to filter it
for inclusion in the destination directory. This is useful when constructing
various different types of replay packs for distribution and review. Replays
are small and Battle.net has a terrible filesystem based replay locator; why
not make your life easier with a little duplication.
--filter-players PLAYER [PLAYER ...]
--filter-matchup MATCHUP [MATCHUP ...]
--filter-map NAME [NAME ...]
--filter-length LOW HIGH
--filter-date START END
Example Configurations
------------------------
This first basic configuration sets up a background process to copy new replays
without renaming to a 'Saved' subdirectory every 10 seconds. The depth 0 option
keeps the script from looking into the 'Saved' subdirectory.
sc2autosave \
--source ~/My\ Documents/Starcraft\ II/Accounts/.../Mutliplayer \
--dest ~/My\ Documents/Starcraft\ II/Accounts/.../Multiplater/Saved \
--period 10 \
--depth 0
This next configuration runs in batch mode using the default renaming format.
sc2autosave \
--source ~/My\ Documents/Starcraft\ II/Accounts/.../Mutliplayer \
--dest ~/My\ Documents/Starcraft\ II/Accounts/.../Multiplater/Saved \
--rename
(ZvP) Lost Temple: ShadesofGray(Z) vs Trisfall(P).SC2Replay
(ZZvPP) Shattered Temple: ShadesofGray(Z), Remedy(Z) vs ProfProbe(P), Trisfall(P).SC2Replay
Here is a heavily customized format that organizes replays into subdirectories
by replay format and favors ShadesofGray in the player and team orderings.
sc2autosave \
--source ~/My\ Documents/Starcraft\ II/Accounts/.../Mutliplayer \
--dest ~/My\ Documents/Starcraft\ II/Accounts/.../Multiplater/Saved \
--rename "{:format}/{:matchup} on {:map}: {:teams}" \
--player-format "{:name}({:play_race})" \
--team-order-by number \
--player-order-by name \
--favored ShadesofGray
1v1/ZvP on Lost Temple: ShadesofGray(Z) vs Trisfall(P).SC2Replay
2v2/ZZvPP on Shattered Temple: ShadesofGray(Z), Remedy(Z) vs ProfProbe(P), Trisfall(P).SC2Replay
Next is another customized format which organizes replays by matchup. It uses
strict player and team ordering by number with no exceptions and formats game
length to show both minutes and seconds.
sc2autosave \
--source ~/My\ Documents/Starcraft\ II/Accounts/.../Mutliplayer \
--dest ~/My\ Documents/Starcraft\ II/Accounts/.../Multiplater/Saved \
--rename "{:matchup}/({:length}) {:map}: {:teams}" \
--player-format "{:name}({:play_race})" \
--team-order-by number \
--player-order-by number \
--length "%M:%S"
PvZ/(20:14) Lost Temple: Trisfall(P) vs ShadesofGray(Z).SC2Replay
ZZvPP/(35:40) Shattered Temple: Remedy(Z), ShadesofGray(Z) vs Trisfall(P), ProfProbe(P).SC2Replay
Complete Reference Guide
---------------------------
--source SOURCE_FOLDER
The source folder to scan for replays. Uses recursive scan by default.
--dest DESTINATION_FOLDER
The destination folder to place replays into.
--depth DEPTH
Allows recursion to be limited and/or disabled (with DEPTH=0).
--period SECONDS
Puts sc2autosave into continuous mode, scanning the directory for new
files every SECONDS seconds.
--rename FORMAT
:map - Inserts the map name.
:date - Inserts a string formatted datetime object using --date-format.
:length - Inserts a string formatted time object using --length-format.
:teams - Inserts a comma separated player list. Teams are separated
with a ' vs ' string. Format the player with --player-format.
:format - Inserts the map format (1v1, 2v2, 3v3, etc)
:matchup - Inserts the matchup (ZvZ, PTvTZ, etc). The matchup is
in team order with races ordered alphabetically; not by player!
This makes matchups more consistent and useful for sorting.
--length-format FORMAT
--player-format FORMAT
--date-format FORMAT
--team-order-by FIELD
--player-order-by FIELD
--favored NAME [NAME,...]
POST-Parse filtering vs preparse filtering?
POST-Parse, how to do it?!?!?!?!
"""
import argparse
import cPickle
import os
import shutil
import sys
import time
import sc2reader
try:
raw_input # Python 2
except NameError:
raw_input = input # Python 3
def run(args):
# Reset wipes the destination clean so we can start over.
if args.reset:
reset(args)
# Set up validates the destination and source directories.
# It also loads the previous state or creates one as necessary.
state = setup(args)
# We break out of this loop in batch mode and on KeyboardInterrupt
while True:
# The file scan uses the arguments and the state to filter down to
# only new (since the last sync time) files.
for path in scan(args, state):
try:
# Read the file and expose useful aspects for renaming/filtering
replay = sc2reader.load_replay(path, load_level=2)
except KeyboardInterrupt:
raise
except:
# Failure to parse
file_name = os.path.basename(path)
directory = make_directory(args, ("parse_error",))
new_path = os.path.join(directory, file_name)
source_path = path[len(args.source) :]
args.log.write("Error parsing replay: {0}".format(source_path))
if not args.dryrun:
args.action.run(path, new_path)
# Skip to the next replay
continue
aspects = generate_aspects(args, replay)
# Use the filter args to select files based on replay attributes
if filter_out_replay(args, replay):
continue
# Apply the aspects to the rename formatting.
#'/' is a special character for creation of subdirectories.
# TODO: Handle duplicate replay names, its possible..
path_parts = args.rename.format(**aspects).split("/")
filename = path_parts.pop() + ".SC2Replay"
# Construct the directory and file paths; create needed directories
directory = make_directory(args, path_parts)
new_path = os.path.join(directory, filename)
# Find the source relative to the source directory for reporting
dest_path = new_path[len(args.dest) :]
source_path = path[len(args.source) :]
# Log the action and run it if we are live
msg = "{0}:\n\tSource: {1}\n\tDest: {2}\n"
args.log.write(msg.format(args.action.type, source_path, dest_path))
if not args.dryrun:
args.action.run(path, new_path)
# After every batch completes, save the state and flush the log
# TODO: modify the state to include a list of remaining files
args.log.flush()
save_state(state, args)
# We only run once in batch mode!
if args.mode == "BATCH":
break
# Since new replays come in fairly infrequently, reduce system load
# by sleeping for an acceptable response time before the next scan.
time.sleep(args.period)
args.log.write("Batch Completed")
def filter_out_replay(args, replay):
player_names = set([player.name for player in replay.players])
filter_out_player = not set(args.filter_player) & player_names
if args.filter_rule == "ALLOW":
return filter_out_player
else:
return not filter_out_player
# We need to create these compare functions at runtime because the ordering
# hinges on the --favored PLAYER options passed in from the command line.
def create_compare_funcs(args):
favored_set = set(name.lower() for name in args.favored)
def player_compare(player1, player2):
# Normalize the player names and generate our key metrics
player1_name = player1.name.lower()
player2_name = player2.name.lower()
player1_favored = player1_name in favored_set
player2_favored = player2_name in favored_set
# The favored player always comes first in the ordering
if player1_favored and not player2_favored:
return -1
elif player2_favored and not player1_favored:
return 1
# The most favored person will always be listed first
elif player1_favored and player2_favored:
player1_index = args.favored.index(player1_name)
player2_index = args.favored.index(player2_name)
return player1_index - player2_index
# If neither is favored, we'll order by number for now
# TODO: Allow command line specification of other orderings (maybe?)
else:
return player1.pid - player2.pid
def team_compare(team1, team2):
# Normalize the team name lists and generate our key metrics
team1_names = set(p.name.lower() for p in team1.players)
team2_names = set(p.name.lower() for p in team2.players)
team1_favored = team1_names & favored_set
team2_favored = team2_names & favored_set
# The team with the favored players will always be listed first
if team1_favored and not team2_favored:
return -1
elif team2_favored and not team1_favored:
return 1
# The team with the most favored person will always come first
elif team1_favored and team2_favored:
team1_best = sorted(args.favored.index(n) for n in team1_favored)
team2_best = sorted(args.favored.index(n) for n in team2_favored)
return team1_best[-1] - team2_best[-1]
# If neither is favored, we'll order by number for now
# TODO: Allow command line specification of other orderings (maybe?)
else:
return team1.number - team2.number
return team_compare, player_compare
def generate_aspects(args, replay):
teams = sorted(replay.teams, args.team_compare)
matchups, team_strings = list(), list()
for team in teams:
team.players = sorted(team.players, args.player_compare)
composition = sorted(p.play_race[0].upper() for p in team.players)
matchups.append("".join(composition))
string = ", ".join(p.format(args.player_format) for p in team.players)
team_strings.append(string)
return sc2reader.utils.AttributeDict(
result=teams[0].result,
length=replay.length,
map=replay.map,
type=replay.type,
date=replay.date.strftime(args.date_format),
matchup="v".join(matchups),
teams=" vs ".join(team_strings),
)
def make_directory(args, path_parts):
directory = args.dest
for part in path_parts:
directory = os.path.join(directory, part)
if not os.path.exists(directory):
args.log.write("Creating subfolder: {0}\n".format(directory))
if not args.dryrun:
os.mkdir(directory)
elif not os.path.isdir(directory):
exit("Cannot create subfolder. Path is occupied: {0}", directory)
return directory
def scan(args, state):
args.log.write("SCANNING: {0}\n".format(args.source))
files = sc2reader.utils.get_files(
path=args.source,
regex=args.exclude_files,
allow=False,
exclude=args.exclude_dirs,
depth=args.depth,
followlinks=args.follow_links,
)
return filter(lambda f: os.path.getctime(f) > state.last_sync, files)
def exit(msg, *args, **kwargs):
sys.exit(msg.format(*args, **kwargs) + "\n\nScript Aborted.")
def reset(args):
if not os.path.exists(args.dest):
exit("Cannot reset, destination does not exist: {0}", args.dest)
elif not os.path.isdir(args.dest):
exit("Cannot reset, destination must be directory: {0}", args.dest)
print(
"About to reset directory: {0}\nAll files and subdirectories will be removed.".format(
args.dest
)
)
choice = raw_input("Proceed anyway? (y/n) ")
if choice.lower() == "y":
args.log.write("Removing old directory: {0}\n".format(args.dest))
if not args.dryrun:
print(args.dest)
shutil.rmtree(args.dest)
else:
sys.exit("Script Aborted")
def setup(args):
args.team_compare, args.player_compare = create_compare_funcs(args)
args.action = sc2reader.utils.AttributeDict(
type=args.action, run=shutil.copy if args.action == "COPY" else shutil.move
)
if not os.path.exists(args.source):
msg = "Source does not exist: {0}.\n\nScript Aborted."
sys.exit(msg.format(args.source))
elif not os.path.isdir(args.source):
msg = "Source is not a directory: {0}.\n\nScript Aborted."
sys.exit(msg.format(args.source))
if not os.path.exists(args.dest):
if not args.dryrun:
os.mkdir(args.dest)
else:
args.log.write("Creating destination: {0}\n".format(args.dest))
elif not os.path.isdir(args.dest):
sys.exit("Destination must be a directory.\n\nScript Aborted")
data_file = os.path.join(args.dest, "sc2autosave.dat")
args.log.write("Loading state from file: {0}\n".format(data_file))
if os.path.isfile(data_file) and not args.reset:
with open(data_file) as file:
return cPickle.load(file)
else:
return sc2reader.utils.AttributeDict(last_sync=0)
def save_state(state, args):
state.last_sync = time.time()
data_file = os.path.join(args.dest, "sc2autosave.dat")
if not args.dryrun:
with open(data_file, "w") as file:
cPickle.dump(state, file)
else:
args.log.write("Writing state to file: {0}\n".format(data_file))
def main():
parser = argparse.ArgumentParser(
description="Automatically copy new replays to directory",
fromfile_prefix_chars="@",
formatter_class=sc2reader.scripts.utils.Formatter.new(max_help_position=35),
epilog="And that's all folks",
)
required = parser.add_argument_group("Required Arguments")
required.add_argument("source", type=str, help="The source directory to poll")
required.add_argument("dest", type=str, help="The destination directory to copy to")
general = parser.add_argument_group("General Options")
general.add_argument(
"--mode",
dest="mode",
type=str,
choices=["BATCH", "CYCLE"],
default="BATCH",
help="The operating mode for the organizer",
)
general.add_argument(
"--action",
dest="action",
choices=["COPY", "MOVE"],
default="COPY",
type=str,
help="Have the organizer move your files instead of copying",
)
general.add_argument(
"--period",
dest="period",
type=int,
default=0,
help="The period of time to wait between scans.",
)
general.add_argument(
"--log",
dest="log",
metavar="LOGFILE",
type=argparse.FileType("w"),
default=sys.stdout,
help="Destination file for log information",
)
general.add_argument(
"--dryrun",
dest="dryrun",
action="store_true",
help="Don't do anything. Only simulate the output",
)
general.add_argument(
"--reset",
dest="reset",
action="store_true",
default=False,
help="Wipe the destination directory clean and start over.",
)
fileargs = parser.add_argument_group("File Options")
fileargs.add_argument(
"--depth",
dest="depth",
type=int,
default=-1,
help="Maximum recussion depth. -1 (default) is unlimited.",
)
fileargs.add_argument(
"--exclude-dirs",
dest="exclude_dirs",
type=str,
metavar="NAME",
nargs="+",
default=[],
help="A list of directory names to exclude during recursion",
)
fileargs.add_argument(
"--exclude-files",
dest="exclude_files",
type=str,
metavar="REGEX",
default="",
help="An expression to match excluded files",
)
fileargs.add_argument(
"--follow-links",
dest="follow_links",
action="store_true",
default=False,
help="Enable following of symbolic links while scanning",
)
renaming = parser.add_argument_group("Renaming Options")
renaming.add_argument(
"--rename",
dest="rename",
type=str,
metavar="FORMAT",
nargs="?",
default="{length} {type} on {map}",
help="""\
The renaming format string. can have the following values:
* {length} - The length of the replay ([H:]MM:SS)
* {type} - The type of the replay (1v1,2v2,4v4,etc)
* {map} - The map that was played on.
* {match} - Race matchup in team order, alphabetically by race.
* {date} - The date the replay was played on
* {teams} - The player line up
""",
)
renaming.add_argument(
"--length-format",
dest="length_format",
type=str,
metavar="FORMAT",
default="%M.%S",
help="The length format string. See the python time module for details",
)
renaming.add_argument(
"--player-format",
dest="player_format",
type=str,
metavar="FORMAT",
default="{name} ({play_race})",
help="The player format string used to render the :teams content item.",
)
renaming.add_argument(
"--date-format",
dest="date_format",
type=str,
metavar="FORMAT",
default="%m-%d-%Y",
help="The date format string used to render the :date content item.",
)
"""
renaming.add_argument('--team-order-by',
dest='team_order', type=str, metavar='FIELD', default='NUMBER',
help='The field by which teams are ordered.')
renaming.add_argument('--player-order-by',
dest='player_order', type=str, metavar='FIELD', default='NAME',
help='The field by which players are ordered on teams.')
"""
renaming.add_argument(
"--favored",
dest="favored",
type=str,
default=[],
metavar="NAME",
nargs="+",
help="A list of the players to favor in ordering teams and players",
)
filterargs = parser.add_argument_group("Filtering Options")
filterargs.add_argument(
"--filter-rule",
dest="filter_rule",
choices=["ALLOW", "DENY"],
help="The filters can either be used as a white list or a black list",
)
filterargs.add_argument(
"--filter-player",
metavar="NAME",
dest="filter_player",
nargs="+",
type=str,
default=[],
help="A list of players to filter on",
)
try:
run(parser.parse_args())
except KeyboardInterrupt:
print("\n\nScript Interrupted. Process Aborting")
if __name__ == "__main__":
main()
| 37.378689
| 101
| 0.630718
|
import argparse
import cPickle
import os
import shutil
import sys
import time
import sc2reader
try:
raw_input except NameError:
raw_input = input
def run(args):
if args.reset:
reset(args)
state = setup(args)
while True:
for path in scan(args, state):
try:
replay = sc2reader.load_replay(path, load_level=2)
except KeyboardInterrupt:
raise
except:
file_name = os.path.basename(path)
directory = make_directory(args, ("parse_error",))
new_path = os.path.join(directory, file_name)
source_path = path[len(args.source) :]
args.log.write("Error parsing replay: {0}".format(source_path))
if not args.dryrun:
args.action.run(path, new_path)
continue
aspects = generate_aspects(args, replay)
if filter_out_replay(args, replay):
continue
path_parts = args.rename.format(**aspects).split("/")
filename = path_parts.pop() + ".SC2Replay"
directory = make_directory(args, path_parts)
new_path = os.path.join(directory, filename)
dest_path = new_path[len(args.dest) :]
source_path = path[len(args.source) :]
msg = "{0}:\n\tSource: {1}\n\tDest: {2}\n"
args.log.write(msg.format(args.action.type, source_path, dest_path))
if not args.dryrun:
args.action.run(path, new_path)
args.log.flush()
save_state(state, args)
if args.mode == "BATCH":
break
time.sleep(args.period)
args.log.write("Batch Completed")
def filter_out_replay(args, replay):
player_names = set([player.name for player in replay.players])
filter_out_player = not set(args.filter_player) & player_names
if args.filter_rule == "ALLOW":
return filter_out_player
else:
return not filter_out_player
def create_compare_funcs(args):
favored_set = set(name.lower() for name in args.favored)
def player_compare(player1, player2):
player1_name = player1.name.lower()
player2_name = player2.name.lower()
player1_favored = player1_name in favored_set
player2_favored = player2_name in favored_set
if player1_favored and not player2_favored:
return -1
elif player2_favored and not player1_favored:
return 1
elif player1_favored and player2_favored:
player1_index = args.favored.index(player1_name)
player2_index = args.favored.index(player2_name)
return player1_index - player2_index
# TODO: Allow command line specification of other orderings (maybe?)
else:
return player1.pid - player2.pid
def team_compare(team1, team2):
# Normalize the team name lists and generate our key metrics
team1_names = set(p.name.lower() for p in team1.players)
team2_names = set(p.name.lower() for p in team2.players)
team1_favored = team1_names & favored_set
team2_favored = team2_names & favored_set
# The team with the favored players will always be listed first
if team1_favored and not team2_favored:
return -1
elif team2_favored and not team1_favored:
return 1
# The team with the most favored person will always come first
elif team1_favored and team2_favored:
team1_best = sorted(args.favored.index(n) for n in team1_favored)
team2_best = sorted(args.favored.index(n) for n in team2_favored)
return team1_best[-1] - team2_best[-1]
# If neither is favored, we'll order by number for now
else:
return team1.number - team2.number
return team_compare, player_compare
def generate_aspects(args, replay):
teams = sorted(replay.teams, args.team_compare)
matchups, team_strings = list(), list()
for team in teams:
team.players = sorted(team.players, args.player_compare)
composition = sorted(p.play_race[0].upper() for p in team.players)
matchups.append("".join(composition))
string = ", ".join(p.format(args.player_format) for p in team.players)
team_strings.append(string)
return sc2reader.utils.AttributeDict(
result=teams[0].result,
length=replay.length,
map=replay.map,
type=replay.type,
date=replay.date.strftime(args.date_format),
matchup="v".join(matchups),
teams=" vs ".join(team_strings),
)
def make_directory(args, path_parts):
directory = args.dest
for part in path_parts:
directory = os.path.join(directory, part)
if not os.path.exists(directory):
args.log.write("Creating subfolder: {0}\n".format(directory))
if not args.dryrun:
os.mkdir(directory)
elif not os.path.isdir(directory):
exit("Cannot create subfolder. Path is occupied: {0}", directory)
return directory
def scan(args, state):
args.log.write("SCANNING: {0}\n".format(args.source))
files = sc2reader.utils.get_files(
path=args.source,
regex=args.exclude_files,
allow=False,
exclude=args.exclude_dirs,
depth=args.depth,
followlinks=args.follow_links,
)
return filter(lambda f: os.path.getctime(f) > state.last_sync, files)
def exit(msg, *args, **kwargs):
sys.exit(msg.format(*args, **kwargs) + "\n\nScript Aborted.")
def reset(args):
if not os.path.exists(args.dest):
exit("Cannot reset, destination does not exist: {0}", args.dest)
elif not os.path.isdir(args.dest):
exit("Cannot reset, destination must be directory: {0}", args.dest)
print(
"About to reset directory: {0}\nAll files and subdirectories will be removed.".format(
args.dest
)
)
choice = raw_input("Proceed anyway? (y/n) ")
if choice.lower() == "y":
args.log.write("Removing old directory: {0}\n".format(args.dest))
if not args.dryrun:
print(args.dest)
shutil.rmtree(args.dest)
else:
sys.exit("Script Aborted")
def setup(args):
args.team_compare, args.player_compare = create_compare_funcs(args)
args.action = sc2reader.utils.AttributeDict(
type=args.action, run=shutil.copy if args.action == "COPY" else shutil.move
)
if not os.path.exists(args.source):
msg = "Source does not exist: {0}.\n\nScript Aborted."
sys.exit(msg.format(args.source))
elif not os.path.isdir(args.source):
msg = "Source is not a directory: {0}.\n\nScript Aborted."
sys.exit(msg.format(args.source))
if not os.path.exists(args.dest):
if not args.dryrun:
os.mkdir(args.dest)
else:
args.log.write("Creating destination: {0}\n".format(args.dest))
elif not os.path.isdir(args.dest):
sys.exit("Destination must be a directory.\n\nScript Aborted")
data_file = os.path.join(args.dest, "sc2autosave.dat")
args.log.write("Loading state from file: {0}\n".format(data_file))
if os.path.isfile(data_file) and not args.reset:
with open(data_file) as file:
return cPickle.load(file)
else:
return sc2reader.utils.AttributeDict(last_sync=0)
def save_state(state, args):
state.last_sync = time.time()
data_file = os.path.join(args.dest, "sc2autosave.dat")
if not args.dryrun:
with open(data_file, "w") as file:
cPickle.dump(state, file)
else:
args.log.write("Writing state to file: {0}\n".format(data_file))
def main():
parser = argparse.ArgumentParser(
description="Automatically copy new replays to directory",
fromfile_prefix_chars="@",
formatter_class=sc2reader.scripts.utils.Formatter.new(max_help_position=35),
epilog="And that's all folks",
)
required = parser.add_argument_group("Required Arguments")
required.add_argument("source", type=str, help="The source directory to poll")
required.add_argument("dest", type=str, help="The destination directory to copy to")
general = parser.add_argument_group("General Options")
general.add_argument(
"--mode",
dest="mode",
type=str,
choices=["BATCH", "CYCLE"],
default="BATCH",
help="The operating mode for the organizer",
)
general.add_argument(
"--action",
dest="action",
choices=["COPY", "MOVE"],
default="COPY",
type=str,
help="Have the organizer move your files instead of copying",
)
general.add_argument(
"--period",
dest="period",
type=int,
default=0,
help="The period of time to wait between scans.",
)
general.add_argument(
"--log",
dest="log",
metavar="LOGFILE",
type=argparse.FileType("w"),
default=sys.stdout,
help="Destination file for log information",
)
general.add_argument(
"--dryrun",
dest="dryrun",
action="store_true",
help="Don't do anything. Only simulate the output",
)
general.add_argument(
"--reset",
dest="reset",
action="store_true",
default=False,
help="Wipe the destination directory clean and start over.",
)
fileargs = parser.add_argument_group("File Options")
fileargs.add_argument(
"--depth",
dest="depth",
type=int,
default=-1,
help="Maximum recussion depth. -1 (default) is unlimited.",
)
fileargs.add_argument(
"--exclude-dirs",
dest="exclude_dirs",
type=str,
metavar="NAME",
nargs="+",
default=[],
help="A list of directory names to exclude during recursion",
)
fileargs.add_argument(
"--exclude-files",
dest="exclude_files",
type=str,
metavar="REGEX",
default="",
help="An expression to match excluded files",
)
fileargs.add_argument(
"--follow-links",
dest="follow_links",
action="store_true",
default=False,
help="Enable following of symbolic links while scanning",
)
renaming = parser.add_argument_group("Renaming Options")
renaming.add_argument(
"--rename",
dest="rename",
type=str,
metavar="FORMAT",
nargs="?",
default="{length} {type} on {map}",
help="""\
The renaming format string. can have the following values:
* {length} - The length of the replay ([H:]MM:SS)
* {type} - The type of the replay (1v1,2v2,4v4,etc)
* {map} - The map that was played on.
* {match} - Race matchup in team order, alphabetically by race.
* {date} - The date the replay was played on
* {teams} - The player line up
""",
)
renaming.add_argument(
"--length-format",
dest="length_format",
type=str,
metavar="FORMAT",
default="%M.%S",
help="The length format string. See the python time module for details",
)
renaming.add_argument(
"--player-format",
dest="player_format",
type=str,
metavar="FORMAT",
default="{name} ({play_race})",
help="The player format string used to render the :teams content item.",
)
renaming.add_argument(
"--date-format",
dest="date_format",
type=str,
metavar="FORMAT",
default="%m-%d-%Y",
help="The date format string used to render the :date content item.",
)
renaming.add_argument(
"--favored",
dest="favored",
type=str,
default=[],
metavar="NAME",
nargs="+",
help="A list of the players to favor in ordering teams and players",
)
filterargs = parser.add_argument_group("Filtering Options")
filterargs.add_argument(
"--filter-rule",
dest="filter_rule",
choices=["ALLOW", "DENY"],
help="The filters can either be used as a white list or a black list",
)
filterargs.add_argument(
"--filter-player",
metavar="NAME",
dest="filter_player",
nargs="+",
type=str,
default=[],
help="A list of players to filter on",
)
try:
run(parser.parse_args())
except KeyboardInterrupt:
print("\n\nScript Interrupted. Process Aborting")
if __name__ == "__main__":
main()
| true
| true
|
f7091cc867b32d1268be2ef4dea0d3b3be89c573
| 20,500
|
py
|
Python
|
nevergrad/optimization/recastlib.py
|
mathuvu/nevergrad
|
8e116190a8a29c238e655d728fc4816f7b4e0415
|
[
"MIT"
] | null | null | null |
nevergrad/optimization/recastlib.py
|
mathuvu/nevergrad
|
8e116190a8a29c238e655d728fc4816f7b4e0415
|
[
"MIT"
] | null | null | null |
nevergrad/optimization/recastlib.py
|
mathuvu/nevergrad
|
8e116190a8a29c238e655d728fc4816f7b4e0415
|
[
"MIT"
] | null | null | null |
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import functools
import math
import warnings
import weakref
import numpy as np
from scipy import optimize as scipyoptimize
import nevergrad.common.typing as tp
from nevergrad.parametrization import parameter as p
from nevergrad.common import errors
from . import base
from .base import IntOrParameter
from . import recaster
class _NonObjectMinimizeBase(recaster.SequentialRecastOptimizer):
def __init__(
self,
parametrization: IntOrParameter,
budget: tp.Optional[int] = None,
num_workers: int = 1,
*,
method: str = "Nelder-Mead",
random_restart: bool = False,
) -> None:
super().__init__(parametrization, budget=budget, num_workers=num_workers)
self.multirun = 1 # work in progress
self._normalizer: tp.Any = None
self.initial_guess: tp.Optional[tp.ArrayLike] = None
# configuration
assert (
method
in [
"CmaFmin2",
"Nelder-Mead",
"COBYLA",
"SLSQP",
"Powell",
]
or "NLOPT" in method
), f"Unknown method '{method}'"
self.method = method
self.random_restart = random_restart
# The following line rescales to [0, 1] if fully bounded.
if method == "CmaFmin2" or "NLOPT" in method:
normalizer = p.helpers.Normalizer(self.parametrization)
if normalizer.fully_bounded:
self._normalizer = normalizer
def _internal_tell_not_asked(self, candidate: p.Parameter, loss: tp.Loss) -> None:
"""Called whenever calling "tell" on a candidate that was not "asked".
Defaults to the standard tell pipeline.
""" # We do not do anything; this just updates the current best.
def get_optimization_function(self) -> tp.Callable[[tp.Callable[[tp.ArrayLike], float]], tp.ArrayLike]:
return functools.partial(self._optimization_function, weakref.proxy(self))
@staticmethod
def _optimization_function(
weakself: tp.Any, objective_function: tp.Callable[[tp.ArrayLike], float]
) -> tp.ArrayLike:
# pylint:disable=unused-argument
budget = np.inf if weakself.budget is None else weakself.budget
best_res = np.inf
best_x: np.ndarray = weakself.current_bests["average"].x # np.zeros(self.dimension)
if weakself.initial_guess is not None:
best_x = np.array(weakself.initial_guess, copy=True) # copy, just to make sure it is not modified
remaining: float = budget - weakself._num_ask
while remaining > 0: # try to restart if budget is not elapsed
options: tp.Dict[str, tp.Any] = {} if weakself.budget is None else {"maxiter": remaining}
# options: tp.Dict[str, tp.Any] = {} if self.budget is None else {"maxiter": remaining}
if weakself.method[:5] == "NLOPT":
# This is NLOPT, used as in the PCSE simulator notebook.
# ( https://github.com/ajwdewit/pcse_notebooks ).
import nlopt
def nlopt_objective_function(*args):
data = np.asarray([arg for arg in args])[0]
assert len(data) == weakself.dimension, (
str(data) + " does not have length " + str(weakself.dimension)
)
if weakself._normalizer is not None:
data = weakself._normalizer.backward(np.asarray(data, dtype=np.float32))
return objective_function(data)
# Sbplx (based on Subplex) is used by default.
nlopt_param = (
getattr(nlopt, weakself.method[6:]) if len(weakself.method) > 5 else nlopt.LN_SBPLX
)
opt = nlopt.opt(nlopt_param, weakself.dimension)
# Assign the objective function calculator
opt.set_min_objective(nlopt_objective_function)
# Set the bounds.
opt.set_lower_bounds(np.zeros(weakself.dimension))
opt.set_upper_bounds(np.ones(weakself.dimension))
# opt.set_initial_step([0.05, 0.05])
opt.set_maxeval(budget)
# Start the optimization with the first guess
firstguess = 0.5 * np.ones(weakself.dimension)
best_x = opt.optimize(firstguess)
# print("\noptimum at TDWI: %s, SPAN: %s" % (x[0], x[1]))
# print("minimum value = ", opt.last_optimum_value())
# print("result code = ", opt.last_optimize_result())
# print("With %i function calls" % objfunc_calculator.n_calls)
if weakself._normalizer is not None:
best_x = weakself._normalizer.backward(np.asarray(best_x, dtype=np.float32))
elif weakself.method == "CmaFmin2":
import cma # import inline in order to avoid matplotlib initialization warning
def cma_objective_function(data):
# Hopefully the line below does nothing if unbounded and rescales from [0, 1] if bounded.
if weakself._normalizer is not None:
data = weakself._normalizer.backward(np.asarray(data, dtype=np.float32))
return objective_function(data)
# cma.fmin2(objective_function, [0.0] * self.dimension, [1.0] * self.dimension, remaining)
x0 = 0.5 * np.ones(weakself.dimension)
num_calls = 0
while budget - num_calls > 0:
options = {"maxfevals": budget - num_calls, "verbose": -9}
if weakself._normalizer is not None:
# Tell CMA to work in [0, 1].
options["bounds"] = [0.0, 1.0]
res = cma.fmin(
cma_objective_function,
x0=x0,
sigma0=0.2,
options=options,
restarts=9,
)
x0 = 0.5 + np.random.uniform() * np.random.uniform(
low=-0.5, high=0.5, size=weakself.dimension
)
if res[1] < best_res:
best_res = res[1]
best_x = res[0]
if weakself._normalizer is not None:
best_x = weakself._normalizer.backward(np.asarray(best_x, dtype=np.float32))
num_calls += res[2]
else:
res = scipyoptimize.minimize(
objective_function,
best_x
if not weakself.random_restart
else weakself._rng.normal(0.0, 1.0, weakself.dimension),
method=weakself.method,
options=options,
tol=0,
)
if res.fun < best_res:
best_res = res.fun
best_x = res.x
remaining = budget - weakself._num_ask
return best_x
class NonObjectOptimizer(base.ConfiguredOptimizer):
"""Wrapper over Scipy optimizer implementations, in standard ask and tell format.
This is actually an import from scipy-optimize, including Sequential Quadratic Programming,
Parameters
----------
method: str
Name of the method to use among:
- Nelder-Mead
- COBYLA
- SQP (or SLSQP): very powerful e.g. in continuous noisy optimization. It is based on
approximating the objective function by quadratic models.
- Powell
- NLOPT* (https://nlopt.readthedocs.io/en/latest/; by default, uses Sbplx, based on Subplex);
can be NLOPT,
NLOPT_LN_SBPLX,
NLOPT_LN_PRAXIS,
NLOPT_GN_DIRECT,
NLOPT_GN_DIRECT_L,
NLOPT_GN_CRS2_LM,
NLOPT_GN_AGS,
NLOPT_GN_ISRES,
NLOPT_GN_ESCH,
NLOPT_LN_COBYLA,
NLOPT_LN_BOBYQA,
NLOPT_LN_NEWUOA_BOUND,
NLOPT_LN_NELDERMEAD.
random_restart: bool
whether to restart at a random point if the optimizer converged but the budget is not entirely
spent yet (otherwise, restarts from best point)
Note
----
These optimizers do not support asking several candidates in a row
"""
recast = True
no_parallelization = True
# pylint: disable=unused-argument
def __init__(self, *, method: str = "Nelder-Mead", random_restart: bool = False) -> None:
super().__init__(_NonObjectMinimizeBase, locals())
NelderMead = NonObjectOptimizer(method="Nelder-Mead").set_name("NelderMead", register=True)
CmaFmin2 = NonObjectOptimizer(method="CmaFmin2").set_name("CmaFmin2", register=True)
NLOPT = NonObjectOptimizer(method="NLOPT").set_name("NLOPT", register=True)
Powell = NonObjectOptimizer(method="Powell").set_name("Powell", register=True)
RPowell = NonObjectOptimizer(method="Powell", random_restart=True).set_name("RPowell", register=True)
Cobyla = NonObjectOptimizer(method="COBYLA").set_name("Cobyla", register=True)
RCobyla = NonObjectOptimizer(method="COBYLA", random_restart=True).set_name("RCobyla", register=True)
SQP = NonObjectOptimizer(method="SLSQP").set_name("SQP", register=True)
SLSQP = SQP # Just so that people who are familiar with SLSQP naming are not lost.
RSQP = NonObjectOptimizer(method="SLSQP", random_restart=True).set_name("RSQP", register=True)
RSLSQP = RSQP # Just so that people who are familiar with SLSQP naming are not lost.
class _PymooMinimizeBase(recaster.SequentialRecastOptimizer):
def __init__(
self,
parametrization: IntOrParameter,
budget: tp.Optional[int] = None,
num_workers: int = 1,
*,
algorithm: str,
) -> None:
super().__init__(parametrization, budget=budget, num_workers=num_workers)
# configuration
self.algorithm = algorithm
self._no_hypervolume = True
self._initial_seed = -1
def get_optimization_function(self) -> tp.Callable[[tp.Callable[..., tp.Any]], tp.Optional[tp.ArrayLike]]:
if self._initial_seed == -1:
self._initial_seed = self._rng.randint(2**30)
return functools.partial(self._optimization_function, weakref.proxy(self))
# pylint:disable=useless-return
@staticmethod
def _optimization_function(
weakself: tp.Any, objective_function: tp.Callable[[tp.ArrayLike], float]
) -> tp.Optional[tp.ArrayLike]:
# pylint:disable=unused-argument, import-outside-toplevel
from pymoo import optimize as pymoooptimize
from pymoo.factory import get_algorithm as get_pymoo_algorithm
# from pymoo.factory import get_reference_directions
# reference direction code for when we want to use the other MOO optimizers in Pymoo
# if self.algorithm in [
# "rnsga2",
# "nsga3",
# "unsga3",
# "rnsga3",
# "moead",
# "ctaea",
# ]: # algorithms that require reference points or reference directions
# the appropriate n_partitions must be looked into
# ref_dirs = get_reference_directions("das-dennis", self.num_objectives, n_partitions=12)
# algorithm = get_pymoo_algorithm(self.algorithm, ref_dirs)
# else:
algorithm = get_pymoo_algorithm(weakself.algorithm)
problem = _create_pymoo_problem(weakself, objective_function)
pymoooptimize.minimize(problem, algorithm, seed=weakself._initial_seed)
return None
def _internal_ask_candidate(self) -> p.Parameter:
"""
Special version to make sure that num_objectives has been set before
the proper _internal_ask_candidate, in our parent class, is called.
"""
if self.num_objectives == 0:
# dummy ask i.e. not activating pymoo until num_objectives is set
warnings.warn(
"with this optimizer, it is more efficient to set num_objectives before the optimization begins",
errors.NevergradRuntimeWarning,
)
# We need to get a datapoint that is a random point in parameter space,
# and waste an evaluation on it.
return self.parametrization.spawn_child()
return super()._internal_ask_candidate()
def _internal_tell_candidate(self, candidate: p.Parameter, loss: float) -> None:
"""
Special version to make sure that we the extra initial evaluation which
we may have done in order to get num_objectives, is discarded.
Note that this discarding means that the extra point will not make it into
replay_archive_tell. Correspondingly, because num_objectives will make it into
the pickle, __setstate__ will never need a dummy ask.
"""
if self._messaging_thread is None:
return # dummy tell i.e. not activating pymoo until num_objectives is set
super()._internal_tell_candidate(candidate, loss)
def _post_loss(self, candidate: p.Parameter, loss: float) -> tp.Loss:
# pylint: disable=unused-argument
"""
Multi-Objective override for this function.
"""
return candidate.losses
class Pymoo(base.ConfiguredOptimizer):
"""Wrapper over Pymoo optimizer implementations, in standard ask and tell format.
This is actually an import from Pymoo Optimize.
Parameters
----------
algorithm: str
Use "algorithm-name" with following names to access algorithm classes:
Single-Objective
-"de"
-'ga'
-"brkga"
-"nelder-mead"
-"pattern-search"
-"cmaes"
Multi-Objective
-"nsga2"
Multi-Objective requiring reference directions, points or lines
-"rnsga2"
-"nsga3"
-"unsga3"
-"rnsga3"
-"moead"
-"ctaea"
Note
----
These optimizers do not support asking several candidates in a row
"""
recast = True
no_parallelization = True
# pylint: disable=unused-argument
def __init__(self, *, algorithm: str) -> None:
super().__init__(_PymooMinimizeBase, locals())
class _PymooBatchMinimizeBase(recaster.BatchRecastOptimizer):
# pylint: disable=abstract-method
def __init__(
self,
parametrization: IntOrParameter,
budget: tp.Optional[int] = None,
num_workers: int = 1,
*,
algorithm: str,
) -> None:
super().__init__(parametrization, budget=budget, num_workers=num_workers)
# configuration
self.algorithm = algorithm
self._no_hypervolume = True
self._initial_seed = -1
def get_optimization_function(self) -> tp.Callable[[tp.Callable[..., tp.Any]], tp.Optional[tp.ArrayLike]]:
if self._initial_seed == -1:
self._initial_seed = self._rng.randint(2**30)
return functools.partial(self._optimization_function, weakref.proxy(self))
# pylint:disable=useless-return
@staticmethod
def _optimization_function(
weakself: tp.Any, objective_function: tp.Callable[[tp.ArrayLike], float]
) -> tp.Optional[tp.ArrayLike]:
# pylint:disable=unused-argument, import-outside-toplevel
from pymoo import optimize as pymoooptimize
from pymoo.factory import get_algorithm as get_pymoo_algorithm
# from pymoo.factory import get_reference_directions
# reference direction code for when we want to use the other MOO optimizers in Pymoo
# if self.algorithm in [
# "rnsga2",
# "nsga3",
# "unsga3",
# "rnsga3",
# "moead",
# "ctaea",
# ]: # algorithms that require reference points or reference directions
# the appropriate n_partitions must be looked into
# ref_dirs = get_reference_directions("das-dennis", self.num_objectives, n_partitions=12)
# algorithm = get_pymoo_algorithm(self.algorithm, ref_dirs)
# else:
algorithm = get_pymoo_algorithm(weakself.algorithm)
problem = _create_pymoo_problem(weakself, objective_function, False)
pymoooptimize.minimize(problem, algorithm, seed=weakself._initial_seed)
return None
def _internal_ask_candidate(self) -> p.Parameter:
"""Reads messages from the thread in which the underlying optimization function is running
New messages are sent as "ask".
"""
# get a datapoint that is a random point in parameter space
if self.num_objectives == 0: # dummy ask i.e. not activating pymoo until num_objectives is set
warnings.warn(
"with this optimizer, it is more efficient to set num_objectives before the optimization begins",
errors.NevergradRuntimeWarning,
)
return self.parametrization.spawn_child()
return super()._internal_ask_candidate()
def _internal_tell_candidate(self, candidate: p.Parameter, loss: float) -> None:
"""Returns value for a point which was "asked"
(none asked point cannot be "tell")
"""
if self._messaging_thread is None:
return # dummy tell i.e. not activating pymoo until num_objectives is set
super()._internal_tell_candidate(candidate, loss)
def _post_loss(self, candidate: p.Parameter, loss: float) -> tp.Loss:
# pylint: disable=unused-argument
"""
Multi-Objective override for this function.
"""
return candidate.losses
class PymooBatch(base.ConfiguredOptimizer):
"""Wrapper over Pymoo optimizer implementations, in standard ask and tell format.
This is actually an import from Pymoo Optimize.
Parameters
----------
algorithm: str
Use "algorithm-name" with following names to access algorithm classes:
Single-Objective
-"de"
-'ga'
-"brkga"
-"nelder-mead"
-"pattern-search"
-"cmaes"
Multi-Objective
-"nsga2"
Multi-Objective requiring reference directions, points or lines
-"rnsga2"
-"nsga3"
-"unsga3"
-"rnsga3"
-"moead"
-"ctaea"
Note
----
These optimizers do not support asking several candidates in a row
"""
recast = True
# pylint: disable=unused-argument
def __init__(self, *, algorithm: str) -> None:
super().__init__(_PymooBatchMinimizeBase, locals())
def _create_pymoo_problem(
optimizer: base.Optimizer,
objective_function: tp.Callable[[tp.ArrayLike], float],
elementwise: bool = True,
):
kwargs = {}
try:
# pylint:disable=import-outside-toplevel
from pymoo.core.problem import ElementwiseProblem, Problem # type: ignore
Base = ElementwiseProblem if elementwise else Problem
except ImportError:
# Used if pymoo < 0.5.0
# pylint:disable=import-outside-toplevel
from pymoo.model.problem import Problem as Base # type: ignore
kwargs = {"elementwise_evaluation": elementwise}
class _PymooProblem(Base): # type: ignore
def __init__(self, optimizer, objective_function):
self.objective_function = objective_function
super().__init__(
n_var=optimizer.dimension,
n_obj=optimizer.num_objectives,
n_constr=0, # constraints handled already by nevergrad
xl=-math.pi * 0.5,
xu=math.pi * 0.5,
**kwargs,
)
def _evaluate(self, X, out, *args, **kwargs):
# pylint:disable=unused-argument
# pymoo is supplying us with bounded parameters in [-pi/2,pi/2]. Nevergrad wants unbounded reals from us.
out["F"] = self.objective_function(np.tan(X))
return _PymooProblem(optimizer, objective_function)
PymooNSGA2 = Pymoo(algorithm="nsga2").set_name("PymooNSGA2", register=True)
PymooBatchNSGA2 = PymooBatch(algorithm="nsga2").set_name("PymooBatchNSGA2", register=False)
| 40.354331
| 117
| 0.615073
|
import functools
import math
import warnings
import weakref
import numpy as np
from scipy import optimize as scipyoptimize
import nevergrad.common.typing as tp
from nevergrad.parametrization import parameter as p
from nevergrad.common import errors
from . import base
from .base import IntOrParameter
from . import recaster
class _NonObjectMinimizeBase(recaster.SequentialRecastOptimizer):
def __init__(
self,
parametrization: IntOrParameter,
budget: tp.Optional[int] = None,
num_workers: int = 1,
*,
method: str = "Nelder-Mead",
random_restart: bool = False,
) -> None:
super().__init__(parametrization, budget=budget, num_workers=num_workers)
self.multirun = 1 self._normalizer: tp.Any = None
self.initial_guess: tp.Optional[tp.ArrayLike] = None
assert (
method
in [
"CmaFmin2",
"Nelder-Mead",
"COBYLA",
"SLSQP",
"Powell",
]
or "NLOPT" in method
), f"Unknown method '{method}'"
self.method = method
self.random_restart = random_restart
if method == "CmaFmin2" or "NLOPT" in method:
normalizer = p.helpers.Normalizer(self.parametrization)
if normalizer.fully_bounded:
self._normalizer = normalizer
def _internal_tell_not_asked(self, candidate: p.Parameter, loss: tp.Loss) -> None:
def get_optimization_function(self) -> tp.Callable[[tp.Callable[[tp.ArrayLike], float]], tp.ArrayLike]:
return functools.partial(self._optimization_function, weakref.proxy(self))
@staticmethod
def _optimization_function(
weakself: tp.Any, objective_function: tp.Callable[[tp.ArrayLike], float]
) -> tp.ArrayLike:
budget = np.inf if weakself.budget is None else weakself.budget
best_res = np.inf
best_x: np.ndarray = weakself.current_bests["average"].x if weakself.initial_guess is not None:
best_x = np.array(weakself.initial_guess, copy=True) remaining: float = budget - weakself._num_ask
while remaining > 0: options: tp.Dict[str, tp.Any] = {} if weakself.budget is None else {"maxiter": remaining}
if weakself.method[:5] == "NLOPT":
import nlopt
def nlopt_objective_function(*args):
data = np.asarray([arg for arg in args])[0]
assert len(data) == weakself.dimension, (
str(data) + " does not have length " + str(weakself.dimension)
)
if weakself._normalizer is not None:
data = weakself._normalizer.backward(np.asarray(data, dtype=np.float32))
return objective_function(data)
nlopt_param = (
getattr(nlopt, weakself.method[6:]) if len(weakself.method) > 5 else nlopt.LN_SBPLX
)
opt = nlopt.opt(nlopt_param, weakself.dimension)
opt.set_min_objective(nlopt_objective_function)
opt.set_lower_bounds(np.zeros(weakself.dimension))
opt.set_upper_bounds(np.ones(weakself.dimension))
opt.set_maxeval(budget)
firstguess = 0.5 * np.ones(weakself.dimension)
best_x = opt.optimize(firstguess)
if weakself._normalizer is not None:
best_x = weakself._normalizer.backward(np.asarray(best_x, dtype=np.float32))
elif weakself.method == "CmaFmin2":
import cma
def cma_objective_function(data):
if weakself._normalizer is not None:
data = weakself._normalizer.backward(np.asarray(data, dtype=np.float32))
return objective_function(data)
x0 = 0.5 * np.ones(weakself.dimension)
num_calls = 0
while budget - num_calls > 0:
options = {"maxfevals": budget - num_calls, "verbose": -9}
if weakself._normalizer is not None:
options["bounds"] = [0.0, 1.0]
res = cma.fmin(
cma_objective_function,
x0=x0,
sigma0=0.2,
options=options,
restarts=9,
)
x0 = 0.5 + np.random.uniform() * np.random.uniform(
low=-0.5, high=0.5, size=weakself.dimension
)
if res[1] < best_res:
best_res = res[1]
best_x = res[0]
if weakself._normalizer is not None:
best_x = weakself._normalizer.backward(np.asarray(best_x, dtype=np.float32))
num_calls += res[2]
else:
res = scipyoptimize.minimize(
objective_function,
best_x
if not weakself.random_restart
else weakself._rng.normal(0.0, 1.0, weakself.dimension),
method=weakself.method,
options=options,
tol=0,
)
if res.fun < best_res:
best_res = res.fun
best_x = res.x
remaining = budget - weakself._num_ask
return best_x
class NonObjectOptimizer(base.ConfiguredOptimizer):
recast = True
no_parallelization = True
def __init__(self, *, method: str = "Nelder-Mead", random_restart: bool = False) -> None:
super().__init__(_NonObjectMinimizeBase, locals())
NelderMead = NonObjectOptimizer(method="Nelder-Mead").set_name("NelderMead", register=True)
CmaFmin2 = NonObjectOptimizer(method="CmaFmin2").set_name("CmaFmin2", register=True)
NLOPT = NonObjectOptimizer(method="NLOPT").set_name("NLOPT", register=True)
Powell = NonObjectOptimizer(method="Powell").set_name("Powell", register=True)
RPowell = NonObjectOptimizer(method="Powell", random_restart=True).set_name("RPowell", register=True)
Cobyla = NonObjectOptimizer(method="COBYLA").set_name("Cobyla", register=True)
RCobyla = NonObjectOptimizer(method="COBYLA", random_restart=True).set_name("RCobyla", register=True)
SQP = NonObjectOptimizer(method="SLSQP").set_name("SQP", register=True)
SLSQP = SQP RSQP = NonObjectOptimizer(method="SLSQP", random_restart=True).set_name("RSQP", register=True)
RSLSQP = RSQP
class _PymooMinimizeBase(recaster.SequentialRecastOptimizer):
def __init__(
self,
parametrization: IntOrParameter,
budget: tp.Optional[int] = None,
num_workers: int = 1,
*,
algorithm: str,
) -> None:
super().__init__(parametrization, budget=budget, num_workers=num_workers)
self.algorithm = algorithm
self._no_hypervolume = True
self._initial_seed = -1
def get_optimization_function(self) -> tp.Callable[[tp.Callable[..., tp.Any]], tp.Optional[tp.ArrayLike]]:
if self._initial_seed == -1:
self._initial_seed = self._rng.randint(2**30)
return functools.partial(self._optimization_function, weakref.proxy(self))
@staticmethod
def _optimization_function(
weakself: tp.Any, objective_function: tp.Callable[[tp.ArrayLike], float]
) -> tp.Optional[tp.ArrayLike]:
from pymoo import optimize as pymoooptimize
from pymoo.factory import get_algorithm as get_pymoo_algorithm
algorithm = get_pymoo_algorithm(weakself.algorithm)
problem = _create_pymoo_problem(weakself, objective_function)
pymoooptimize.minimize(problem, algorithm, seed=weakself._initial_seed)
return None
def _internal_ask_candidate(self) -> p.Parameter:
if self.num_objectives == 0:
warnings.warn(
"with this optimizer, it is more efficient to set num_objectives before the optimization begins",
errors.NevergradRuntimeWarning,
)
return self.parametrization.spawn_child()
return super()._internal_ask_candidate()
def _internal_tell_candidate(self, candidate: p.Parameter, loss: float) -> None:
if self._messaging_thread is None:
return super()._internal_tell_candidate(candidate, loss)
def _post_loss(self, candidate: p.Parameter, loss: float) -> tp.Loss:
return candidate.losses
class Pymoo(base.ConfiguredOptimizer):
recast = True
no_parallelization = True
def __init__(self, *, algorithm: str) -> None:
super().__init__(_PymooMinimizeBase, locals())
class _PymooBatchMinimizeBase(recaster.BatchRecastOptimizer):
def __init__(
self,
parametrization: IntOrParameter,
budget: tp.Optional[int] = None,
num_workers: int = 1,
*,
algorithm: str,
) -> None:
super().__init__(parametrization, budget=budget, num_workers=num_workers)
self.algorithm = algorithm
self._no_hypervolume = True
self._initial_seed = -1
def get_optimization_function(self) -> tp.Callable[[tp.Callable[..., tp.Any]], tp.Optional[tp.ArrayLike]]:
if self._initial_seed == -1:
self._initial_seed = self._rng.randint(2**30)
return functools.partial(self._optimization_function, weakref.proxy(self))
@staticmethod
def _optimization_function(
weakself: tp.Any, objective_function: tp.Callable[[tp.ArrayLike], float]
) -> tp.Optional[tp.ArrayLike]:
from pymoo import optimize as pymoooptimize
from pymoo.factory import get_algorithm as get_pymoo_algorithm
algorithm = get_pymoo_algorithm(weakself.algorithm)
problem = _create_pymoo_problem(weakself, objective_function, False)
pymoooptimize.minimize(problem, algorithm, seed=weakself._initial_seed)
return None
def _internal_ask_candidate(self) -> p.Parameter:
if self.num_objectives == 0: warnings.warn(
"with this optimizer, it is more efficient to set num_objectives before the optimization begins",
errors.NevergradRuntimeWarning,
)
return self.parametrization.spawn_child()
return super()._internal_ask_candidate()
def _internal_tell_candidate(self, candidate: p.Parameter, loss: float) -> None:
if self._messaging_thread is None:
return super()._internal_tell_candidate(candidate, loss)
def _post_loss(self, candidate: p.Parameter, loss: float) -> tp.Loss:
return candidate.losses
class PymooBatch(base.ConfiguredOptimizer):
recast = True
def __init__(self, *, algorithm: str) -> None:
super().__init__(_PymooBatchMinimizeBase, locals())
def _create_pymoo_problem(
optimizer: base.Optimizer,
objective_function: tp.Callable[[tp.ArrayLike], float],
elementwise: bool = True,
):
kwargs = {}
try:
from pymoo.core.problem import ElementwiseProblem, Problem
Base = ElementwiseProblem if elementwise else Problem
except ImportError:
from pymoo.model.problem import Problem as Base
kwargs = {"elementwise_evaluation": elementwise}
class _PymooProblem(Base): def __init__(self, optimizer, objective_function):
self.objective_function = objective_function
super().__init__(
n_var=optimizer.dimension,
n_obj=optimizer.num_objectives,
n_constr=0, xl=-math.pi * 0.5,
xu=math.pi * 0.5,
**kwargs,
)
def _evaluate(self, X, out, *args, **kwargs):
out["F"] = self.objective_function(np.tan(X))
return _PymooProblem(optimizer, objective_function)
PymooNSGA2 = Pymoo(algorithm="nsga2").set_name("PymooNSGA2", register=True)
PymooBatchNSGA2 = PymooBatch(algorithm="nsga2").set_name("PymooBatchNSGA2", register=False)
| true
| true
|
f7091dc148b356a0b6931bd347d661f76f85ade9
| 627
|
py
|
Python
|
Day16/program.py
|
CAG2Mark/Advent-Of-Code-Solutions
|
b744025b8c53dc7ea810a13dc818568520110b86
|
[
"MIT"
] | null | null | null |
Day16/program.py
|
CAG2Mark/Advent-Of-Code-Solutions
|
b744025b8c53dc7ea810a13dc818568520110b86
|
[
"MIT"
] | null | null | null |
Day16/program.py
|
CAG2Mark/Advent-Of-Code-Solutions
|
b744025b8c53dc7ea810a13dc818568520110b86
|
[
"MIT"
] | null | null | null |
# valid ranges
rules = []
while True:
try:
ln = input()
if not ln.strip():
break
rule = [x.split("-") for x in ln.split(": ")[1].split(" or ")]
for r in rule:
rules.append([int(x) for x in r])
except EOFError:
break
while True:
if not input().strip(): break
input()
inval_sum = 0
while True:
try:
ln = input()
vals = ln.split(',')
for v in vals:
if not any(r[0] <= int(v) <= r[1] for r in rules):
inval_sum += int(v)
except EOFError:
break
print(inval_sum)
| 19
| 70
| 0.464115
|
rules = []
while True:
try:
ln = input()
if not ln.strip():
break
rule = [x.split("-") for x in ln.split(": ")[1].split(" or ")]
for r in rule:
rules.append([int(x) for x in r])
except EOFError:
break
while True:
if not input().strip(): break
input()
inval_sum = 0
while True:
try:
ln = input()
vals = ln.split(',')
for v in vals:
if not any(r[0] <= int(v) <= r[1] for r in rules):
inval_sum += int(v)
except EOFError:
break
print(inval_sum)
| true
| true
|
f7091e05767123ebd494b47f0357c05cee301864
| 2,616
|
py
|
Python
|
reports/report/visualizations/linplot.py
|
pplonski/automlbenchmark
|
f49ddfa2583643173296ed8ab45a8c14c62a6987
|
[
"MIT"
] | 4
|
2021-04-26T12:03:59.000Z
|
2021-11-07T20:06:00.000Z
|
reports/report/visualizations/linplot.py
|
pplonski/automlbenchmark
|
f49ddfa2583643173296ed8ab45a8c14c62a6987
|
[
"MIT"
] | null | null | null |
reports/report/visualizations/linplot.py
|
pplonski/automlbenchmark
|
f49ddfa2583643173296ed8ab45a8c14c62a6987
|
[
"MIT"
] | null | null | null |
import matplotlib as mp
import pandas as pd
import seaborn as sb
import report.config as config
from ..util import create_file, sort_dataframe
from .util import savefig, set_scales, set_labels, task_labels
def draw_parallel_coord(df, class_column,
x_labels=True, yscale='linear',
title=None, xlabel=None, ylabel=None,
legend_loc='best', legend_title=None, colormap=None):
colormap = config.colormap if colormap is None else colormap
with sb.axes_style('ticks', rc={'grid.linestyle': 'dotted'}), sb.plotting_context('paper'):
# print(sb.axes_style())
parallel_fig = mp.pyplot.figure(dpi=120, figsize=(10, df.shape[0]))
# select the first colors from the colormap to ensure we use the same colors as in the stripplot later
colors = mp.cm.get_cmap(colormap).colors[:len(df[class_column].unique())]
axes = pd.plotting.parallel_coordinates(df,
class_column=class_column,
color=colors,
axvlines=False,
)
set_scales(axes, yscale=yscale)
handles, labels = axes.get_legend_handles_labels()
axes.legend(handles, labels, loc=legend_loc, title=legend_title)
set_labels(axes, title=title, xlabel=xlabel, ylabel=ylabel, x_labels=x_labels,
x_tick_params=dict(labelrotation=90))
return parallel_fig
def draw_score_parallel_coord(col, results, type_filter='all', metadata=None,
x_sort_by='name', ylabel=None, filename=None,
**kwargs):
res_group = results.groupby(['type', 'task', 'framework'])
df = res_group[col].mean().unstack(['type', 'task'])
df = df if type_filter == 'all' \
else df.iloc[:, df.columns.get_loc(type_filter)]
if metadata:
sort_by = lambda cols: getattr(metadata[cols[1]], x_sort_by)
df = sort_dataframe(df, by=sort_by, axis=1)
df.reset_index(inplace=True)
fig = draw_parallel_coord(df,
'framework',
x_labels=task_labels(df.columns.drop('framework')),
# xlabel="Task",
ylabel=ylabel or "Score",
legend_title="Framework",
**kwargs)
if filename:
savefig(fig, create_file("graphics", config.results_group, filename))
return fig
| 48.444444
| 110
| 0.569572
|
import matplotlib as mp
import pandas as pd
import seaborn as sb
import report.config as config
from ..util import create_file, sort_dataframe
from .util import savefig, set_scales, set_labels, task_labels
def draw_parallel_coord(df, class_column,
x_labels=True, yscale='linear',
title=None, xlabel=None, ylabel=None,
legend_loc='best', legend_title=None, colormap=None):
colormap = config.colormap if colormap is None else colormap
with sb.axes_style('ticks', rc={'grid.linestyle': 'dotted'}), sb.plotting_context('paper'):
parallel_fig = mp.pyplot.figure(dpi=120, figsize=(10, df.shape[0]))
colors = mp.cm.get_cmap(colormap).colors[:len(df[class_column].unique())]
axes = pd.plotting.parallel_coordinates(df,
class_column=class_column,
color=colors,
axvlines=False,
)
set_scales(axes, yscale=yscale)
handles, labels = axes.get_legend_handles_labels()
axes.legend(handles, labels, loc=legend_loc, title=legend_title)
set_labels(axes, title=title, xlabel=xlabel, ylabel=ylabel, x_labels=x_labels,
x_tick_params=dict(labelrotation=90))
return parallel_fig
def draw_score_parallel_coord(col, results, type_filter='all', metadata=None,
x_sort_by='name', ylabel=None, filename=None,
**kwargs):
res_group = results.groupby(['type', 'task', 'framework'])
df = res_group[col].mean().unstack(['type', 'task'])
df = df if type_filter == 'all' \
else df.iloc[:, df.columns.get_loc(type_filter)]
if metadata:
sort_by = lambda cols: getattr(metadata[cols[1]], x_sort_by)
df = sort_dataframe(df, by=sort_by, axis=1)
df.reset_index(inplace=True)
fig = draw_parallel_coord(df,
'framework',
x_labels=task_labels(df.columns.drop('framework')),
ylabel=ylabel or "Score",
legend_title="Framework",
**kwargs)
if filename:
savefig(fig, create_file("graphics", config.results_group, filename))
return fig
| true
| true
|
f7091f618378824f233afd61b0beecfbadd5f8c8
| 2,552
|
py
|
Python
|
lib/greet.py
|
yndajas/Twitch-YndaBot
|
41b3600f5336a073f42c1cc296609dbe88c8e510
|
[
"MIT"
] | null | null | null |
lib/greet.py
|
yndajas/Twitch-YndaBot
|
41b3600f5336a073f42c1cc296609dbe88c8e510
|
[
"MIT"
] | null | null | null |
lib/greet.py
|
yndajas/Twitch-YndaBot
|
41b3600f5336a073f42c1cc296609dbe88c8e510
|
[
"MIT"
] | null | null | null |
async def greet(ctx):
greetings = [
"Ahn nyong ha se yo",
"Ahn-nyong-ha-se-yo",
"Ahoj",
"An-nyŏng-ha-se-yo",
"As-salamu alaykum",
"Assalamo aleikum",
"Assalamualaikum",
"Avuxeni",
"Bonġu",
"Bonjour",
"Bună ziua",
"Ciao",
"Cześć",
"Dia dhuit",
"Dobar dan",
"Dobra većer",
"Dobro jutro",
"God dag",
"Góðan dag",
"Grüß gott",
"Guten tag",
"Hafa adai",
"Hallå",
"Hallo",
"Hello",
"Hoi",
"Hola",
"How ya doing",
"How you doing",
"Howdy",
"Hujambo",
"Hyvää päivää",
"Ia orna",
"Jo napot",
"Konnichiwa",
"Marhaba",
"Merhaba",
"Moïen",
"Namaskar",
"Namaste",
"Namastē",
"Nde-ewo",
"Nǐ hǎo",
"Niltze",
"Now then",
"Olá",
"Salam",
"Salve",
"Sawasdee",
"Sawubona",
"Selamat siang",
"Shalom",
"Shwmae",
"Sveiki",
"Wassup",
"What's up",
"Xin chào",
"Yasou",
"Zdraveite",
"Zdravo",
"Zdravstvuyte",
"안녕하세요",
"こんにちは",
"你好",
]
message = ctx.content.lower()
# if no one is tagged in the message
if "@" not in message:
message_greetings = []
# check if any of the greetings are in the message
for greeting in greetings:
if greeting.lower() in message:
message_greetings.append(greeting)
# if any are, format them into a greeting back to the user
if len(message_greetings) > 0:
greetings_string = message_greetings[0]
if len(message_greetings) > 1:
first_greeting = message_greetings[0]
other_greetings = []
for greeting in message_greetings[1 : len(message_greetings)]:
other_greetings.append(greeting.lower())
all_greetings = [first_greeting] + other_greetings
if len(message_greetings) > 2:
greetings_string = (
f"{', '.join(all_greetings[0:-1])} and {all_greetings[-1]}"
)
else:
greetings_string = " and ".join(all_greetings)
# respond to user
await ctx.channel.send(f"{greetings_string}, @{ctx.author.name}!")
| 25.019608
| 83
| 0.460031
|
async def greet(ctx):
greetings = [
"Ahn nyong ha se yo",
"Ahn-nyong-ha-se-yo",
"Ahoj",
"An-nyŏng-ha-se-yo",
"As-salamu alaykum",
"Assalamo aleikum",
"Assalamualaikum",
"Avuxeni",
"Bonġu",
"Bonjour",
"Bună ziua",
"Ciao",
"Cześć",
"Dia dhuit",
"Dobar dan",
"Dobra većer",
"Dobro jutro",
"God dag",
"Góðan dag",
"Grüß gott",
"Guten tag",
"Hafa adai",
"Hallå",
"Hallo",
"Hello",
"Hoi",
"Hola",
"How ya doing",
"How you doing",
"Howdy",
"Hujambo",
"Hyvää päivää",
"Ia orna",
"Jo napot",
"Konnichiwa",
"Marhaba",
"Merhaba",
"Moïen",
"Namaskar",
"Namaste",
"Namastē",
"Nde-ewo",
"Nǐ hǎo",
"Niltze",
"Now then",
"Olá",
"Salam",
"Salve",
"Sawasdee",
"Sawubona",
"Selamat siang",
"Shalom",
"Shwmae",
"Sveiki",
"Wassup",
"What's up",
"Xin chào",
"Yasou",
"Zdraveite",
"Zdravo",
"Zdravstvuyte",
"안녕하세요",
"こんにちは",
"你好",
]
message = ctx.content.lower()
# if no one is tagged in the message
if "@" not in message:
message_greetings = []
# check if any of the greetings are in the message
for greeting in greetings:
if greeting.lower() in message:
message_greetings.append(greeting)
# if any are, format them into a greeting back to the user
if len(message_greetings) > 0:
greetings_string = message_greetings[0]
if len(message_greetings) > 1:
first_greeting = message_greetings[0]
other_greetings = []
for greeting in message_greetings[1 : len(message_greetings)]:
other_greetings.append(greeting.lower())
all_greetings = [first_greeting] + other_greetings
if len(message_greetings) > 2:
greetings_string = (
f"{', '.join(all_greetings[0:-1])} and {all_greetings[-1]}"
)
else:
greetings_string = " and ".join(all_greetings)
# respond to user
await ctx.channel.send(f"{greetings_string}, @{ctx.author.name}!")
| true
| true
|
f7091f945438d05214721e9df9b4991008b776a6
| 968
|
py
|
Python
|
services/core-api/app/api/now_applications/models/activity_summary/cut_lines_polarization_survey.py
|
bcgov/mds
|
6c427a66a5edb4196222607291adef8fd6677038
|
[
"Apache-2.0"
] | 25
|
2018-07-09T19:04:37.000Z
|
2022-03-15T17:27:10.000Z
|
services/core-api/app/api/now_applications/models/activity_summary/cut_lines_polarization_survey.py
|
areyeslo/mds
|
e8c38e593e09b78e2a57009c0d003d6c4bfa32e6
|
[
"Apache-2.0"
] | 983
|
2018-04-25T20:08:07.000Z
|
2022-03-31T21:45:20.000Z
|
services/core-api/app/api/now_applications/models/activity_summary/cut_lines_polarization_survey.py
|
areyeslo/mds
|
e8c38e593e09b78e2a57009c0d003d6c4bfa32e6
|
[
"Apache-2.0"
] | 58
|
2018-05-15T22:35:50.000Z
|
2021-11-29T19:40:52.000Z
|
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.schema import FetchedValue
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.hybrid import hybrid_property
from app.api.utils.models_mixins import Base
from app.extensions import db
from app.api.now_applications.models.activity_summary.activity_summary_base import ActivitySummaryBase
class CutLinesPolarizationSurvey(ActivitySummaryBase):
__mapper_args__ = {
'polymorphic_identity': 'cut_lines_polarization_survey', ## type code
}
## NO TABLE FOR THIS TYPE
details = db.relationship(
'CutLinesPolarizationSurveyDetail',
secondary='activity_summary_detail_xref',
load_on_pending=True)
@hybrid_property
def calculated_total_disturbance(self):
return self.calculate_total_disturbance_area(self.details)
def __repr__(self):
return '<CutLinesPolarizationSurvey %r>' % self.activity_summary_id
| 33.37931
| 102
| 0.785124
|
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.schema import FetchedValue
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.hybrid import hybrid_property
from app.api.utils.models_mixins import Base
from app.extensions import db
from app.api.now_applications.models.activity_summary.activity_summary_base import ActivitySummaryBase
class CutLinesPolarizationSurvey(ActivitySummaryBase):
__mapper_args__ = {
'polymorphic_identity': 'cut_lines_polarization_survey', }
details = db.relationship(
'CutLinesPolarizationSurveyDetail',
secondary='activity_summary_detail_xref',
load_on_pending=True)
@hybrid_property
def calculated_total_disturbance(self):
return self.calculate_total_disturbance_area(self.details)
def __repr__(self):
return '<CutLinesPolarizationSurvey %r>' % self.activity_summary_id
| true
| true
|
f709200bc42e18277171017bd58e82fdd5518401
| 144
|
py
|
Python
|
contacts/apps.py
|
phildini/logtacts
|
2cfc1d93a6ec7535b57a42b46b7d8c5c09a4729b
|
[
"MIT"
] | 56
|
2016-02-22T16:12:53.000Z
|
2021-01-12T20:59:02.000Z
|
contacts/apps.py
|
phildini/logtacts
|
2cfc1d93a6ec7535b57a42b46b7d8c5c09a4729b
|
[
"MIT"
] | 107
|
2016-01-04T00:49:37.000Z
|
2021-11-18T18:27:24.000Z
|
contacts/apps.py
|
phildini/logtacts
|
2cfc1d93a6ec7535b57a42b46b7d8c5c09a4729b
|
[
"MIT"
] | 23
|
2016-01-04T00:54:09.000Z
|
2021-07-09T15:23:15.000Z
|
from django.apps import AppConfig
class ContactConfig(AppConfig):
name = 'contacts'
def ready(self):
import contacts.signals
| 16
| 33
| 0.701389
|
from django.apps import AppConfig
class ContactConfig(AppConfig):
name = 'contacts'
def ready(self):
import contacts.signals
| true
| true
|
f7092088390ccc9209ff2691ca6f9d0dba0c03d2
| 2,644
|
py
|
Python
|
scrapers/add_db_entry.py
|
ivanek/covid_19
|
e7d7652c65cbdf9a2b12ddacaa7f2415d11a5b87
|
[
"CC-BY-4.0"
] | 1
|
2020-03-30T12:48:04.000Z
|
2020-03-30T12:48:04.000Z
|
scrapers/add_db_entry.py
|
prematzerosoft/covid_19
|
a642d7ce12830d4bace93dd14b850973cfeee6b0
|
[
"CC-BY-4.0"
] | null | null | null |
scrapers/add_db_entry.py
|
prematzerosoft/covid_19
|
a642d7ce12830d4bace93dd14b850973cfeee6b0
|
[
"CC-BY-4.0"
] | null | null | null |
#!/usr/bin/env python3
import re
import sys
import sqlite3
import traceback
import os
__location__ = os.path.realpath(
os.path.join(
os.getcwd(),
os.path.dirname(__file__)
)
)
input_failures = 0
try:
DATABASE_NAME = os.path.join(__location__, 'data.sqlite')
conn = sqlite3.connect(DATABASE_NAME)
i = 0
for line in sys.stdin:
l = line.strip()
match = re.search('^(\w+)\s+([\w\-\:]+)\s+(\w+)\s+((\w+|-))\s+OK', l)
if not match:
input_failures += 1
print(f'Error: Not matched input line: {l}')
continue
date_part = match.group(2).split('T')
data = {
'date': date_part[0],
'time': '',
'area': os.environ['SCRAPER_KEY'],
'tested': None,
'confirmed': int(match.group(3)),
'hospitalized': None,
'icu': None,
'vent': None,
'released': None,
'deceased': match.group(4),
'source': os.environ['SCRAPER_SOURCE']
}
if len(date_part) == 2:
data['time'] = date_part[1]
if (data['deceased'] == '-'):
data['deceased'] = None
else:
data['deceased'] = int(data['deceased'])
c = conn.cursor()
try:
print(data)
c.execute(
'''
INSERT INTO data (
date,
time,
abbreviation_canton_and_fl,
ncumul_tested,
ncumul_conf,
ncumul_hosp,
ncumul_ICU,
ncumul_vent,
ncumul_released,
ncumul_deceased,
source
)
VALUES
(?,?,?,?,?,?,?,?,?,?,?)
''',
[
data['date'],
data['time'],
data['area'],
data['tested'],
data['confirmed'],
data['hospitalized'],
data['icu'],
data['vent'],
data['released'],
data['deceased'],
data['source'],
]
)
except sqlite3.IntegrityError:
print("Error: Data for this date has already been added")
finally:
conn.commit()
except Exception as e:
print("Error: %s" % e)
print(traceback.format_exc())
sys.exit(1)
finally:
conn.close()
if input_failures:
sys.exit(1)
| 25.921569
| 77
| 0.421331
|
import re
import sys
import sqlite3
import traceback
import os
__location__ = os.path.realpath(
os.path.join(
os.getcwd(),
os.path.dirname(__file__)
)
)
input_failures = 0
try:
DATABASE_NAME = os.path.join(__location__, 'data.sqlite')
conn = sqlite3.connect(DATABASE_NAME)
i = 0
for line in sys.stdin:
l = line.strip()
match = re.search('^(\w+)\s+([\w\-\:]+)\s+(\w+)\s+((\w+|-))\s+OK', l)
if not match:
input_failures += 1
print(f'Error: Not matched input line: {l}')
continue
date_part = match.group(2).split('T')
data = {
'date': date_part[0],
'time': '',
'area': os.environ['SCRAPER_KEY'],
'tested': None,
'confirmed': int(match.group(3)),
'hospitalized': None,
'icu': None,
'vent': None,
'released': None,
'deceased': match.group(4),
'source': os.environ['SCRAPER_SOURCE']
}
if len(date_part) == 2:
data['time'] = date_part[1]
if (data['deceased'] == '-'):
data['deceased'] = None
else:
data['deceased'] = int(data['deceased'])
c = conn.cursor()
try:
print(data)
c.execute(
'''
INSERT INTO data (
date,
time,
abbreviation_canton_and_fl,
ncumul_tested,
ncumul_conf,
ncumul_hosp,
ncumul_ICU,
ncumul_vent,
ncumul_released,
ncumul_deceased,
source
)
VALUES
(?,?,?,?,?,?,?,?,?,?,?)
''',
[
data['date'],
data['time'],
data['area'],
data['tested'],
data['confirmed'],
data['hospitalized'],
data['icu'],
data['vent'],
data['released'],
data['deceased'],
data['source'],
]
)
except sqlite3.IntegrityError:
print("Error: Data for this date has already been added")
finally:
conn.commit()
except Exception as e:
print("Error: %s" % e)
print(traceback.format_exc())
sys.exit(1)
finally:
conn.close()
if input_failures:
sys.exit(1)
| true
| true
|
f70920a45d8b352e57cdd5c4ba4ed7a956b3f421
| 4,150
|
py
|
Python
|
pyesgf/util.py
|
ggarcias/esgf-pyclient-cmip6
|
9e7975d2e676ed2c4001edb4e25c9c20cc16b7af
|
[
"BSD-3-Clause"
] | 17
|
2016-09-07T02:55:30.000Z
|
2022-03-10T15:34:53.000Z
|
pyesgf/util.py
|
ggarcias/esgf-pyclient-cmip6
|
9e7975d2e676ed2c4001edb4e25c9c20cc16b7af
|
[
"BSD-3-Clause"
] | 61
|
2015-05-27T08:10:46.000Z
|
2022-03-17T12:36:45.000Z
|
pyesgf/util.py
|
ggarcias/esgf-pyclient-cmip6
|
9e7975d2e676ed2c4001edb4e25c9c20cc16b7af
|
[
"BSD-3-Clause"
] | 22
|
2015-10-27T11:21:05.000Z
|
2022-01-12T08:26:16.000Z
|
"""
Utility functions using the pyesgf package.
"""
import sys
from urllib.parse import quote_plus
def ats_url(base_url):
"""
Return the URL for the ESGF SAML AttributeService
"""
# Strip '/' from url as necessary
base_url = base_url.rstrip('/')
return '/'.join([base_url,
'esgf-idp/saml/soap/secure/attributeService.htm'])
def get_manifest(drs_id, version, connection):
"""
Retrieve the filenames, sizes and checksums of a dataset.
This function will raise ValueError if more than one dataset is found
matching the given drs_id and version on a search without replicas.
The connection should be either distrib=True or be connected to a suitable
ESGF search interface.
:param drs_id: a string containing the DRS identifier without version
:param version: The version as a string or int
"""
if isinstance(version, int):
version = str(version)
context = connection.new_context(drs_id=drs_id, version=version)
results = context.search()
if len(results) > 1:
raise ValueError("Search for dataset %s.v%s returns multiple hits" %
(drs_id, version))
file_context = results[0].file_context()
manifest = {}
for file in file_context.search():
manifest[file.filename] = {
'checksum_type': file.checksum_type,
'checksum': file.checksum,
'size': file.size,
}
return manifest
def urlencode(query):
"""
Encode a sequence of two-element tuples or dictionary into a URL query
string.
This version is adapted from the standard library to understand operators
in the pyesgf.search.constraints module.
If the query arg is a sequence of two-element tuples, the order of the
parameters in the output will match the order of parameters in the
input.
"""
if hasattr(query, "items"):
# mapping objects
query = list(query.items())
else:
# it's a bother at times that strings and string-like objects are
# sequences...
try:
# non-sequence items should not work with len()
# non-empty strings will fail this
if len(query) and not isinstance(query[0], tuple):
raise TypeError
# zero-length sequences of all types will get here and succeed,
# but that's a minor nit - since the original implementation
# allowed empty dicts that type of behavior probably should be
# preserved for consistency
except TypeError:
ty, va, tb = sys.exc_info()
raise TypeError("not a valid non-string sequence "
"or mapping object", tb)
def append(k, v, tag, lst):
from .search.consts import OPERATOR_NEQ
if tag == OPERATOR_NEQ:
lst.append('%s!=%s' % (k, v))
elif tag is None:
lst.append('%s=%s' % (k, v))
else:
raise ValueError('Unknown operator tag %s' % tag)
def strip_tag(v):
if isinstance(v, tuple):
tag, v = v
else:
tag = None
return tag, v
lst = []
for k, v in query:
tag, v = strip_tag(v)
k = quote_plus(str(k))
if isinstance(v, str):
if hasattr(v, 'encode'):
# is there a reasonable way to convert to ASCII?
# encode generates a string, but "replace" or "ignore"
# lose information and "strict" can raise UnicodeError
v = quote_plus(v.encode("ASCII", "replace"))
else:
v = quote_plus(v)
append(k, v, tag, lst)
else:
try:
# is this a sufficient test for sequence-ness?
len(v)
except TypeError:
# not a sequence
v = quote_plus(str(v))
append(k, v, tag, lst)
else:
# loop over the sequence
for elt in v:
append(k, quote_plus(str(elt)), tag, lst)
return '&'.join(lst)
| 30.291971
| 78
| 0.576867
|
import sys
from urllib.parse import quote_plus
def ats_url(base_url):
base_url = base_url.rstrip('/')
return '/'.join([base_url,
'esgf-idp/saml/soap/secure/attributeService.htm'])
def get_manifest(drs_id, version, connection):
if isinstance(version, int):
version = str(version)
context = connection.new_context(drs_id=drs_id, version=version)
results = context.search()
if len(results) > 1:
raise ValueError("Search for dataset %s.v%s returns multiple hits" %
(drs_id, version))
file_context = results[0].file_context()
manifest = {}
for file in file_context.search():
manifest[file.filename] = {
'checksum_type': file.checksum_type,
'checksum': file.checksum,
'size': file.size,
}
return manifest
def urlencode(query):
if hasattr(query, "items"):
query = list(query.items())
else:
# sequences...
try:
# non-sequence items should not work with len()
# non-empty strings will fail this
if len(query) and not isinstance(query[0], tuple):
raise TypeError
# zero-length sequences of all types will get here and succeed,
# but that's a minor nit - since the original implementation
except TypeError:
ty, va, tb = sys.exc_info()
raise TypeError("not a valid non-string sequence "
"or mapping object", tb)
def append(k, v, tag, lst):
from .search.consts import OPERATOR_NEQ
if tag == OPERATOR_NEQ:
lst.append('%s!=%s' % (k, v))
elif tag is None:
lst.append('%s=%s' % (k, v))
else:
raise ValueError('Unknown operator tag %s' % tag)
def strip_tag(v):
if isinstance(v, tuple):
tag, v = v
else:
tag = None
return tag, v
lst = []
for k, v in query:
tag, v = strip_tag(v)
k = quote_plus(str(k))
if isinstance(v, str):
if hasattr(v, 'encode'):
v = quote_plus(v.encode("ASCII", "replace"))
else:
v = quote_plus(v)
append(k, v, tag, lst)
else:
try:
len(v)
except TypeError:
v = quote_plus(str(v))
append(k, v, tag, lst)
else:
for elt in v:
append(k, quote_plus(str(elt)), tag, lst)
return '&'.join(lst)
| true
| true
|
f70920aa2ec17f63790605e4dc9745d131bf1ad5
| 27,868
|
py
|
Python
|
apps/addons/forms.py
|
Joergen/olympia
|
eb84203469adbb6584e50d7bb6f9de7f20980dac
|
[
"BSD-3-Clause"
] | null | null | null |
apps/addons/forms.py
|
Joergen/olympia
|
eb84203469adbb6584e50d7bb6f9de7f20980dac
|
[
"BSD-3-Clause"
] | null | null | null |
apps/addons/forms.py
|
Joergen/olympia
|
eb84203469adbb6584e50d7bb6f9de7f20980dac
|
[
"BSD-3-Clause"
] | null | null | null |
from datetime import datetime
from decimal import Decimal
import os
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.forms.formsets import formset_factory
import commonware.log
import happyforms
from quieter_formset.formset import BaseFormSet
from tower import ugettext as _, ugettext_lazy as _lazy, ungettext as ngettext
from access import acl
import amo
import captcha.fields
from amo.fields import ColorField
from amo.urlresolvers import reverse
from amo.utils import slug_validator, slugify, sorted_groupby, remove_icons
from addons.models import (Addon, AddonCategory, BlacklistedSlug, Category,
Persona)
from addons.tasks import save_theme, save_theme_reupload
from addons.utils import reverse_name_lookup
from addons.widgets import IconWidgetRenderer, CategoriesSelectMultiple
from devhub import tasks as devhub_tasks
from tags.models import Tag
from translations import LOCALES
from translations.fields import TransField, TransTextarea
from translations.forms import TranslationFormMixin
from translations.models import Translation
from translations.utils import transfield_changed
from translations.widgets import TranslationTextInput
from users.models import UserEmailField
from versions.models import Version
log = commonware.log.getLogger('z.addons')
def clean_name(name, instance=None):
if not instance:
log.debug('clean_name called without an instance: %s' % name)
id = reverse_name_lookup(name)
# If we get an id and either there's no instance or the instance.id != id.
if id and (not instance or id != instance.id):
raise forms.ValidationError(_('This name is already in use. Please '
'choose another.'))
return name
def clean_slug(slug, instance):
slug_validator(slug, lower=False)
if slug != instance.slug:
if Addon.objects.filter(slug=slug).exists():
raise forms.ValidationError(
_('This slug is already in use. Please choose another.'))
if BlacklistedSlug.blocked(slug):
raise forms.ValidationError(
_('The slug cannot be "%s". Please choose another.' % slug))
return slug
def clean_tags(request, tags):
target = [slugify(t, spaces=True, lower=True) for t in tags.split(',')]
target = set(filter(None, target))
min_len = amo.MIN_TAG_LENGTH
max_len = Tag._meta.get_field('tag_text').max_length
max_tags = amo.MAX_TAGS
total = len(target)
blacklisted = (Tag.objects.values_list('tag_text', flat=True)
.filter(tag_text__in=target, blacklisted=True))
if blacklisted:
# L10n: {0} is a single tag or a comma-separated list of tags.
msg = ngettext('Invalid tag: {0}', 'Invalid tags: {0}',
len(blacklisted)).format(', '.join(blacklisted))
raise forms.ValidationError(msg)
restricted = (Tag.objects.values_list('tag_text', flat=True)
.filter(tag_text__in=target, restricted=True))
if not acl.action_allowed(request, 'Addons', 'Edit'):
if restricted:
# L10n: {0} is a single tag or a comma-separated list of tags.
msg = ngettext('"{0}" is a reserved tag and cannot be used.',
'"{0}" are reserved tags and cannot be used.',
len(restricted)).format('", "'.join(restricted))
raise forms.ValidationError(msg)
else:
# Admin's restricted tags don't count towards the limit.
total = len(target - set(restricted))
if total > max_tags:
num = total - max_tags
msg = ngettext('You have {0} too many tags.',
'You have {0} too many tags.', num).format(num)
raise forms.ValidationError(msg)
if any(t for t in target if len(t) > max_len):
raise forms.ValidationError(
_('All tags must be %s characters or less after invalid characters'
' are removed.' % max_len))
if any(t for t in target if len(t) < min_len):
msg = ngettext("All tags must be at least {0} character.",
"All tags must be at least {0} characters.",
min_len).format(min_len)
raise forms.ValidationError(msg)
return target
class AddonFormBase(TranslationFormMixin, happyforms.ModelForm):
def __init__(self, *args, **kw):
self.request = kw.pop('request')
super(AddonFormBase, self).__init__(*args, **kw)
class Meta:
models = Addon
fields = ('name', 'slug', 'summary', 'tags')
def clean_slug(self):
return clean_slug(self.cleaned_data['slug'], self.instance)
def clean_tags(self):
return clean_tags(self.request, self.cleaned_data['tags'])
def get_tags(self, addon):
if acl.action_allowed(self.request, 'Addons', 'Edit'):
return list(addon.tags.values_list('tag_text', flat=True))
else:
return list(addon.tags.filter(restricted=False)
.values_list('tag_text', flat=True))
class AddonFormBasic(AddonFormBase):
name = TransField(max_length=50)
slug = forms.CharField(max_length=30)
summary = TransField(widget=TransTextarea(attrs={'rows': 4}),
max_length=250)
tags = forms.CharField(required=False)
class Meta:
model = Addon
fields = ('name', 'slug', 'summary', 'tags')
def __init__(self, *args, **kw):
super(AddonFormBasic, self).__init__(*args, **kw)
self.fields['tags'].initial = ', '.join(self.get_tags(self.instance))
# Do not simply append validators, as validators will persist between
# instances.
def validate_name(name):
return clean_name(name, self.instance)
name_validators = list(self.fields['name'].validators)
name_validators.append(validate_name)
self.fields['name'].validators = name_validators
def save(self, addon, commit=False):
tags_new = self.cleaned_data['tags']
tags_old = [slugify(t, spaces=True) for t in self.get_tags(addon)]
# Add new tags.
for t in set(tags_new) - set(tags_old):
Tag(tag_text=t).save_tag(addon)
# Remove old tags.
for t in set(tags_old) - set(tags_new):
Tag(tag_text=t).remove_tag(addon)
# We ignore `commit`, since we need it to be `False` so we can save
# the ManyToMany fields on our own.
addonform = super(AddonFormBasic, self).save(commit=False)
addonform.save()
return addonform
class AppFormBasic(AddonFormBasic):
"""Form to override name length for apps."""
name = TransField(max_length=128)
class CategoryForm(forms.Form):
application = forms.TypedChoiceField(amo.APPS_CHOICES, coerce=int,
widget=forms.HiddenInput,
required=False)
categories = forms.ModelMultipleChoiceField(
queryset=Category.objects.all(), widget=CategoriesSelectMultiple)
def save(self, addon):
application = self.cleaned_data.get('application')
categories_new = self.cleaned_data['categories']
categories_old = [cats for app, cats in addon.app_categories if
(app and application and app.id == application)
or (not app and not application)]
if categories_old:
categories_old = categories_old[0]
# Add new categories.
for c in set(categories_new) - set(categories_old):
AddonCategory(addon=addon, category=c).save()
# Remove old categories.
for c in set(categories_old) - set(categories_new):
AddonCategory.objects.filter(addon=addon, category=c).delete()
def clean_categories(self):
categories = self.cleaned_data['categories']
total = categories.count()
max_cat = amo.MAX_CATEGORIES
if getattr(self, 'disabled', False) and total:
raise forms.ValidationError(
_('Categories cannot be changed while your add-on is featured '
'for this application.'))
if total > max_cat:
# L10n: {0} is the number of categories.
raise forms.ValidationError(ngettext(
'You can have only {0} category.',
'You can have only {0} categories.',
max_cat).format(max_cat))
has_misc = filter(lambda x: x.misc, categories)
if has_misc and total > 1:
raise forms.ValidationError(
_('The miscellaneous category cannot be combined with '
'additional categories.'))
return categories
class BaseCategoryFormSet(BaseFormSet):
def __init__(self, *args, **kw):
self.addon = kw.pop('addon')
self.request = kw.pop('request', None)
super(BaseCategoryFormSet, self).__init__(*args, **kw)
self.initial = []
apps = sorted(self.addon.compatible_apps.keys(),
key=lambda x: x.id)
# Drop any apps that don't have appropriate categories.
qs = Category.objects.filter(type=self.addon.type)
app_cats = dict((k, list(v)) for k, v in
sorted_groupby(qs, 'application'))
for app in list(apps):
if app and not app_cats.get(app.id):
apps.remove(app)
if not app_cats:
apps = []
for app in apps:
cats = dict(self.addon.app_categories).get(app, [])
self.initial.append({'categories': [c.id for c in cats]})
for app, form in zip(apps, self.forms):
key = app.id if app else None
form.request = self.request
form.initial['application'] = key
form.app = app
cats = sorted(app_cats[key], key=lambda x: x.name)
form.fields['categories'].choices = [(c.id, c.name) for c in cats]
# If this add-on is featured for this application, category
# changes are forbidden.
if not acl.action_allowed(self.request, 'Addons', 'Edit'):
form.disabled = (app and self.addon.is_featured(app))
def save(self):
for f in self.forms:
f.save(self.addon)
CategoryFormSet = formset_factory(form=CategoryForm,
formset=BaseCategoryFormSet, extra=0)
def icons():
"""
Generates a list of tuples for the default icons for add-ons,
in the format (pseudo-mime-type, description).
"""
icons = [('image/jpeg', 'jpeg'), ('image/png', 'png'), ('', 'default')]
dirs, files = storage.listdir(settings.ADDON_ICONS_DEFAULT_PATH)
for fname in files:
if '32' in fname and 'default' not in fname:
icon_name = fname.split('-')[0]
icons.append(('icon/%s' % icon_name, icon_name))
return icons
class AddonFormMedia(AddonFormBase):
icon_type = forms.CharField(widget=forms.RadioSelect(
renderer=IconWidgetRenderer, choices=[]), required=False)
icon_upload_hash = forms.CharField(required=False)
class Meta:
model = Addon
fields = ('icon_upload_hash', 'icon_type')
def __init__(self, *args, **kwargs):
super(AddonFormMedia, self).__init__(*args, **kwargs)
# Add icons here so we only read the directory when
# AddonFormMedia is actually being used.
self.fields['icon_type'].widget.choices = icons()
def save(self, addon, commit=True):
if self.cleaned_data['icon_upload_hash']:
upload_hash = self.cleaned_data['icon_upload_hash']
upload_path = os.path.join(settings.TMP_PATH, 'icon', upload_hash)
dirname = addon.get_icon_dir()
destination = os.path.join(dirname, '%s' % addon.id)
remove_icons(destination)
devhub_tasks.resize_icon.delay(upload_path, destination,
amo.ADDON_ICON_SIZES,
set_modified_on=[addon])
return super(AddonFormMedia, self).save(commit)
class AddonFormDetails(AddonFormBase):
default_locale = forms.TypedChoiceField(choices=LOCALES)
class Meta:
model = Addon
fields = ('description', 'default_locale', 'homepage')
def clean(self):
# Make sure we have the required translations in the new locale.
required = 'name', 'summary', 'description'
data = self.cleaned_data
if not self.errors and 'default_locale' in self.changed_data:
fields = dict((k, getattr(self.instance, k + '_id'))
for k in required)
locale = self.cleaned_data['default_locale']
ids = filter(None, fields.values())
qs = (Translation.objects.filter(locale=locale, id__in=ids,
localized_string__isnull=False)
.values_list('id', flat=True))
missing = [k for k, v in fields.items() if v not in qs]
# They might be setting description right now.
if 'description' in missing and locale in data['description']:
missing.remove('description')
if missing:
raise forms.ValidationError(
_('Before changing your default locale you must have a '
'name, summary, and description in that locale. '
'You are missing %s.') % ', '.join(map(repr, missing)))
return data
class AddonFormSupport(AddonFormBase):
support_url = TransField.adapt(forms.URLField)(required=False)
support_email = TransField.adapt(forms.EmailField)(required=False)
class Meta:
model = Addon
fields = ('support_email', 'support_url')
def __init__(self, *args, **kw):
super(AddonFormSupport, self).__init__(*args, **kw)
def save(self, addon, commit=True):
return super(AddonFormSupport, self).save(commit)
class AddonFormTechnical(AddonFormBase):
developer_comments = TransField(widget=TransTextarea, required=False)
class Meta:
model = Addon
fields = ('developer_comments', 'view_source', 'site_specific',
'external_software', 'auto_repackage', 'public_stats',
'whiteboard')
class AddonForm(happyforms.ModelForm):
name = forms.CharField(widget=TranslationTextInput,)
homepage = forms.CharField(widget=TranslationTextInput, required=False)
eula = forms.CharField(widget=TranslationTextInput,)
description = forms.CharField(widget=TranslationTextInput,)
developer_comments = forms.CharField(widget=TranslationTextInput,)
privacy_policy = forms.CharField(widget=TranslationTextInput,)
the_future = forms.CharField(widget=TranslationTextInput,)
the_reason = forms.CharField(widget=TranslationTextInput,)
support_email = forms.CharField(widget=TranslationTextInput,)
class Meta:
model = Addon
fields = ('name', 'homepage', 'default_locale', 'support_email',
'support_url', 'description', 'summary',
'developer_comments', 'eula', 'privacy_policy', 'the_reason',
'the_future', 'view_source', 'prerelease', 'site_specific',)
exclude = ('status', )
def clean_name(self):
return clean_name(self.cleaned_data['name'])
def save(self):
desc = self.data.get('description')
if desc and desc != unicode(self.instance.description):
amo.log(amo.LOG.EDIT_DESCRIPTIONS, self.instance)
if self.changed_data:
amo.log(amo.LOG.EDIT_PROPERTIES, self.instance)
super(AddonForm, self).save()
class AbuseForm(happyforms.Form):
recaptcha = captcha.fields.ReCaptchaField(label='')
text = forms.CharField(required=True,
label='',
widget=forms.Textarea())
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(AbuseForm, self).__init__(*args, **kwargs)
if (not self.request.user.is_anonymous() or
not settings.RECAPTCHA_PRIVATE_KEY):
del self.fields['recaptcha']
class ThemeFormBase(AddonFormBase):
def __init__(self, *args, **kwargs):
super(ThemeFormBase, self).__init__(*args, **kwargs)
cats = Category.objects.filter(type=amo.ADDON_PERSONA, weight__gte=0)
cats = sorted(cats, key=lambda x: x.name)
self.fields['category'].choices = [(c.id, c.name) for c in cats]
for field in ('header', 'footer'):
self.fields[field].widget.attrs = {
'data-upload-url': reverse('devhub.personas.upload_persona',
args=['persona_%s' % field]),
'data-allowed-types': 'image/jpeg|image/png'
}
def clean_name(self):
return clean_name(self.cleaned_data['name'])
def clean_slug(self):
return clean_slug(self.cleaned_data['slug'], self.instance)
class ThemeForm(ThemeFormBase):
name = forms.CharField(max_length=50)
slug = forms.CharField(max_length=30)
category = forms.ModelChoiceField(queryset=Category.objects.all(),
widget=forms.widgets.RadioSelect)
description = forms.CharField(widget=forms.Textarea(attrs={'rows': 4}),
max_length=500, required=False)
tags = forms.CharField(required=False)
license = forms.TypedChoiceField(
choices=amo.PERSONA_LICENSES_CHOICES,
coerce=int, empty_value=None, widget=forms.HiddenInput,
error_messages={'required': _lazy(u'A license must be selected.')})
header = forms.FileField(required=False)
header_hash = forms.CharField(widget=forms.HiddenInput)
footer = forms.FileField(required=False)
footer_hash = forms.CharField(widget=forms.HiddenInput, required=False)
# Native color picker doesn't allow real time tracking of user input
# and empty values, thus force the JavaScript color picker for now.
# See bugs 1005206 and 1003575.
accentcolor = ColorField(
required=False,
widget=forms.TextInput(attrs={'class': 'color-picker'}),
)
textcolor = ColorField(
required=False,
widget=forms.TextInput(attrs={'class': 'color-picker'}),
)
agreed = forms.BooleanField()
# This lets us POST the data URIs of the unsaved previews so we can still
# show them if there were form errors. It's really clever.
unsaved_data = forms.CharField(required=False, widget=forms.HiddenInput)
class Meta:
model = Addon
fields = ('name', 'slug', 'description', 'tags')
def save(self, commit=False):
data = self.cleaned_data
addon = Addon.objects.create(
slug=data.get('slug'),
status=amo.STATUS_PENDING, type=amo.ADDON_PERSONA)
addon.name = {'en-US': data['name']}
if data.get('description'):
addon.description = data['description']
addon._current_version = Version.objects.create(addon=addon,
version='0')
addon.save()
# Create Persona instance.
p = Persona()
p.persona_id = 0
p.addon = addon
p.header = 'header.png'
if data['footer_hash']:
p.footer = 'footer.png'
if data['accentcolor']:
p.accentcolor = data['accentcolor'].lstrip('#')
if data['textcolor']:
p.textcolor = data['textcolor'].lstrip('#')
p.license = data['license']
p.submit = datetime.now()
user = self.request.amo_user
p.author = user.username
p.display_username = user.name
p.save()
# Save header, footer, and preview images.
save_theme.delay(data['header_hash'], data['footer_hash'], addon)
# Save user info.
addon.addonuser_set.create(user=user, role=amo.AUTHOR_ROLE_OWNER)
# Save tags.
for t in data['tags']:
Tag(tag_text=t).save_tag(addon)
# Save categories.
AddonCategory(addon=addon, category=data['category']).save()
return addon
class EditThemeForm(AddonFormBase):
name = TransField(max_length=50, label=_lazy('Give Your Theme a Name.'))
slug = forms.CharField(max_length=30)
category = forms.ModelChoiceField(queryset=Category.objects.all(),
widget=forms.widgets.RadioSelect)
description = TransField(
widget=TransTextarea(attrs={'rows': 4}),
max_length=500, required=False, label=_lazy('Describe your Theme.'))
tags = forms.CharField(required=False)
accentcolor = ColorField(
required=False,
widget=forms.TextInput(attrs={'class': 'color-picker'}),
)
textcolor = ColorField(
required=False,
widget=forms.TextInput(attrs={'class': 'color-picker'}),
)
license = forms.TypedChoiceField(
choices=amo.PERSONA_LICENSES_CHOICES, coerce=int, empty_value=None,
widget=forms.HiddenInput,
error_messages={'required': _lazy(u'A license must be selected.')})
# Theme re-upload.
header = forms.FileField(required=False)
header_hash = forms.CharField(widget=forms.HiddenInput, required=False)
footer = forms.FileField(required=False)
footer_hash = forms.CharField(widget=forms.HiddenInput, required=False)
class Meta:
model = Addon
fields = ('name', 'slug', 'description', 'tags')
def __init__(self, *args, **kw):
self.request = kw.pop('request')
super(AddonFormBase, self).__init__(*args, **kw)
addon = Addon.objects.no_cache().get(id=self.instance.id)
persona = addon.persona
# Do not simply append validators, as validators will persist between
# instances.
self.fields['name'].validators = list(self.fields['name'].validators)
self.fields['name'].validators.append(lambda x: clean_name(x, addon))
# Allow theme artists to localize Name and Description.
for trans in Translation.objects.filter(id=self.initial['name']):
self.initial['name_' + trans.locale.lower()] = trans
for trans in Translation.objects.filter(
id=self.initial['description']):
self.initial['description_' + trans.locale.lower()] = trans
self.old_tags = self.get_tags(addon)
self.initial['tags'] = ', '.join(self.old_tags)
if persona.accentcolor:
self.initial['accentcolor'] = '#' + persona.accentcolor
if persona.textcolor:
self.initial['textcolor'] = '#' + persona.textcolor
self.initial['license'] = persona.license
cats = sorted(Category.objects.filter(type=amo.ADDON_PERSONA,
weight__gte=0),
key=lambda x: x.name)
self.fields['category'].choices = [(c.id, c.name) for c in cats]
try:
self.initial['category'] = addon.categories.values_list(
'id', flat=True)[0]
except IndexError:
pass
for field in ('header', 'footer'):
self.fields[field].widget.attrs = {
'data-upload-url': reverse('devhub.personas.reupload_persona',
args=[addon.slug,
'persona_%s' % field]),
'data-allowed-types': 'image/jpeg|image/png'
}
def save(self):
addon = self.instance
persona = addon.persona
data = self.cleaned_data
# Update Persona-specific data.
persona_data = {
'license': int(data['license']),
'accentcolor': data['accentcolor'].lstrip('#'),
'textcolor': data['textcolor'].lstrip('#'),
'author': self.request.amo_user.username,
'display_username': self.request.amo_user.name
}
changed = False
for k, v in persona_data.iteritems():
if v != getattr(persona, k):
changed = True
setattr(persona, k, v)
if changed:
persona.save()
if self.changed_data:
amo.log(amo.LOG.EDIT_PROPERTIES, addon)
self.instance.modified = datetime.now()
# Update Addon-specific data.
changed = (
set(self.old_tags) != data['tags'] or # Check if tags changed.
self.initial['slug'] != data['slug'] or # Check if slug changed.
transfield_changed('description', self.initial, data) or
transfield_changed('name', self.initial, data))
if changed:
# Only save if addon data changed.
super(EditThemeForm, self).save()
# Update tags.
tags_new = data['tags']
tags_old = [slugify(t, spaces=True) for t in self.old_tags]
# Add new tags.
for t in set(tags_new) - set(tags_old):
Tag(tag_text=t).save_tag(addon)
# Remove old tags.
for t in set(tags_old) - set(tags_new):
Tag(tag_text=t).remove_tag(addon)
# Update category.
if data['category'].id != self.initial['category']:
addon_cat = addon.addoncategory_set.all()[0]
addon_cat.category = data['category']
addon_cat.save()
# Theme reupload.
if not addon.is_pending():
if data['header_hash'] or data['footer_hash']:
save_theme_reupload.delay(
data['header_hash'], data['footer_hash'], addon)
return data
class EditThemeOwnerForm(happyforms.Form):
owner = UserEmailField()
def __init__(self, *args, **kw):
self.instance = kw.pop('instance')
super(EditThemeOwnerForm, self).__init__(*args, **kw)
addon = self.instance
self.fields['owner'].widget.attrs['placeholder'] = _(
"Enter a new author's email address")
try:
self.instance_addonuser = addon.addonuser_set.all()[0]
self.initial['owner'] = self.instance_addonuser.user.email
except IndexError:
# If there was never an author before, then don't require one now.
self.instance_addonuser = None
self.fields['owner'].required = False
def save(self):
data = self.cleaned_data
if data.get('owner'):
changed = (not self.instance_addonuser or
self.instance_addonuser != data['owner'])
if changed:
# Update Persona-specific data.
persona = self.instance.persona
persona.author = data['owner'].username
persona.display_username = data['owner'].name
persona.save()
if not self.instance_addonuser:
# If there previously never another owner, create one.
self.instance.addonuser_set.create(user=data['owner'],
role=amo.AUTHOR_ROLE_OWNER)
elif self.instance_addonuser != data['owner']:
# If the owner has changed, update the `AddonUser` object.
self.instance_addonuser.user = data['owner']
self.instance_addonuser.role = amo.AUTHOR_ROLE_OWNER
self.instance_addonuser.save()
self.instance.modified = datetime.now()
self.instance.save()
return data
class ContributionForm(happyforms.Form):
amount = forms.DecimalField(required=True, min_value=Decimal('0.01'))
| 38.123119
| 79
| 0.611885
|
from datetime import datetime
from decimal import Decimal
import os
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.forms.formsets import formset_factory
import commonware.log
import happyforms
from quieter_formset.formset import BaseFormSet
from tower import ugettext as _, ugettext_lazy as _lazy, ungettext as ngettext
from access import acl
import amo
import captcha.fields
from amo.fields import ColorField
from amo.urlresolvers import reverse
from amo.utils import slug_validator, slugify, sorted_groupby, remove_icons
from addons.models import (Addon, AddonCategory, BlacklistedSlug, Category,
Persona)
from addons.tasks import save_theme, save_theme_reupload
from addons.utils import reverse_name_lookup
from addons.widgets import IconWidgetRenderer, CategoriesSelectMultiple
from devhub import tasks as devhub_tasks
from tags.models import Tag
from translations import LOCALES
from translations.fields import TransField, TransTextarea
from translations.forms import TranslationFormMixin
from translations.models import Translation
from translations.utils import transfield_changed
from translations.widgets import TranslationTextInput
from users.models import UserEmailField
from versions.models import Version
log = commonware.log.getLogger('z.addons')
def clean_name(name, instance=None):
if not instance:
log.debug('clean_name called without an instance: %s' % name)
id = reverse_name_lookup(name)
if id and (not instance or id != instance.id):
raise forms.ValidationError(_('This name is already in use. Please '
'choose another.'))
return name
def clean_slug(slug, instance):
slug_validator(slug, lower=False)
if slug != instance.slug:
if Addon.objects.filter(slug=slug).exists():
raise forms.ValidationError(
_('This slug is already in use. Please choose another.'))
if BlacklistedSlug.blocked(slug):
raise forms.ValidationError(
_('The slug cannot be "%s". Please choose another.' % slug))
return slug
def clean_tags(request, tags):
target = [slugify(t, spaces=True, lower=True) for t in tags.split(',')]
target = set(filter(None, target))
min_len = amo.MIN_TAG_LENGTH
max_len = Tag._meta.get_field('tag_text').max_length
max_tags = amo.MAX_TAGS
total = len(target)
blacklisted = (Tag.objects.values_list('tag_text', flat=True)
.filter(tag_text__in=target, blacklisted=True))
if blacklisted:
# L10n: {0} is a single tag or a comma-separated list of tags.
msg = ngettext('Invalid tag: {0}', 'Invalid tags: {0}',
len(blacklisted)).format(', '.join(blacklisted))
raise forms.ValidationError(msg)
restricted = (Tag.objects.values_list('tag_text', flat=True)
.filter(tag_text__in=target, restricted=True))
if not acl.action_allowed(request, 'Addons', 'Edit'):
if restricted:
# L10n: {0} is a single tag or a comma-separated list of tags.
msg = ngettext('"{0}" is a reserved tag and cannot be used.',
'"{0}" are reserved tags and cannot be used.',
len(restricted)).format('", "'.join(restricted))
raise forms.ValidationError(msg)
else:
# Admin's restricted tags don't count towards the limit.
total = len(target - set(restricted))
if total > max_tags:
num = total - max_tags
msg = ngettext('You have {0} too many tags.',
'You have {0} too many tags.', num).format(num)
raise forms.ValidationError(msg)
if any(t for t in target if len(t) > max_len):
raise forms.ValidationError(
_('All tags must be %s characters or less after invalid characters'
' are removed.' % max_len))
if any(t for t in target if len(t) < min_len):
msg = ngettext("All tags must be at least {0} character.",
"All tags must be at least {0} characters.",
min_len).format(min_len)
raise forms.ValidationError(msg)
return target
class AddonFormBase(TranslationFormMixin, happyforms.ModelForm):
def __init__(self, *args, **kw):
self.request = kw.pop('request')
super(AddonFormBase, self).__init__(*args, **kw)
class Meta:
models = Addon
fields = ('name', 'slug', 'summary', 'tags')
def clean_slug(self):
return clean_slug(self.cleaned_data['slug'], self.instance)
def clean_tags(self):
return clean_tags(self.request, self.cleaned_data['tags'])
def get_tags(self, addon):
if acl.action_allowed(self.request, 'Addons', 'Edit'):
return list(addon.tags.values_list('tag_text', flat=True))
else:
return list(addon.tags.filter(restricted=False)
.values_list('tag_text', flat=True))
class AddonFormBasic(AddonFormBase):
name = TransField(max_length=50)
slug = forms.CharField(max_length=30)
summary = TransField(widget=TransTextarea(attrs={'rows': 4}),
max_length=250)
tags = forms.CharField(required=False)
class Meta:
model = Addon
fields = ('name', 'slug', 'summary', 'tags')
def __init__(self, *args, **kw):
super(AddonFormBasic, self).__init__(*args, **kw)
self.fields['tags'].initial = ', '.join(self.get_tags(self.instance))
# Do not simply append validators, as validators will persist between
# instances.
def validate_name(name):
return clean_name(name, self.instance)
name_validators = list(self.fields['name'].validators)
name_validators.append(validate_name)
self.fields['name'].validators = name_validators
def save(self, addon, commit=False):
tags_new = self.cleaned_data['tags']
tags_old = [slugify(t, spaces=True) for t in self.get_tags(addon)]
# Add new tags.
for t in set(tags_new) - set(tags_old):
Tag(tag_text=t).save_tag(addon)
# Remove old tags.
for t in set(tags_old) - set(tags_new):
Tag(tag_text=t).remove_tag(addon)
# We ignore `commit`, since we need it to be `False` so we can save
# the ManyToMany fields on our own.
addonform = super(AddonFormBasic, self).save(commit=False)
addonform.save()
return addonform
class AppFormBasic(AddonFormBasic):
name = TransField(max_length=128)
class CategoryForm(forms.Form):
application = forms.TypedChoiceField(amo.APPS_CHOICES, coerce=int,
widget=forms.HiddenInput,
required=False)
categories = forms.ModelMultipleChoiceField(
queryset=Category.objects.all(), widget=CategoriesSelectMultiple)
def save(self, addon):
application = self.cleaned_data.get('application')
categories_new = self.cleaned_data['categories']
categories_old = [cats for app, cats in addon.app_categories if
(app and application and app.id == application)
or (not app and not application)]
if categories_old:
categories_old = categories_old[0]
# Add new categories.
for c in set(categories_new) - set(categories_old):
AddonCategory(addon=addon, category=c).save()
# Remove old categories.
for c in set(categories_old) - set(categories_new):
AddonCategory.objects.filter(addon=addon, category=c).delete()
def clean_categories(self):
categories = self.cleaned_data['categories']
total = categories.count()
max_cat = amo.MAX_CATEGORIES
if getattr(self, 'disabled', False) and total:
raise forms.ValidationError(
_('Categories cannot be changed while your add-on is featured '
'for this application.'))
if total > max_cat:
# L10n: {0} is the number of categories.
raise forms.ValidationError(ngettext(
'You can have only {0} category.',
'You can have only {0} categories.',
max_cat).format(max_cat))
has_misc = filter(lambda x: x.misc, categories)
if has_misc and total > 1:
raise forms.ValidationError(
_('The miscellaneous category cannot be combined with '
'additional categories.'))
return categories
class BaseCategoryFormSet(BaseFormSet):
def __init__(self, *args, **kw):
self.addon = kw.pop('addon')
self.request = kw.pop('request', None)
super(BaseCategoryFormSet, self).__init__(*args, **kw)
self.initial = []
apps = sorted(self.addon.compatible_apps.keys(),
key=lambda x: x.id)
# Drop any apps that don't have appropriate categories.
qs = Category.objects.filter(type=self.addon.type)
app_cats = dict((k, list(v)) for k, v in
sorted_groupby(qs, 'application'))
for app in list(apps):
if app and not app_cats.get(app.id):
apps.remove(app)
if not app_cats:
apps = []
for app in apps:
cats = dict(self.addon.app_categories).get(app, [])
self.initial.append({'categories': [c.id for c in cats]})
for app, form in zip(apps, self.forms):
key = app.id if app else None
form.request = self.request
form.initial['application'] = key
form.app = app
cats = sorted(app_cats[key], key=lambda x: x.name)
form.fields['categories'].choices = [(c.id, c.name) for c in cats]
if not acl.action_allowed(self.request, 'Addons', 'Edit'):
form.disabled = (app and self.addon.is_featured(app))
def save(self):
for f in self.forms:
f.save(self.addon)
CategoryFormSet = formset_factory(form=CategoryForm,
formset=BaseCategoryFormSet, extra=0)
def icons():
icons = [('image/jpeg', 'jpeg'), ('image/png', 'png'), ('', 'default')]
dirs, files = storage.listdir(settings.ADDON_ICONS_DEFAULT_PATH)
for fname in files:
if '32' in fname and 'default' not in fname:
icon_name = fname.split('-')[0]
icons.append(('icon/%s' % icon_name, icon_name))
return icons
class AddonFormMedia(AddonFormBase):
icon_type = forms.CharField(widget=forms.RadioSelect(
renderer=IconWidgetRenderer, choices=[]), required=False)
icon_upload_hash = forms.CharField(required=False)
class Meta:
model = Addon
fields = ('icon_upload_hash', 'icon_type')
def __init__(self, *args, **kwargs):
super(AddonFormMedia, self).__init__(*args, **kwargs)
self.fields['icon_type'].widget.choices = icons()
def save(self, addon, commit=True):
if self.cleaned_data['icon_upload_hash']:
upload_hash = self.cleaned_data['icon_upload_hash']
upload_path = os.path.join(settings.TMP_PATH, 'icon', upload_hash)
dirname = addon.get_icon_dir()
destination = os.path.join(dirname, '%s' % addon.id)
remove_icons(destination)
devhub_tasks.resize_icon.delay(upload_path, destination,
amo.ADDON_ICON_SIZES,
set_modified_on=[addon])
return super(AddonFormMedia, self).save(commit)
class AddonFormDetails(AddonFormBase):
default_locale = forms.TypedChoiceField(choices=LOCALES)
class Meta:
model = Addon
fields = ('description', 'default_locale', 'homepage')
def clean(self):
required = 'name', 'summary', 'description'
data = self.cleaned_data
if not self.errors and 'default_locale' in self.changed_data:
fields = dict((k, getattr(self.instance, k + '_id'))
for k in required)
locale = self.cleaned_data['default_locale']
ids = filter(None, fields.values())
qs = (Translation.objects.filter(locale=locale, id__in=ids,
localized_string__isnull=False)
.values_list('id', flat=True))
missing = [k for k, v in fields.items() if v not in qs]
if 'description' in missing and locale in data['description']:
missing.remove('description')
if missing:
raise forms.ValidationError(
_('Before changing your default locale you must have a '
'name, summary, and description in that locale. '
'You are missing %s.') % ', '.join(map(repr, missing)))
return data
class AddonFormSupport(AddonFormBase):
support_url = TransField.adapt(forms.URLField)(required=False)
support_email = TransField.adapt(forms.EmailField)(required=False)
class Meta:
model = Addon
fields = ('support_email', 'support_url')
def __init__(self, *args, **kw):
super(AddonFormSupport, self).__init__(*args, **kw)
def save(self, addon, commit=True):
return super(AddonFormSupport, self).save(commit)
class AddonFormTechnical(AddonFormBase):
developer_comments = TransField(widget=TransTextarea, required=False)
class Meta:
model = Addon
fields = ('developer_comments', 'view_source', 'site_specific',
'external_software', 'auto_repackage', 'public_stats',
'whiteboard')
class AddonForm(happyforms.ModelForm):
name = forms.CharField(widget=TranslationTextInput,)
homepage = forms.CharField(widget=TranslationTextInput, required=False)
eula = forms.CharField(widget=TranslationTextInput,)
description = forms.CharField(widget=TranslationTextInput,)
developer_comments = forms.CharField(widget=TranslationTextInput,)
privacy_policy = forms.CharField(widget=TranslationTextInput,)
the_future = forms.CharField(widget=TranslationTextInput,)
the_reason = forms.CharField(widget=TranslationTextInput,)
support_email = forms.CharField(widget=TranslationTextInput,)
class Meta:
model = Addon
fields = ('name', 'homepage', 'default_locale', 'support_email',
'support_url', 'description', 'summary',
'developer_comments', 'eula', 'privacy_policy', 'the_reason',
'the_future', 'view_source', 'prerelease', 'site_specific',)
exclude = ('status', )
def clean_name(self):
return clean_name(self.cleaned_data['name'])
def save(self):
desc = self.data.get('description')
if desc and desc != unicode(self.instance.description):
amo.log(amo.LOG.EDIT_DESCRIPTIONS, self.instance)
if self.changed_data:
amo.log(amo.LOG.EDIT_PROPERTIES, self.instance)
super(AddonForm, self).save()
class AbuseForm(happyforms.Form):
recaptcha = captcha.fields.ReCaptchaField(label='')
text = forms.CharField(required=True,
label='',
widget=forms.Textarea())
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(AbuseForm, self).__init__(*args, **kwargs)
if (not self.request.user.is_anonymous() or
not settings.RECAPTCHA_PRIVATE_KEY):
del self.fields['recaptcha']
class ThemeFormBase(AddonFormBase):
def __init__(self, *args, **kwargs):
super(ThemeFormBase, self).__init__(*args, **kwargs)
cats = Category.objects.filter(type=amo.ADDON_PERSONA, weight__gte=0)
cats = sorted(cats, key=lambda x: x.name)
self.fields['category'].choices = [(c.id, c.name) for c in cats]
for field in ('header', 'footer'):
self.fields[field].widget.attrs = {
'data-upload-url': reverse('devhub.personas.upload_persona',
args=['persona_%s' % field]),
'data-allowed-types': 'image/jpeg|image/png'
}
def clean_name(self):
return clean_name(self.cleaned_data['name'])
def clean_slug(self):
return clean_slug(self.cleaned_data['slug'], self.instance)
class ThemeForm(ThemeFormBase):
name = forms.CharField(max_length=50)
slug = forms.CharField(max_length=30)
category = forms.ModelChoiceField(queryset=Category.objects.all(),
widget=forms.widgets.RadioSelect)
description = forms.CharField(widget=forms.Textarea(attrs={'rows': 4}),
max_length=500, required=False)
tags = forms.CharField(required=False)
license = forms.TypedChoiceField(
choices=amo.PERSONA_LICENSES_CHOICES,
coerce=int, empty_value=None, widget=forms.HiddenInput,
error_messages={'required': _lazy(u'A license must be selected.')})
header = forms.FileField(required=False)
header_hash = forms.CharField(widget=forms.HiddenInput)
footer = forms.FileField(required=False)
footer_hash = forms.CharField(widget=forms.HiddenInput, required=False)
# and empty values, thus force the JavaScript color picker for now.
# See bugs 1005206 and 1003575.
accentcolor = ColorField(
required=False,
widget=forms.TextInput(attrs={'class': 'color-picker'}),
)
textcolor = ColorField(
required=False,
widget=forms.TextInput(attrs={'class': 'color-picker'}),
)
agreed = forms.BooleanField()
# This lets us POST the data URIs of the unsaved previews so we can still
# show them if there were form errors. It's really clever.
unsaved_data = forms.CharField(required=False, widget=forms.HiddenInput)
class Meta:
model = Addon
fields = ('name', 'slug', 'description', 'tags')
def save(self, commit=False):
data = self.cleaned_data
addon = Addon.objects.create(
slug=data.get('slug'),
status=amo.STATUS_PENDING, type=amo.ADDON_PERSONA)
addon.name = {'en-US': data['name']}
if data.get('description'):
addon.description = data['description']
addon._current_version = Version.objects.create(addon=addon,
version='0')
addon.save()
p = Persona()
p.persona_id = 0
p.addon = addon
p.header = 'header.png'
if data['footer_hash']:
p.footer = 'footer.png'
if data['accentcolor']:
p.accentcolor = data['accentcolor'].lstrip('#')
if data['textcolor']:
p.textcolor = data['textcolor'].lstrip('#')
p.license = data['license']
p.submit = datetime.now()
user = self.request.amo_user
p.author = user.username
p.display_username = user.name
p.save()
save_theme.delay(data['header_hash'], data['footer_hash'], addon)
addon.addonuser_set.create(user=user, role=amo.AUTHOR_ROLE_OWNER)
for t in data['tags']:
Tag(tag_text=t).save_tag(addon)
AddonCategory(addon=addon, category=data['category']).save()
return addon
class EditThemeForm(AddonFormBase):
name = TransField(max_length=50, label=_lazy('Give Your Theme a Name.'))
slug = forms.CharField(max_length=30)
category = forms.ModelChoiceField(queryset=Category.objects.all(),
widget=forms.widgets.RadioSelect)
description = TransField(
widget=TransTextarea(attrs={'rows': 4}),
max_length=500, required=False, label=_lazy('Describe your Theme.'))
tags = forms.CharField(required=False)
accentcolor = ColorField(
required=False,
widget=forms.TextInput(attrs={'class': 'color-picker'}),
)
textcolor = ColorField(
required=False,
widget=forms.TextInput(attrs={'class': 'color-picker'}),
)
license = forms.TypedChoiceField(
choices=amo.PERSONA_LICENSES_CHOICES, coerce=int, empty_value=None,
widget=forms.HiddenInput,
error_messages={'required': _lazy(u'A license must be selected.')})
header = forms.FileField(required=False)
header_hash = forms.CharField(widget=forms.HiddenInput, required=False)
footer = forms.FileField(required=False)
footer_hash = forms.CharField(widget=forms.HiddenInput, required=False)
class Meta:
model = Addon
fields = ('name', 'slug', 'description', 'tags')
def __init__(self, *args, **kw):
self.request = kw.pop('request')
super(AddonFormBase, self).__init__(*args, **kw)
addon = Addon.objects.no_cache().get(id=self.instance.id)
persona = addon.persona
self.fields['name'].validators = list(self.fields['name'].validators)
self.fields['name'].validators.append(lambda x: clean_name(x, addon))
for trans in Translation.objects.filter(id=self.initial['name']):
self.initial['name_' + trans.locale.lower()] = trans
for trans in Translation.objects.filter(
id=self.initial['description']):
self.initial['description_' + trans.locale.lower()] = trans
self.old_tags = self.get_tags(addon)
self.initial['tags'] = ', '.join(self.old_tags)
if persona.accentcolor:
self.initial['accentcolor'] = '#' + persona.accentcolor
if persona.textcolor:
self.initial['textcolor'] = '#' + persona.textcolor
self.initial['license'] = persona.license
cats = sorted(Category.objects.filter(type=amo.ADDON_PERSONA,
weight__gte=0),
key=lambda x: x.name)
self.fields['category'].choices = [(c.id, c.name) for c in cats]
try:
self.initial['category'] = addon.categories.values_list(
'id', flat=True)[0]
except IndexError:
pass
for field in ('header', 'footer'):
self.fields[field].widget.attrs = {
'data-upload-url': reverse('devhub.personas.reupload_persona',
args=[addon.slug,
'persona_%s' % field]),
'data-allowed-types': 'image/jpeg|image/png'
}
def save(self):
addon = self.instance
persona = addon.persona
data = self.cleaned_data
persona_data = {
'license': int(data['license']),
'accentcolor': data['accentcolor'].lstrip('#'),
'textcolor': data['textcolor'].lstrip('#'),
'author': self.request.amo_user.username,
'display_username': self.request.amo_user.name
}
changed = False
for k, v in persona_data.iteritems():
if v != getattr(persona, k):
changed = True
setattr(persona, k, v)
if changed:
persona.save()
if self.changed_data:
amo.log(amo.LOG.EDIT_PROPERTIES, addon)
self.instance.modified = datetime.now()
changed = (
set(self.old_tags) != data['tags'] or self.initial['slug'] != data['slug'] or transfield_changed('description', self.initial, data) or
transfield_changed('name', self.initial, data))
if changed:
super(EditThemeForm, self).save()
tags_new = data['tags']
tags_old = [slugify(t, spaces=True) for t in self.old_tags]
for t in set(tags_new) - set(tags_old):
Tag(tag_text=t).save_tag(addon)
for t in set(tags_old) - set(tags_new):
Tag(tag_text=t).remove_tag(addon)
if data['category'].id != self.initial['category']:
addon_cat = addon.addoncategory_set.all()[0]
addon_cat.category = data['category']
addon_cat.save()
if not addon.is_pending():
if data['header_hash'] or data['footer_hash']:
save_theme_reupload.delay(
data['header_hash'], data['footer_hash'], addon)
return data
class EditThemeOwnerForm(happyforms.Form):
owner = UserEmailField()
def __init__(self, *args, **kw):
self.instance = kw.pop('instance')
super(EditThemeOwnerForm, self).__init__(*args, **kw)
addon = self.instance
self.fields['owner'].widget.attrs['placeholder'] = _(
"Enter a new author's email address")
try:
self.instance_addonuser = addon.addonuser_set.all()[0]
self.initial['owner'] = self.instance_addonuser.user.email
except IndexError:
# If there was never an author before, then don't require one now.
self.instance_addonuser = None
self.fields['owner'].required = False
def save(self):
data = self.cleaned_data
if data.get('owner'):
changed = (not self.instance_addonuser or
self.instance_addonuser != data['owner'])
if changed:
persona = self.instance.persona
persona.author = data['owner'].username
persona.display_username = data['owner'].name
persona.save()
if not self.instance_addonuser:
self.instance.addonuser_set.create(user=data['owner'],
role=amo.AUTHOR_ROLE_OWNER)
elif self.instance_addonuser != data['owner']:
self.instance_addonuser.user = data['owner']
self.instance_addonuser.role = amo.AUTHOR_ROLE_OWNER
self.instance_addonuser.save()
self.instance.modified = datetime.now()
self.instance.save()
return data
class ContributionForm(happyforms.Form):
amount = forms.DecimalField(required=True, min_value=Decimal('0.01'))
| true
| true
|
f70921c6af89f557c4ad7ff0343c8dc6ea00a385
| 1,445
|
py
|
Python
|
pdata/dirstructure.py
|
semeniuta/pdata
|
5eb6ece8e2fb1856bc87ed76290240cd901f7654
|
[
"BSD-3-Clause"
] | null | null | null |
pdata/dirstructure.py
|
semeniuta/pdata
|
5eb6ece8e2fb1856bc87ed76290240cd901f7654
|
[
"BSD-3-Clause"
] | null | null | null |
pdata/dirstructure.py
|
semeniuta/pdata
|
5eb6ece8e2fb1856bc87ed76290240cd901f7654
|
[
"BSD-3-Clause"
] | null | null | null |
import os
from glob import glob
import pandas as pd
def get_list_of_full_child_dirs(d):
"""
For a directory d (full path),
return a list of its subdirectories
in a full path form.
"""
children = (os.path.join(d, child) for child in os.listdir(d))
dirs = filter(os.path.isdir, children)
return list(dirs)
def split_full_path(full_path, base_dir):
"""
Given a full path, return:
- relative_dir: the part of the path that does not
include the base directory and the basename
- basename
"""
fname = os.path.basename(full_path)
relative_path = full_path.split(base_dir)[-1]
relative_dir = relative_path.split(fname)[0]
relative_dir = relative_dir[1:-1] # clip slashes
return relative_dir, fname
def gather_files(base_dir, file_mask):
"""
Walk the directory base_dir using os.walk
and gather files that match file_mask (e.g. '*.jpg').
Return the result as a Pandas dataframe with columns
'relative_dir' and 'basename'.
"""
res_tuples = []
for dir_name, subdirs, files in os.walk(base_dir):
dir_has_files = len(files) > 0
if dir_has_files:
full_mask = os.path.join(dir_name, file_mask)
mask_matches = glob(full_mask)
res_tuples += [split_full_path(f, base_dir) for f in mask_matches]
return pd.DataFrame(res_tuples, columns=['relative_dir', 'basename'])
| 24.491525
| 78
| 0.657439
|
import os
from glob import glob
import pandas as pd
def get_list_of_full_child_dirs(d):
children = (os.path.join(d, child) for child in os.listdir(d))
dirs = filter(os.path.isdir, children)
return list(dirs)
def split_full_path(full_path, base_dir):
fname = os.path.basename(full_path)
relative_path = full_path.split(base_dir)[-1]
relative_dir = relative_path.split(fname)[0]
relative_dir = relative_dir[1:-1]
return relative_dir, fname
def gather_files(base_dir, file_mask):
res_tuples = []
for dir_name, subdirs, files in os.walk(base_dir):
dir_has_files = len(files) > 0
if dir_has_files:
full_mask = os.path.join(dir_name, file_mask)
mask_matches = glob(full_mask)
res_tuples += [split_full_path(f, base_dir) for f in mask_matches]
return pd.DataFrame(res_tuples, columns=['relative_dir', 'basename'])
| true
| true
|
f709225799582acc8b4fb03957fc54ab2aaada80
| 631
|
py
|
Python
|
problems/31/problem_31.py
|
r1cc4rdo/daily_coding_problem
|
6ac85309fad2f64231ac7ab94aa4158e18bdec40
|
[
"Unlicense"
] | 158
|
2018-01-25T06:33:30.000Z
|
2022-03-14T23:18:05.000Z
|
problems/31/problem_31.py
|
r1cc4rdo/daily_coding_problem
|
6ac85309fad2f64231ac7ab94aa4158e18bdec40
|
[
"Unlicense"
] | 9
|
2018-07-04T00:31:57.000Z
|
2020-05-16T21:02:30.000Z
|
problems/31/problem_31.py
|
r1cc4rdo/daily_coding_problem
|
6ac85309fad2f64231ac7ab94aa4158e18bdec40
|
[
"Unlicense"
] | 50
|
2018-06-22T16:48:44.000Z
|
2022-01-11T16:45:48.000Z
|
def coding_problem_31(s, t, debt=0):
"""
Given two strings, compute the edit distance between them.
The edit distance between two strings refers to the minimum number of character insertions, deletions, and
substitutions required to change one string to the other.
Example:
>>> coding_problem_31("kitten", "sitting") # k>>s, e>>i, +g
3
>>> coding_problem_31("kitten", "cat") # k>>c, i>>a, -ten
5
>>> coding_problem_31("black", "white")
5
>>> coding_problem_31("top", "dog")
2
"""
pass
if __name__ == '__main__':
import doctest
doctest.testmod(verbose=True)
| 26.291667
| 110
| 0.635499
|
def coding_problem_31(s, t, debt=0):
pass
if __name__ == '__main__':
import doctest
doctest.testmod(verbose=True)
| true
| true
|
f709226fc4762f35761640ef37183013e4082969
| 1,221
|
py
|
Python
|
pytorch_ess/mean_elliptical_slice.py
|
wjmaddox/pytorch_ess
|
8e189666ce7381cf760666464384c634abbc4be2
|
[
"Apache-2.0"
] | 1
|
2022-02-19T12:37:06.000Z
|
2022-02-19T12:37:06.000Z
|
pytorch_ess/mean_elliptical_slice.py
|
wjmaddox/pytorch_ess
|
8e189666ce7381cf760666464384c634abbc4be2
|
[
"Apache-2.0"
] | null | null | null |
pytorch_ess/mean_elliptical_slice.py
|
wjmaddox/pytorch_ess
|
8e189666ce7381cf760666464384c634abbc4be2
|
[
"Apache-2.0"
] | null | null | null |
import torch
from .elliptical_slice import EllipticalSliceSampler
class MeanEllipticalSliceSampler(EllipticalSliceSampler):
def __init__(self, f_init, dist, lnpdf, nsamples, pdf_params=()):
"""
Implementation of elliptical slice sampling (Murray, Adams, & Mckay, 2010).
f_init: initial value of `f`
dist: multivariate normal to sample from to sample from
lnpdf: likelihood function
n_samples: number of samples
pdf_params: callable arguments for lnpdf
"""
mean_vector = dist.mean
demeaned_lnpdf = lambda g: lnpdf(g + mean_vector, *pdf_params)
demeaned_init = f_init - mean_vector
samples = dist.sample(sample_shape = torch.Size((nsamples,))).transpose(-1, -2)
demeaned_samples = samples - mean_vector.unsqueeze(1)
super(MeanEllipticalSliceSampler, self).__init__(demeaned_init, demeaned_samples, demeaned_lnpdf, nsamples, pdf_params=())
self.mean_vector = mean_vector
def run(self):
self.f_sampled, self.ell = super().run()
#add means back into f_sampled
self.f_sampled = self.f_sampled + self.mean_vector.unsqueeze(1)
return self.f_sampled, self.ell
| 34.885714
| 130
| 0.684685
|
import torch
from .elliptical_slice import EllipticalSliceSampler
class MeanEllipticalSliceSampler(EllipticalSliceSampler):
def __init__(self, f_init, dist, lnpdf, nsamples, pdf_params=()):
mean_vector = dist.mean
demeaned_lnpdf = lambda g: lnpdf(g + mean_vector, *pdf_params)
demeaned_init = f_init - mean_vector
samples = dist.sample(sample_shape = torch.Size((nsamples,))).transpose(-1, -2)
demeaned_samples = samples - mean_vector.unsqueeze(1)
super(MeanEllipticalSliceSampler, self).__init__(demeaned_init, demeaned_samples, demeaned_lnpdf, nsamples, pdf_params=())
self.mean_vector = mean_vector
def run(self):
self.f_sampled, self.ell = super().run()
self.f_sampled = self.f_sampled + self.mean_vector.unsqueeze(1)
return self.f_sampled, self.ell
| true
| true
|
f70922c43840ce448deb9296e93b5401d187395f
| 7,592
|
py
|
Python
|
src/spring-cloud/azext_spring_cloud/vendored_sdks/appplatform/v2022_05_01_preview/aio/operations/_service_operations.py
|
Sneezry/azure-cli-extensions
|
bd186fe31c8fbd8c8b945fb749349e7f243be532
|
[
"MIT"
] | null | null | null |
src/spring-cloud/azext_spring_cloud/vendored_sdks/appplatform/v2022_05_01_preview/aio/operations/_service_operations.py
|
Sneezry/azure-cli-extensions
|
bd186fe31c8fbd8c8b945fb749349e7f243be532
|
[
"MIT"
] | null | null | null |
src/spring-cloud/azext_spring_cloud/vendored_sdks/appplatform/v2022_05_01_preview/aio/operations/_service_operations.py
|
Sneezry/azure-cli-extensions
|
bd186fe31c8fbd8c8b945fb749349e7f243be532
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ServiceOperations:
"""ServiceOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.appplatform.v2022_05_01_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _registries_delete_initial(
self,
resource_group_name: str,
service_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2022-05-01-preview"
accept = "application/json"
# Construct URL
url = self._registries_delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_registries_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/default'} # type: ignore
async def begin_registries_delete(
self,
resource_group_name: str,
service_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Disable the default Service Registry.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._registries_delete_initial(
resource_group_name=resource_group_name,
service_name=service_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_registries_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/default'} # type: ignore
| 49.620915
| 214
| 0.683351
|
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ServiceOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _registries_delete_initial(
self,
resource_group_name: str,
service_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2022-05-01-preview"
accept = "application/json"
url = self._registries_delete_initial.metadata['url'] path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_registries_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/default'}
async def begin_registries_delete(
self,
resource_group_name: str,
service_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) if cont_token is None:
raw_result = await self._registries_delete_initial(
resource_group_name=resource_group_name,
service_name=service_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_registries_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/serviceRegistries/default'}
| true
| true
|
f709233cf0c713f895035d7036aa596c63d20754
| 1,139
|
py
|
Python
|
dog_recognition/models/vgg.py
|
helloholmes/dog_detection_gluoncv
|
eff91aa43633adcc1339a8d1f31ed667fdae846f
|
[
"Apache-2.0"
] | 2
|
2018-08-11T13:55:41.000Z
|
2020-04-26T08:06:29.000Z
|
models/vgg.py
|
helloholmes/dog_detection_gluoncv
|
eff91aa43633adcc1339a8d1f31ed667fdae846f
|
[
"Apache-2.0"
] | null | null | null |
models/vgg.py
|
helloholmes/dog_detection_gluoncv
|
eff91aa43633adcc1339a8d1f31ed667fdae846f
|
[
"Apache-2.0"
] | null | null | null |
# coding:utf-8
'''
python 3.5
mxnet 1.3.0
gluoncv 0.3.0
visdom 0.1.7
gluonbook 0.6.9
auther: helloholmes
'''
import mxnet as mx
import numpy as np
import os
import time
import pickle
from mxnet import gluon
from mxnet import init
from mxnet import nd
from mxnet import autograd
from mxnet.gluon import nn
class VGG16(nn.HybridBlock):
# input size (b, 3, 224, 224)
def __init__(self, num_classes=120, **kwargs):
super(VGG16, self).__init__(**kwargs)
model = gluon.model_zoo.vision.get_model('vgg16', pretrained=True)
with self.name_scope():
self.features = model.features
self.output = nn.Dense(num_classes)
def initialize(self, ctx=None):
for param in self.collect_params().values():
if param._data is not None:
continue
else:
param.initialize()
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
if __name__ == '__main__':
m = VGG16()
m.initialize()
data = mx.nd.random.uniform(shape=(1, 3, 224, 224))
out = m(data)
print(out.shape)
| 24.234043
| 74
| 0.626866
|
import mxnet as mx
import numpy as np
import os
import time
import pickle
from mxnet import gluon
from mxnet import init
from mxnet import nd
from mxnet import autograd
from mxnet.gluon import nn
class VGG16(nn.HybridBlock):
def __init__(self, num_classes=120, **kwargs):
super(VGG16, self).__init__(**kwargs)
model = gluon.model_zoo.vision.get_model('vgg16', pretrained=True)
with self.name_scope():
self.features = model.features
self.output = nn.Dense(num_classes)
def initialize(self, ctx=None):
for param in self.collect_params().values():
if param._data is not None:
continue
else:
param.initialize()
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
if __name__ == '__main__':
m = VGG16()
m.initialize()
data = mx.nd.random.uniform(shape=(1, 3, 224, 224))
out = m(data)
print(out.shape)
| true
| true
|
f709237a5a4a3e19ba965c764a9817e628ec93be
| 1,797
|
py
|
Python
|
python/lax_sod_data.py
|
kjetil-lye/lax_sod_shock_tube_machine_learning
|
a0e8600eba89737a03bdea3d82756a2a0ccf0259
|
[
"MIT"
] | null | null | null |
python/lax_sod_data.py
|
kjetil-lye/lax_sod_shock_tube_machine_learning
|
a0e8600eba89737a03bdea3d82756a2a0ccf0259
|
[
"MIT"
] | null | null | null |
python/lax_sod_data.py
|
kjetil-lye/lax_sod_shock_tube_machine_learning
|
a0e8600eba89737a03bdea3d82756a2a0ccf0259
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
def get_lax_sod_network():
return [12, 12, 10, 12, 10, 12, 10, 10, 12,1]
def get_lax_sod_data_inner():
data_path = os.environ.get("LAX_SOD_REPO_PATH", "../lax_sod_tube")
qmc_points = np.loadtxt(os.path.join(data_path, "parameters/parameters_sobol_X.txt"))
forces = np.loadtxt(os.path.join(data_path, "functionals/average_functionals_sobol_2048.txt"))
data_per_func = {}
force_names = [*[f'q{k+1}' for k in range(3)],
*[f'EK{k+1}' for k in range(3)]]
for n, force_name in enumerate(force_names):
data_per_func[force_name] = forces[:, n]
return qmc_points, data_per_func
def get_lax_sod_data():
qmc_points, qmc_values = get_lax_sod_data_inner()
mc_params, mc_values = get_lax_sod_data_mc_inner()
return qmc_points, qmc_values, mc_params, mc_values
def get_lax_sod_data_mc_inner():
data_path = os.environ.get("LAX_SOD_REPO_PATH", "../lax_sod_tube")
mc_points = np.loadtxt(os.path.join(data_path, "parameters/parameters_mc_X.txt"))
forces = np.loadtxt(os.path.join(data_path, "functionals/average_functionals_mc_2048.txt"))
data_per_func = {}
force_names = [*[f'q{k+1}' for k in range(3)],
*[f'EK{k+1}' for k in range(3)]]
for n, force_name in enumerate(force_names):
data_per_func[force_name] = forces[:, n]
return mc_points, data_per_func
def get_lax_sod_data_mc():
mc_params, mc_values = get_lax_sod_data_mc_inner()
qmc_params, qmc_values = get_lax_sod_data_inner()
return mc_params, mc_values, qmc_params, qmc_values
def make_folders():
folders = ['img', 'img_tikz', 'tables', 'results']
for folder in folders:
if not os.path.exists(folder):
os.mkdir(folder)
| 23.337662
| 98
| 0.674457
|
import os
import numpy as np
def get_lax_sod_network():
return [12, 12, 10, 12, 10, 12, 10, 10, 12,1]
def get_lax_sod_data_inner():
data_path = os.environ.get("LAX_SOD_REPO_PATH", "../lax_sod_tube")
qmc_points = np.loadtxt(os.path.join(data_path, "parameters/parameters_sobol_X.txt"))
forces = np.loadtxt(os.path.join(data_path, "functionals/average_functionals_sobol_2048.txt"))
data_per_func = {}
force_names = [*[f'q{k+1}' for k in range(3)],
*[f'EK{k+1}' for k in range(3)]]
for n, force_name in enumerate(force_names):
data_per_func[force_name] = forces[:, n]
return qmc_points, data_per_func
def get_lax_sod_data():
qmc_points, qmc_values = get_lax_sod_data_inner()
mc_params, mc_values = get_lax_sod_data_mc_inner()
return qmc_points, qmc_values, mc_params, mc_values
def get_lax_sod_data_mc_inner():
data_path = os.environ.get("LAX_SOD_REPO_PATH", "../lax_sod_tube")
mc_points = np.loadtxt(os.path.join(data_path, "parameters/parameters_mc_X.txt"))
forces = np.loadtxt(os.path.join(data_path, "functionals/average_functionals_mc_2048.txt"))
data_per_func = {}
force_names = [*[f'q{k+1}' for k in range(3)],
*[f'EK{k+1}' for k in range(3)]]
for n, force_name in enumerate(force_names):
data_per_func[force_name] = forces[:, n]
return mc_points, data_per_func
def get_lax_sod_data_mc():
mc_params, mc_values = get_lax_sod_data_mc_inner()
qmc_params, qmc_values = get_lax_sod_data_inner()
return mc_params, mc_values, qmc_params, qmc_values
def make_folders():
folders = ['img', 'img_tikz', 'tables', 'results']
for folder in folders:
if not os.path.exists(folder):
os.mkdir(folder)
| true
| true
|
f70923a1041886df98850f9ba4df0b8e849b83fe
| 1,795
|
py
|
Python
|
src/compiler/setuppaths.py
|
fnoeding/exoself
|
11dfceea12a9f6f8ed0018fd60e6de5f73b9fa35
|
[
"BSD-3-Clause"
] | 4
|
2015-12-18T10:36:38.000Z
|
2021-03-19T04:54:03.000Z
|
src/compiler/setuppaths.py
|
fnoeding/exoself
|
11dfceea12a9f6f8ed0018fd60e6de5f73b9fa35
|
[
"BSD-3-Clause"
] | null | null | null |
src/compiler/setuppaths.py
|
fnoeding/exoself
|
11dfceea12a9f6f8ed0018fd60e6de5f73b9fa35
|
[
"BSD-3-Clause"
] | null | null | null |
#
# The BSD License
#
# Copyright (c) 2008, Florian Noeding
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
# Neither the name of the of the author nor the names of its contributors may be
# used to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# just import this file to get all the paths setup up
import sys
import os
sys.path.append(os.path.normpath(os.path.join(sys.path[0], '..', '..', '3rdparty', 'pylibs')))
sys.path.append(os.path.normpath(os.path.join(sys.path[0], 'grammar')))
| 46.025641
| 94
| 0.773816
|
import sys
import os
sys.path.append(os.path.normpath(os.path.join(sys.path[0], '..', '..', '3rdparty', 'pylibs')))
sys.path.append(os.path.normpath(os.path.join(sys.path[0], 'grammar')))
| true
| true
|
f70923eab3da1bedc87560d855a3d722ac2685a4
| 9,918
|
py
|
Python
|
kiauto/misc.py
|
jaessy77/KiAuto
|
517af0808f38bcf57b8ab584e130d2aad3834376
|
[
"Apache-2.0"
] | null | null | null |
kiauto/misc.py
|
jaessy77/KiAuto
|
517af0808f38bcf57b8ab584e130d2aad3834376
|
[
"Apache-2.0"
] | null | null | null |
kiauto/misc.py
|
jaessy77/KiAuto
|
517af0808f38bcf57b8ab584e130d2aad3834376
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2020-2021 Salvador E. Tropea
# Copyright (c) 2020-2021 Instituto Nacional de Tecnologïa Industrial
# License: Apache 2.0
# Project: KiAuto (formerly kicad-automation-scripts)
import os
import re
import json
import configparser
from contextlib import contextmanager
from sys import exit, path
# Default W,H for recording
REC_W = 1366
REC_H = 960
# Return error codes
# Positive values are ERC/DRC errors
NO_SCHEMATIC = 1
WRONG_ARGUMENTS = 2 # This is what argsparse uses
EESCHEMA_CFG_PRESENT = 11
KICAD_CFG_PRESENT = 3
NO_PCB = 4
PCBNEW_CFG_PRESENT = 5
WRONG_LAYER_NAME = 6
WRONG_PCB_NAME = 7
WRONG_SCH_NAME = 8
PCBNEW_ERROR = 9
EESCHEMA_ERROR = 10
NO_PCBNEW_MODULE = 11
USER_HOTKEYS_PRESENT = 12
CORRUPTED_PCB = 13
# Wait 40 s to pcbnew/eeschema window to be present
WAIT_START = 60
# Name for testing versions
NIGHTLY = 'nightly'
# Scale factor for the timeouts
TIME_OUT_MULT = 1.0
KICAD_VERSION_5_99 = 5099000
KICAD_SHARE = '/usr/share/kicad/'
KICAD_NIGHTLY_SHARE = '/usr/share/kicad-nightly/'
@contextmanager
def hide_stderr():
""" Low level stderr supression, used to hide KiCad bugs. """
newstderr = os.dup(2)
devnull = os.open('/dev/null', os.O_WRONLY)
os.dup2(devnull, 2)
os.close(devnull)
yield
os.dup2(newstderr, 2)
class Config(object):
def __init__(self, logger, input_file=None, args=None):
self.export_format = 'pdf'
if input_file:
self.input_file = input_file
self.input_no_ext = os.path.splitext(input_file)[0]
#
# As soon as we init pcbnew the following files are modified:
#
if os.path.isfile(self.input_no_ext+'.pro'):
self.start_pro_stat = os.stat(self.input_no_ext+'.pro')
else:
self.start_pro_stat = None
if os.path.isfile(self.input_no_ext+'.kicad_pro'):
self.start_kicad_pro_stat = os.stat(self.input_no_ext+'.kicad_pro')
else:
self.start_kicad_pro_stat = None
if os.path.isfile(self.input_no_ext+'.kicad_prl'):
self.start_kicad_prl_stat = os.stat(self.input_no_ext+'.kicad_prl')
else:
self.start_kicad_prl_stat = None
if args:
# Session debug
self.use_wm = args.use_wm # Use a Window Manager, dialogs behaves in a different way
self.start_x11vnc = args.start_x11vnc
self.rec_width = args.rec_width
self.rec_height = args.rec_height
self.record = args.record
self.video_dir = args.output_dir
self.wait_for_key = args.wait_key
self.time_out_scale = args.time_out_scale
# Others
if hasattr(args, 'file_format'):
self.export_format = args.file_format.lower()
else:
# Session debug
self.use_wm = False
self.start_x11vnc = False
self.rec_width = REC_W
self.rec_height = REC_H
self.record = False
self.video_dir = None
self.wait_for_key = False
self.time_out_scale = 1.0
self.colordepth = 24
self.video_name = None
self.video_dir = self.output_dir = ''
# Executable and dirs
self.eeschema = 'eeschema'
self.pcbnew = 'pcbnew'
self.kicad_conf_dir = 'kicad'
ng_ver = os.environ.get('KIAUS_USE_NIGHTLY')
if ng_ver:
self.eeschema += '-'+NIGHTLY
self.pcbnew += '-'+NIGHTLY
self.kicad_conf_dir += os.path.join(NIGHTLY, ng_ver)
# Path to the Python module
path.insert(0, '/usr/lib/kicad-nightly/lib/python3/dist-packages')
# Detect KiCad version
try:
import pcbnew
except ImportError:
logger.error("Failed to import pcbnew Python module."
" Is KiCad installed?"
" Do you need to add it to PYTHONPATH?")
exit(NO_PCBNEW_MODULE)
kicad_version = pcbnew.GetBuildVersion()
m = re.match(r'(\d+)\.(\d+)\.(\d+)', kicad_version)
self.kicad_version_major = int(m.group(1))
self.kicad_version_minor = int(m.group(2))
self.kicad_version_patch = int(m.group(3))
self.kicad_version = self.kicad_version_major*1000000+self.kicad_version_minor*1000+self.kicad_version_patch
logger.debug('Detected KiCad v{}.{}.{} ({} {})'.format(self.kicad_version_major, self.kicad_version_minor,
self.kicad_version_patch, kicad_version, self.kicad_version))
# Config file names
if self.kicad_version >= KICAD_VERSION_5_99:
self.kicad_conf_path = pcbnew.GetSettingsManager().GetUserSettingsPath()
if ng_ver:
self.kicad_conf_path = self.kicad_conf_path.replace('/kicad/', '/kicadnightly/')
else:
# Bug in KiCad (#6989), prints to stderr:
# `../src/common/stdpbase.cpp(62): assert "traits" failed in Get(test_dir): create wxApp before calling this`
# Found in KiCad 5.1.8, 5.1.9
# So we temporarily supress stderr
with hide_stderr():
self.kicad_conf_path = pcbnew.GetKicadConfigPath()
logger.debug('Config path {}'.format(self.kicad_conf_path))
# First we solve kicad_common because it can redirect to another config dir
self.conf_kicad = os.path.join(self.kicad_conf_path, 'kicad_common')
self.conf_kicad_bkp = None
if self.kicad_version >= KICAD_VERSION_5_99:
self.conf_kicad += '.json'
self.conf_kicad_json = True
else:
self.conf_kicad_json = False
# Read the environment redefinitions used by KiCad
if os.path.isfile(self.conf_kicad):
self.load_kicad_environment(logger)
if 'KICAD_CONFIG_HOME' in self.env and self.kicad_version < KICAD_VERSION_5_99:
# The user is redirecting the configuration
# KiCad 5 unintentionally allows it, is a bug, and won't be fixed:
# https://forum.kicad.info/t/kicad-config-home-inconsistencies-and-detail/26875
self.kicad_conf_path = self.env['KICAD_CONFIG_HOME']
logger.debug('Redirecting KiCad config path to: '+self.kicad_conf_path)
else:
logger.warning('Missing KiCad main config file '+self.conf_kicad)
# - eeschema config
self.conf_eeschema = os.path.join(self.kicad_conf_path, 'eeschema')
self.conf_eeschema_bkp = None
# - pcbnew config
self.conf_pcbnew = os.path.join(self.kicad_conf_path, 'pcbnew')
self.conf_pcbnew_bkp = None
# Config files that migrated to JSON
# Note that they remain in the old format until saved
if self.kicad_version >= KICAD_VERSION_5_99:
self.conf_eeschema += '.json'
self.conf_pcbnew += '.json'
self.conf_eeschema_json = True
self.conf_pcbnew_json = True
self.pro_ext = 'kicad_pro'
self.prl_ext = 'kicad_prl'
else:
self.conf_eeschema_json = False
self.conf_pcbnew_json = False
self.pro_ext = 'pro'
self.prl_ext = None
# - hotkeys
self.conf_hotkeys = os.path.join(self.kicad_conf_path, 'user.hotkeys')
self.conf_hotkeys_bkp = None
# - sym-lib-table
self.user_sym_lib_table = os.path.join(self.kicad_conf_path, 'sym-lib-table')
self.user_fp_lib_table = os.path.join(self.kicad_conf_path, 'fp-lib-table')
self.sys_sym_lib_table = [KICAD_SHARE+'template/sym-lib-table']
self.sys_fp_lib_table = [KICAD_SHARE+'template/fp-lib-table']
if ng_ver:
# 20200912: sym-lib-table is missing
self.sys_sym_lib_table.insert(0, KICAD_NIGHTLY_SHARE+'template/sym-lib-table')
self.sys_fp_lib_table.insert(0, KICAD_NIGHTLY_SHARE+'template/fp-lib-table')
# Some details about the UI
if self.kicad_version >= KICAD_VERSION_5_99:
# KiCad 5.99.0
self.ee_window_title = r'\[.*\] — Eeschema$' # "PROJECT [HIERARCHY_PATH] - Eeschema"
else:
# KiCad 5.1.6
self.ee_window_title = r'Eeschema.*\.sch' # "Eeschema - file.sch"
# Collected errors and unconnecteds (warnings)
self.errs = []
self.wrns = []
# Error filters
self.err_filters = []
def load_kicad_environment(self, logger):
self.env = {}
if self.conf_kicad_json:
env = self.get_config_vars_json(self.conf_kicad)
if env:
self.env = env
else:
env = self.get_config_vars_ini(self.conf_kicad)
if env:
for k, v in env.items():
self.env[k.upper()] = v
logger.debug('KiCad environment: '+str(self.env))
@staticmethod
def get_config_vars_json(file):
with open(file, "rt") as f:
data = json.load(f)
if 'environment' in data and 'vars' in data['environment']:
return data['environment']['vars']
return None
@staticmethod
def get_config_vars_ini(file):
config = configparser.ConfigParser()
with open(file, "rt") as f:
data = f.read()
config.read_string('[Various]\n'+data)
if 'EnvironmentVariables' in config:
return config['EnvironmentVariables']
return None
__author__ = 'Salvador E. Tropea'
__copyright__ = 'Copyright 2018-2021, INTI/Productize SPRL'
__credits__ = ['Salvador E. Tropea', 'Seppe Stas', 'Jesse Vincent', 'Scott Bezek']
__license__ = 'Apache 2.0'
__email__ = 'stropea@inti.gob.ar'
__status__ = 'beta'
__url__ = 'https://github.com/INTI-CMNB/KiAuto/'
__version__ = '1.5.8'
| 40.153846
| 121
| 0.619076
|
import os
import re
import json
import configparser
from contextlib import contextmanager
from sys import exit, path
REC_W = 1366
REC_H = 960
NO_SCHEMATIC = 1
WRONG_ARGUMENTS = 2 EESCHEMA_CFG_PRESENT = 11
KICAD_CFG_PRESENT = 3
NO_PCB = 4
PCBNEW_CFG_PRESENT = 5
WRONG_LAYER_NAME = 6
WRONG_PCB_NAME = 7
WRONG_SCH_NAME = 8
PCBNEW_ERROR = 9
EESCHEMA_ERROR = 10
NO_PCBNEW_MODULE = 11
USER_HOTKEYS_PRESENT = 12
CORRUPTED_PCB = 13
WAIT_START = 60
NIGHTLY = 'nightly'
TIME_OUT_MULT = 1.0
KICAD_VERSION_5_99 = 5099000
KICAD_SHARE = '/usr/share/kicad/'
KICAD_NIGHTLY_SHARE = '/usr/share/kicad-nightly/'
@contextmanager
def hide_stderr():
newstderr = os.dup(2)
devnull = os.open('/dev/null', os.O_WRONLY)
os.dup2(devnull, 2)
os.close(devnull)
yield
os.dup2(newstderr, 2)
class Config(object):
def __init__(self, logger, input_file=None, args=None):
self.export_format = 'pdf'
if input_file:
self.input_file = input_file
self.input_no_ext = os.path.splitext(input_file)[0]
if os.path.isfile(self.input_no_ext+'.pro'):
self.start_pro_stat = os.stat(self.input_no_ext+'.pro')
else:
self.start_pro_stat = None
if os.path.isfile(self.input_no_ext+'.kicad_pro'):
self.start_kicad_pro_stat = os.stat(self.input_no_ext+'.kicad_pro')
else:
self.start_kicad_pro_stat = None
if os.path.isfile(self.input_no_ext+'.kicad_prl'):
self.start_kicad_prl_stat = os.stat(self.input_no_ext+'.kicad_prl')
else:
self.start_kicad_prl_stat = None
if args:
self.use_wm = args.use_wm self.start_x11vnc = args.start_x11vnc
self.rec_width = args.rec_width
self.rec_height = args.rec_height
self.record = args.record
self.video_dir = args.output_dir
self.wait_for_key = args.wait_key
self.time_out_scale = args.time_out_scale
if hasattr(args, 'file_format'):
self.export_format = args.file_format.lower()
else:
self.use_wm = False
self.start_x11vnc = False
self.rec_width = REC_W
self.rec_height = REC_H
self.record = False
self.video_dir = None
self.wait_for_key = False
self.time_out_scale = 1.0
self.colordepth = 24
self.video_name = None
self.video_dir = self.output_dir = ''
self.eeschema = 'eeschema'
self.pcbnew = 'pcbnew'
self.kicad_conf_dir = 'kicad'
ng_ver = os.environ.get('KIAUS_USE_NIGHTLY')
if ng_ver:
self.eeschema += '-'+NIGHTLY
self.pcbnew += '-'+NIGHTLY
self.kicad_conf_dir += os.path.join(NIGHTLY, ng_ver)
path.insert(0, '/usr/lib/kicad-nightly/lib/python3/dist-packages')
try:
import pcbnew
except ImportError:
logger.error("Failed to import pcbnew Python module."
" Is KiCad installed?"
" Do you need to add it to PYTHONPATH?")
exit(NO_PCBNEW_MODULE)
kicad_version = pcbnew.GetBuildVersion()
m = re.match(r'(\d+)\.(\d+)\.(\d+)', kicad_version)
self.kicad_version_major = int(m.group(1))
self.kicad_version_minor = int(m.group(2))
self.kicad_version_patch = int(m.group(3))
self.kicad_version = self.kicad_version_major*1000000+self.kicad_version_minor*1000+self.kicad_version_patch
logger.debug('Detected KiCad v{}.{}.{} ({} {})'.format(self.kicad_version_major, self.kicad_version_minor,
self.kicad_version_patch, kicad_version, self.kicad_version))
if self.kicad_version >= KICAD_VERSION_5_99:
self.kicad_conf_path = pcbnew.GetSettingsManager().GetUserSettingsPath()
if ng_ver:
self.kicad_conf_path = self.kicad_conf_path.replace('/kicad/', '/kicadnightly/')
else:
with hide_stderr():
self.kicad_conf_path = pcbnew.GetKicadConfigPath()
logger.debug('Config path {}'.format(self.kicad_conf_path))
self.conf_kicad = os.path.join(self.kicad_conf_path, 'kicad_common')
self.conf_kicad_bkp = None
if self.kicad_version >= KICAD_VERSION_5_99:
self.conf_kicad += '.json'
self.conf_kicad_json = True
else:
self.conf_kicad_json = False
if os.path.isfile(self.conf_kicad):
self.load_kicad_environment(logger)
if 'KICAD_CONFIG_HOME' in self.env and self.kicad_version < KICAD_VERSION_5_99:
# https://forum.kicad.info/t/kicad-config-home-inconsistencies-and-detail/26875
self.kicad_conf_path = self.env['KICAD_CONFIG_HOME']
logger.debug('Redirecting KiCad config path to: '+self.kicad_conf_path)
else:
logger.warning('Missing KiCad main config file '+self.conf_kicad)
# - eeschema config
self.conf_eeschema = os.path.join(self.kicad_conf_path, 'eeschema')
self.conf_eeschema_bkp = None
# - pcbnew config
self.conf_pcbnew = os.path.join(self.kicad_conf_path, 'pcbnew')
self.conf_pcbnew_bkp = None
# Config files that migrated to JSON
# Note that they remain in the old format until saved
if self.kicad_version >= KICAD_VERSION_5_99:
self.conf_eeschema += '.json'
self.conf_pcbnew += '.json'
self.conf_eeschema_json = True
self.conf_pcbnew_json = True
self.pro_ext = 'kicad_pro'
self.prl_ext = 'kicad_prl'
else:
self.conf_eeschema_json = False
self.conf_pcbnew_json = False
self.pro_ext = 'pro'
self.prl_ext = None
# - hotkeys
self.conf_hotkeys = os.path.join(self.kicad_conf_path, 'user.hotkeys')
self.conf_hotkeys_bkp = None
# - sym-lib-table
self.user_sym_lib_table = os.path.join(self.kicad_conf_path, 'sym-lib-table')
self.user_fp_lib_table = os.path.join(self.kicad_conf_path, 'fp-lib-table')
self.sys_sym_lib_table = [KICAD_SHARE+'template/sym-lib-table']
self.sys_fp_lib_table = [KICAD_SHARE+'template/fp-lib-table']
if ng_ver:
# 20200912: sym-lib-table is missing
self.sys_sym_lib_table.insert(0, KICAD_NIGHTLY_SHARE+'template/sym-lib-table')
self.sys_fp_lib_table.insert(0, KICAD_NIGHTLY_SHARE+'template/fp-lib-table')
# Some details about the UI
if self.kicad_version >= KICAD_VERSION_5_99:
# KiCad 5.99.0
self.ee_window_title = r'\[.*\] — Eeschema$' # "PROJECT [HIERARCHY_PATH] - Eeschema"
else:
# KiCad 5.1.6
self.ee_window_title = r'Eeschema.*\.sch' # "Eeschema - file.sch"
# Collected errors and unconnecteds (warnings)
self.errs = []
self.wrns = []
# Error filters
self.err_filters = []
def load_kicad_environment(self, logger):
self.env = {}
if self.conf_kicad_json:
env = self.get_config_vars_json(self.conf_kicad)
if env:
self.env = env
else:
env = self.get_config_vars_ini(self.conf_kicad)
if env:
for k, v in env.items():
self.env[k.upper()] = v
logger.debug('KiCad environment: '+str(self.env))
@staticmethod
def get_config_vars_json(file):
with open(file, "rt") as f:
data = json.load(f)
if 'environment' in data and 'vars' in data['environment']:
return data['environment']['vars']
return None
@staticmethod
def get_config_vars_ini(file):
config = configparser.ConfigParser()
with open(file, "rt") as f:
data = f.read()
config.read_string('[Various]\n'+data)
if 'EnvironmentVariables' in config:
return config['EnvironmentVariables']
return None
__author__ = 'Salvador E. Tropea'
__copyright__ = 'Copyright 2018-2021, INTI/Productize SPRL'
__credits__ = ['Salvador E. Tropea', 'Seppe Stas', 'Jesse Vincent', 'Scott Bezek']
__license__ = 'Apache 2.0'
__email__ = 'stropea@inti.gob.ar'
__status__ = 'beta'
__url__ = 'https://github.com/INTI-CMNB/KiAuto/'
__version__ = '1.5.8'
| true
| true
|
f70924607cd6bc830019782182dd67bf0f16ab46
| 246
|
py
|
Python
|
discord_bot/config.sample.py
|
treasure-hacks/treasure-hacks-ctf
|
9d07d0182bb096ed7161ba9d35299d60ade9cf5a
|
[
"MIT"
] | null | null | null |
discord_bot/config.sample.py
|
treasure-hacks/treasure-hacks-ctf
|
9d07d0182bb096ed7161ba9d35299d60ade9cf5a
|
[
"MIT"
] | null | null | null |
discord_bot/config.sample.py
|
treasure-hacks/treasure-hacks-ctf
|
9d07d0182bb096ed7161ba9d35299d60ade9cf5a
|
[
"MIT"
] | null | null | null |
# Replace with DB URI; proto://user:pass@host/database
DB_URI = "replace"
# Replace with bot token
TOKEN = "replace"
# Replace with IDs of admin command users
ADMIN_IDS = []
# Replace with voice channel for audio clue
TARGET_VOICE_CHANNEL = 0
| 20.5
| 54
| 0.743902
|
DB_URI = "replace"
TOKEN = "replace"
ADMIN_IDS = []
TARGET_VOICE_CHANNEL = 0
| true
| true
|
f70924d202c078270870d9d0bb24cb8b377f14c1
| 2,413
|
py
|
Python
|
nicos_jcns/galaxi/devices/automation.py
|
ebadkamil/nicos
|
0355a970d627aae170c93292f08f95759c97f3b5
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
nicos_jcns/galaxi/devices/automation.py
|
ebadkamil/nicos
|
0355a970d627aae170c93292f08f95759c97f3b5
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 1
|
2021-08-18T10:55:42.000Z
|
2021-08-18T10:55:42.000Z
|
nicos_jcns/galaxi/devices/automation.py
|
ISISComputingGroup/nicos
|
94cb4d172815919481f8c6ee686f21ebb76f2068
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# *****************************************************************************
# NICOS, the Networked Instrument Control System of the MLZ
# Copyright (c) 2009-2021 by the NICOS contributors (see AUTHORS)
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
# Alexander Steffens <a.steffens@fz-juelich.de>
#
# *****************************************************************************
"""GALAXI Automatic vacuum control and detector positioning"""
from nicos.core.device import Readable
from nicos.core.params import Attach, Param, listof
from nicos.devices.tango import NamedDigitalOutput
class DetectorDistance(Readable):
"""Calculate detector distance based on the detector tubes position"""
attached_devices = {
'detectubes': Attach('Pilatus detector tubes', Readable, multiple=4)
}
parameters = {
'offset': Param('Minimum distance between Pilatus and sample',
type=int, settable=True),
'tubelength': Param('List of tube length',
type=listof(int), settable=False,
default=[450, 450, 900, 900]),
}
hardware_access = False
def doInit(self, mode):
self.log.debug('Detector distance init')
self.read()
def doRead(self, maxage=0):
distance = 0
for tube, l in zip(self._attached_detectubes, self.tubelength):
# tubes can only be set in correct sequence
if tube.read(maxage) != 'up':
break
distance += l
return self.offset + distance
class VacuumOperation(NamedDigitalOutput):
"""Provide different vacuum operation states"""
def doStop(self):
self._dev.Reset()
| 36.014925
| 79
| 0.63075
|
from nicos.core.device import Readable
from nicos.core.params import Attach, Param, listof
from nicos.devices.tango import NamedDigitalOutput
class DetectorDistance(Readable):
attached_devices = {
'detectubes': Attach('Pilatus detector tubes', Readable, multiple=4)
}
parameters = {
'offset': Param('Minimum distance between Pilatus and sample',
type=int, settable=True),
'tubelength': Param('List of tube length',
type=listof(int), settable=False,
default=[450, 450, 900, 900]),
}
hardware_access = False
def doInit(self, mode):
self.log.debug('Detector distance init')
self.read()
def doRead(self, maxage=0):
distance = 0
for tube, l in zip(self._attached_detectubes, self.tubelength):
if tube.read(maxage) != 'up':
break
distance += l
return self.offset + distance
class VacuumOperation(NamedDigitalOutput):
def doStop(self):
self._dev.Reset()
| true
| true
|
f709254330ef76af7db62a35d05bbcb48fbff96b
| 6,705
|
py
|
Python
|
py4syn/epics/LaudaClass.py
|
lnls-sol/py4syn-old
|
6653faa788b273c8a592ae7548f9027fd95cc62a
|
[
"0BSD"
] | 12
|
2015-07-12T17:15:06.000Z
|
2018-04-28T06:51:15.000Z
|
py4syn/epics/LaudaClass.py
|
lnls-sol/py4syn-old
|
6653faa788b273c8a592ae7548f9027fd95cc62a
|
[
"0BSD"
] | 29
|
2016-06-28T12:24:08.000Z
|
2018-10-22T15:59:43.000Z
|
py4syn/epics/LaudaClass.py
|
lnls-sol/py4syn-old
|
6653faa788b273c8a592ae7548f9027fd95cc62a
|
[
"0BSD"
] | 10
|
2015-09-02T17:30:33.000Z
|
2018-01-18T18:52:32.000Z
|
"""Lauda temperature controller class
Python class for Lauda temperature controllers
:platform: Unix
:synopsis: Python class for Lauda temperature controllers
.. moduleauthor:: Henrique Dante de Almeida <henrique.almeida@lnls.br>
"""
from threading import Event
from epics import Device, ca
from py4syn.epics.IScannable import IScannable
from py4syn.epics.StandardDevice import StandardDevice
from py4syn.utils.timer import Timer
class Lauda(StandardDevice, IScannable):
"""
Class to control Lauda temperature controllers via EPICS.
Examples
--------
>>> from py4syn.epics.LaudaClass import Lauda
>>>
>>> def showTemperature(pv):
... lauda = Lauda(pv, 'lauda')
... print('Temperature is: %d' % lauda.getValue())
...
>>> def setTemperature(lauda, temperature):
... lauda.setValue(temperature)
... lauda.run()
"""
EPSILON = 0.1
def __init__(self, pvName, mnemonic):
"""
**Constructor**
See :class:`py4syn.epics.StandardDevice`
Parameters
----------
pvName : `string`
Power supply base naming of the PV (Process Variable)
mnemonic : `string`
Temperature controller mnemonic
"""
super().__init__(mnemonic)
self.pvName = pvName
self.lauda = Device(pvName + ':', ['BLEVEL', 'BOVERTEMP', 'BPOWER', 'BSP',
'BSTATS', 'BTEMP', 'BTN', 'BTHERMOSTATS', 'WSP', 'WSTART',
'ETEMP', 'WPUMP', 'WSTOP', 'WTN'])
self.newTemperature = Event()
self.lauda.add_callback('BTEMP', self.onTemperatureChange)
# Skip initial callback
self.newTemperature.wait(1)
def __str__(self):
return '%s (%s)' % (self.getMnemonic(), self.pvName)
def getValue(self):
"""
Returns the current measured temperature.
Returns
-------
`int`
"""
return self.lauda.get('BTEMP')
def getRealPosition(self):
"""
Returns the same as :meth:`getValue`.
See: :meth:`getValue`
Returns
-------
`int`
"""
return self.getValue()
def onTemperatureChange(self, **kwargs):
"""
Helper callback that indicates when the measured temperature changed.
"""
self.newTemperature.set()
def setVelocity(self, r):
"""
Dummy method setVelocity()
Parameters
----------
r : `float`
Ramp speed in °C/min
"""
pass
def setValue(self, v):
"""
Changes the temperature to a new value.
Parameters
----------
v : `int`
The target temperature in °C
"""
self.lauda.put('WSP', v)
self.run()
self.requestedValue = v
def wait(self):
"""
Blocks until the requested temperature is achieved.
"""
ca.flush_io()
self.newTemperature.clear()
while abs(self.getValue()-self.requestedValue) > self.EPSILON:
# Give up after 60 seconds without an update
if not self.newTemperature.wait(60):
break
self.newTemperature.clear()
def getLowLimitValue(self):
"""
Returns the controller low limit temperature.
Returns
-------
`int`
"""
return -20
def getHighLimitValue(self):
"""
Returns the controller high limit temperature.
Returns
-------
`int`
"""
return 200
def run(self):
"""
Starts or resumes executing the current temperature program.
"""
self.lauda.put('WSTART', 1)
def stop(self):
"""
Stops executing the current temperature program and puts the device in idle state.
In the idle state, the device will not try to set a target temperature.
"""
self.lauda.put('WSTOP', 1)
def setPumpSpeed(self, speed):
"""
Changes the pump speed.
Parameters
----------
speed : `int`
The requested pump speed, ranging from 1 to 8.
"""
if speed < 1 or speed > 8:
raise ValueError('Invalid speed')
self.lauda.put('WPUMP', speed)
def getInternalTemp(self):
"""
Same as :meth:`getValue`.
See :meth:`getValue`
Returns
-------
`int`
"""
return self.getValue()
def getExternalTemp(self):
"""
Returns the device's external temperature.
Returns
-------
`int`
"""
return self.lauda.get('ETEMP')
def getLevel(self):
"""
Returns the device's bath level.
Returns
-------
`int`
"""
return self.lauda.get('BLEVEL')
def getSetPoint(self):
"""
Returns the current target temperature.
Returns
-------
`int`
"""
return self.lauda.get('BSP')
def getPower(self):
"""
Returns the current device power.
Returns
----------
`int`
"""
return self.lauda.get('BPOWER')
def getOverTemp(self):
"""
Returns the maximum temperature software defined limit.
Returns
----------
`int`
"""
return self.lauda.get('BOVERTEMP')
def getTN(self):
"""
Returns
----------
`int`
"""
return self.lauda.get('BTN')
def getStatus(self):
"""
Returns the device status word.
Returns
----------
`int`
"""
return self.lauda.get('BSTATS')
def getThermoStatus(self):
"""
Returns the device thermostat error word.
Returns
----------
`int`
"""
return self.lauda.get('BTHERMOSTATS')
def changeSetPoint(self, val):
"""
Same as :meth:`setValue`.
See :meth:`setValue`
Parameters
----------
val : `int`
The requested temperature.
"""
self.setValue(val)
def changePump(self, val):
"""
Same as :meth:`setPumpSpeed`.
See :meth:`setPumpSpeed`
Parameters
----------
val : `int`
The requested pump speed.
"""
self.setPumpSpeed(val)
def changeTN(self, val):
self.lauda.put('WTN', val)
def start(self):
"""
Same as :meth:`run`.
See :meth:`run`
"""
self.run()
| 21.983607
| 90
| 0.509918
|
from threading import Event
from epics import Device, ca
from py4syn.epics.IScannable import IScannable
from py4syn.epics.StandardDevice import StandardDevice
from py4syn.utils.timer import Timer
class Lauda(StandardDevice, IScannable):
EPSILON = 0.1
def __init__(self, pvName, mnemonic):
super().__init__(mnemonic)
self.pvName = pvName
self.lauda = Device(pvName + ':', ['BLEVEL', 'BOVERTEMP', 'BPOWER', 'BSP',
'BSTATS', 'BTEMP', 'BTN', 'BTHERMOSTATS', 'WSP', 'WSTART',
'ETEMP', 'WPUMP', 'WSTOP', 'WTN'])
self.newTemperature = Event()
self.lauda.add_callback('BTEMP', self.onTemperatureChange)
self.newTemperature.wait(1)
def __str__(self):
return '%s (%s)' % (self.getMnemonic(), self.pvName)
def getValue(self):
return self.lauda.get('BTEMP')
def getRealPosition(self):
return self.getValue()
def onTemperatureChange(self, **kwargs):
self.newTemperature.set()
def setVelocity(self, r):
pass
def setValue(self, v):
self.lauda.put('WSP', v)
self.run()
self.requestedValue = v
def wait(self):
ca.flush_io()
self.newTemperature.clear()
while abs(self.getValue()-self.requestedValue) > self.EPSILON:
if not self.newTemperature.wait(60):
break
self.newTemperature.clear()
def getLowLimitValue(self):
return -20
def getHighLimitValue(self):
return 200
def run(self):
self.lauda.put('WSTART', 1)
def stop(self):
self.lauda.put('WSTOP', 1)
def setPumpSpeed(self, speed):
if speed < 1 or speed > 8:
raise ValueError('Invalid speed')
self.lauda.put('WPUMP', speed)
def getInternalTemp(self):
return self.getValue()
def getExternalTemp(self):
return self.lauda.get('ETEMP')
def getLevel(self):
return self.lauda.get('BLEVEL')
def getSetPoint(self):
return self.lauda.get('BSP')
def getPower(self):
return self.lauda.get('BPOWER')
def getOverTemp(self):
return self.lauda.get('BOVERTEMP')
def getTN(self):
return self.lauda.get('BTN')
def getStatus(self):
return self.lauda.get('BSTATS')
def getThermoStatus(self):
return self.lauda.get('BTHERMOSTATS')
def changeSetPoint(self, val):
self.setValue(val)
def changePump(self, val):
self.setPumpSpeed(val)
def changeTN(self, val):
self.lauda.put('WTN', val)
def start(self):
self.run()
| true
| true
|
f70925733799e0b5c6d890ecbb866e5f5a32c735
| 1,110
|
py
|
Python
|
demos/sparse_op/wfuncs/H0/donut.py
|
tbcole/majoranaJJ
|
dcf31f7786fa0a4874a940b7d8dcdd55f3921a46
|
[
"MIT"
] | null | null | null |
demos/sparse_op/wfuncs/H0/donut.py
|
tbcole/majoranaJJ
|
dcf31f7786fa0a4874a940b7d8dcdd55f3921a46
|
[
"MIT"
] | 2
|
2020-03-24T23:46:17.000Z
|
2020-04-19T20:29:08.000Z
|
demos/sparse_op/wfuncs/H0/donut.py
|
tbcole/majoranaJJ
|
dcf31f7786fa0a4874a940b7d8dcdd55f3921a46
|
[
"MIT"
] | 3
|
2020-04-30T08:48:12.000Z
|
2022-01-26T12:15:15.000Z
|
import numpy as np
import matplotlib.pyplot as plt
import scipy.sparse.linalg as spLA
import majoranaJJ.operators.sparse.qmsops as spop #sparse operators
import majoranaJJ.lattice.nbrs as nb #neighbor arrays
import majoranaJJ.lattice.shapes as shps #lattice shapes
import majoranaJJ.modules.plots as plots #plotting functions
R = 50
r = 15
ax = 10 #[A]
ay = 10 #[A]
coor = shps.donut(R, r)
NN = nb.NN_Arr(coor)
print("lattice size", coor.shape[0])
alpha = 0 #Spin-Orbit Coupling constant: [eV*A]
gammaz = 0 #Zeeman field energy contribution: [T]
delta = 0 #Superconducting Gap: [eV]
V0 = 0.0 #Amplitude of potential : [eV]
mu = 0 #Chemical Potential: [eV]
H = spop.H0(coor, ax, ay, NN)
print("H shape: ", H.shape)
num = 75 # This is the number of eigenvalues and eigenvectors you want
sigma = 0 # This is the eigenvalue we search around
which = 'LM'
eigs, vecs = spLA.eigsh(H, k = num, sigma = sigma, which = which)
plots.state_cmap(coor, eigs, vecs, n = 0, title = 'SPARSE Free Particle Ground State')
n = 39
plots.state_cmap(coor, eigs, vecs, n = n, title = 'SPARSE: Excited State # {}'.format(n))
| 30.833333
| 89
| 0.713514
|
import numpy as np
import matplotlib.pyplot as plt
import scipy.sparse.linalg as spLA
import majoranaJJ.operators.sparse.qmsops as spop import majoranaJJ.lattice.nbrs as nb import majoranaJJ.lattice.shapes as shps import majoranaJJ.modules.plots as plots
R = 50
r = 15
ax = 10 ay = 10
coor = shps.donut(R, r)
NN = nb.NN_Arr(coor)
print("lattice size", coor.shape[0])
alpha = 0 gammaz = 0 delta = 0 V0 = 0.0 mu = 0
H = spop.H0(coor, ax, ay, NN)
print("H shape: ", H.shape)
num = 75 sigma = 0 which = 'LM'
eigs, vecs = spLA.eigsh(H, k = num, sigma = sigma, which = which)
plots.state_cmap(coor, eigs, vecs, n = 0, title = 'SPARSE Free Particle Ground State')
n = 39
plots.state_cmap(coor, eigs, vecs, n = n, title = 'SPARSE: Excited State # {}'.format(n))
| true
| true
|
f70925fd466531f7885b22db0a2494715d6a730b
| 1,181
|
py
|
Python
|
backend/app/api/auth.py
|
dasdachs/flask-blog
|
d484026c1057e991a89df54d3fec20b43a507d1b
|
[
"MIT"
] | null | null | null |
backend/app/api/auth.py
|
dasdachs/flask-blog
|
d484026c1057e991a89df54d3fec20b43a507d1b
|
[
"MIT"
] | null | null | null |
backend/app/api/auth.py
|
dasdachs/flask-blog
|
d484026c1057e991a89df54d3fec20b43a507d1b
|
[
"MIT"
] | 1
|
2020-04-08T17:48:34.000Z
|
2020-04-08T17:48:34.000Z
|
#!/usr/bin/env python3.4
from flask import Blueprint, flash, redirect, render_template, request, url_for
from flask.ext.login import login_user, logout_user, login_required
from ..models import User
from ..forms import LoginForm
auth = Blueprint('auth', __name__)
@auth.route('/login', methods=['GET', 'POST'])
def login():
"""
The login view. It uses the login form from forms and relies on
Flask-login to do it's biding.
If the form is valid on submit, the functions gets the user object
using his username.
"""
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if not user or not user.verify_password(form.password.data):
flash('Invalid email or password')
return redirect(url_for('auth.login'))
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('admin.dashboard'))
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('Logged out and good to go.')
return redirect(url_for('blog.main'))
| 31.078947
| 79
| 0.690093
|
from flask import Blueprint, flash, redirect, render_template, request, url_for
from flask.ext.login import login_user, logout_user, login_required
from ..models import User
from ..forms import LoginForm
auth = Blueprint('auth', __name__)
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if not user or not user.verify_password(form.password.data):
flash('Invalid email or password')
return redirect(url_for('auth.login'))
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('admin.dashboard'))
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('Logged out and good to go.')
return redirect(url_for('blog.main'))
| true
| true
|
f7092607e2f50e603d2cab1086ade69261c874d1
| 11,241
|
py
|
Python
|
outputFiles/statistics/archives/ourIA/closest.py/0.9/9/player1.py
|
dimtion/jml
|
dba4db760280cc5ed8c384e36e41d6c7a310fb4f
|
[
"MIT"
] | 1
|
2015-10-07T19:18:55.000Z
|
2015-10-07T19:18:55.000Z
|
outputFiles/statistics/archives/ourIA/closest.py/0.9/9/player1.py
|
dimtion/jml
|
dba4db760280cc5ed8c384e36e41d6c7a310fb4f
|
[
"MIT"
] | 1
|
2015-10-07T19:28:25.000Z
|
2015-10-08T19:01:47.000Z
|
outputFiles/statistics/archives/ourIA/closest.py/0.9/9/player1.py
|
dimtion/jml
|
dba4db760280cc5ed8c384e36e41d6c7a310fb4f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
####################################################################################################################################################################################################################################
######################################################################################################## PRE-DEFINED IMPORTS #######################################################################################################
####################################################################################################################################################################################################################################
# Imports that are necessary for the program architecture to work properly
# Do not edit this code
import ast
import sys
import os
####################################################################################################################################################################################################################################
####################################################################################################### PRE-DEFINED CONSTANTS ######################################################################################################
####################################################################################################################################################################################################################################
# Possible characters to send to the maze application
# Any other will be ignored
# Do not edit this code
UP = 'U'
DOWN = 'D'
LEFT = 'L'
RIGHT = 'R'
####################################################################################################################################################################################################################################
# Name of your team
# It will be displayed in the maze
# You have to edit this code
TEAM_NAME = "closest"
####################################################################################################################################################################################################################################
########################################################################################################## YOUR VARIABLES ##########################################################################################################
####################################################################################################################################################################################################################################
# Stores all the moves in a list to restitute them one by one
allMoves = [RIGHT, RIGHT, RIGHT, UP, RIGHT, RIGHT, RIGHT, UP, UP, UP, RIGHT, UP, UP, UP, RIGHT, UP]
####################################################################################################################################################################################################################################
####################################################################################################### PRE-DEFINED FUNCTIONS ######################################################################################################
####################################################################################################################################################################################################################################
# Writes a message to the shell
# Use for debugging your program
# Channels stdout and stdin are captured to enable communication with the maze
# Do not edit this code
def debug (text) :
# Writes to the stderr channel
sys.stderr.write(str(text) + "\n")
sys.stderr.flush()
####################################################################################################################################################################################################################################
# Reads one line of information sent by the maze application
# This function is blocking, and will wait for a line to terminate
# The received information is automatically converted to the correct type
# Do not edit this code
def readFromPipe () :
# Reads from the stdin channel and returns the structure associated to the string
try :
text = sys.stdin.readline()
return ast.literal_eval(text.strip())
except :
os._exit(-1)
####################################################################################################################################################################################################################################
# Sends the text to the maze application
# Do not edit this code
def writeToPipe (text) :
# Writes to the stdout channel
sys.stdout.write(text)
sys.stdout.flush()
####################################################################################################################################################################################################################################
# Reads the initial maze information
# The function processes the text and returns the associated variables
# The dimensions of the maze are positive integers
# Maze map is a dictionary associating to a location its adjacent locations and the associated weights
# The preparation time gives the time during which 'initializationCode' can make computations before the game starts
# The turn time gives the time during which 'determineNextMove' can make computations before returning a decision
# Player locations are tuples (line, column)
# Coins are given as a list of locations where they appear
# A boolean indicates if the game is over
# Do not edit this code
def processInitialInformation () :
# We read from the pipe
data = readFromPipe()
return (data['mazeWidth'], data['mazeHeight'], data['mazeMap'], data['preparationTime'], data['turnTime'], data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
####################################################################################################################################################################################################################################
# Reads the information after each player moved
# The maze map and allowed times are no longer provided since they do not change
# Do not edit this code
def processNextInformation () :
# We read from the pipe
data = readFromPipe()
return (data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
####################################################################################################################################################################################################################################
########################################################################################################## YOUR FUNCTIONS ##########################################################################################################
####################################################################################################################################################################################################################################
# This is where you should write your code to do things during the initialization delay
# This function should not return anything, but should be used for a short preprocessing
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def initializationCode (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
# Nothing to do
pass
####################################################################################################################################################################################################################################
# This is where you should write your code to determine the next direction
# This function should return one of the directions defined in the CONSTANTS section
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def determineNextMove (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
# We return the next move as described by the list
global allMoves
nextMove = allMoves[0]
allMoves = allMoves[1:]
return nextMove
####################################################################################################################################################################################################################################
############################################################################################################# MAIN LOOP ############################################################################################################
####################################################################################################################################################################################################################################
# This is the entry point when executing this file
# We first send the name of the team to the maze
# The first message we receive from the maze includes its dimensions and map, the times allowed to the various steps, and the players and coins locations
# Then, at every loop iteration, we get the maze status and determine a move
# Do not edit this code
if __name__ == "__main__" :
# We send the team name
writeToPipe(TEAM_NAME + "\n")
# We process the initial information and have a delay to compute things using it
(mazeWidth, mazeHeight, mazeMap, preparationTime, turnTime, playerLocation, opponentLocation, coins, gameIsOver) = processInitialInformation()
initializationCode(mazeWidth, mazeHeight, mazeMap, preparationTime, playerLocation, opponentLocation, coins)
# We decide how to move and wait for the next step
while not gameIsOver :
(playerLocation, opponentLocation, coins, gameIsOver) = processNextInformation()
if gameIsOver :
break
nextMove = determineNextMove(mazeWidth, mazeHeight, mazeMap, turnTime, playerLocation, opponentLocation, coins)
writeToPipe(nextMove)
####################################################################################################################################################################################################################################
####################################################################################################################################################################################################################################
| 64.976879
| 228
| 0.357708
|
import ast
import sys
import os
UP = 'U'
DOWN = 'D'
LEFT = 'L'
RIGHT = 'R'
TEAM_NAME = "closest"
allMoves = [RIGHT, RIGHT, RIGHT, UP, RIGHT, RIGHT, RIGHT, UP, UP, UP, RIGHT, UP, UP, UP, RIGHT, UP]
def debug (text) :
sys.stderr.write(str(text) + "\n")
sys.stderr.flush()
def readFromPipe () :
try :
text = sys.stdin.readline()
return ast.literal_eval(text.strip())
except :
os._exit(-1)
def writeToPipe (text) :
sys.stdout.write(text)
sys.stdout.flush()
def processInitialInformation () :
data = readFromPipe()
return (data['mazeWidth'], data['mazeHeight'], data['mazeMap'], data['preparationTime'], data['turnTime'], data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
def processNextInformation () :
data = readFromPipe()
return (data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
def initializationCode (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
pass
def determineNextMove (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
global allMoves
nextMove = allMoves[0]
allMoves = allMoves[1:]
return nextMove
if __name__ == "__main__" :
writeToPipe(TEAM_NAME + "\n")
(mazeWidth, mazeHeight, mazeMap, preparationTime, turnTime, playerLocation, opponentLocation, coins, gameIsOver) = processInitialInformation()
initializationCode(mazeWidth, mazeHeight, mazeMap, preparationTime, playerLocation, opponentLocation, coins)
while not gameIsOver :
(playerLocation, opponentLocation, coins, gameIsOver) = processNextInformation()
if gameIsOver :
break
nextMove = determineNextMove(mazeWidth, mazeHeight, mazeMap, turnTime, playerLocation, opponentLocation, coins)
writeToPipe(nextMove)
| true
| true
|
f7092662056ebef64208f6ab439e77a3dae4b1e8
| 1,412
|
py
|
Python
|
setup.py
|
masudurHimel/TestLibrary
|
911f2762ed11b8fd79a32c1a9ecc30331d111998
|
[
"MIT"
] | null | null | null |
setup.py
|
masudurHimel/TestLibrary
|
911f2762ed11b8fd79a32c1a9ecc30331d111998
|
[
"MIT"
] | null | null | null |
setup.py
|
masudurHimel/TestLibrary
|
911f2762ed11b8fd79a32c1a9ecc30331d111998
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
setup(
name='TestLibrary_MR', # How you named your package folder (MyLib)
packages=['TestLibrary_MR'], # Chose the same as "name"
version='0.2', # Start with a small number and increase it with every change you make
license='MIT', # Chose a license from here: https://help.github.com/articles/licensing-a-repository
description='Just a test module', # Give a short description about your library
author='Md. Masudur Rahman', # Type in your name
author_email='masudurhimel@gmail.com', # Type in your E-Mail
url='https://github.com/masudurHimel/TestLibrary_MR', # Provide either the link to your github or to your website
download_url='https://github.com/masudurHimel/TestLibrary_MR/archive/refs/tags/v_02.tar.gz', # I explain this later on
keywords=['test'], # Keywords that define your package best
install_requires=[],
classifiers=[
'Development Status :: 3 - Alpha',
# Chose either "3 - Alpha", "4 - Beta" or "5 - Production/Stable" as the current state of your package
'Intended Audience :: Developers', # Define that your audience are developers
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License', # Again, pick a license
'Programming Language :: Python :: 3', # Specify which pyhton versions that you want to support
],
)
| 58.833333
| 123
| 0.689802
|
from distutils.core import setup
setup(
name='TestLibrary_MR', packages=['TestLibrary_MR'], version='0.2', license='MIT', description='Just a test module', author='Md. Masudur Rahman', author_email='masudurhimel@gmail.com', url='https://github.com/masudurHimel/TestLibrary_MR', download_url='https://github.com/masudurHimel/TestLibrary_MR/archive/refs/tags/v_02.tar.gz', keywords=['test'], install_requires=[],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', ],
)
| true
| true
|
f70926f0ac93c387914718f3fd574704afb0fbea
| 5,832
|
py
|
Python
|
cogs/encoding.py
|
vierofernando/username601
|
b5309b91b9da49a2a5cee1596084d450b987c17a
|
[
"MIT"
] | 48
|
2020-05-21T16:29:46.000Z
|
2021-12-30T00:09:45.000Z
|
cogs/encoding.py
|
vierofernando/username601
|
b5309b91b9da49a2a5cee1596084d450b987c17a
|
[
"MIT"
] | 5
|
2020-08-28T02:06:45.000Z
|
2021-11-08T11:02:36.000Z
|
cogs/encoding.py
|
vierofernando/username601
|
b5309b91b9da49a2a5cee1596084d450b987c17a
|
[
"MIT"
] | 24
|
2020-06-08T14:47:09.000Z
|
2021-09-28T18:46:13.000Z
|
import discord
from discord.ext import commands
from decorators import *
from io import BytesIO
from urllib.parse import quote
from base64 import b64encode
from json import loads
class encoding(commands.Cog):
def __init__(self):
self.ciphers = loads(open("./assets/json/encode.json", "r").read())
pass
@command(["jumble"])
@cooldown(2)
@require_args()
async def shuffle(self, ctx, *args):
return await ctx.reply(ctx.bot.util.shuffle(" ".join(args)))
@command(["morse-code"])
@cooldown(5)
@require_args()
async def morse(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += " " + self.ciphers.get(char, { "morse": char })["morse"]
return await ctx.reply(total[1:])
@command(["blind"])
@cooldown(5)
@require_args()
async def braille(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += self.ciphers.get(char, { "braille": char })["braille"]
return await ctx.reply(total)
@command(["curve", "curve-text"])
@cooldown(5)
@require_args()
async def cursive(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += self.ciphers.get(char, { "cursive": char })["cursive"]
return await ctx.reply(total)
@command(["fancy-text"])
@cooldown(5)
@require_args()
async def fancy(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += self.ciphers.get(char, { "fancy": char })["fancy"]
return await ctx.reply(total)
@command(["upside-down", "upsidedown", "flip-text", "textflip"])
@cooldown(5)
@require_args()
async def fliptext(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += self.ciphers.get(char, { "upside-down": char })["upside-down"]
return await ctx.reply(total)
@command()
@cooldown(4)
@require_args()
@permissions(bot=['attach_files'])
async def ascii(self, ctx, *args):
await ctx.trigger_typing()
parser = ctx.bot.Parser(args)
parser.parse(('hastebin',))
if (not parser) or (not parser.has("image")):
if not parser.other:
return await ctx.bot.cmds.invalid_args(ctx)
ascii = await ctx.bot.util.request(
"http://artii.herokuapp.com/make",
text=' '.join(parser.other)
)
if parser.has("hastebin"):
try:
response = await ctx.bot.http._HTTPClient__session.post("https://paste.mod.gg/documents", data=ascii)
assert response.status < 400
json = await response.json()
await ctx.success_embed(description=f"[**Click here to see the asciified text.**](https://paste.mod.gg/{json['key']})")
del ascii, image, parser, json
return
except AssertionError:
pass
await ctx.reply(f'```{ascii[:2000]}```')
del ascii, parser
return
parser.shift("image")
image = await ctx.bot.Parser.parse_image(ctx, parser.other)
string = await ctx.bot.Image.asciify(image)
if hastebin:
try:
response = await ctx.bot.http._HTTPClient__session.post("https://paste.mod.gg/documents", data=string)
assert response.status < 400
json = await response.json()
await ctx.success_embed(description=f"[**Click here to see the asciified image.**](https://paste.mod.gg/{json['key']})")
del string, image, parser, hastebin, json
return
except AssertionError:
pass
await ctx.bot.http.send_files(ctx.channel.id, content="", files=[discord.File(BytesIO(bytes(string, 'utf-8')), "asciified.txt")])
del string, image, parser, hastebin
@command()
@cooldown(2)
@permissions(bot=['attach_files'])
@require_args()
async def barcode(self, ctx, *args):
await ctx.trigger_typing()
return await ctx.send_image('http://www.barcode-generator.org/zint/api.php?bc_number=20&bc_data=' + quote(' '.join(args))[:75])
@command(['qrcode', 'qr-code'])
@cooldown(2)
@permissions(bot=['attach_files'])
@require_args()
async def qr(self, ctx, *args):
await ctx.trigger_typing()
return await ctx.send_image('https://api.qrserver.com/v1/create-qr-code/?size=150x150&data=' + quote(' '.join(args))[:75])
@command()
@cooldown(2)
@require_args()
async def binary(self, ctx, *args):
return await ctx.reply('```'+''.join(map(lambda x: f"{ord(x):08b}", ' '.join(args)))[:2000]+'```')
@command()
@cooldown(2)
@require_args(2)
async def caesar(self, ctx, *args):
offset = ctx.bot.Parser.get_numbers(args)
if not offset:
return await ctx.bot.cmds.invalid_args(ctx)
return await ctx.reply(ctx.bot.util.caesar(str(' '.join(args).replace(str(offset[0]), '')), offset[0]))
@command()
@cooldown(2)
@require_args()
async def atbash(self, ctx, *args):
return await ctx.reply(ctx.bot.util.atbash(' '.join(args)))
@command()
@cooldown(2)
@require_args()
async def reverse(self, ctx, *args):
return await ctx.reply(' '.join(args)[::-1])
@command(['b64'])
@cooldown(2)
@require_args()
async def base64(self, ctx, *args):
return await ctx.reply(b64encode(' '.join(args).encode('ascii')).decode('ascii'))
def setup(client):
client.add_cog(encoding())
| 35.560976
| 139
| 0.564986
|
import discord
from discord.ext import commands
from decorators import *
from io import BytesIO
from urllib.parse import quote
from base64 import b64encode
from json import loads
class encoding(commands.Cog):
def __init__(self):
self.ciphers = loads(open("./assets/json/encode.json", "r").read())
pass
@command(["jumble"])
@cooldown(2)
@require_args()
async def shuffle(self, ctx, *args):
return await ctx.reply(ctx.bot.util.shuffle(" ".join(args)))
@command(["morse-code"])
@cooldown(5)
@require_args()
async def morse(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += " " + self.ciphers.get(char, { "morse": char })["morse"]
return await ctx.reply(total[1:])
@command(["blind"])
@cooldown(5)
@require_args()
async def braille(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += self.ciphers.get(char, { "braille": char })["braille"]
return await ctx.reply(total)
@command(["curve", "curve-text"])
@cooldown(5)
@require_args()
async def cursive(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += self.ciphers.get(char, { "cursive": char })["cursive"]
return await ctx.reply(total)
@command(["fancy-text"])
@cooldown(5)
@require_args()
async def fancy(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += self.ciphers.get(char, { "fancy": char })["fancy"]
return await ctx.reply(total)
@command(["upside-down", "upsidedown", "flip-text", "textflip"])
@cooldown(5)
@require_args()
async def fliptext(self, ctx, *args):
total = ""
for char in " ".join(args).lower():
total += self.ciphers.get(char, { "upside-down": char })["upside-down"]
return await ctx.reply(total)
@command()
@cooldown(4)
@require_args()
@permissions(bot=['attach_files'])
async def ascii(self, ctx, *args):
await ctx.trigger_typing()
parser = ctx.bot.Parser(args)
parser.parse(('hastebin',))
if (not parser) or (not parser.has("image")):
if not parser.other:
return await ctx.bot.cmds.invalid_args(ctx)
ascii = await ctx.bot.util.request(
"http://artii.herokuapp.com/make",
text=' '.join(parser.other)
)
if parser.has("hastebin"):
try:
response = await ctx.bot.http._HTTPClient__session.post("https://paste.mod.gg/documents", data=ascii)
assert response.status < 400
json = await response.json()
await ctx.success_embed(description=f"[**Click here to see the asciified text.**](https://paste.mod.gg/{json['key']})")
del ascii, image, parser, json
return
except AssertionError:
pass
await ctx.reply(f'```{ascii[:2000]}```')
del ascii, parser
return
parser.shift("image")
image = await ctx.bot.Parser.parse_image(ctx, parser.other)
string = await ctx.bot.Image.asciify(image)
if hastebin:
try:
response = await ctx.bot.http._HTTPClient__session.post("https://paste.mod.gg/documents", data=string)
assert response.status < 400
json = await response.json()
await ctx.success_embed(description=f"[**Click here to see the asciified image.**](https://paste.mod.gg/{json['key']})")
del string, image, parser, hastebin, json
return
except AssertionError:
pass
await ctx.bot.http.send_files(ctx.channel.id, content="", files=[discord.File(BytesIO(bytes(string, 'utf-8')), "asciified.txt")])
del string, image, parser, hastebin
@command()
@cooldown(2)
@permissions(bot=['attach_files'])
@require_args()
async def barcode(self, ctx, *args):
await ctx.trigger_typing()
return await ctx.send_image('http://www.barcode-generator.org/zint/api.php?bc_number=20&bc_data=' + quote(' '.join(args))[:75])
@command(['qrcode', 'qr-code'])
@cooldown(2)
@permissions(bot=['attach_files'])
@require_args()
async def qr(self, ctx, *args):
await ctx.trigger_typing()
return await ctx.send_image('https://api.qrserver.com/v1/create-qr-code/?size=150x150&data=' + quote(' '.join(args))[:75])
@command()
@cooldown(2)
@require_args()
async def binary(self, ctx, *args):
return await ctx.reply('```'+''.join(map(lambda x: f"{ord(x):08b}", ' '.join(args)))[:2000]+'```')
@command()
@cooldown(2)
@require_args(2)
async def caesar(self, ctx, *args):
offset = ctx.bot.Parser.get_numbers(args)
if not offset:
return await ctx.bot.cmds.invalid_args(ctx)
return await ctx.reply(ctx.bot.util.caesar(str(' '.join(args).replace(str(offset[0]), '')), offset[0]))
@command()
@cooldown(2)
@require_args()
async def atbash(self, ctx, *args):
return await ctx.reply(ctx.bot.util.atbash(' '.join(args)))
@command()
@cooldown(2)
@require_args()
async def reverse(self, ctx, *args):
return await ctx.reply(' '.join(args)[::-1])
@command(['b64'])
@cooldown(2)
@require_args()
async def base64(self, ctx, *args):
return await ctx.reply(b64encode(' '.join(args).encode('ascii')).decode('ascii'))
def setup(client):
client.add_cog(encoding())
| true
| true
|
f709278271159bcb09d59ef159f718217946794c
| 1,237
|
py
|
Python
|
py/scrapeJson.py
|
mpaulweeks/edh-obscurity
|
58b6d34775111f5c111424ee51b186943ecd478d
|
[
"MIT"
] | null | null | null |
py/scrapeJson.py
|
mpaulweeks/edh-obscurity
|
58b6d34775111f5c111424ee51b186943ecd478d
|
[
"MIT"
] | null | null | null |
py/scrapeJson.py
|
mpaulweeks/edh-obscurity
|
58b6d34775111f5c111424ee51b186943ecd478d
|
[
"MIT"
] | null | null | null |
import json
import requests
EDHREC_BASE_URL = 'https://edhrec-json.s3.amazonaws.com/commanders/%s.json'
COMMANDER_PAGE_SLUGS = frozenset([
'w',
'u',
'b',
'r',
'g',
'colorless',
'wu',
'ub',
'br',
'rg',
'gw',
'wb',
'ur',
'bg',
'rw',
'gu',
'wub',
'ubr',
'brg',
'rgw',
'gwu',
'wbg',
'urw',
'bgu',
'rwb',
'gur',
'wubr',
'ubrg',
'brgw',
'rgwu',
'gwub',
'wubrg',
])
def scrape_commanders_json(color_slug):
url = EDHREC_BASE_URL % color_slug
req = requests.get(url)
print(req.status_code, url)
if(req.status_code != 200):
return
json_obj = req.json()['container']['json_dict']
cards = json_obj['cardlists'][0]['cardviews']
counts = []
for card in cards:
card_name = card['name']
card_count = int(card['label'].split(' ')[0])
counts.append([card_name, card_count])
return counts
def scrape_edhrec_json():
counts = []
for slug in COMMANDER_PAGE_SLUGS:
counts.extend(scrape_commanders_json(slug))
for card in counts:
print(card)
return counts
if __name__ == "__main__":
print(scrape_commanders_json('b'))
| 17.180556
| 75
| 0.547292
|
import json
import requests
EDHREC_BASE_URL = 'https://edhrec-json.s3.amazonaws.com/commanders/%s.json'
COMMANDER_PAGE_SLUGS = frozenset([
'w',
'u',
'b',
'r',
'g',
'colorless',
'wu',
'ub',
'br',
'rg',
'gw',
'wb',
'ur',
'bg',
'rw',
'gu',
'wub',
'ubr',
'brg',
'rgw',
'gwu',
'wbg',
'urw',
'bgu',
'rwb',
'gur',
'wubr',
'ubrg',
'brgw',
'rgwu',
'gwub',
'wubrg',
])
def scrape_commanders_json(color_slug):
url = EDHREC_BASE_URL % color_slug
req = requests.get(url)
print(req.status_code, url)
if(req.status_code != 200):
return
json_obj = req.json()['container']['json_dict']
cards = json_obj['cardlists'][0]['cardviews']
counts = []
for card in cards:
card_name = card['name']
card_count = int(card['label'].split(' ')[0])
counts.append([card_name, card_count])
return counts
def scrape_edhrec_json():
counts = []
for slug in COMMANDER_PAGE_SLUGS:
counts.extend(scrape_commanders_json(slug))
for card in counts:
print(card)
return counts
if __name__ == "__main__":
print(scrape_commanders_json('b'))
| true
| true
|
f70928592e8883c6be6fc9952f6de13f07725f2b
| 1,063
|
py
|
Python
|
Iterative Methods/gauss_jacobi.py
|
Hariharan-SV/Scientific-Computing
|
fccb065fe176f5fac6a463ec29f7e618dabd8099
|
[
"MIT"
] | null | null | null |
Iterative Methods/gauss_jacobi.py
|
Hariharan-SV/Scientific-Computing
|
fccb065fe176f5fac6a463ec29f7e618dabd8099
|
[
"MIT"
] | null | null | null |
Iterative Methods/gauss_jacobi.py
|
Hariharan-SV/Scientific-Computing
|
fccb065fe176f5fac6a463ec29f7e618dabd8099
|
[
"MIT"
] | null | null | null |
import get_coefficients_as_list
import check_diagonal_dominant
# function that computes in gauss jacobi method
def gauss_jacobi(no_of_unknowns):
coefficient_list = get_coefficients_as_list.get_coefficients_as_list(no_of_unknowns)
if check_diagonal_dominant.is_diagonally_dominant(coefficient_list):
print("Computing...")
else:
print("Matrix failed to be diagonally dominant\nExiting...")
return
factors = [0]*(no_of_unknowns)
sample_factors = [0]*(no_of_unknowns)
for i in range(0,6):
for j in range(0,no_of_unknowns):
diff = 0
for k in range(0,j):
diff = diff + coefficient_list[j][k]*factors[k]
for k in range(j+1,no_of_unknowns):
diff = diff + coefficient_list[j][k]*factors[k]
#print(coefficient_list[j][no_of_unknowns],"-",diff,"/",coefficient_list[j][j])
diff = (coefficient_list[j][no_of_unknowns]-diff)/coefficient_list[j][j]
sample_factors = sample_factors[0:j]+[diff]+sample_factors[j+1:]
factors = sample_factors
print("At iteration ",i+1," factors are ",factors)
| 42.52
| 86
| 0.71778
|
import get_coefficients_as_list
import check_diagonal_dominant
def gauss_jacobi(no_of_unknowns):
coefficient_list = get_coefficients_as_list.get_coefficients_as_list(no_of_unknowns)
if check_diagonal_dominant.is_diagonally_dominant(coefficient_list):
print("Computing...")
else:
print("Matrix failed to be diagonally dominant\nExiting...")
return
factors = [0]*(no_of_unknowns)
sample_factors = [0]*(no_of_unknowns)
for i in range(0,6):
for j in range(0,no_of_unknowns):
diff = 0
for k in range(0,j):
diff = diff + coefficient_list[j][k]*factors[k]
for k in range(j+1,no_of_unknowns):
diff = diff + coefficient_list[j][k]*factors[k]
diff = (coefficient_list[j][no_of_unknowns]-diff)/coefficient_list[j][j]
sample_factors = sample_factors[0:j]+[diff]+sample_factors[j+1:]
factors = sample_factors
print("At iteration ",i+1," factors are ",factors)
| true
| true
|
f7092877b036548f0ac6c9dcc5a5085434c79104
| 14,364
|
py
|
Python
|
cinder/volume/flows/manager/manage_existing_snapshot.py
|
ISCAS-VDI/cinder-base
|
9529102548beef074264aaef31fa8267db99df61
|
[
"Apache-2.0"
] | null | null | null |
cinder/volume/flows/manager/manage_existing_snapshot.py
|
ISCAS-VDI/cinder-base
|
9529102548beef074264aaef31fa8267db99df61
|
[
"Apache-2.0"
] | null | null | null |
cinder/volume/flows/manager/manage_existing_snapshot.py
|
ISCAS-VDI/cinder-base
|
9529102548beef074264aaef31fa8267db99df61
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
import taskflow.engines
from taskflow.patterns import linear_flow
from taskflow.types import failure as ft
from cinder import exception
from cinder import flow_utils
from cinder.i18n import _, _LE, _LI
from cinder import objects
from cinder import quota
from cinder.volume.flows import common as flow_common
from cinder.volume import utils as volume_utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
QUOTAS = quota.QUOTAS
ACTION = 'snapshot:manage_existing'
class ExtractSnapshotRefTask(flow_utils.CinderTask):
"""Extracts snapshot reference for given snapshot id."""
default_provides = 'snapshot_ref'
def __init__(self, db):
super(ExtractSnapshotRefTask, self).__init__(addons=[ACTION])
self.db = db
def execute(self, context, snapshot_id):
# NOTE(wanghao): this will fetch the snapshot from the database, if
# the snapshot has been deleted before we got here then this should
# fail.
#
# In the future we might want to have a lock on the snapshot_id so that
# the snapshot can not be deleted while its still being created?
snapshot_ref = objects.Snapshot.get_by_id(context, snapshot_id)
LOG.debug("ExtractSnapshotRefTask return"
" snapshot_ref: %s", snapshot_ref)
return snapshot_ref
def revert(self, context, snapshot_id, result, **kwargs):
if isinstance(result, ft.Failure):
return
flow_common.error_out_snapshot(context, self.db, snapshot_id)
LOG.error(_LE("Snapshot %s: create failed"), snapshot_id)
class NotifySnapshotActionTask(flow_utils.CinderTask):
"""Performs a notification about the given snapshot when called.
Reversion strategy: N/A
"""
def __init__(self, db, event_suffix, host):
super(NotifySnapshotActionTask, self).__init__(addons=[ACTION,
event_suffix])
self.db = db
self.event_suffix = event_suffix
self.host = host
def execute(self, context, snapshot_ref):
snapshot_id = snapshot_ref['id']
try:
volume_utils.notify_about_snapshot_usage(context, snapshot_ref,
self.event_suffix,
host=self.host)
except exception.CinderException:
# If notification sending of snapshot database entry reading fails
# then we shouldn't error out the whole workflow since this is
# not always information that must be sent for snapshots to operate
LOG.exception(_LE("Failed notifying about the snapshot "
"action %(event)s for snapshot %(snp_id)s."),
{'event': self.event_suffix,
'snp_id': snapshot_id})
class PrepareForQuotaReservationTask(flow_utils.CinderTask):
"""Gets the snapshot size from the driver."""
default_provides = set(['size', 'snapshot_properties'])
def __init__(self, db, driver):
super(PrepareForQuotaReservationTask, self).__init__(addons=[ACTION])
self.db = db
self.driver = driver
def execute(self, context, snapshot_ref, manage_existing_ref):
snapshot_id = snapshot_ref['id']
if not self.driver.initialized:
driver_name = (self.driver.configuration.
safe_get('volume_backend_name'))
LOG.error(_LE("Unable to manage existing snapshot. "
"Volume driver %s not initialized."), driver_name)
flow_common.error_out_snapshot(context, self.db, snapshot_id,
reason=_("Volume driver %s "
"not initialized.") %
driver_name)
raise exception.DriverNotInitialized()
size = self.driver.manage_existing_snapshot_get_size(
snapshot=snapshot_ref,
existing_ref=manage_existing_ref)
return {'size': size,
'snapshot_properties': snapshot_ref}
class QuotaReserveTask(flow_utils.CinderTask):
"""Reserves a single snapshot with the given size.
Reversion strategy: rollback the quota reservation.
Warning Warning: if the process that is running this reserve and commit
process fails (or is killed before the quota is rolled back or committed
it does appear like the quota will never be rolled back). This makes
software upgrades hard (inflight operations will need to be stopped or
allowed to complete before the upgrade can occur). *In the future* when
taskflow has persistence built-in this should be easier to correct via
an automated or manual process.
"""
default_provides = set(['reservations'])
def __init__(self):
super(QuotaReserveTask, self).__init__(addons=[ACTION])
def execute(self, context, size, optional_args):
try:
if CONF.no_snapshot_gb_quota:
reserve_opts = {'snapshots': 1}
else:
reserve_opts = {'snapshots': 1, 'gigabytes': size}
reservations = QUOTAS.reserve(context, **reserve_opts)
return {
'reservations': reservations,
}
except exception.OverQuota as e:
overs = e.kwargs['overs']
quotas = e.kwargs['quotas']
usages = e.kwargs['usages']
volume_utils.process_reserve_over_quota(context, overs, usages,
quotas, size)
def revert(self, context, result, optional_args, **kwargs):
# We never produced a result and therefore can't destroy anything.
if isinstance(result, ft.Failure):
return
if optional_args['is_quota_committed']:
# The reservations have already been committed and can not be
# rolled back at this point.
return
# We actually produced an output that we can revert so lets attempt
# to use said output to rollback the reservation.
reservations = result['reservations']
try:
QUOTAS.rollback(context, reservations)
except exception.CinderException:
# We are already reverting, therefore we should silence this
# exception since a second exception being active will be bad.
LOG.exception(_LE("Failed rolling back quota for"
" %s reservations."), reservations)
class QuotaCommitTask(flow_utils.CinderTask):
"""Commits the reservation.
Reversion strategy: N/A (the rollback will be handled by the task that did
the initial reservation (see: QuotaReserveTask).
Warning Warning: if the process that is running this reserve and commit
process fails (or is killed before the quota is rolled back or committed
it does appear like the quota will never be rolled back). This makes
software upgrades hard (inflight operations will need to be stopped or
allowed to complete before the upgrade can occur). *In the future* when
taskflow has persistence built-in this should be easier to correct via
an automated or manual process.
"""
def __init__(self):
super(QuotaCommitTask, self).__init__(addons=[ACTION])
def execute(self, context, reservations, snapshot_properties,
optional_args):
QUOTAS.commit(context, reservations)
# updating is_quota_committed attribute of optional_args dictionary
optional_args['is_quota_committed'] = True
return {'snapshot_properties': snapshot_properties}
def revert(self, context, result, **kwargs):
# We never produced a result and therefore can't destroy anything.
if isinstance(result, ft.Failure):
return
snapshot = result['snapshot_properties']
try:
reserve_opts = {'snapshots': -1,
'gigabytes': -snapshot['volume_size']}
reservations = QUOTAS.reserve(context,
project_id=context.project_id,
**reserve_opts)
if reservations:
QUOTAS.commit(context, reservations,
project_id=context.project_id)
except Exception:
LOG.exception(_LE("Failed to update quota while deleting "
"snapshots: %s"), snapshot['id'])
class ManageExistingTask(flow_utils.CinderTask):
"""Brings an existing snapshot under Cinder management."""
default_provides = set(['snapshot', 'new_status'])
def __init__(self, db, driver):
super(ManageExistingTask, self).__init__(addons=[ACTION])
self.db = db
self.driver = driver
def execute(self, context, snapshot_ref, manage_existing_ref, size):
model_update = self.driver.manage_existing_snapshot(
snapshot=snapshot_ref,
existing_ref=manage_existing_ref)
if not model_update:
model_update = {}
model_update.update({'size': size})
try:
snapshot_object = objects.Snapshot.get_by_id(context,
snapshot_ref['id'])
snapshot_object.update(model_update)
snapshot_object.save()
except exception.CinderException:
LOG.exception(_LE("Failed updating model of snapshot "
"%(snapshot_id)s with creation provided model "
"%(model)s."),
{'snapshot_id': snapshot_ref['id'],
'model': model_update})
raise
return {'snapshot': snapshot_ref,
'new_status': 'available'}
class CreateSnapshotOnFinishTask(NotifySnapshotActionTask):
"""Perform final snapshot actions.
When a snapshot is created successfully it is expected that MQ
notifications and database updates will occur to 'signal' to others that
the snapshot is now ready for usage. This task does those notifications and
updates in a reliable manner (not re-raising exceptions if said actions can
not be triggered).
Reversion strategy: N/A
"""
def __init__(self, db, event_suffix, host):
super(CreateSnapshotOnFinishTask, self).__init__(db, event_suffix,
host)
def execute(self, context, snapshot, new_status):
LOG.debug("Begin to call CreateSnapshotOnFinishTask execute.")
snapshot_id = snapshot['id']
LOG.debug("New status: %s", new_status)
update = {
'status': new_status
}
try:
# TODO(harlowja): is it acceptable to only log if this fails??
# or are there other side-effects that this will cause if the
# status isn't updated correctly (aka it will likely be stuck in
# 'building' if this fails)??
snapshot_object = objects.Snapshot.get_by_id(context,
snapshot_id)
snapshot_object.update(update)
snapshot_object.save()
# Now use the parent to notify.
super(CreateSnapshotOnFinishTask, self).execute(context, snapshot)
except exception.CinderException:
LOG.exception(_LE("Failed updating snapshot %(snapshot_id)s with "
"%(update)s."), {'snapshot_id': snapshot_id,
'update': update})
# Even if the update fails, the snapshot is ready.
LOG.info(_LI("Snapshot %s created successfully."), snapshot_id)
def get_flow(context, db, driver, host, snapshot_id, ref):
"""Constructs and returns the manager entry point flow."""
LOG.debug("Input parameters: context=%(context)s, db=%(db)s,"
"driver=%(driver)s, host=%(host)s, "
"snapshot_id=(snapshot_id)s, ref=%(ref)s.",
{'context': context,
'db': db,
'driver': driver,
'host': host,
'snapshot_id': snapshot_id,
'ref': ref}
)
flow_name = ACTION.replace(":", "_") + "_manager"
snapshot_flow = linear_flow.Flow(flow_name)
# This injects the initial starting flow values into the workflow so that
# the dependency order of the tasks provides/requires can be correctly
# determined.
create_what = {
'context': context,
'snapshot_id': snapshot_id,
'manage_existing_ref': ref,
'optional_args': {'is_quota_committed': False}
}
notify_start_msg = "manage_existing_snapshot.start"
notify_end_msg = "manage_existing_snapshot.end"
snapshot_flow.add(ExtractSnapshotRefTask(db),
NotifySnapshotActionTask(db, notify_start_msg,
host=host),
PrepareForQuotaReservationTask(db, driver),
QuotaReserveTask(),
ManageExistingTask(db, driver),
QuotaCommitTask(),
CreateSnapshotOnFinishTask(db, notify_end_msg,
host=host))
LOG.debug("Begin to return taskflow.engines."
"load(snapshot_flow,store=create_what).")
# Now load (but do not run) the flow using the provided initial data.
return taskflow.engines.load(snapshot_flow, store=create_what)
| 41.755814
| 79
| 0.61675
|
from oslo_config import cfg
from oslo_log import log as logging
import taskflow.engines
from taskflow.patterns import linear_flow
from taskflow.types import failure as ft
from cinder import exception
from cinder import flow_utils
from cinder.i18n import _, _LE, _LI
from cinder import objects
from cinder import quota
from cinder.volume.flows import common as flow_common
from cinder.volume import utils as volume_utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
QUOTAS = quota.QUOTAS
ACTION = 'snapshot:manage_existing'
class ExtractSnapshotRefTask(flow_utils.CinderTask):
default_provides = 'snapshot_ref'
def __init__(self, db):
super(ExtractSnapshotRefTask, self).__init__(addons=[ACTION])
self.db = db
def execute(self, context, snapshot_id):
snapshot_ref = objects.Snapshot.get_by_id(context, snapshot_id)
LOG.debug("ExtractSnapshotRefTask return"
" snapshot_ref: %s", snapshot_ref)
return snapshot_ref
def revert(self, context, snapshot_id, result, **kwargs):
if isinstance(result, ft.Failure):
return
flow_common.error_out_snapshot(context, self.db, snapshot_id)
LOG.error(_LE("Snapshot %s: create failed"), snapshot_id)
class NotifySnapshotActionTask(flow_utils.CinderTask):
def __init__(self, db, event_suffix, host):
super(NotifySnapshotActionTask, self).__init__(addons=[ACTION,
event_suffix])
self.db = db
self.event_suffix = event_suffix
self.host = host
def execute(self, context, snapshot_ref):
snapshot_id = snapshot_ref['id']
try:
volume_utils.notify_about_snapshot_usage(context, snapshot_ref,
self.event_suffix,
host=self.host)
except exception.CinderException:
# not always information that must be sent for snapshots to operate
LOG.exception(_LE("Failed notifying about the snapshot "
"action %(event)s for snapshot %(snp_id)s."),
{'event': self.event_suffix,
'snp_id': snapshot_id})
class PrepareForQuotaReservationTask(flow_utils.CinderTask):
default_provides = set(['size', 'snapshot_properties'])
def __init__(self, db, driver):
super(PrepareForQuotaReservationTask, self).__init__(addons=[ACTION])
self.db = db
self.driver = driver
def execute(self, context, snapshot_ref, manage_existing_ref):
snapshot_id = snapshot_ref['id']
if not self.driver.initialized:
driver_name = (self.driver.configuration.
safe_get('volume_backend_name'))
LOG.error(_LE("Unable to manage existing snapshot. "
"Volume driver %s not initialized."), driver_name)
flow_common.error_out_snapshot(context, self.db, snapshot_id,
reason=_("Volume driver %s "
"not initialized.") %
driver_name)
raise exception.DriverNotInitialized()
size = self.driver.manage_existing_snapshot_get_size(
snapshot=snapshot_ref,
existing_ref=manage_existing_ref)
return {'size': size,
'snapshot_properties': snapshot_ref}
class QuotaReserveTask(flow_utils.CinderTask):
default_provides = set(['reservations'])
def __init__(self):
super(QuotaReserveTask, self).__init__(addons=[ACTION])
def execute(self, context, size, optional_args):
try:
if CONF.no_snapshot_gb_quota:
reserve_opts = {'snapshots': 1}
else:
reserve_opts = {'snapshots': 1, 'gigabytes': size}
reservations = QUOTAS.reserve(context, **reserve_opts)
return {
'reservations': reservations,
}
except exception.OverQuota as e:
overs = e.kwargs['overs']
quotas = e.kwargs['quotas']
usages = e.kwargs['usages']
volume_utils.process_reserve_over_quota(context, overs, usages,
quotas, size)
def revert(self, context, result, optional_args, **kwargs):
# We never produced a result and therefore can't destroy anything.
if isinstance(result, ft.Failure):
return
if optional_args['is_quota_committed']:
return
reservations = result['reservations']
try:
QUOTAS.rollback(context, reservations)
except exception.CinderException:
LOG.exception(_LE("Failed rolling back quota for"
" %s reservations."), reservations)
class QuotaCommitTask(flow_utils.CinderTask):
def __init__(self):
super(QuotaCommitTask, self).__init__(addons=[ACTION])
def execute(self, context, reservations, snapshot_properties,
optional_args):
QUOTAS.commit(context, reservations)
optional_args['is_quota_committed'] = True
return {'snapshot_properties': snapshot_properties}
def revert(self, context, result, **kwargs):
if isinstance(result, ft.Failure):
return
snapshot = result['snapshot_properties']
try:
reserve_opts = {'snapshots': -1,
'gigabytes': -snapshot['volume_size']}
reservations = QUOTAS.reserve(context,
project_id=context.project_id,
**reserve_opts)
if reservations:
QUOTAS.commit(context, reservations,
project_id=context.project_id)
except Exception:
LOG.exception(_LE("Failed to update quota while deleting "
"snapshots: %s"), snapshot['id'])
class ManageExistingTask(flow_utils.CinderTask):
default_provides = set(['snapshot', 'new_status'])
def __init__(self, db, driver):
super(ManageExistingTask, self).__init__(addons=[ACTION])
self.db = db
self.driver = driver
def execute(self, context, snapshot_ref, manage_existing_ref, size):
model_update = self.driver.manage_existing_snapshot(
snapshot=snapshot_ref,
existing_ref=manage_existing_ref)
if not model_update:
model_update = {}
model_update.update({'size': size})
try:
snapshot_object = objects.Snapshot.get_by_id(context,
snapshot_ref['id'])
snapshot_object.update(model_update)
snapshot_object.save()
except exception.CinderException:
LOG.exception(_LE("Failed updating model of snapshot "
"%(snapshot_id)s with creation provided model "
"%(model)s."),
{'snapshot_id': snapshot_ref['id'],
'model': model_update})
raise
return {'snapshot': snapshot_ref,
'new_status': 'available'}
class CreateSnapshotOnFinishTask(NotifySnapshotActionTask):
def __init__(self, db, event_suffix, host):
super(CreateSnapshotOnFinishTask, self).__init__(db, event_suffix,
host)
def execute(self, context, snapshot, new_status):
LOG.debug("Begin to call CreateSnapshotOnFinishTask execute.")
snapshot_id = snapshot['id']
LOG.debug("New status: %s", new_status)
update = {
'status': new_status
}
try:
# TODO(harlowja): is it acceptable to only log if this fails??
# or are there other side-effects that this will cause if the
# status isn't updated correctly (aka it will likely be stuck in
snapshot_object = objects.Snapshot.get_by_id(context,
snapshot_id)
snapshot_object.update(update)
snapshot_object.save()
super(CreateSnapshotOnFinishTask, self).execute(context, snapshot)
except exception.CinderException:
LOG.exception(_LE("Failed updating snapshot %(snapshot_id)s with "
"%(update)s."), {'snapshot_id': snapshot_id,
'update': update})
LOG.info(_LI("Snapshot %s created successfully."), snapshot_id)
def get_flow(context, db, driver, host, snapshot_id, ref):
LOG.debug("Input parameters: context=%(context)s, db=%(db)s,"
"driver=%(driver)s, host=%(host)s, "
"snapshot_id=(snapshot_id)s, ref=%(ref)s.",
{'context': context,
'db': db,
'driver': driver,
'host': host,
'snapshot_id': snapshot_id,
'ref': ref}
)
flow_name = ACTION.replace(":", "_") + "_manager"
snapshot_flow = linear_flow.Flow(flow_name)
create_what = {
'context': context,
'snapshot_id': snapshot_id,
'manage_existing_ref': ref,
'optional_args': {'is_quota_committed': False}
}
notify_start_msg = "manage_existing_snapshot.start"
notify_end_msg = "manage_existing_snapshot.end"
snapshot_flow.add(ExtractSnapshotRefTask(db),
NotifySnapshotActionTask(db, notify_start_msg,
host=host),
PrepareForQuotaReservationTask(db, driver),
QuotaReserveTask(),
ManageExistingTask(db, driver),
QuotaCommitTask(),
CreateSnapshotOnFinishTask(db, notify_end_msg,
host=host))
LOG.debug("Begin to return taskflow.engines."
"load(snapshot_flow,store=create_what).")
return taskflow.engines.load(snapshot_flow, store=create_what)
| true
| true
|
f7092a6a9bede6870067db1af0ef8ca88e82b286
| 2,116
|
py
|
Python
|
setup.py
|
ZLLentz/pcdscalc
|
6279d3eb8bd62da0e5ac9d9f3b451519e5f13aea
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
setup.py
|
ZLLentz/pcdscalc
|
6279d3eb8bd62da0e5ac9d9f3b451519e5f13aea
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
setup.py
|
ZLLentz/pcdscalc
|
6279d3eb8bd62da0e5ac9d9f3b451519e5f13aea
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
import sys
from os import path
from setuptools import find_packages, setup
import versioneer
min_version = (3, 6)
if sys.version_info < min_version:
error = """
pcdscalc does not support Python {0}.{1}.
Python {2}.{3} and above is required. Check your Python version like so:
python3 --version
This may be due to an out-of-date pip. Make sure you have pip >= 9.0.1.
Upgrade pip like so:
pip install --upgrade pip
""".format(*sys.version_info[:2], *min_version)
sys.exit(error)
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as readme_file:
readme = readme_file.read()
with open(path.join(here, 'requirements.txt')) as requirements_file:
# Parse requirements.txt, ignoring any commented-out lines.
requirements = [line for line in requirements_file.read().splitlines()
if not line.startswith('#')]
git_requirements = [r for r in requirements if r.startswith('git+')]
if git_requirements:
print('User must install the following packages manually:')
print()
print("\n".join(f'* {r}' for r in git_requirements))
print()
setup(
name='pcdscalc',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
license='BSD',
author='SLAC National Accelerator Laboratory',
packages=find_packages(exclude=['docs', 'tests']),
description='PCDS Calculation Routines',
long_description=readme,
url='https://github.com/pcdshub/pcdscalc', # noqa
entry_points={
'console_scripts': [
# 'pcdscalc=pcdscalc.__main__:main', # noqa
],
},
include_package_data=True,
package_data={
'pcdscalc': [
# When adding files here, remember to update MANIFEST.in as well,
# or else they will not be included in the distribution on PyPI!
# 'path/to/data_file',
]
},
install_requires=requirements,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Natural Language :: English',
'Programming Language :: Python :: 3',
],
)
| 28.594595
| 77
| 0.654537
|
import sys
from os import path
from setuptools import find_packages, setup
import versioneer
min_version = (3, 6)
if sys.version_info < min_version:
error = """
pcdscalc does not support Python {0}.{1}.
Python {2}.{3} and above is required. Check your Python version like so:
python3 --version
This may be due to an out-of-date pip. Make sure you have pip >= 9.0.1.
Upgrade pip like so:
pip install --upgrade pip
""".format(*sys.version_info[:2], *min_version)
sys.exit(error)
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as readme_file:
readme = readme_file.read()
with open(path.join(here, 'requirements.txt')) as requirements_file:
requirements = [line for line in requirements_file.read().splitlines()
if not line.startswith('#')]
git_requirements = [r for r in requirements if r.startswith('git+')]
if git_requirements:
print('User must install the following packages manually:')
print()
print("\n".join(f'* {r}' for r in git_requirements))
print()
setup(
name='pcdscalc',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
license='BSD',
author='SLAC National Accelerator Laboratory',
packages=find_packages(exclude=['docs', 'tests']),
description='PCDS Calculation Routines',
long_description=readme,
url='https://github.com/pcdshub/pcdscalc', entry_points={
'console_scripts': [
],
},
include_package_data=True,
package_data={
'pcdscalc': [
]
},
install_requires=requirements,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Natural Language :: English',
'Programming Language :: Python :: 3',
],
)
| true
| true
|
f7092b83133107e338da2d33a559c9985e7b4a42
| 2,967
|
py
|
Python
|
anchore_engine/db/db_accounts.py
|
ognjen-it/anchore-engine
|
02eb4b01b544c2ec8755326731d31ec2b1f265f6
|
[
"Apache-2.0"
] | 1
|
2019-06-27T08:47:48.000Z
|
2019-06-27T08:47:48.000Z
|
anchore_engine/db/db_accounts.py
|
ognjen-it/anchore-engine
|
02eb4b01b544c2ec8755326731d31ec2b1f265f6
|
[
"Apache-2.0"
] | 4
|
2020-11-07T00:16:02.000Z
|
2020-11-08T20:52:06.000Z
|
anchore_engine/db/db_accounts.py
|
mcburne/anchore-engine
|
de3c5bea6c0628fd611b027fc1d9e58b7e8d15a3
|
[
"Apache-2.0"
] | 1
|
2019-11-23T03:39:28.000Z
|
2019-11-23T03:39:28.000Z
|
"""
Interface to the accounts table. Data format is dicts, not objects.
"""
from anchore_engine.db import Account, AccountTypes, AccountStates
from anchore_engine.db.entities.common import anchore_now
class AccountNotFoundError(Exception):
def __init__(self, account_name):
super(AccountNotFoundError, self).__init__('User account not found. Name={}'.format(account_name))
self.account_name = account_name
class AccountAlreadyExistsError(Exception):
def __init__(self, account_name):
super(AccountAlreadyExistsError, self).__init__('User account already exists. name={}'.format(account_name))
self.account_name = account_name
class InvalidStateError(Exception):
def __init__(self, current_state, desired_state):
super(InvalidStateError, self).__init__('Invalid account state change requested. Cannot go from state {} to state {}'.format(current_state.value, desired_state.value))
self.current_state = current_state
self.desired_state = desired_state
def add(account_name, state=AccountStates.enabled, account_type=AccountTypes.user, email=None, session=None):
found_account = session.query(Account).filter_by(name=account_name).one_or_none()
if found_account:
raise AccountAlreadyExistsError(account_name)
accnt = Account()
accnt.name = account_name
accnt.state = state
accnt.type = account_type
accnt.email = email
accnt.created_at = anchore_now()
accnt.last_updated = anchore_now()
session.add(accnt)
return accnt.to_dict()
def update_state(name, new_state, session=None):
"""
Update state of the account. Allowed transitions:
active -> disabled
disabled -> active
disabled -> deleting
Deleting is a terminal state, and can be reached only from disabled
:param name:
:param new_state:
:param session:
:return:
"""
accnt = session.query(Account).filter_by(name=name).one_or_none()
if not accnt:
raise AccountNotFoundError(name)
# Deleting state is terminal. Must deactivate account prior to deleting it.
if accnt.state == AccountStates.deleting or (accnt.state == AccountStates.enabled and new_state == AccountStates.deleting):
raise InvalidStateError(accnt.state, new_state)
accnt.state = new_state
return accnt.to_dict()
def get_all(with_state=None, session=None):
if with_state is not None:
return [x.to_dict() for x in session.query(Account).filter(Account.state == with_state)]
else:
return [x.to_dict() for x in session.query(Account)]
def get(name, session=None):
accnt = session.query(Account).filter_by(name=name).one_or_none()
if accnt:
return accnt.to_dict()
else:
return None
def delete(name, session=None):
accnt = session.query(Account).filter_by(name=name).one_or_none()
if accnt:
session.delete(accnt)
return True
else:
return False
| 31.56383
| 175
| 0.713515
|
from anchore_engine.db import Account, AccountTypes, AccountStates
from anchore_engine.db.entities.common import anchore_now
class AccountNotFoundError(Exception):
def __init__(self, account_name):
super(AccountNotFoundError, self).__init__('User account not found. Name={}'.format(account_name))
self.account_name = account_name
class AccountAlreadyExistsError(Exception):
def __init__(self, account_name):
super(AccountAlreadyExistsError, self).__init__('User account already exists. name={}'.format(account_name))
self.account_name = account_name
class InvalidStateError(Exception):
def __init__(self, current_state, desired_state):
super(InvalidStateError, self).__init__('Invalid account state change requested. Cannot go from state {} to state {}'.format(current_state.value, desired_state.value))
self.current_state = current_state
self.desired_state = desired_state
def add(account_name, state=AccountStates.enabled, account_type=AccountTypes.user, email=None, session=None):
found_account = session.query(Account).filter_by(name=account_name).one_or_none()
if found_account:
raise AccountAlreadyExistsError(account_name)
accnt = Account()
accnt.name = account_name
accnt.state = state
accnt.type = account_type
accnt.email = email
accnt.created_at = anchore_now()
accnt.last_updated = anchore_now()
session.add(accnt)
return accnt.to_dict()
def update_state(name, new_state, session=None):
accnt = session.query(Account).filter_by(name=name).one_or_none()
if not accnt:
raise AccountNotFoundError(name)
if accnt.state == AccountStates.deleting or (accnt.state == AccountStates.enabled and new_state == AccountStates.deleting):
raise InvalidStateError(accnt.state, new_state)
accnt.state = new_state
return accnt.to_dict()
def get_all(with_state=None, session=None):
if with_state is not None:
return [x.to_dict() for x in session.query(Account).filter(Account.state == with_state)]
else:
return [x.to_dict() for x in session.query(Account)]
def get(name, session=None):
accnt = session.query(Account).filter_by(name=name).one_or_none()
if accnt:
return accnt.to_dict()
else:
return None
def delete(name, session=None):
accnt = session.query(Account).filter_by(name=name).one_or_none()
if accnt:
session.delete(accnt)
return True
else:
return False
| true
| true
|
f7092c10da42e20fd36d0a193c9d2a7e83185c7d
| 22,395
|
py
|
Python
|
lib/utils/SegDataGenerator.py
|
Grusinator/BirdClassification
|
c78ca3dbf70c2509c79ca4641102a2d725084d2a
|
[
"MIT"
] | 1
|
2018-04-16T19:01:48.000Z
|
2018-04-16T19:01:48.000Z
|
lib/utils/SegDataGenerator.py
|
Grusinator/BirdClassification
|
c78ca3dbf70c2509c79ca4641102a2d725084d2a
|
[
"MIT"
] | null | null | null |
lib/utils/SegDataGenerator.py
|
Grusinator/BirdClassification
|
c78ca3dbf70c2509c79ca4641102a2d725084d2a
|
[
"MIT"
] | null | null | null |
from keras.preprocessing.image import *
from keras.applications.imagenet_utils import preprocess_input
from keras import backend as K
from PIL import Image
import numpy as np
import os
#import cv2
def center_crop(x, center_crop_size, data_format, **kwargs):
if data_format == 'channels_first':
centerh, centerw = x.shape[1] // 2, x.shape[2] // 2
elif data_format == 'channels_last':
centerh, centerw = x.shape[0] // 2, x.shape[1] // 2
lh, lw = center_crop_size[0] // 2, center_crop_size[1] // 2
rh, rw = center_crop_size[0] - lh, center_crop_size[1] - lw
h_start, h_end = centerh - lh, centerh + rh
w_start, w_end = centerw - lw, centerw + rw
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :]
def pair_center_crop(x, y, center_crop_size, data_format, **kwargs):
if data_format == 'channels_first':
centerh, centerw = x.shape[1] // 2, x.shape[2] // 2
elif data_format == 'channels_last':
centerh, centerw = x.shape[0] // 2, x.shape[1] // 2
lh, lw = center_crop_size[0] // 2, center_crop_size[1] // 2
rh, rw = center_crop_size[0] - lh, center_crop_size[1] - lw
h_start, h_end = centerh - lh, centerh + rh
w_start, w_end = centerw - lw, centerw + rw
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end], \
y[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :], \
y[h_start:h_end, w_start:w_end, :]
def random_crop(x, random_crop_size, data_format, sync_seed=None, **kwargs):
np.random.seed(sync_seed)
if data_format == 'channels_first':
h, w = x.shape[1], x.shape[2]
elif data_format == 'channels_last':
h, w = x.shape[0], x.shape[1]
rangeh = (h - random_crop_size[0]) // 2
rangew = (w - random_crop_size[1]) // 2
offseth = 0 if rangeh == 0 else np.random.randint(rangeh)
offsetw = 0 if rangew == 0 else np.random.randint(rangew)
h_start, h_end = offseth, offseth + random_crop_size[0]
w_start, w_end = offsetw, offsetw + random_crop_size[1]
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :]
def pair_random_crop(x, y, random_crop_size, data_format, sync_seed=None, **kwargs):
np.random.seed(sync_seed)
if data_format == 'channels_first':
h, w = x.shape[1], x.shape[2]
elif data_format == 'channels_last':
h, w = x.shape[0], x.shape[1]
rangeh = (h - random_crop_size[0]) // 2
rangew = (w - random_crop_size[1]) // 2
offseth = 0 if rangeh == 0 else np.random.randint(rangeh)
offsetw = 0 if rangew == 0 else np.random.randint(rangew)
h_start, h_end = offseth, offseth + random_crop_size[0]
w_start, w_end = offsetw, offsetw + random_crop_size[1]
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end], y[:, h_start:h_end, h_start:h_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :], y[h_start:h_end, w_start:w_end, :]
class SegDirectoryIterator(Iterator):
'''
Users need to ensure that all files exist.
Label images should be png images where pixel values represents class number.
find images -name *.jpg > images.txt
find labels -name *.png > labels.txt
for a file name 2011_002920.jpg, each row should contain 2011_002920
file_path: location of train.txt, or val.txt in PASCAL VOC2012 format,
listing image file path components without extension
data_dir: location of image files referred to by file in file_path
label_dir: location of label files
data_suffix: image file extension, such as `.jpg` or `.png`
label_suffix: label file suffix, such as `.png`, or `.npy`
loss_shape: shape to use when applying loss function to the label data
'''
def __init__(self, file_path, seg_data_generator,
data_dir, data_suffix,
label_dir, label_suffix, classes, ignore_label=255,
crop_mode='none', label_cval=255, pad_size=None,
target_size=None, color_mode='rgb',
data_format='default', class_mode='sparse',
batch_size=1, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg',
loss_shape=None):
if data_format == 'default':
data_format = K.image_data_format()
self.file_path = file_path
self.data_dir = data_dir
self.data_suffix = data_suffix
self.label_suffix = label_suffix
self.label_dir = label_dir
self.classes = classes
self.seg_data_generator = seg_data_generator
self.target_size = tuple(target_size)
self.ignore_label = ignore_label
self.crop_mode = crop_mode
self.label_cval = label_cval
self.pad_size = pad_size
if color_mode not in {'rgb', 'grayscale'}:
raise ValueError('Invalid color mode:', color_mode,
'; expected "rgb" or "grayscale".')
self.color_mode = color_mode
self.data_format = data_format
self.nb_label_ch = 1
self.loss_shape = loss_shape
if (self.label_suffix == '.npy') or (self.label_suffix == 'npy'):
self.label_file_format = 'npy'
else:
self.label_file_format = 'img'
if target_size:
if self.color_mode == 'rgb':
if self.data_format == 'channels_last':
self.image_shape = self.target_size + (3,)
else:
self.image_shape = (3,) + self.target_size
else:
if self.data_format == 'channels_last':
self.image_shape = self.target_size + (1,)
else:
self.image_shape = (1,) + self.target_size
if self.data_format == 'channels_last':
self.label_shape = self.target_size + (self.nb_label_ch,)
else:
self.label_shape = (self.nb_label_ch,) + self.target_size
elif batch_size != 1:
raise ValueError(
'Batch size must be 1 when target image size is undetermined')
else:
self.image_shape = None
self.label_shape = None
if class_mode not in {'sparse', None}:
raise ValueError('Invalid class_mode:', class_mode,
'; expected one of '
'"sparse", or None.')
self.class_mode = class_mode
if save_to_dir:
self.palette = None
self.save_to_dir = save_to_dir
self.save_prefix = save_prefix
self.save_format = save_format
white_list_formats = {'png', 'jpg', 'jpeg', 'bmp', 'npy'}
# build lists for data files and label files
self.data_files = []
self.label_files = []
fp = open(file_path)
lines = fp.readlines()
fp.close()
self.nb_sample = len(lines)
for line in lines:
line = line.strip('\n')
self.data_files.append(line + data_suffix)
self.label_files.append(line + label_suffix)
super(SegDirectoryIterator, self).__init__(
self.nb_sample, batch_size, shuffle, seed)
def next(self):
with self.lock:
index_array, current_index, current_batch_size = next(
self.index_generator)
# The transformation of images is not under thread lock so it can be
# done in parallel
if self.target_size:
# TODO(ahundt) make dtype properly configurable
batch_x = np.zeros((current_batch_size,) + self.image_shape)
if self.loss_shape is None and self.label_file_format is 'img':
batch_y = np.zeros((current_batch_size,) + self.label_shape,
dtype=int)
elif self.loss_shape is None:
batch_y = np.zeros((current_batch_size,) + self.label_shape)
else:
batch_y = np.zeros((current_batch_size,) + self.loss_shape,
dtype=np.uint8)
grayscale = self.color_mode == 'grayscale'
# build batch of image data and labels
for i, j in enumerate(index_array):
data_file = self.data_files[j]
label_file = self.label_files[j]
img_file_format = 'img'
img = load_img(os.path.join(self.data_dir, data_file),
grayscale=grayscale, target_size=None)
label_filepath = os.path.join(self.label_dir, label_file)
if self.label_file_format == 'npy':
y = np.load(label_filepath)
else:
label = Image.open(label_filepath)
if self.save_to_dir and self.palette is None:
self.palette = label.palette
# do padding
if self.target_size:
if self.crop_mode != 'none':
x = img_to_array(img, data_format=self.data_format)
if self.label_file_format is not 'npy':
y = img_to_array(
label, data_format=self.data_format).astype(int)
img_w, img_h = img.size
if self.pad_size:
pad_w = max(self.pad_size[1] - img_w, 0)
pad_h = max(self.pad_size[0] - img_h, 0)
else:
pad_w = max(self.target_size[1] - img_w, 0)
pad_h = max(self.target_size[0] - img_h, 0)
if self.data_format == 'channels_first':
x = np.lib.pad(x, ((0, 0), (pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2)), 'constant', constant_values=0.)
y = np.lib.pad(y, ((0, 0), (pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2)),
'constant', constant_values=self.label_cval)
elif self.data_format == 'channels_last':
x = np.lib.pad(x, ((pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2), (0, 0)), 'constant', constant_values=0.)
y = np.lib.pad(y, ((pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2), (0, 0)), 'constant', constant_values=self.label_cval)
else:
x = img_to_array(img.resize((self.target_size[1], self.target_size[0]),
Image.BILINEAR),
data_format=self.data_format)
if self.label_file_format is not 'npy':
y = img_to_array(label.resize((self.target_size[1], self.target_size[
0]), Image.NEAREST), data_format=self.data_format).astype(int)
else:
print('ERROR: resize not implemented for label npy file')
if self.target_size is None:
batch_x = np.zeros((current_batch_size,) + x.shape)
if self.loss_shape is not None:
batch_y = np.zeros((current_batch_size,) + self.loss_shape)
else:
batch_y = np.zeros((current_batch_size,) + y.shape)
x, y = self.seg_data_generator.random_transform(x, y)
x = self.seg_data_generator.standardize(x)
if self.ignore_label:
y[np.where(y == self.ignore_label)] = self.classes
if self.loss_shape is not None:
y = np.reshape(y, self.loss_shape)
batch_x[i] = x
batch_y[i] = y
# optionally save augmented images to disk for debugging purposes
if self.save_to_dir:
for i in range(current_batch_size):
img = array_to_img(batch_x[i], self.data_format, scale=True)
label = batch_y[i][:, :, 0].astype('uint8')
label[np.where(label == self.classes)] = self.ignore_label
label = Image.fromarray(label, mode='P')
label.palette = self.palette
fname = '{prefix}_{index}_{hash}'.format(prefix=self.save_prefix,
index=current_index + i,
hash=np.random.randint(1e4))
img.save(os.path.join(self.save_to_dir, 'img_' +
fname + '.{format}'.format(format=self.save_format)))
label.save(os.path.join(self.save_to_dir,
'label_' + fname + '.png'))
# return
batch_x = preprocess_input(batch_x)
if self.class_mode == 'sparse':
return batch_x, batch_y
else:
return batch_x
class SegDataGenerator(object):
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
channelwise_center=False,
rotation_range=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
zoom_maintain_shape=True,
channel_shift_range=0.,
fill_mode='constant',
cval=0.,
label_cval=255,
crop_mode='none',
crop_size=(0, 0),
pad_size=None,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
data_format='default'):
if data_format == 'default':
data_format = K.image_data_format()
self.__dict__.update(locals())
self.mean = None
self.ch_mean = None
self.std = None
self.principal_components = None
self.rescale = rescale
if data_format not in {'channels_last', 'channels_first'}:
raise Exception('data_format should be channels_last (channel after row and '
'column) or channels_first (channel before row and column). '
'Received arg: ', data_format)
if crop_mode not in {'none', 'random', 'center'}:
raise Exception('crop_mode should be "none" or "random" or "center" '
'Received arg: ', crop_mode)
self.data_format = data_format
if data_format == 'channels_first':
self.channel_index = 1
self.row_index = 2
self.col_index = 3
if data_format == 'channels_last':
self.channel_index = 3
self.row_index = 1
self.col_index = 2
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise Exception('zoom_range should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow_from_directory(self, file_path, data_dir, data_suffix,
label_dir, label_suffix, classes,
ignore_label=255,
target_size=None, color_mode='rgb',
class_mode='sparse',
batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg',
loss_shape=None):
if self.crop_mode == 'random' or self.crop_mode == 'center':
target_size = self.crop_size
return SegDirectoryIterator(
file_path, self,
data_dir=data_dir, data_suffix=data_suffix,
label_dir=label_dir, label_suffix=label_suffix,
classes=classes, ignore_label=ignore_label,
crop_mode=self.crop_mode, label_cval=self.label_cval,
pad_size=self.pad_size,
target_size=target_size, color_mode=color_mode,
data_format=self.data_format, class_mode=class_mode,
batch_size=batch_size, shuffle=shuffle, seed=seed,
save_to_dir=save_to_dir, save_prefix=save_prefix,
save_format=save_format,
loss_shape=loss_shape)
def standardize(self, x):
if self.rescale:
x *= self.rescale
# x is a single image, so it doesn't have image number at index 0
img_channel_index = self.channel_index - 1
if self.samplewise_center:
x -= np.mean(x, axis=img_channel_index, keepdims=True)
if self.samplewise_std_normalization:
x /= (np.std(x, axis=img_channel_index, keepdims=True) + 1e-7)
if self.featurewise_center:
x -= self.mean
if self.featurewise_std_normalization:
x /= (self.std + 1e-7)
if self.channelwise_center:
x -= self.ch_mean
return x
def random_transform(self, x, y):
# x is a single image, so it doesn't have image number at index 0
img_row_index = self.row_index - 1
img_col_index = self.col_index - 1
img_channel_index = self.channel_index - 1
if self.crop_mode == 'none':
crop_size = (x.shape[img_row_index], x.shape[img_col_index])
else:
crop_size = self.crop_size
assert x.shape[img_row_index] == y.shape[img_row_index] and x.shape[img_col_index] == y.shape[
img_col_index], 'DATA ERROR: Different shape of data and label!\ndata shape: %s, label shape: %s' % (str(x.shape), str(y.shape))
# use composition of homographies to generate final transform that
# needs to be applied
if self.rotation_range:
theta = np.pi / 180 * \
np.random.uniform(-self.rotation_range, self.rotation_range)
else:
theta = 0
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
if self.height_shift_range:
# * x.shape[img_row_index]
tx = np.random.uniform(-self.height_shift_range,
self.height_shift_range) * crop_size[0]
else:
tx = 0
if self.width_shift_range:
# * x.shape[img_col_index]
ty = np.random.uniform(-self.width_shift_range,
self.width_shift_range) * crop_size[1]
else:
ty = 0
translation_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(
self.zoom_range[0], self.zoom_range[1], 2)
if self.zoom_maintain_shape:
zy = zx
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(
np.dot(np.dot(rotation_matrix, translation_matrix), shear_matrix), zoom_matrix)
h, w = x.shape[img_row_index], x.shape[img_col_index]
transform_matrix = transform_matrix_offset_center(
transform_matrix, h, w)
x = apply_transform(x, transform_matrix, img_channel_index,
fill_mode=self.fill_mode, cval=self.cval)
y = apply_transform(y, transform_matrix, img_channel_index,
fill_mode='constant', cval=self.label_cval)
if self.channel_shift_range != 0:
x = random_channel_shift(
x, self.channel_shift_range, img_channel_index)
if self.horizontal_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_col_index)
y = flip_axis(y, img_col_index)
if self.vertical_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_row_index)
y = flip_axis(y, img_row_index)
if self.crop_mode == 'center':
x, y = pair_center_crop(x, y, self.crop_size, self.data_format)
elif self.crop_mode == 'random':
x, y = pair_random_crop(x, y, self.crop_size, self.data_format)
# TODO:
# channel-wise normalization
# barrel/fisheye
return x, y
def fit(self, X,
augment=False,
rounds=1,
seed=None):
'''Required for featurewise_center and featurewise_std_normalization
# Arguments
X: Numpy array, the data to fit on.
augment: whether to fit on randomly augmented samples
rounds: if `augment`,
how many augmentation passes to do over the data
seed: random seed.
'''
X = np.copy(X)
if augment:
aX = np.zeros(tuple([rounds * X.shape[0]] + list(X.shape)[1:]))
for r in range(rounds):
for i in range(X.shape[0]):
aX[i + r * X.shape[0]] = self.random_transform(X[i])
X = aX
if self.featurewise_center:
self.mean = np.mean(X, axis=0)
X -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(X, axis=0)
X /= (self.std + 1e-7)
def set_ch_mean(self, ch_mean):
self.ch_mean = ch_mean
| 43.067308
| 160
| 0.552936
|
from keras.preprocessing.image import *
from keras.applications.imagenet_utils import preprocess_input
from keras import backend as K
from PIL import Image
import numpy as np
import os
def center_crop(x, center_crop_size, data_format, **kwargs):
if data_format == 'channels_first':
centerh, centerw = x.shape[1] // 2, x.shape[2] // 2
elif data_format == 'channels_last':
centerh, centerw = x.shape[0] // 2, x.shape[1] // 2
lh, lw = center_crop_size[0] // 2, center_crop_size[1] // 2
rh, rw = center_crop_size[0] - lh, center_crop_size[1] - lw
h_start, h_end = centerh - lh, centerh + rh
w_start, w_end = centerw - lw, centerw + rw
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :]
def pair_center_crop(x, y, center_crop_size, data_format, **kwargs):
if data_format == 'channels_first':
centerh, centerw = x.shape[1] // 2, x.shape[2] // 2
elif data_format == 'channels_last':
centerh, centerw = x.shape[0] // 2, x.shape[1] // 2
lh, lw = center_crop_size[0] // 2, center_crop_size[1] // 2
rh, rw = center_crop_size[0] - lh, center_crop_size[1] - lw
h_start, h_end = centerh - lh, centerh + rh
w_start, w_end = centerw - lw, centerw + rw
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end], \
y[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :], \
y[h_start:h_end, w_start:w_end, :]
def random_crop(x, random_crop_size, data_format, sync_seed=None, **kwargs):
np.random.seed(sync_seed)
if data_format == 'channels_first':
h, w = x.shape[1], x.shape[2]
elif data_format == 'channels_last':
h, w = x.shape[0], x.shape[1]
rangeh = (h - random_crop_size[0]) // 2
rangew = (w - random_crop_size[1]) // 2
offseth = 0 if rangeh == 0 else np.random.randint(rangeh)
offsetw = 0 if rangew == 0 else np.random.randint(rangew)
h_start, h_end = offseth, offseth + random_crop_size[0]
w_start, w_end = offsetw, offsetw + random_crop_size[1]
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :]
def pair_random_crop(x, y, random_crop_size, data_format, sync_seed=None, **kwargs):
np.random.seed(sync_seed)
if data_format == 'channels_first':
h, w = x.shape[1], x.shape[2]
elif data_format == 'channels_last':
h, w = x.shape[0], x.shape[1]
rangeh = (h - random_crop_size[0]) // 2
rangew = (w - random_crop_size[1]) // 2
offseth = 0 if rangeh == 0 else np.random.randint(rangeh)
offsetw = 0 if rangew == 0 else np.random.randint(rangew)
h_start, h_end = offseth, offseth + random_crop_size[0]
w_start, w_end = offsetw, offsetw + random_crop_size[1]
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end], y[:, h_start:h_end, h_start:h_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :], y[h_start:h_end, w_start:w_end, :]
class SegDirectoryIterator(Iterator):
def __init__(self, file_path, seg_data_generator,
data_dir, data_suffix,
label_dir, label_suffix, classes, ignore_label=255,
crop_mode='none', label_cval=255, pad_size=None,
target_size=None, color_mode='rgb',
data_format='default', class_mode='sparse',
batch_size=1, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg',
loss_shape=None):
if data_format == 'default':
data_format = K.image_data_format()
self.file_path = file_path
self.data_dir = data_dir
self.data_suffix = data_suffix
self.label_suffix = label_suffix
self.label_dir = label_dir
self.classes = classes
self.seg_data_generator = seg_data_generator
self.target_size = tuple(target_size)
self.ignore_label = ignore_label
self.crop_mode = crop_mode
self.label_cval = label_cval
self.pad_size = pad_size
if color_mode not in {'rgb', 'grayscale'}:
raise ValueError('Invalid color mode:', color_mode,
'; expected "rgb" or "grayscale".')
self.color_mode = color_mode
self.data_format = data_format
self.nb_label_ch = 1
self.loss_shape = loss_shape
if (self.label_suffix == '.npy') or (self.label_suffix == 'npy'):
self.label_file_format = 'npy'
else:
self.label_file_format = 'img'
if target_size:
if self.color_mode == 'rgb':
if self.data_format == 'channels_last':
self.image_shape = self.target_size + (3,)
else:
self.image_shape = (3,) + self.target_size
else:
if self.data_format == 'channels_last':
self.image_shape = self.target_size + (1,)
else:
self.image_shape = (1,) + self.target_size
if self.data_format == 'channels_last':
self.label_shape = self.target_size + (self.nb_label_ch,)
else:
self.label_shape = (self.nb_label_ch,) + self.target_size
elif batch_size != 1:
raise ValueError(
'Batch size must be 1 when target image size is undetermined')
else:
self.image_shape = None
self.label_shape = None
if class_mode not in {'sparse', None}:
raise ValueError('Invalid class_mode:', class_mode,
'; expected one of '
'"sparse", or None.')
self.class_mode = class_mode
if save_to_dir:
self.palette = None
self.save_to_dir = save_to_dir
self.save_prefix = save_prefix
self.save_format = save_format
white_list_formats = {'png', 'jpg', 'jpeg', 'bmp', 'npy'}
self.data_files = []
self.label_files = []
fp = open(file_path)
lines = fp.readlines()
fp.close()
self.nb_sample = len(lines)
for line in lines:
line = line.strip('\n')
self.data_files.append(line + data_suffix)
self.label_files.append(line + label_suffix)
super(SegDirectoryIterator, self).__init__(
self.nb_sample, batch_size, shuffle, seed)
def next(self):
with self.lock:
index_array, current_index, current_batch_size = next(
self.index_generator)
if self.target_size:
batch_x = np.zeros((current_batch_size,) + self.image_shape)
if self.loss_shape is None and self.label_file_format is 'img':
batch_y = np.zeros((current_batch_size,) + self.label_shape,
dtype=int)
elif self.loss_shape is None:
batch_y = np.zeros((current_batch_size,) + self.label_shape)
else:
batch_y = np.zeros((current_batch_size,) + self.loss_shape,
dtype=np.uint8)
grayscale = self.color_mode == 'grayscale'
for i, j in enumerate(index_array):
data_file = self.data_files[j]
label_file = self.label_files[j]
img_file_format = 'img'
img = load_img(os.path.join(self.data_dir, data_file),
grayscale=grayscale, target_size=None)
label_filepath = os.path.join(self.label_dir, label_file)
if self.label_file_format == 'npy':
y = np.load(label_filepath)
else:
label = Image.open(label_filepath)
if self.save_to_dir and self.palette is None:
self.palette = label.palette
if self.target_size:
if self.crop_mode != 'none':
x = img_to_array(img, data_format=self.data_format)
if self.label_file_format is not 'npy':
y = img_to_array(
label, data_format=self.data_format).astype(int)
img_w, img_h = img.size
if self.pad_size:
pad_w = max(self.pad_size[1] - img_w, 0)
pad_h = max(self.pad_size[0] - img_h, 0)
else:
pad_w = max(self.target_size[1] - img_w, 0)
pad_h = max(self.target_size[0] - img_h, 0)
if self.data_format == 'channels_first':
x = np.lib.pad(x, ((0, 0), (pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2)), 'constant', constant_values=0.)
y = np.lib.pad(y, ((0, 0), (pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2)),
'constant', constant_values=self.label_cval)
elif self.data_format == 'channels_last':
x = np.lib.pad(x, ((pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2), (0, 0)), 'constant', constant_values=0.)
y = np.lib.pad(y, ((pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2), (0, 0)), 'constant', constant_values=self.label_cval)
else:
x = img_to_array(img.resize((self.target_size[1], self.target_size[0]),
Image.BILINEAR),
data_format=self.data_format)
if self.label_file_format is not 'npy':
y = img_to_array(label.resize((self.target_size[1], self.target_size[
0]), Image.NEAREST), data_format=self.data_format).astype(int)
else:
print('ERROR: resize not implemented for label npy file')
if self.target_size is None:
batch_x = np.zeros((current_batch_size,) + x.shape)
if self.loss_shape is not None:
batch_y = np.zeros((current_batch_size,) + self.loss_shape)
else:
batch_y = np.zeros((current_batch_size,) + y.shape)
x, y = self.seg_data_generator.random_transform(x, y)
x = self.seg_data_generator.standardize(x)
if self.ignore_label:
y[np.where(y == self.ignore_label)] = self.classes
if self.loss_shape is not None:
y = np.reshape(y, self.loss_shape)
batch_x[i] = x
batch_y[i] = y
if self.save_to_dir:
for i in range(current_batch_size):
img = array_to_img(batch_x[i], self.data_format, scale=True)
label = batch_y[i][:, :, 0].astype('uint8')
label[np.where(label == self.classes)] = self.ignore_label
label = Image.fromarray(label, mode='P')
label.palette = self.palette
fname = '{prefix}_{index}_{hash}'.format(prefix=self.save_prefix,
index=current_index + i,
hash=np.random.randint(1e4))
img.save(os.path.join(self.save_to_dir, 'img_' +
fname + '.{format}'.format(format=self.save_format)))
label.save(os.path.join(self.save_to_dir,
'label_' + fname + '.png'))
batch_x = preprocess_input(batch_x)
if self.class_mode == 'sparse':
return batch_x, batch_y
else:
return batch_x
class SegDataGenerator(object):
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
channelwise_center=False,
rotation_range=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
zoom_maintain_shape=True,
channel_shift_range=0.,
fill_mode='constant',
cval=0.,
label_cval=255,
crop_mode='none',
crop_size=(0, 0),
pad_size=None,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
data_format='default'):
if data_format == 'default':
data_format = K.image_data_format()
self.__dict__.update(locals())
self.mean = None
self.ch_mean = None
self.std = None
self.principal_components = None
self.rescale = rescale
if data_format not in {'channels_last', 'channels_first'}:
raise Exception('data_format should be channels_last (channel after row and '
'column) or channels_first (channel before row and column). '
'Received arg: ', data_format)
if crop_mode not in {'none', 'random', 'center'}:
raise Exception('crop_mode should be "none" or "random" or "center" '
'Received arg: ', crop_mode)
self.data_format = data_format
if data_format == 'channels_first':
self.channel_index = 1
self.row_index = 2
self.col_index = 3
if data_format == 'channels_last':
self.channel_index = 3
self.row_index = 1
self.col_index = 2
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise Exception('zoom_range should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow_from_directory(self, file_path, data_dir, data_suffix,
label_dir, label_suffix, classes,
ignore_label=255,
target_size=None, color_mode='rgb',
class_mode='sparse',
batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg',
loss_shape=None):
if self.crop_mode == 'random' or self.crop_mode == 'center':
target_size = self.crop_size
return SegDirectoryIterator(
file_path, self,
data_dir=data_dir, data_suffix=data_suffix,
label_dir=label_dir, label_suffix=label_suffix,
classes=classes, ignore_label=ignore_label,
crop_mode=self.crop_mode, label_cval=self.label_cval,
pad_size=self.pad_size,
target_size=target_size, color_mode=color_mode,
data_format=self.data_format, class_mode=class_mode,
batch_size=batch_size, shuffle=shuffle, seed=seed,
save_to_dir=save_to_dir, save_prefix=save_prefix,
save_format=save_format,
loss_shape=loss_shape)
def standardize(self, x):
if self.rescale:
x *= self.rescale
img_channel_index = self.channel_index - 1
if self.samplewise_center:
x -= np.mean(x, axis=img_channel_index, keepdims=True)
if self.samplewise_std_normalization:
x /= (np.std(x, axis=img_channel_index, keepdims=True) + 1e-7)
if self.featurewise_center:
x -= self.mean
if self.featurewise_std_normalization:
x /= (self.std + 1e-7)
if self.channelwise_center:
x -= self.ch_mean
return x
def random_transform(self, x, y):
# x is a single image, so it doesn't have image number at index 0
img_row_index = self.row_index - 1
img_col_index = self.col_index - 1
img_channel_index = self.channel_index - 1
if self.crop_mode == 'none':
crop_size = (x.shape[img_row_index], x.shape[img_col_index])
else:
crop_size = self.crop_size
assert x.shape[img_row_index] == y.shape[img_row_index] and x.shape[img_col_index] == y.shape[
img_col_index], 'DATA ERROR: Different shape of data and label!\ndata shape: %s, label shape: %s' % (str(x.shape), str(y.shape))
if self.rotation_range:
theta = np.pi / 180 * \
np.random.uniform(-self.rotation_range, self.rotation_range)
else:
theta = 0
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
if self.height_shift_range:
tx = np.random.uniform(-self.height_shift_range,
self.height_shift_range) * crop_size[0]
else:
tx = 0
if self.width_shift_range:
ty = np.random.uniform(-self.width_shift_range,
self.width_shift_range) * crop_size[1]
else:
ty = 0
translation_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(
self.zoom_range[0], self.zoom_range[1], 2)
if self.zoom_maintain_shape:
zy = zx
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(
np.dot(np.dot(rotation_matrix, translation_matrix), shear_matrix), zoom_matrix)
h, w = x.shape[img_row_index], x.shape[img_col_index]
transform_matrix = transform_matrix_offset_center(
transform_matrix, h, w)
x = apply_transform(x, transform_matrix, img_channel_index,
fill_mode=self.fill_mode, cval=self.cval)
y = apply_transform(y, transform_matrix, img_channel_index,
fill_mode='constant', cval=self.label_cval)
if self.channel_shift_range != 0:
x = random_channel_shift(
x, self.channel_shift_range, img_channel_index)
if self.horizontal_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_col_index)
y = flip_axis(y, img_col_index)
if self.vertical_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_row_index)
y = flip_axis(y, img_row_index)
if self.crop_mode == 'center':
x, y = pair_center_crop(x, y, self.crop_size, self.data_format)
elif self.crop_mode == 'random':
x, y = pair_random_crop(x, y, self.crop_size, self.data_format)
return x, y
def fit(self, X,
augment=False,
rounds=1,
seed=None):
X = np.copy(X)
if augment:
aX = np.zeros(tuple([rounds * X.shape[0]] + list(X.shape)[1:]))
for r in range(rounds):
for i in range(X.shape[0]):
aX[i + r * X.shape[0]] = self.random_transform(X[i])
X = aX
if self.featurewise_center:
self.mean = np.mean(X, axis=0)
X -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(X, axis=0)
X /= (self.std + 1e-7)
def set_ch_mean(self, ch_mean):
self.ch_mean = ch_mean
| true
| true
|
f7092e1255d38618f9c2c9eca5f281dfe7bcef56
| 2,904
|
py
|
Python
|
test/swig/Less.py
|
AyishaR/deepC
|
1dc9707ef5ca9000fc13c3da7f1129685a83b494
|
[
"Apache-2.0"
] | 223
|
2020-04-15T20:34:33.000Z
|
2022-03-28T05:41:49.000Z
|
test/swig/Less.py
|
AyishaR/deepC
|
1dc9707ef5ca9000fc13c3da7f1129685a83b494
|
[
"Apache-2.0"
] | 42
|
2019-07-29T15:57:12.000Z
|
2020-04-08T15:12:48.000Z
|
test/swig/Less.py
|
AyishaR/deepC
|
1dc9707ef5ca9000fc13c3da7f1129685a83b494
|
[
"Apache-2.0"
] | 58
|
2019-07-22T11:46:19.000Z
|
2020-04-09T22:56:41.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-argument
#
# This file is part of DNN compiler maintained at
# https://github.com/ai-techsystems/dnnCompiler
import common
import deepC.dnnc as dc
import numpy as np
import unittest
class LessTest(unittest.TestCase):
def setUp(self):
self.len = 24
self.np_a = np.random.randn(self.len).astype(np.float32)
self.np_b = np.random.randn(self.len).astype(np.float32)
self.dc_a = dc.array(list(self.np_a));
self.dc_b = dc.array(list(self.np_b));
def test_Less1D (self):
npr = np.less(self.np_a, self.np_b)
dcr = dc.less(self.dc_a, self.dc_b)
np.testing.assert_allclose(npr, np.array(dcr.data()).astype(np.bool),
rtol=1e-3, atol=1e-3)
def test_Less2D (self):
np_a = np.reshape(self.np_a, (6,4))
np_b = np.reshape(self.np_b, (6,4))
dc_a = dc.reshape(self.dc_a, (6,4));
dc_b = dc.reshape(self.dc_b, (6,4));
npr = np.less(np_a, np_b);
dcr = dc.less(dc_a, dc_b);
np.testing.assert_allclose(npr.flatten(), np.array(dcr.data()).astype(np.bool),
rtol=1e-3, atol=1e-3)
def test_Less3D (self):
np_a = np.reshape(self.np_a, (2,4,3))
np_b = np.reshape(self.np_b, (2,4,3))
dc_a = dc.reshape(self.dc_a, (2,4,3));
dc_b = dc.reshape(self.dc_b, (2,4,3));
npr = np.less(np_a, np_b);
dcr = dc.less(dc_a, dc_b);
np.testing.assert_allclose(npr.flatten(), np.array(dcr.data()).astype(np.bool),
rtol=1e-3, atol=1e-3)
def test_Equal4D (self):
np_a = np.reshape(self.np_a, (2,2,2,3))
np_b = np.reshape(self.np_b, (2,2,2,3))
dc_a = dc.reshape(self.dc_a, (2,2,2,3))
dc_b = dc.reshape(self.dc_b, (2,2,2,3))
npr = np.less(np_a, np_b)
dcr = dc.less(dc_a, dc_b)
np.testing.assert_allclose(npr.flatten(), np.array(dcr.data()).astype(np.bool),
rtol=1e-3, atol=1e-3)
def tearDown(self):
return "test finished"
if __name__ == '__main__':
unittest.main()
| 34.987952
| 87
| 0.632576
|
import common
import deepC.dnnc as dc
import numpy as np
import unittest
class LessTest(unittest.TestCase):
def setUp(self):
self.len = 24
self.np_a = np.random.randn(self.len).astype(np.float32)
self.np_b = np.random.randn(self.len).astype(np.float32)
self.dc_a = dc.array(list(self.np_a));
self.dc_b = dc.array(list(self.np_b));
def test_Less1D (self):
npr = np.less(self.np_a, self.np_b)
dcr = dc.less(self.dc_a, self.dc_b)
np.testing.assert_allclose(npr, np.array(dcr.data()).astype(np.bool),
rtol=1e-3, atol=1e-3)
def test_Less2D (self):
np_a = np.reshape(self.np_a, (6,4))
np_b = np.reshape(self.np_b, (6,4))
dc_a = dc.reshape(self.dc_a, (6,4));
dc_b = dc.reshape(self.dc_b, (6,4));
npr = np.less(np_a, np_b);
dcr = dc.less(dc_a, dc_b);
np.testing.assert_allclose(npr.flatten(), np.array(dcr.data()).astype(np.bool),
rtol=1e-3, atol=1e-3)
def test_Less3D (self):
np_a = np.reshape(self.np_a, (2,4,3))
np_b = np.reshape(self.np_b, (2,4,3))
dc_a = dc.reshape(self.dc_a, (2,4,3));
dc_b = dc.reshape(self.dc_b, (2,4,3));
npr = np.less(np_a, np_b);
dcr = dc.less(dc_a, dc_b);
np.testing.assert_allclose(npr.flatten(), np.array(dcr.data()).astype(np.bool),
rtol=1e-3, atol=1e-3)
def test_Equal4D (self):
np_a = np.reshape(self.np_a, (2,2,2,3))
np_b = np.reshape(self.np_b, (2,2,2,3))
dc_a = dc.reshape(self.dc_a, (2,2,2,3))
dc_b = dc.reshape(self.dc_b, (2,2,2,3))
npr = np.less(np_a, np_b)
dcr = dc.less(dc_a, dc_b)
np.testing.assert_allclose(npr.flatten(), np.array(dcr.data()).astype(np.bool),
rtol=1e-3, atol=1e-3)
def tearDown(self):
return "test finished"
if __name__ == '__main__':
unittest.main()
| true
| true
|
f7092eb8bb29cd5ee13ba9193e8f20b7d4714b28
| 442
|
py
|
Python
|
messier_objects/migrations/0003_auto_20200723_1441.py
|
DanielPDWalker/Astrophoto
|
9a7ee59deb291617baa3ab8724b8ce5970e6ea9f
|
[
"MIT"
] | null | null | null |
messier_objects/migrations/0003_auto_20200723_1441.py
|
DanielPDWalker/Astrophoto
|
9a7ee59deb291617baa3ab8724b8ce5970e6ea9f
|
[
"MIT"
] | 12
|
2020-07-26T06:20:22.000Z
|
2022-03-12T00:43:09.000Z
|
messier_objects/migrations/0003_auto_20200723_1441.py
|
DanielPDWalker/Astrophoto-API
|
9a7ee59deb291617baa3ab8724b8ce5970e6ea9f
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.9 on 2020-07-23 13:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('messier_objects', '0002_auto_20200723_1438'),
]
operations = [
migrations.AlterField(
model_name='messierobject',
name='photo',
field=models.ImageField(default='notcaptured.JPG', upload_to='messier_objects'),
),
]
| 23.263158
| 92
| 0.631222
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('messier_objects', '0002_auto_20200723_1438'),
]
operations = [
migrations.AlterField(
model_name='messierobject',
name='photo',
field=models.ImageField(default='notcaptured.JPG', upload_to='messier_objects'),
),
]
| true
| true
|
f7092f97f43d8f03ca9070fa7e458673b7957cf3
| 9,168
|
py
|
Python
|
src/config/api-server/vnc_cfg_api_server/tests/test_vnc_load_data.py
|
pawelzny/contrail-controller
|
4950d3144cb8c422264ddb2a926cf4fe9e40b14d
|
[
"Apache-2.0"
] | null | null | null |
src/config/api-server/vnc_cfg_api_server/tests/test_vnc_load_data.py
|
pawelzny/contrail-controller
|
4950d3144cb8c422264ddb2a926cf4fe9e40b14d
|
[
"Apache-2.0"
] | null | null | null |
src/config/api-server/vnc_cfg_api_server/tests/test_vnc_load_data.py
|
pawelzny/contrail-controller
|
4950d3144cb8c422264ddb2a926cf4fe9e40b14d
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2018 Juniper Networks, Inc. All rights reserved.
#
import sys
import os
import logging
import json
import test_case
from vnc_api.exceptions import NoIdError, RefsExistError
from vnc_api.gen.resource_client import *
from vnc_api.gen.resource_xsd import *
from vnc_api.utils import obj_type_to_vnc_class
import shutil
sys.path.append("../common/tests")
from time import sleep
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
def retry_exc_handler(tries_remaining, exception, delay):
print >> sys.stderr, "Caught '%s', %d tries remaining, sleeping for %s seconds" % (exception, tries_remaining, delay)
def retries(max_tries, delay=5, backoff=1, exceptions=(Exception,),hook=None):
def dec(func):
def f2(*args, **kwargs):
mydelay = delay
tries = range(max_tries)
tries.reverse()
for tries_remaining in tries:
try:
return func(*args, **kwargs)
except exceptions as e:
if tries_remaining > 0:
if hook is not None:
hook(tries_remaining, e, mydelay)
sleep(mydelay)
mydelay = mydelay * backoff
else:
raise
return f2
return dec
#Testing if all the objects in the json file are created. If not, create them.
class TestInitData1(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
super(TestInitData1, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
"../fabric-ansible/ansible-playbooks")])
# end setUpClass
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
super(TestInitData1, cls).tearDownClass(*args, **kwargs)
# end tearDownClass
def create_object(self, object, res_type, fq_name):
# Get the class name from object type
vnc_cls = obj_type_to_vnc_class(res_type, __name__)
instance_obj = vnc_cls.from_dict(**object)
try:
if(res_type == "job-template"):
schema_name = fq_name.replace('template', 'schema.json')
with open(os.path.join("../fabric-ansible/ansible-playbooks" +
'/schema/', schema_name),'r+') as schema_file:
schema_json = json.load(schema_file)
object["job_template_input_schema"] = schema_json.get(
"input_schema")
object["job_template_output_schema"] = schema_json.get(
"output_schema")
self._vnc_lib.job_template_create(instance_obj)
else:
self._vnc_lib._object_create(res_type, instance_obj)
except RefsExistError:
pass
def test_load_init_data_2(self):
object = {}
res_type = ""
fq_name = ""
try:
with open("../fabric-ansible/ansible-playbooks/conf"
"/predef_payloads.json") as data_file:
input_json = json.load(data_file)
for item in input_json.get('data'):
res_type = item.get("object_type")
for object in item.get("objects"):
fq_name = object.get("name")
self._vnc_lib._object_read(res_type=res_type, fq_name=fq_name)
except NoIdError:
self.create_object(object, res_type, fq_name)
except Exception as e:
print ("Test failed due to unexpected error: %s" % str(e))
# Test when object_type having invalid name
class TestInitDataError2(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
json_data = {
"data": [
{
"object_type": "abc",
"objects": [{"fq_name": ["test"]}]
}
]
}
if not os.path.exists("conf"):
os.makedirs("conf")
with open("conf/predef_payloads.json", "w") as f:
json.dump(json_data, f)
super(TestInitDataError2, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
".")])
# end setUpClass
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
if os.path.exists("conf"):
shutil.rmtree("conf")
super(TestInitDataError2, cls).tearDownClass(*args, **kwargs)
# end tearDownClass
@retries(5, hook=retry_exc_handler)
def test_load_init_data_02(self):
try:
ipam_fq_name = ['default-domain', 'default-project',
'service-chain-flat-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
if (ipam_obj):
jb_list = self._vnc_lib.job_templates_list()
self.assertEquals(len(jb_list.get('job-templates')), 0)
except Exception as e:
print( "Test failed due to unexpected error: %s" % str(e))
# Testing when json is invalid
class TestInitDataError3(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
json_data = "abc"
if not os.path.exists("conf"):
os.makedirs("conf")
with open("conf/predef_payloads.json", "w") as f:
f.write(json_data)
super(TestInitDataError3, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
".")])
# end setUpClass
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
if os.path.exists("conf"):
shutil.rmtree("conf")
super(TestInitDataError3, cls).tearDownClass(*args, **kwargs)
# end tearDownClass
@retries(5, hook=retry_exc_handler)
def test_load_init_data_04(self):
try:
ipam_fq_name = ['default-domain', 'default-project',
'service-chain-flat-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
if (ipam_obj):
jb_list = self._vnc_lib.job_templates_list()
self.assertEquals(len(jb_list.get('job-templates')), 0)
except Exception as e:
print("Test failed due to unexpected error: %s" % str(e))
# Testing when tag type is unknown
class TestInitDataError4(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
# create a file in current dir and put some invalid json
# create predef_payloads.json and schema/files
json_data = {
"data": [
{
"object_type": "tag",
"objects": [
{
"fq_name": [
"abc=management_ip"
],
"name": "abc=management_ip",
"tag_type_name": "abc",
"tag_value": "management_ip"
}
]
}
]
}
if not os.path.exists("conf"):
os.makedirs("conf")
with open("conf/predef_payloads.json", "w") as f:
json.dump(json_data, f)
super(TestInitDataError4, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
".")])
# end setUpClass
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
if os.path.exists("conf"):
shutil.rmtree("conf")
super(TestInitDataError4, cls).tearDownClass(*args, **kwargs)
# end tearDownClass
@retries(5, hook=retry_exc_handler)
def test_load_init_data_05(self):
try:
ipam_fq_name = ['default-domain', 'default-project',
'service-chain-flat-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
if (ipam_obj):
tags = self._vnc_lib.tags_list()
self.assertEquals(len(tags.get('tags')), 0)
except Exception as e:
print("Test failed due to unexpected error: %s" % str(e))
| 36.672
| 121
| 0.571444
|
import sys
import os
import logging
import json
import test_case
from vnc_api.exceptions import NoIdError, RefsExistError
from vnc_api.gen.resource_client import *
from vnc_api.gen.resource_xsd import *
from vnc_api.utils import obj_type_to_vnc_class
import shutil
sys.path.append("../common/tests")
from time import sleep
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
def retry_exc_handler(tries_remaining, exception, delay):
print >> sys.stderr, "Caught '%s', %d tries remaining, sleeping for %s seconds" % (exception, tries_remaining, delay)
def retries(max_tries, delay=5, backoff=1, exceptions=(Exception,),hook=None):
def dec(func):
def f2(*args, **kwargs):
mydelay = delay
tries = range(max_tries)
tries.reverse()
for tries_remaining in tries:
try:
return func(*args, **kwargs)
except exceptions as e:
if tries_remaining > 0:
if hook is not None:
hook(tries_remaining, e, mydelay)
sleep(mydelay)
mydelay = mydelay * backoff
else:
raise
return f2
return dec
class TestInitData1(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
super(TestInitData1, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
"../fabric-ansible/ansible-playbooks")])
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
super(TestInitData1, cls).tearDownClass(*args, **kwargs)
def create_object(self, object, res_type, fq_name):
vnc_cls = obj_type_to_vnc_class(res_type, __name__)
instance_obj = vnc_cls.from_dict(**object)
try:
if(res_type == "job-template"):
schema_name = fq_name.replace('template', 'schema.json')
with open(os.path.join("../fabric-ansible/ansible-playbooks" +
'/schema/', schema_name),'r+') as schema_file:
schema_json = json.load(schema_file)
object["job_template_input_schema"] = schema_json.get(
"input_schema")
object["job_template_output_schema"] = schema_json.get(
"output_schema")
self._vnc_lib.job_template_create(instance_obj)
else:
self._vnc_lib._object_create(res_type, instance_obj)
except RefsExistError:
pass
def test_load_init_data_2(self):
object = {}
res_type = ""
fq_name = ""
try:
with open("../fabric-ansible/ansible-playbooks/conf"
"/predef_payloads.json") as data_file:
input_json = json.load(data_file)
for item in input_json.get('data'):
res_type = item.get("object_type")
for object in item.get("objects"):
fq_name = object.get("name")
self._vnc_lib._object_read(res_type=res_type, fq_name=fq_name)
except NoIdError:
self.create_object(object, res_type, fq_name)
except Exception as e:
print ("Test failed due to unexpected error: %s" % str(e))
class TestInitDataError2(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
json_data = {
"data": [
{
"object_type": "abc",
"objects": [{"fq_name": ["test"]}]
}
]
}
if not os.path.exists("conf"):
os.makedirs("conf")
with open("conf/predef_payloads.json", "w") as f:
json.dump(json_data, f)
super(TestInitDataError2, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
".")])
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
if os.path.exists("conf"):
shutil.rmtree("conf")
super(TestInitDataError2, cls).tearDownClass(*args, **kwargs)
@retries(5, hook=retry_exc_handler)
def test_load_init_data_02(self):
try:
ipam_fq_name = ['default-domain', 'default-project',
'service-chain-flat-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
if (ipam_obj):
jb_list = self._vnc_lib.job_templates_list()
self.assertEquals(len(jb_list.get('job-templates')), 0)
except Exception as e:
print( "Test failed due to unexpected error: %s" % str(e))
class TestInitDataError3(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
json_data = "abc"
if not os.path.exists("conf"):
os.makedirs("conf")
with open("conf/predef_payloads.json", "w") as f:
f.write(json_data)
super(TestInitDataError3, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
".")])
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
if os.path.exists("conf"):
shutil.rmtree("conf")
super(TestInitDataError3, cls).tearDownClass(*args, **kwargs)
@retries(5, hook=retry_exc_handler)
def test_load_init_data_04(self):
try:
ipam_fq_name = ['default-domain', 'default-project',
'service-chain-flat-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
if (ipam_obj):
jb_list = self._vnc_lib.job_templates_list()
self.assertEquals(len(jb_list.get('job-templates')), 0)
except Exception as e:
print("Test failed due to unexpected error: %s" % str(e))
class TestInitDataError4(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
json_data = {
"data": [
{
"object_type": "tag",
"objects": [
{
"fq_name": [
"abc=management_ip"
],
"name": "abc=management_ip",
"tag_type_name": "abc",
"tag_value": "management_ip"
}
]
}
]
}
if not os.path.exists("conf"):
os.makedirs("conf")
with open("conf/predef_payloads.json", "w") as f:
json.dump(json_data, f)
super(TestInitDataError4, cls).setUpClass(
extra_config_knobs=[('DEFAULTS', 'fabric_ansible_dir',
".")])
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
if os.path.exists("conf"):
shutil.rmtree("conf")
super(TestInitDataError4, cls).tearDownClass(*args, **kwargs)
@retries(5, hook=retry_exc_handler)
def test_load_init_data_05(self):
try:
ipam_fq_name = ['default-domain', 'default-project',
'service-chain-flat-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
if (ipam_obj):
tags = self._vnc_lib.tags_list()
self.assertEquals(len(tags.get('tags')), 0)
except Exception as e:
print("Test failed due to unexpected error: %s" % str(e))
| true
| true
|
f7092fce6743940e729d3c18cfe5b7cc2120c659
| 14,174
|
py
|
Python
|
openapi_client/models/net_cdf_timeseries_rain.py
|
nens/threedi-api-client
|
43b0eb1bd47310b1783f87f6ad8bfbfe0fb4d90a
|
[
"BSD-3-Clause"
] | null | null | null |
openapi_client/models/net_cdf_timeseries_rain.py
|
nens/threedi-api-client
|
43b0eb1bd47310b1783f87f6ad8bfbfe0fb4d90a
|
[
"BSD-3-Clause"
] | 16
|
2021-05-31T09:52:04.000Z
|
2022-03-14T16:07:19.000Z
|
openapi_client/models/net_cdf_timeseries_rain.py
|
nens/threedi-api-client
|
43b0eb1bd47310b1783f87f6ad8bfbfe0fb4d90a
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
"""
3Di API
3Di simulation API (latest version: 3.0) Framework release: 1.0.16 3Di core release: 2.0.11 deployed on: 07:33AM (UTC) on September 04, 2020 # noqa: E501
The version of the OpenAPI document: 3.0
Contact: info@nelen-schuurmans.nl
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from openapi_client.configuration import Configuration
class NetCDFTimeseriesRain(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'url': 'str',
'multiplier': 'float',
'simulation': 'str',
'offset': 'int',
'duration': 'int',
'timestamps': 'list[int]',
'interval': 'int',
'values_reference': 'str',
'fill_value': 'str',
'units': 'str',
'file': 'FileReadOnly',
'uid': 'str'
}
attribute_map = {
'url': 'url',
'multiplier': 'multiplier',
'simulation': 'simulation',
'offset': 'offset',
'duration': 'duration',
'timestamps': 'timestamps',
'interval': 'interval',
'values_reference': 'values_reference',
'fill_value': 'fill_value',
'units': 'units',
'file': 'file',
'uid': 'uid'
}
def __init__(self, url=None, multiplier=None, simulation=None, offset=None, duration=None, timestamps=None, interval=None, values_reference=None, fill_value=None, units=None, file=None, uid=None, local_vars_configuration=None): # noqa: E501
"""NetCDFTimeseriesRain - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._url = None
self._multiplier = None
self._simulation = None
self._offset = None
self._duration = None
self._timestamps = None
self._interval = None
self._values_reference = None
self._fill_value = None
self._units = None
self._file = None
self._uid = None
self.discriminator = None
if url is not None:
self.url = url
if multiplier is not None:
self.multiplier = multiplier
if simulation is not None:
self.simulation = simulation
self.offset = offset
self.duration = duration
self.timestamps = timestamps
self.interval = interval
self.values_reference = values_reference
if fill_value is not None:
self.fill_value = fill_value
self.units = units
if file is not None:
self.file = file
if uid is not None:
self.uid = uid
@property
def url(self):
"""Gets the url of this NetCDFTimeseriesRain. # noqa: E501
:return: The url of this NetCDFTimeseriesRain. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this NetCDFTimeseriesRain.
:param url: The url of this NetCDFTimeseriesRain. # noqa: E501
:type: str
"""
self._url = url
@property
def multiplier(self):
"""Gets the multiplier of this NetCDFTimeseriesRain. # noqa: E501
:return: The multiplier of this NetCDFTimeseriesRain. # noqa: E501
:rtype: float
"""
return self._multiplier
@multiplier.setter
def multiplier(self, multiplier):
"""Sets the multiplier of this NetCDFTimeseriesRain.
:param multiplier: The multiplier of this NetCDFTimeseriesRain. # noqa: E501
:type: float
"""
self._multiplier = multiplier
@property
def simulation(self):
"""Gets the simulation of this NetCDFTimeseriesRain. # noqa: E501
:return: The simulation of this NetCDFTimeseriesRain. # noqa: E501
:rtype: str
"""
return self._simulation
@simulation.setter
def simulation(self, simulation):
"""Sets the simulation of this NetCDFTimeseriesRain.
:param simulation: The simulation of this NetCDFTimeseriesRain. # noqa: E501
:type: str
"""
self._simulation = simulation
@property
def offset(self):
"""Gets the offset of this NetCDFTimeseriesRain. # noqa: E501
offset of event in simulation in seconds # noqa: E501
:return: The offset of this NetCDFTimeseriesRain. # noqa: E501
:rtype: int
"""
return self._offset
@offset.setter
def offset(self, offset):
"""Sets the offset of this NetCDFTimeseriesRain.
offset of event in simulation in seconds # noqa: E501
:param offset: The offset of this NetCDFTimeseriesRain. # noqa: E501
:type: int
"""
if (self.local_vars_configuration.client_side_validation and
offset is not None and offset > 2147483647): # noqa: E501
raise ValueError("Invalid value for `offset`, must be a value less than or equal to `2147483647`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
offset is not None and offset < -2147483648): # noqa: E501
raise ValueError("Invalid value for `offset`, must be a value greater than or equal to `-2147483648`") # noqa: E501
self._offset = offset
@property
def duration(self):
"""Gets the duration of this NetCDFTimeseriesRain. # noqa: E501
Duration of event in seconds # noqa: E501
:return: The duration of this NetCDFTimeseriesRain. # noqa: E501
:rtype: int
"""
return self._duration
@duration.setter
def duration(self, duration):
"""Sets the duration of this NetCDFTimeseriesRain.
Duration of event in seconds # noqa: E501
:param duration: The duration of this NetCDFTimeseriesRain. # noqa: E501
:type: int
"""
if (self.local_vars_configuration.client_side_validation and
duration is not None and duration > 2147483647): # noqa: E501
raise ValueError("Invalid value for `duration`, must be a value less than or equal to `2147483647`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
duration is not None and duration < -2147483648): # noqa: E501
raise ValueError("Invalid value for `duration`, must be a value greater than or equal to `-2147483648`") # noqa: E501
self._duration = duration
@property
def timestamps(self):
"""Gets the timestamps of this NetCDFTimeseriesRain. # noqa: E501
in simulation in seconds # noqa: E501
:return: The timestamps of this NetCDFTimeseriesRain. # noqa: E501
:rtype: list[int]
"""
return self._timestamps
@timestamps.setter
def timestamps(self, timestamps):
"""Sets the timestamps of this NetCDFTimeseriesRain.
in simulation in seconds # noqa: E501
:param timestamps: The timestamps of this NetCDFTimeseriesRain. # noqa: E501
:type: list[int]
"""
self._timestamps = timestamps
@property
def interval(self):
"""Gets the interval of this NetCDFTimeseriesRain. # noqa: E501
interval in seconds # noqa: E501
:return: The interval of this NetCDFTimeseriesRain. # noqa: E501
:rtype: int
"""
return self._interval
@interval.setter
def interval(self, interval):
"""Sets the interval of this NetCDFTimeseriesRain.
interval in seconds # noqa: E501
:param interval: The interval of this NetCDFTimeseriesRain. # noqa: E501
:type: int
"""
if (self.local_vars_configuration.client_side_validation and
interval is not None and interval > 2147483647): # noqa: E501
raise ValueError("Invalid value for `interval`, must be a value less than or equal to `2147483647`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
interval is not None and interval < 0): # noqa: E501
raise ValueError("Invalid value for `interval`, must be a value greater than or equal to `0`") # noqa: E501
self._interval = interval
@property
def values_reference(self):
"""Gets the values_reference of this NetCDFTimeseriesRain. # noqa: E501
:return: The values_reference of this NetCDFTimeseriesRain. # noqa: E501
:rtype: str
"""
return self._values_reference
@values_reference.setter
def values_reference(self, values_reference):
"""Sets the values_reference of this NetCDFTimeseriesRain.
:param values_reference: The values_reference of this NetCDFTimeseriesRain. # noqa: E501
:type: str
"""
if (self.local_vars_configuration.client_side_validation and
values_reference is not None and len(values_reference) > 255):
raise ValueError("Invalid value for `values_reference`, length must be less than or equal to `255`") # noqa: E501
self._values_reference = values_reference
@property
def fill_value(self):
"""Gets the fill_value of this NetCDFTimeseriesRain. # noqa: E501
:return: The fill_value of this NetCDFTimeseriesRain. # noqa: E501
:rtype: str
"""
return self._fill_value
@fill_value.setter
def fill_value(self, fill_value):
"""Sets the fill_value of this NetCDFTimeseriesRain.
:param fill_value: The fill_value of this NetCDFTimeseriesRain. # noqa: E501
:type: str
"""
if (self.local_vars_configuration.client_side_validation and
fill_value is not None and len(fill_value) > 128):
raise ValueError("Invalid value for `fill_value`, length must be less than or equal to `128`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
fill_value is not None and len(fill_value) < 1):
raise ValueError("Invalid value for `fill_value`, length must be greater than or equal to `1`") # noqa: E501
self._fill_value = fill_value
@property
def units(self):
"""Gets the units of this NetCDFTimeseriesRain. # noqa: E501
:return: The units of this NetCDFTimeseriesRain. # noqa: E501
:rtype: str
"""
return self._units
@units.setter
def units(self, units):
"""Sets the units of this NetCDFTimeseriesRain.
:param units: The units of this NetCDFTimeseriesRain. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and units is None: # noqa: E501
raise ValueError("Invalid value for `units`, must not be `None`") # noqa: E501
allowed_values = ["mm", "mm/h"] # noqa: E501
if self.local_vars_configuration.client_side_validation and units not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `units` ({0}), must be one of {1}" # noqa: E501
.format(units, allowed_values)
)
self._units = units
@property
def file(self):
"""Gets the file of this NetCDFTimeseriesRain. # noqa: E501
:return: The file of this NetCDFTimeseriesRain. # noqa: E501
:rtype: FileReadOnly
"""
return self._file
@file.setter
def file(self, file):
"""Sets the file of this NetCDFTimeseriesRain.
:param file: The file of this NetCDFTimeseriesRain. # noqa: E501
:type: FileReadOnly
"""
self._file = file
@property
def uid(self):
"""Gets the uid of this NetCDFTimeseriesRain. # noqa: E501
:return: The uid of this NetCDFTimeseriesRain. # noqa: E501
:rtype: str
"""
return self._uid
@uid.setter
def uid(self, uid):
"""Sets the uid of this NetCDFTimeseriesRain.
:param uid: The uid of this NetCDFTimeseriesRain. # noqa: E501
:type: str
"""
self._uid = uid
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NetCDFTimeseriesRain):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, NetCDFTimeseriesRain):
return True
return self.to_dict() != other.to_dict()
| 31.851685
| 245
| 0.607239
|
import pprint
import re
import six
from openapi_client.configuration import Configuration
class NetCDFTimeseriesRain(object):
openapi_types = {
'url': 'str',
'multiplier': 'float',
'simulation': 'str',
'offset': 'int',
'duration': 'int',
'timestamps': 'list[int]',
'interval': 'int',
'values_reference': 'str',
'fill_value': 'str',
'units': 'str',
'file': 'FileReadOnly',
'uid': 'str'
}
attribute_map = {
'url': 'url',
'multiplier': 'multiplier',
'simulation': 'simulation',
'offset': 'offset',
'duration': 'duration',
'timestamps': 'timestamps',
'interval': 'interval',
'values_reference': 'values_reference',
'fill_value': 'fill_value',
'units': 'units',
'file': 'file',
'uid': 'uid'
}
def __init__(self, url=None, multiplier=None, simulation=None, offset=None, duration=None, timestamps=None, interval=None, values_reference=None, fill_value=None, units=None, file=None, uid=None, local_vars_configuration=None): if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._url = None
self._multiplier = None
self._simulation = None
self._offset = None
self._duration = None
self._timestamps = None
self._interval = None
self._values_reference = None
self._fill_value = None
self._units = None
self._file = None
self._uid = None
self.discriminator = None
if url is not None:
self.url = url
if multiplier is not None:
self.multiplier = multiplier
if simulation is not None:
self.simulation = simulation
self.offset = offset
self.duration = duration
self.timestamps = timestamps
self.interval = interval
self.values_reference = values_reference
if fill_value is not None:
self.fill_value = fill_value
self.units = units
if file is not None:
self.file = file
if uid is not None:
self.uid = uid
@property
def url(self):
return self._url
@url.setter
def url(self, url):
self._url = url
@property
def multiplier(self):
return self._multiplier
@multiplier.setter
def multiplier(self, multiplier):
self._multiplier = multiplier
@property
def simulation(self):
return self._simulation
@simulation.setter
def simulation(self, simulation):
self._simulation = simulation
@property
def offset(self):
return self._offset
@offset.setter
def offset(self, offset):
if (self.local_vars_configuration.client_side_validation and
offset is not None and offset > 2147483647): raise ValueError("Invalid value for `offset`, must be a value less than or equal to `2147483647`") if (self.local_vars_configuration.client_side_validation and
offset is not None and offset < -2147483648): raise ValueError("Invalid value for `offset`, must be a value greater than or equal to `-2147483648`")
self._offset = offset
@property
def duration(self):
return self._duration
@duration.setter
def duration(self, duration):
if (self.local_vars_configuration.client_side_validation and
duration is not None and duration > 2147483647): raise ValueError("Invalid value for `duration`, must be a value less than or equal to `2147483647`") if (self.local_vars_configuration.client_side_validation and
duration is not None and duration < -2147483648): raise ValueError("Invalid value for `duration`, must be a value greater than or equal to `-2147483648`")
self._duration = duration
@property
def timestamps(self):
return self._timestamps
@timestamps.setter
def timestamps(self, timestamps):
self._timestamps = timestamps
@property
def interval(self):
return self._interval
@interval.setter
def interval(self, interval):
if (self.local_vars_configuration.client_side_validation and
interval is not None and interval > 2147483647): raise ValueError("Invalid value for `interval`, must be a value less than or equal to `2147483647`") if (self.local_vars_configuration.client_side_validation and
interval is not None and interval < 0): raise ValueError("Invalid value for `interval`, must be a value greater than or equal to `0`")
self._interval = interval
@property
def values_reference(self):
return self._values_reference
@values_reference.setter
def values_reference(self, values_reference):
if (self.local_vars_configuration.client_side_validation and
values_reference is not None and len(values_reference) > 255):
raise ValueError("Invalid value for `values_reference`, length must be less than or equal to `255`")
self._values_reference = values_reference
@property
def fill_value(self):
return self._fill_value
@fill_value.setter
def fill_value(self, fill_value):
if (self.local_vars_configuration.client_side_validation and
fill_value is not None and len(fill_value) > 128):
raise ValueError("Invalid value for `fill_value`, length must be less than or equal to `128`") if (self.local_vars_configuration.client_side_validation and
fill_value is not None and len(fill_value) < 1):
raise ValueError("Invalid value for `fill_value`, length must be greater than or equal to `1`")
self._fill_value = fill_value
@property
def units(self):
return self._units
@units.setter
def units(self, units):
if self.local_vars_configuration.client_side_validation and units is None: raise ValueError("Invalid value for `units`, must not be `None`") allowed_values = ["mm", "mm/h"] if self.local_vars_configuration.client_side_validation and units not in allowed_values: raise ValueError(
"Invalid value for `units` ({0}), must be one of {1}" .format(units, allowed_values)
)
self._units = units
@property
def file(self):
return self._file
@file.setter
def file(self, file):
self._file = file
@property
def uid(self):
return self._uid
@uid.setter
def uid(self, uid):
self._uid = uid
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, NetCDFTimeseriesRain):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
if not isinstance(other, NetCDFTimeseriesRain):
return True
return self.to_dict() != other.to_dict()
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.