id stringlengths 1 7 | text stringlengths 6 1.03M | dataset_id stringclasses 1
value |
|---|---|---|
1771071 | import pytest
import mock
from datetime import datetime
from app.lib.room_list import RoomList
class TestRoomList():
def test_it_takes_a_list_of_rooms_as_an_argument(self):
rooms = ['Big One', 'Little One', 'Cardboard One']
room_list = RoomList(rooms)
assert room_list.rooms == rooms
@mock.patch('app.lib.room_list.current_app')
def test_get_free_busy_calls_to_api_with_correct_data(self, current_app):
rooms = [
{
'resourceEmail': 'Big One'
},
{
'resourceEmail': 'Little One'
},
{
'resourceEmail': 'Cardboard One'
}
]
room_list = RoomList(rooms)
date = str(datetime(2016, 8, 19, 16, 0, 0))[0:10]
calendar = mock.MagicMock()
freebusy_return = mock.MagicMock()
query_return = mock.MagicMock()
query_return.execute.return_value = {
'rooms': rooms,
'date': date
}
freebusy_return.query.return_value = query_return
calendar.freebusy.return_value = freebusy_return
current_app.config = {'CALENDAR': calendar}
free_busy_info = room_list.get_free_busy(date)
assert free_busy_info == {
'rooms': rooms,
'date': date
}
| StarcoderdataPython |
173567 | from utilities import utils
from text_processing import text_normalizer
import pickle
import re
import os
import pickle
from time import time
from text_processing import abbreviations_resolver
class SearchEngineInsensitiveToSpelling:
def __init__(self, abbreviation_folder = "../model/abbreviations_dicts", load_abbreviations = False,
symbols_count = 3, columns_to_process = ["title","abstract","keywords","identificators"]):
self.dictionary_by_first_letters = {}
self.id2docArray = []
self.dictionary_small_words = {}
self.newId = 0
self.total_articles_number = 0
self.symbols_count = symbols_count
self._abbreviations_resolver = abbreviations_resolver.AbbreviationsResolver([])
self.abbreviations_count_docs = {}
self.docs_with_abbreviations = {}
self.columns_to_process = columns_to_process
self.load_abbreviations = load_abbreviations
if self.load_abbreviations:
self._abbreviations_resolver.load_model(abbreviation_folder)
def calculate_abbreviations_count_docs(self, articles_df):
self.docs_with_abbreviations = {}
self.docs_with_abbreviaitons_by_id = {}
for idx, abbr in enumerate(self._abbreviations_resolver.resolved_abbreviations):
docs_found_for_abbr = self.find_articles_with_keywords([abbr], 1.0, extend_with_abbreviations=False)
docs_resolved = set()
for abbr_meaning in self._abbreviations_resolver.resolved_abbreviations[abbr]:
docs_for_abbr_meaning = self.find_articles_with_keywords([abbr_meaning], 0.92, extend_with_abbreviations=False)
for docId in docs_found_for_abbr.intersection(docs_for_abbr_meaning):
if docId not in self.docs_with_abbreviations:
self.docs_with_abbreviations[docId] = {}
if abbr not in self.docs_with_abbreviations[docId]:
self.docs_with_abbreviations[docId][abbr] = []
self.docs_with_abbreviations[docId][abbr].append(abbr_meaning)
docs_resolved = docs_resolved.union(docs_for_abbr_meaning)
for docId in docs_found_for_abbr - docs_resolved:
if docId not in self.docs_with_abbreviations:
self.docs_with_abbreviations[docId] = {}
if abbr not in self.docs_with_abbreviations[docId]:
self.docs_with_abbreviations[docId][abbr] = []
self.docs_with_abbreviations[docId][abbr].append(self._abbreviations_resolver.sorted_resolved_abbreviations[abbr][0][0])
if idx % 3000 == 0 or idx == len(self._abbreviations_resolver.resolved_abbreviations) - 1:
print("Processed %d abbreviations"%idx)
for docId in self.docs_with_abbreviations:
for word in self.docs_with_abbreviations[docId]:
if len(self.docs_with_abbreviations[docId][word]) > 1:
sorted_abbr = sorted(
[(w, self._abbreviations_resolver.resolved_abbreviations[word][w]) for w in self.docs_with_abbreviations[docId][word]],
key = lambda x: x[1], reverse =True)
self.docs_with_abbreviations[docId][word] = [sorted_abbr[0][0]]
for docId in self.docs_with_abbreviations:
for word in self.docs_with_abbreviations[docId]:
abbr_meanings = set()
for abbr_meaning in self.docs_with_abbreviations[docId][word]:
abbr_meanings.add(re.sub(r"\bprogramme\b", "program", abbr_meaning))
abbr_meanings.add(re.sub(r"\bprogram\b", "programme", abbr_meaning))
self.docs_with_abbreviations[docId][word] = list(abbr_meanings)
for docId in self.docs_with_abbreviations:
art_id = articles_df["id"].values[docId] if "id" in articles_df.columns else docId
for word in self.docs_with_abbreviations[docId]:
if art_id not in self.docs_with_abbreviaitons_by_id:
self.docs_with_abbreviaitons_by_id[art_id] = {}
self.docs_with_abbreviaitons_by_id[art_id][word] = self.docs_with_abbreviations[docId][word]
self.abbreviations_count_docs = {}
for i in self.docs_with_abbreviations:
for key in self.docs_with_abbreviations[i]:
if key not in self.abbreviations_count_docs:
self.abbreviations_count_docs[key] = {}
for meaning in self.docs_with_abbreviations[i][key]:
if meaning not in self.abbreviations_count_docs[key]:
self.abbreviations_count_docs[key][meaning] = set()
self.abbreviations_count_docs[key][meaning].add(i)
def save_model(self, folder="../model/search_index"):
if not os.path.exists(folder):
os.makedirs(folder)
pickle.dump([self.dictionary_by_first_letters, self.id2docArray, self.dictionary_small_words, self.total_articles_number, self.abbreviations_count_docs, self.docs_with_abbreviations, self.docs_with_abbreviaitons_by_id], open(os.path.join(folder, "search_index.pickle"),"wb"))
def load_model(self, folder="../model/search_index"):
self.dictionary_by_first_letters, self.id2docArray, self.dictionary_small_words, self.total_articles_number, self.abbreviations_count_docs, self.docs_with_abbreviations, self.docs_with_abbreviaitons_by_id = pickle.load(open(os.path.join(folder, "search_index.pickle"),"rb"))
def create_inverted_index(self, articles_df, continue_adding = False, print_info = True):
if continue_adding:
self.total_articles_number += len(articles_df)
self.unshrink_memory(set)
else:
self.total_articles_number = len(articles_df)
for i in range(len(articles_df)):
text = ""
for column in self.columns_to_process:
if column in ["keywords","identificators"]:
text = text + " . " + (text_normalizer.normalize_key_words_for_search(articles_df[column].values[i]) if column in articles_df.columns else "" )
else:
text = text + " . " + text_normalizer.normalize_text(articles_df[column].values[i])
text_words = text_normalizer.get_stemmed_words_inverted_index(text)
for j in range(len(text_words)):
self.add_item_to_dict(text_words[j], i)
if j != len(text_words) - 1:
word_expression = text_words[j] + " " + text_words[j+1]
self.add_item_to_dict(word_expression, i)
if print_info and (i % 20000 == 0 or i == len(articles_df) -1):
print("Processed %d articles"%i)
self.shrink_memory()
if self.load_abbreviations:
self.calculate_abbreviations_count_docs(articles_df)
def shrink_memory(self, operation = list):
for i in range(len(self.id2docArray)):
self.id2docArray[i] = operation(self.id2docArray[i])
def add_item_to_dict(self, word, docId):
if len(word) == 0:
return
if len(word) < self.symbols_count:
if word not in self.dictionary_small_words:
self.dictionary_small_words[word] = self.newId
self.newId += 1
self.id2docArray.append(set())
self.id2docArray[self.dictionary_small_words[word]].add(docId)
return
if word[:self.symbols_count] not in self.dictionary_by_first_letters:
self.dictionary_by_first_letters[word[:self.symbols_count]] = {}
if word not in self.dictionary_by_first_letters[word[:self.symbols_count]]:
self.dictionary_by_first_letters[word[:self.symbols_count]][word] = self.newId
self.newId += 1
self.id2docArray.append(set())
self.id2docArray[self.dictionary_by_first_letters[word[:self.symbols_count]][word]].add(docId)
def get_articles_by_word(self, word):
try:
if len(word) < self.symbols_count:
return self.id2docArray[self.dictionary_small_words[word]]
except:
return []
try:
if len(word) >= self.symbols_count:
return self.id2docArray[self.dictionary_by_first_letters[word[:self.symbols_count]][word]]
except:
return []
return []
def generate_sub_patterns(self, pattern):
if pattern.strip() != "" and pattern.strip()[0] == "*":
return ["*"]
sub_patterns = set()
res = ""
cnt = 0
for symb in pattern:
if symb !="*":
res += symb
cnt += 1
if symb == "*":
res += "\w*"
sub_patterns.add(res)
if cnt == self.symbols_count:
sub_patterns.add(res)
break
sub_patterns.add(res)
return list(sub_patterns)
def find_words_by_pattern(self, pattern):
if re.search(r"\w+(\*\w*)+", pattern) is None:
return [pattern]
pattern = re.sub("[\*]+","*", pattern)
new_pattern = pattern.replace("*","\w*")
words_found = []
if len(pattern.replace("*","")) < self.symbols_count:
for w in self.dictionary_small_words:
res = re.match(new_pattern, w)
if res and res.group(0) == w and " " not in w:
words_found.append(w)
if new_pattern[:self.symbols_count] in self.dictionary_by_first_letters:
for key in self.dictionary_by_first_letters[new_pattern[:self.symbols_count]]:
res = re.match(new_pattern, key)
if res and res.group(0) == key and " " not in key:
words_found.append(key)
else:
for sub_pattern in self.generate_sub_patterns(pattern):
for w in self.dictionary_by_first_letters:
res = re.match(sub_pattern, w)
if res and res.group(0) == w and " " not in w:
for key in self.dictionary_by_first_letters[w]:
res = re.match(new_pattern, key)
if res and res.group(0) == key and " " not in key:
words_found.append(key)
return words_found
def find_similar_words_by_spelling(self, word, threshold = 0.85, all_similar_words = False):
time_total = time()
stemmed_word = " ".join(text_normalizer.get_stemmed_words_inverted_index(word))
stemmed_word = word if len(stemmed_word) < self.symbols_count else stemmed_word
words = set([word, stemmed_word])
if threshold >= 0.99 or len(stemmed_word) < self.symbols_count:
return words
words.add(re.sub(r"\bprogramme\b", "program", word))
words.add(re.sub(r"\bprogram\b", "programme", stemmed_word))
intial_words = words
try:
articles_count = len(self.get_articles_by_word(stemmed_word))
for dict_word in self.dictionary_by_first_letters[word[:self.symbols_count]]:
if all_similar_words or (articles_count == 0 or len(self.get_articles_by_word(dict_word)) < 4*articles_count):
for w in intial_words:
if utils.normalized_levenshtein_score(dict_word, w) >= threshold:
words.add(dict_word)
except:
pass
z_s_replaced_words = set()
for word in words:
z_s_replaced_words = z_s_replaced_words.union(text_normalizer.replaced_with_z_s_symbols_words(word, self))
return words.union(z_s_replaced_words)
def find_keywords(self, stemmed_words):
keywords = []
if len(stemmed_words) > 2:
for i in range(len(stemmed_words) - 1):
keywords.append(stemmed_words[i] + " " + stemmed_words[i+1])
else:
keywords.append(" ".join(stemmed_words))
return keywords
def extend_query(self, query):
words = query.split()
prev_set = self.find_similar_words_by_spelling(words[0])
for i in range(1, len(words)):
new_set = set()
for word in self.find_similar_words_by_spelling(words[i]):
for prev_exp in prev_set:
new_set.add(prev_exp + " " + word)
prev_set = new_set
return prev_set
def generate_subexpressions(self, expression):
words = expression.split()
generated_words = set()
for i in range(len(words)):
word = words[i]
generated_words.add(word)
for j in range(i+1, len(words)):
word = word + " " + words[j]
generated_words.add(word)
return generated_words
def has_meaning_for_abbreviation(self, abbr_meanings, dict_to_check):
return len(abbr_meanings.intersection(set([w[0] for w in dict_to_check]))) > 0
def extend_with_abbreviations(self, query, dict_to_check, extend_abbr_meanings = "", add_to_meanings = False):
abbr_meanings = set([w.strip() for w in extend_abbr_meanings.split(";") if w.strip() != ""])
new_queries = set([query])
subexpressions = self.generate_subexpressions(query)
for expr in subexpressions:
if expr in dict_to_check:
if self.has_meaning_for_abbreviation(abbr_meanings, dict_to_check[expr]):
for word,cnt in dict_to_check[expr]:
if expr in word:
continue
if word in abbr_meanings:
new_query = re.sub(r"\b%s\b"%expr, word, query)
if new_query not in new_queries:
if add_to_meanings and expr.strip() != "":
abbr_meanings.add(expr)
extend_abbr_meanings = ";".join(list(abbr_meanings))
new_queries_part, extend_abbr_meanings = self.extend_with_abbreviations(new_query, dict_to_check, extend_abbr_meanings)
new_queries = new_queries.union(new_queries_part)
else:
for word,cnt in dict_to_check[expr]:
if expr in word or (len(dict_to_check[expr]) > 1 and cnt < 15):
continue
new_query = re.sub(r"\b%s\b"%expr, word, query)
if new_query not in new_queries:
if add_to_meanings and expr.strip() != "":
abbr_meanings.add(expr)
extend_abbr_meanings = ";".join(list(abbr_meanings))
new_queries_part, extend_abbr_meanings = self.extend_with_abbreviations(new_query, dict_to_check, extend_abbr_meanings)
new_queries = new_queries.union(new_queries_part)
return new_queries, extend_abbr_meanings
def extend_query_with_abbreviations(self, query, extend_with_abbreviations, extend_abbr_meanings=""):
if not extend_with_abbreviations:
return set(), extend_abbr_meanings
normalized_key = text_normalizer.normalize_text(query)
extended_queries = set()
extended_queries_part, extend_abbr_meanings = self.extend_with_abbreviations(normalized_key, self._abbreviations_resolver.sorted_resolved_abbreviations, extend_abbr_meanings)
extended_queries = extended_queries.union(extended_queries_part)
new_extended_queries = set(extended_queries)
for new_query in extended_queries:
new_extended_queries_part, extend_abbr_meanings = self.extend_with_abbreviations(new_query, self._abbreviations_resolver.sorted_words_to_abbreviations, extend_abbr_meanings, add_to_meanings = True)
new_extended_queries = new_extended_queries.union(new_extended_queries_part)
return new_extended_queries, extend_abbr_meanings
def get_article_with_special_abbr_meanings(self, query, abbr_meanings):
if abbr_meanings.strip() == "" or len([w for w in query.split() if text_normalizer.is_abbreviation(w)]) == 0:
return set(), False
docs_with_abbreviations = set()
first_assignment = True
for abbr_meaning in abbr_meanings.split(";"):
abbr_meaning = abbr_meaning.strip()
if abbr_meaning not in self._abbreviations_resolver.sorted_words_to_abbreviations:
continue
for word,cnt in self._abbreviations_resolver.sorted_words_to_abbreviations[abbr_meaning]:
if re.search(r"\b%s\b"%word, query) != None and word in self.abbreviations_count_docs and abbr_meaning in self.abbreviations_count_docs[word]:
if first_assignment:
docs_with_abbreviations = docs_with_abbreviations.union(self.abbreviations_count_docs[word][abbr_meaning])
first_assignment = False
else:
docs_with_abbreviations = docs_with_abbreviations.intersection(self.abbreviations_count_docs[word][abbr_meaning])
return docs_with_abbreviations, True
def find_articles_with_keywords(self, key_words, threshold = 0.85, extend_query = False, extend_with_abbreviations = True, extend_abbr_meanings = ""):
total_articles = set()
time_start = time()
time_total = time()
for key in key_words:
normalized_key = text_normalizer.normalize_text(key)
extended_queries = self.extend_query(normalized_key) if extend_query else set([normalized_key])
extended_queries_with_abbr, extend_abbr_meanings = self.extend_query_with_abbreviations(key,extend_with_abbreviations, extend_abbr_meanings)
extended_queries = extended_queries.union(extended_queries_with_abbr)
time_start = time()
for query in extended_queries:
first_assignment = True
articles = set()
for key_word in self.find_keywords(text_normalizer.get_stemmed_words_inverted_index(query)):
sim_word_articles = set()
for sim_word in self.find_similar_words_by_spelling(key_word, threshold):
sim_word_articles = sim_word_articles.union(set(self.get_articles_by_word(sim_word)))
if first_assignment:
articles = articles.union(sim_word_articles)
first_assignment = False
else:
articles = articles.intersection(sim_word_articles)
docs_with_abbreviations, has_abbr = self.get_article_with_special_abbr_meanings(query, extend_abbr_meanings)
if not has_abbr:
total_articles = total_articles.union(articles)
else:
total_articles = total_articles.union(articles.intersection(docs_with_abbreviations))
time_start = time()
return total_articles
def find_articles_with_keywords_extended(self, key_words, threshold = 0.9, extend_query = False, extend_with_abbreviations = True, extend_abbr_meanings = ""):
full_keywords = set()
for query in key_words:
words = query.split()
prev_set = set(self.find_words_by_pattern(words[0]))
for i in range(1, len(words)):
new_set = set()
for word in self.find_words_by_pattern(words[i]):
for prev_exp in prev_set:
new_set.add(prev_exp + " " + word)
prev_set = new_set
full_keywords = full_keywords.union(prev_set)
return self.find_articles_with_keywords(list(full_keywords), threshold = threshold, extend_query = extend_query,\
extend_with_abbreviations = extend_with_abbreviations, extend_abbr_meanings = extend_abbr_meanings)
def save_diminished_dictionary_for_synonyms_app(self, folder):
dictionary_by_first_letters = {}
for key in self.dictionary_by_first_letters:
if len(key) <= 2:
if len(self.id2docArray[self.dictionary_small_words[key]]) >= 2:
dictionary_by_first_letters[key] = len(self.id2docArray[self.dictionary_small_words[key]])
else:
dictionary_by_first_letters[key] = {}
for key_word in self.dictionary_by_first_letters[key]:
if len(self.id2docArray[self.dictionary_by_first_letters[key][key_word]]) >= 2:
dictionary_by_first_letters[key][key_word] = len(self.id2docArray[self.dictionary_by_first_letters[key][key_word]])
if not os.path.exists(folder):
os.makedirs(folder)
pickle.dump([dictionary_by_first_letters, self.total_articles_number, {}], open(os.path.join(folder, "search_index.pickle"),"wb")) | StarcoderdataPython |
4841908 | <gh_stars>1-10
from setuptools import setup
setup(
name='kitsh',
version='0.1.0',
author='<NAME>',
packages=[
'kitsh'
],
package_data={'': ['static/*', 'templates/*']},
include_package_data=True,
zip_safe=False
)
| StarcoderdataPython |
3374401 | import struct
from abc import ABCMeta, abstractmethod
from typing import Tuple, Optional, List, Set, Union, NamedTuple, Deque
from bxcommon import constants
from bxcommon.messages.abstract_block_message import AbstractBlockMessage
from bxcommon.messages.bloxroute import compact_block_short_ids_serializer
from bxcommon.messages.bloxroute.tx_message import TxMessage
from bxcommon.models.transaction_flag import TransactionFlag
from bxcommon.utils.object_hash import Sha256Hash, convert
from bxcommon.utils.memory_utils import SpecialMemoryProperties, SpecialTuple
from bxgateway.utils.block_info import BlockInfo
class BlockDecompressionResult(NamedTuple):
block_msg: Optional[AbstractBlockMessage]
block_info: BlockInfo
unknown_short_ids: List[int]
unknown_tx_hashes: List[Sha256Hash]
def finalize_block_bytes(
buf: Deque[Union[bytes, bytearray, memoryview]], size: int, short_ids: List[int]
) -> memoryview:
serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(short_ids)
buf.append(serialized_short_ids)
size += constants.UL_ULL_SIZE_IN_BYTES
offset_buf = struct.pack("<Q", size)
buf.appendleft(offset_buf)
size += len(serialized_short_ids)
block = bytearray(size)
off = 0
for blob in buf:
next_off = off + len(blob)
block[off:next_off] = blob
off = next_off
return memoryview(block)
class AbstractMessageConverter(SpecialMemoryProperties, metaclass=ABCMeta):
"""
Message converter abstract class.
Converts messages of specific blockchain protocol to internal messages
"""
@abstractmethod
def tx_to_bx_txs(
self,
tx_msg,
network_num: int,
transaction_flag: Optional[TransactionFlag] = None,
min_tx_network_fee: int = 0,
account_id: str = constants.DECODED_EMPTY_ACCOUNT_ID
) -> List[Tuple[TxMessage, Sha256Hash, Union[bytearray, memoryview]]]:
"""
Converts blockchain transactions message to internal transaction message
:param tx_msg: blockchain transactions message
:param network_num: blockchain network number
:param transaction_flag: the transaction_flag type to assign to the BDN transaction.
:param min_tx_network_fee: minimum transaction fee. If support by the network, transactions
with fees lower than this will be excluded from the result
:param account_id: gateway's account id
:return: array of tuples (transaction message, transaction hash, transaction bytes)
"""
pass
@abstractmethod
def bx_tx_to_tx(self, bx_tx_msg):
"""
Converts internal transaction message to blockchain transactions message
:param bx_tx_msg: internal transaction message
:return: blockchain transactions message
"""
pass
@abstractmethod
def block_to_bx_block(
self, block_msg, tx_service, enable_block_compression: bool, min_tx_age_seconds: float
) -> Tuple[memoryview, BlockInfo]:
"""
Convert blockchain block message to internal broadcast message with transactions replaced with short ids
:param block_msg: blockchain new block message
:param tx_service: Transactions service
:param enable_block_compression
:param min_tx_age_seconds
:return: Internal broadcast message bytes (bytearray), tuple (txs count, previous block hash, short ids)
"""
pass
@abstractmethod
def bx_block_to_block(self, bx_block_msg, tx_service) -> BlockDecompressionResult:
"""
Converts internal broadcast message to blockchain new block message
Returns None for block message if any of the transactions shorts ids or hashes are unknown
:param bx_block_msg: internal broadcast message bytes
:param tx_service: Transactions service
:return: block decompression result
"""
pass
@abstractmethod
def bdn_tx_to_bx_tx(
self,
raw_tx: Union[bytes, bytearray, memoryview],
network_num: int,
transaction_flag: Optional[TransactionFlag] = None,
account_id: str = constants.DECODED_EMPTY_ACCOUNT_ID
) -> TxMessage:
"""
Convert a raw transaction which arrived from an RPC request into bx transaction.
:param raw_tx: The raw transaction bytes.
:param network_num: the network number.
:param transaction_flag: the quota type to assign to the BDN transaction.
:param account_id: node's account id
:return: bx transaction.
"""
pass
def encode_raw_msg(self, raw_msg: str) -> bytes:
"""
Encode a raw message string into bytes
:param raw_msg: the raw message to encode
:return: binary encoded message
:raise ValueError: if the encoding fails
"""
return convert.hex_to_bytes(raw_msg)
def special_memory_size(self, ids: Optional[Set[int]] = None) -> SpecialTuple:
return super(AbstractMessageConverter, self).special_memory_size(ids)
| StarcoderdataPython |
89312 | <reponame>toanquachp/dl_stock_prediction
from utils import plot_figures
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from tensorflow.keras.layers import Input, LSTM, Flatten, concatenate, BatchNormalization, Dense
from tensorflow.keras.models import Model, save_model
class MultiDayNetwork:
def __init__(self, data, LAG_DAYS=10, NUM_STEP=5):
self.data = data
self.FEATURE_COL = ['Open', 'High', 'Low', 'Close', 'Volume', 'ma7', 'ma21', '26ema', '12ema', 'MACD', 'std21', 'upper_band21', 'lower_band21', 'ema']
self.EXT_FEATURE_COL = ['ma7', 'MACD', 'upper_band21', 'lower_band21', 'ema']
self.TARGET_COL = ['Close']
self.LAG_DAYS = LAG_DAYS
self.NUM_STEP = NUM_STEP
def get_technical_indicators(self, data):
# Moving average (7 days and 21 days)
data['ma7'] = data['Close'].rolling(window=7).mean()
data['ma21'] = data['Close'].rolling(window=21).mean()
# Create MACD
mod_close = data['Close'].copy()
mod_close[0:26] = np.nan
data['26ema'] = mod_close.ewm(span=26, adjust=False).mean()
data['12ema'] = mod_close.ewm(span=12, adjust=False).mean()
data['MACD'] = (data['12ema'] - data['26ema'])
# Create Bollinger Bands (21 days)
data['std21'] = data['Close'].rolling(window=21).std()
data['upper_band21'] = data['ma21'] + (data['std21']*2)
data['lower_band21'] = data['ma21'] - (data['std21']*2)
# Create Exponential moving average
data['ema'] = data['Close'].ewm(com=0.5).mean()
# remove
return data
def split_data(self, data, feature_col, external_feature_col, target_col, train_ratio=0.8, shuffle=False):
# get data columns
X = data[feature_col]
X_external = data[external_feature_col]
y = data[target_col]
X_train, X_test, y_train, y_test = train_test_split(X, y, train_size=train_ratio, shuffle=shuffle)
X_train_external, X_test_external = train_test_split(X_external, train_size=train_ratio, shuffle=shuffle)
print(f'Training set: ({X_train.shape} - {y_train.shape})')
print(f'Training set: External data - ({X_train_external.shape})')
print(f'Testing set: ({X_test.shape} - {y_test.shape})')
return (X_train, X_train_external, y_train), (X_test, X_test_external, y_test)
def scale_data(self, data, scaler=None):
if scaler is None:
scaler = MinMaxScaler()
data_scaled = scaler.fit_transform(data)
else:
data_scaled = scaler.transform(data)
return data_scaled, scaler
def split_feature_target(self, data, lag_days, num_step=1):
X, X_external, y = data
X_splitted = np.array([np.array(X[i: i + lag_days].copy()) for i in range(len(X) - lag_days - num_step + 1)])
X_external_splitted = np.array([np.array(X_external[i + lag_days - 1].copy()) for i in range(len(y) - lag_days - num_step + 1)])
y_splitted = np.array([np.array(y[i + lag_days : i + lag_days + num_step].copy()) for i in range(len(y) - lag_days - num_step + 1)])
return (X_splitted, X_external_splitted, y_splitted)
def preprocess_data(self, data, train_ratio):
data_technical_indicators = self.get_technical_indicators(data)
data_technical_indicators = data_technical_indicators.dropna()
data_technical_indicators = data_technical_indicators.reset_index(drop=True)
train_data, test_data = self.split_data(data_technical_indicators, self.FEATURE_COL, self.EXT_FEATURE_COL, self.TARGET_COL, train_ratio=train_ratio)
X_train_scaled, self.feature_scaler = self.scale_data(train_data[0])
X_train_external_scaled, self.external_feature_scaler = self.scale_data(train_data[1])
y_train_scaled, self.target_scaler = self.scale_data(train_data[2])
X_test_scaled, _ = self.scale_data(test_data[0], self.feature_scaler)
X_test_external_scaled, _ = self.scale_data(test_data[1], self.external_feature_scaler)
y_test_scaled, _ = self.scale_data(test_data[2], self.target_scaler)
train_data_scaled = (X_train_scaled, X_train_external_scaled, y_train_scaled)
test_data_scaled = (X_test_scaled, X_test_external_scaled, y_test_scaled)
train_data_splitted = self.split_feature_target(train_data_scaled, self.LAG_DAYS, self.NUM_STEP)
test_data_splitted = self.split_feature_target(test_data_scaled, self.LAG_DAYS, self.NUM_STEP)
return train_data_splitted, test_data_splitted
def build_model(self, lstm_input_shape, extensive_input_shape):
input_layer = Input(shape=(lstm_input_shape), name='lstm_input')
external_input_layer = Input(shape=(extensive_input_shape), name='external_dense_input')
x = LSTM(32, name='lstm_layer_0', kernel_regularizer='l2', return_sequences=True)(input_layer)
x = Flatten()(x)
x = concatenate((external_input_layer, x))
x = BatchNormalization()(x)
output_layer = Dense(self.NUM_STEP, activation='elu', name='output_layer')(x)
lstm_model = Model(inputs=[input_layer, external_input_layer], outputs=output_layer, name='lstm_model')
return lstm_model
def plot_multi_day_prediction(self, y_test, y_pred, file_name):
plt.figure(figsize=(15, 8))
plt.plot(y_test, color='g')
for i in range(0, y_pred.shape[0], self.NUM_STEP):
plt.plot(range(i, i + self.NUM_STEP), y_pred[i], color='b')
plt.savefig(file_name)
def build_train_model(self, train_ratio=0.8, epochs=80, batch_size=32, model_save_name='models/multi_day_lstm.h5'):
print('-- Preprocessing data --\n')
(X_train, X_train_external, y_train), (X_test, X_test_external, y_test) = self.preprocess_data(self.data, train_ratio=train_ratio)
print(f'Training set: ({X_train.shape} - {y_train.shape})')
print(f'Testing set: ({X_test.shape} - {y_test.shape})')
LSTM_INPUT_SHAPE = (X_train.shape[1], X_train.shape[2])
EXTENSIVE_INPUT_SHAPE = (X_train_external.shape[1])
print('-- Build LSTM model --\n')
lstm_model = self.build_model(LSTM_INPUT_SHAPE, EXTENSIVE_INPUT_SHAPE)
lstm_model.compile(loss='mse', optimizer='rmsprop')
print('-- Train LSTM model --\n')
history = lstm_model.fit([X_train, X_train_external], y_train, epochs=epochs, batch_size=batch_size, validation_split=0.2, use_multiprocessing=True)
print('-- Plotting LOSS figure --\n')
plot_figures(
data=[history.history['loss'], history.history['val_loss']],
y_label='Loss',
legend=['loss', 'val_loss'],
title='LSTM multi day training and validating loss',
file_name='figures/lstm_loss_multi_day.png'
)
y_predicted = lstm_model.predict([X_test, X_test_external])
y_test_plot = np.concatenate((y_test[:, 0], y_test[-1, 1:]))
y_predicted_inverse = self.target_scaler.inverse_transform(y_predicted)
y_test_inverse = self.target_scaler.inverse_transform(y_test_plot)
print('-- Plotting LSTM stock prediction vs Real closing stock price figure --\n')
self.plot_multi_day_prediction(
y_test_inverse,
y_predicted_inverse,
file_name='figures/lstm_prediction_multi_day.png'
)
print('-- Save LSTM model --\n')
if model_save_name is not None:
save_model(lstm_model, filepath=model_save_name)
return lstm_model
| StarcoderdataPython |
1720488 | __author__ = '<NAME>'
import asyncio
import logging
import re
from datetime import datetime, date, timedelta
from ipaddress import IPv4Address, IPv6Address
from typing import Optional, Dict, Deque, Tuple, Any, cast
import simplejson
from geolite2 import maxminddb
from monetdblite.exceptions import DatabaseError
from .monetdb_dao import MonetDAO, Entry
VALID_SERVICE = re.compile(r'\A[\w]+\Z')
class BallconeJSONEncoder(simplejson.JSONEncoder):
def default(self, obj: Any) -> str:
if isinstance(obj, date):
return obj.isoformat()
if isinstance(obj, (IPv4Address, IPv6Address)):
return str(obj)
return cast(str, super().default(obj))
class Ballcone:
def __init__(self, dao: MonetDAO, geoip: maxminddb.reader.Reader,
top_limit: int = 5, persist_period: int = 5) -> None:
self.dao = dao
self.geoip = geoip
self.top_limit = top_limit
self.persist_period = persist_period
self.queue: Dict[str, Deque[Entry]] = {}
self.json_dumps = BallconeJSONEncoder().encode
async def persist_timer(self) -> None:
while await asyncio.sleep(self.persist_period, result=True):
self.persist()
def persist(self) -> None:
for service, queue in self.queue.items():
try:
count = self.dao.batch_insert_into_from_deque(service, queue)
if count:
logging.debug(f'Inserted {count} entries for service {service}')
except DatabaseError:
logging.exception('Please check if the query is correct')
def unwrap_top_limit(self, top_limit: Optional[int] = None) -> int:
return top_limit if top_limit else self.top_limit
def check_service(self, service: Optional[str], should_exist: bool = False) -> bool:
return (
service is not None
and VALID_SERVICE.match(service) is not None
and (not should_exist or self.dao.table_exists(service))
)
@staticmethod
def iso_code(geoip: maxminddb.reader.Reader, ip: str) -> Optional[str]:
geo = geoip.get(ip)
return geo['country'].get('iso_code', None) if geo and 'country' in geo else None
@staticmethod
def days_before(stop_date: Optional[date] = None, days: int = 30) -> Tuple[date, date]:
stop = stop_date if stop_date else datetime.utcnow().date()
start = stop - timedelta(days=days - 1)
return start, stop
| StarcoderdataPython |
1657218 | # -*- coding: utf-8 -*-
def command():
return "start-component"
def init_argument(parser):
parser.add_argument("--component-no", required=True)
parser.add_argument("--instance-nos", required=True)
def execute(requester, args):
component_no = args.component_no
instance_nos = args.instance_nos
parameters = {}
parameters["ComponentNo"] = component_no
parameters["InstanceNos"] = instance_nos
return requester.execute("/StartComponent", parameters)
| StarcoderdataPython |
1611053 | import pytest
from thefuck.types import Command
from thefuck.rules.brew_uninstall import get_new_command, match
@pytest.fixture
def output():
return ("Uninstalling /usr/local/Cellar/tbb/4.4-20160916... (118 files, 1.9M)\n"
"tbb 4.4-20160526, 4.4-20160722 are still installed.\n"
"Remove all versions with `brew uninstall --force tbb`.\n")
@pytest.fixture
def new_command(formula):
return 'brew uninstall --force {}'.format(formula)
@pytest.mark.parametrize('script', ['brew uninstall tbb', 'brew rm tbb', 'brew remove tbb'])
def test_match(output, script):
assert match(Command(script, output))
@pytest.mark.parametrize('script', ['brew remove gnuplot'])
def test_not_match(script):
output = 'Uninstalling /usr/local/Cellar/gnuplot/5.0.4_1... (44 files, 2.3M)\n'
assert not match(Command(script, output))
@pytest.mark.parametrize('script, formula, ', [('brew uninstall tbb', 'tbb')])
def test_get_new_command(output, new_command, script, formula):
assert get_new_command(Command(script, output)) == new_command
| StarcoderdataPython |
3253986 | # -*- coding: utf-8 -*-
from django.db import migrations, models
import django.core.validators
import django.contrib.auth.models
import django.utils.timezone
from django.conf import settings
import rich_editor.fields
import autoimagefield.fields
class Migration(migrations.Migration):
dependencies = [
('auth', '0006_require_contenttypes_0002'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128)),
('last_login', models.DateTimeField(null=True, blank=True)),
('is_superuser', models.BooleanField(default=False)),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, max_length=30, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.', 'invalid')], unique=True)),
('first_name', models.CharField(max_length=30, blank=True)),
('last_name', models.CharField(max_length=30, blank=True)),
('email', models.EmailField(max_length=254, blank=True)),
('is_staff', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('date_joined', models.DateTimeField(default=django.utils.timezone.now)),
('jabber', models.CharField(max_length=127, blank=True)),
('url', models.CharField(max_length=255, blank=True)),
('signature', models.CharField(max_length=255, blank=True)),
('display_mail', models.BooleanField(default=False)),
('distribution', models.CharField(max_length=50, blank=True)),
('original_info', rich_editor.fields.RichTextOriginalField(blank=True, property_name='info', filtered_field='filtered_info', validators=[django.core.validators.MaxLengthValidator(100000)])),
('filtered_info', rich_editor.fields.RichTextFilteredField(editable=False, blank=True)),
('year', models.SmallIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1900), django.core.validators.MaxValueValidator(2015)])),
('avatar', autoimagefield.fields.AutoImageField(upload_to='accounts/avatars', blank=True)),
('settings', models.TextField(blank=True)),
('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True)),
('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True)),
],
options={
'db_table': 'auth_user',
'verbose_name': 'pou\u017e\xedvate\u013e',
'verbose_name_plural': 'pou\u017e\xedvatelia',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='RememberToken',
fields=[
('token_hash', models.CharField(max_length=255, serialize=False, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(related_name='remember_me_tokens', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='UserRating',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, primary_key=True)),
('comments', models.IntegerField(default=0)),
('articles', models.IntegerField(default=0)),
('helped', models.IntegerField(default=0)),
('news', models.IntegerField(default=0)),
('wiki', models.IntegerField(default=0)),
('rating', models.IntegerField(default=0)),
('user', models.OneToOneField(related_name='rating', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
),
]
| StarcoderdataPython |
1645931 | # -*- coding: utf-8 -*-
"""
Adapters
--------
.. contents::
:backlinks: none
The :func:`authomatic.login` function needs access to functionality like
getting the **URL** of the handler where it is being called, getting the **request params** and **cookies** and
**writing the body**, **headers** and **status** to the response.
Since implementation of these features varies across Python web frameworks,
the Authomatic library uses **adapters** to unify these differences into a single interface.
Available Adapters
^^^^^^^^^^^^^^^^^^
If you are missing an adapter for the framework of your choice,
please open an `enhancement issue <https://github.com/peterhudec/authomatic/issues>`_
or consider a contribution to this module by :ref:`implementing <implement_adapters>` one by yourself.
Its very easy and shouldn't take you more than a few minutes.
.. autoclass:: DjangoAdapter
:members:
.. autoclass:: Webapp2Adapter
:members:
.. autoclass:: WebObAdapter
:members:
.. autoclass:: WerkzeugAdapter
:members:
.. _implement_adapters:
Implementing an Adapter
^^^^^^^^^^^^^^^^^^^^^^^
Implementing an adapter for a Python web framework is pretty easy.
Do it by subclassing the :class:`.BaseAdapter` abstract class.
There are only **six** members that you need to implement.
Moreover if your framework is based on the |webob|_ or |werkzeug|_ package
you can subclass the :class:`.WebObAdapter` or :class:`.WerkzeugAdapter` respectively.
.. autoclass:: BaseAdapter
:members:
"""
import abc
from authomatic.core import Response
class BaseAdapter(object):
"""
Base class for platform adapters
Defines common interface for WSGI framework specific functionality.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def params(self):
"""
Must return a :class:`dict` of all request parameters of any HTTP method.
:returns:
:class:`dict`
"""
@abc.abstractproperty
def url(self):
"""
Must return the url of the actual request including path but without query and fragment
:returns:
:class:`str`
"""
@abc.abstractproperty
def cookies(self):
"""
Must return cookies as a :class:`dict`.
:returns:
:class:`dict`
"""
@abc.abstractmethod
def write(self, value):
"""
Must write specified value to response.
:param str value:
String to be written to response.
"""
@abc.abstractmethod
def set_header(self, key, value):
"""
Must set response headers to ``Key: value``.
:param str key:
Header name.
:param str value:
Header value.
"""
@abc.abstractmethod
def set_status(self, status):
"""
Must set the response status e.g. ``'302 Found'``.
:param str status:
The HTTP response status.
"""
class DjangoAdapter(BaseAdapter):
"""
Adapter for the |django|_ framework.
"""
def __init__(self, request, response):
"""
:param request:
An instance of the :class:`django.http.HttpRequest` class.
:param response:
An instance of the :class:`django.http.HttpResponse` class.
"""
self.request = request
self.response = response
@property
def params(self):
return dict(self.request.REQUEST)
@property
def url(self):
return self.request.build_absolute_uri(self.request.path)
@property
def cookies(self):
return dict(self.request.COOKIES)
def write(self, value):
self.response.write(value)
def set_header(self, key, value):
self.response[key] = value
def set_status(self, status):
self.response.status_code = status
class WebObAdapter(BaseAdapter):
"""Adapter for the |webob|_ package."""
def __init__(self, request, response):
"""
:param request:
A |webob|_ :class:`Request` instance.
:param response:
A |webob|_ :class:`Response` instance.
"""
self.request = request
self.response = response
#===========================================================================
# Request
#===========================================================================
@property
def url(self):
return self.request.path_url
@property
def params(self):
return dict(self.request.params)
@property
def cookies(self):
return dict(self.request.cookies)
#===========================================================================
# Response
#===========================================================================
def write(self, value):
self.response.write(value)
def set_header(self, key, value):
self.response.headers[key] = str(value)
def set_status(self, status):
self.response.status = status
class Webapp2Adapter(WebObAdapter):
"""
Adapter for the |webapp2|_ framework.
Inherits from the :class:`.WebObAdapter`.
"""
def __init__(self, handler):
"""
:param handler:
A :class:`webapp2.RequestHandler` instance.
"""
self.request = handler.request
self.response = handler.response
class WerkzeugAdapter(BaseAdapter):
"""
Adapter for |flask|_ and other |werkzeug|_ based frameworks.
Thanks to `<NAME> <http://marksteve.com>`_.
"""
@property
def params(self):
return self.request.args
@property
def url(self):
return self.request.base_url
@property
def cookies(self):
return self.request.cookies
def __init__(self, request, response):
"""
:param request:
Instance of the :class:`werkzeug.wrappers.Request` class.
:param response:
Instance of the :class:`werkzeug.wrappers.Response` class.
"""
self.request = request
self.response = response
def write(self, value):
self.response.data += value
def set_header(self, key, value):
self.response.headers[key] = value
def set_status(self, status):
self.response.status = status
| StarcoderdataPython |
1752506 | <gh_stars>0
from __future__ import absolute_import
from .version import __version__
| StarcoderdataPython |
1609205 | <filename>swarmcg/scoring/__init__.py
from .angles import get_AA_angles_distrib, get_CG_angles_distrib
from .bonds import get_AA_bonds_distrib, get_CG_bonds_distrib
from .dihedrals import get_AA_dihedrals_distrib, get_CG_dihedrals_distrib
from .sasa import compute_SASA
from .rg import compute_Rg
from .distances import create_bins_and_dist_matrices
from .evaluation_function import eval_function
| StarcoderdataPython |
3370919 | import os
import pandas as pd
from typing import Any
from django.contrib.gis.geos import LineString, MultiLineString
def mission_planner_convert_log(url: str) -> list:
""" This function takes in a string url of the .waypoints, .txt or .json
file exported from the mission planner flight plan
It returns an array of coordinates for each point
Returns:
[array] -- [
[long, lat],
[long, lat],
...
]
"""
data = pd.read_table(str(url), delim_whitespace=True)
df = pd.DataFrame(data)
df.to_csv("me.csv",)
datatest = pd.DataFrame((pd.read_csv("me.csv", index_col=0)))
d = datatest.drop(
[
"WPL",
"Unnamed: 1",
"Unnamed: 2",
"Unnamed: 3",
"Unnamed: 4",
"Unnamed: 5",
"Unnamed: 6",
"Unnamed: 7",
"110",
],
axis=1,
)
z = d[d != 0.0].dropna(axis=0)
cols = list(z)
cols[0], cols[1] = cols[1], cols[0]
f = z.loc[:, cols]
e = f.values.tolist()
# print(e)
return e
# x = mission_planner_convert_log("./mission.waypoints")
# print(x)
# FOR DJANGO USERS
"""We can further convert the above lat/long array into LineString and MultiLineString
format to be used for saving to database as well as displaying on the front-end
"""
def convert_mission_planner_log_to_geoJson(url: str) -> Any:
""" kindly check how to install these libraries
https://docs.djangoproject.com/en/3.0/ref/contrib/gis/install/geolibs/
"""
# GEOS_LIBRARY_PATH = "/home/<your computer username>/local/lib/libgeos_c.so"
GEOS_LIBRARY_PATH = "/home/nyaga/local/lib/libgeos_c.so"
output = mission_planner_convert_log(url)
print(output, "output")
line = LineString(output)
print(line, "line")
multi_line = MultiLineString(line)
print(multi_line, "multi_line")
from django.core.serializers import serialize
return multi_line
# y = convert_mission_planner_log_to_geoJson("./mission.waypoints")
# print(y, "y")
def qgc_convert_log(url: str) -> list:
import shutil
import json
""" This function takes in a string url of the .plan,
file exported from the QgroundControl flight plan
It returns an array of coordinates for each point
Returns:
[array] -- [
[long, lat],
[long, lat],
...
]
"""
pre, ext = os.path.splitext(url)
dest = shutil.copyfile(url, f"./{pre}.json")
with open(f"{pre}.json") as f:
data = json.load(f)
mission_data = data["mission"]
hover_speed = mission_data["hoverSpeed"]
cruise_speed = mission_data["cruiseSpeed"]
planned_home_position = mission_data["plannedHomePosition"]
mission_waypoints_data = mission_data["items"]
output: list = []
for waypoint in mission_waypoints_data:
lat_long = [waypoint["params"][5], waypoint["params"][4]]
output.append(lat_long)
return output
# z = qgc_convert_log("./qgc.plan")
# print(z,"z")
def convert_qgc_log_to_geoJson(url: str) -> Any:
""" This function converts a qgroundControl .plan to Django Linestring/MultiLineString formats
kindly check how to install these libraries
https://docs.djangoproject.com/en/3.0/ref/contrib/gis/install/geolibs/
"""
# GEOS_LIBRARY_PATH = "/home/<your computer username>/local/lib/libgeos_c.so"
GEOS_LIBRARY_PATH = "/home/nyaga/local/lib/libgeos_c.so"
output = qgc_convert_log(url)
print(output, "output")
line = LineString(output)
print(line, "line")
multi_line = MultiLineString(line)
print(multi_line, "multi_line")
return multi_line
z1 = convert_qgc_log_to_geoJson("./qgc.plan")
print("z1")
| StarcoderdataPython |
3326155 | from data.my_collection import cards as my_col
# from data.nastya_collection import cards as my_col
from dataobjects.collection import Collection
from dataobjects.mask import Mask
from dataobjects.deck import Deck
from dataobjects import constants
my_col_object = Collection()
my_col_object.cards = my_col
# m = Mask()
# m.forbid_all()
# m.allow_rarity('Free')
# m.allow_rarity('Common')
# my_col_object.apply_mask(m)
player_class = raw_input('Input your class: ')
start_card = my_col_object.get_closest_name(raw_input('First card in deck? '))
is_arena_deck = raw_input('Type y if it is arena deck') == 'y'
deck = Deck(my_col=my_col_object.cards)
deck.add_card(start_card)
deck.player_class = player_class
if is_arena_deck:
deck.type = constants.ARENA_DECK
while sum(deck.cards.values()) < 30:
next_card, card_syn_value, better_cards = deck.get_advice()
print 'Adding %s : %f (skipped missing cards: %s)' % (next_card, card_syn_value, str(better_cards))
deck.add_card(next_card)
if raw_input("Refine? (y/n)") == 'y':
deck.refine_deck()
print('Final deck:')
for card in deck.cards:
print "%s : %d" % (card, deck.cards[card])
print('Synergy score: %f' % deck.get_total_synergy_score())
| StarcoderdataPython |
1683597 | <reponame>NaverCloudPlatform/ncloud-sdk-python
# coding: utf-8
"""
server
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class GetBlockStorageInstanceListRequest(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'server_instance_no': 'str',
'block_storage_instance_no_list': 'list[str]',
'search_filter_name': 'str',
'search_filter_value': 'str',
'block_storage_type_code_list': 'list[str]',
'page_no': 'int',
'page_size': 'int',
'block_storage_instance_status_code': 'str',
'disk_type_code': 'str',
'disk_detail_type_code': 'str',
'region_no': 'str',
'zone_no': 'str',
'sorted_by': 'str',
'sorting_order': 'str'
}
attribute_map = {
'server_instance_no': 'serverInstanceNo',
'block_storage_instance_no_list': 'blockStorageInstanceNoList',
'search_filter_name': 'searchFilterName',
'search_filter_value': 'searchFilterValue',
'block_storage_type_code_list': 'blockStorageTypeCodeList',
'page_no': 'pageNo',
'page_size': 'pageSize',
'block_storage_instance_status_code': 'blockStorageInstanceStatusCode',
'disk_type_code': 'diskTypeCode',
'disk_detail_type_code': 'diskDetailTypeCode',
'region_no': 'regionNo',
'zone_no': 'zoneNo',
'sorted_by': 'sortedBy',
'sorting_order': 'sortingOrder'
}
def __init__(self, server_instance_no=None, block_storage_instance_no_list=None, search_filter_name=None, search_filter_value=None, block_storage_type_code_list=None, page_no=None, page_size=None, block_storage_instance_status_code=None, disk_type_code=None, disk_detail_type_code=None, region_no=None, zone_no=None, sorted_by=None, sorting_order=None): # noqa: E501
"""GetBlockStorageInstanceListRequest - a model defined in Swagger""" # noqa: E501
self._server_instance_no = None
self._block_storage_instance_no_list = None
self._search_filter_name = None
self._search_filter_value = None
self._block_storage_type_code_list = None
self._page_no = None
self._page_size = None
self._block_storage_instance_status_code = None
self._disk_type_code = None
self._disk_detail_type_code = None
self._region_no = None
self._zone_no = None
self._sorted_by = None
self._sorting_order = None
self.discriminator = None
if server_instance_no is not None:
self.server_instance_no = server_instance_no
if block_storage_instance_no_list is not None:
self.block_storage_instance_no_list = block_storage_instance_no_list
if search_filter_name is not None:
self.search_filter_name = search_filter_name
if search_filter_value is not None:
self.search_filter_value = search_filter_value
if block_storage_type_code_list is not None:
self.block_storage_type_code_list = block_storage_type_code_list
if page_no is not None:
self.page_no = page_no
if page_size is not None:
self.page_size = page_size
if block_storage_instance_status_code is not None:
self.block_storage_instance_status_code = block_storage_instance_status_code
if disk_type_code is not None:
self.disk_type_code = disk_type_code
if disk_detail_type_code is not None:
self.disk_detail_type_code = disk_detail_type_code
if region_no is not None:
self.region_no = region_no
if zone_no is not None:
self.zone_no = zone_no
if sorted_by is not None:
self.sorted_by = sorted_by
if sorting_order is not None:
self.sorting_order = sorting_order
@property
def server_instance_no(self):
"""Gets the server_instance_no of this GetBlockStorageInstanceListRequest. # noqa: E501
서버인스턴스번호 # noqa: E501
:return: The server_instance_no of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._server_instance_no
@server_instance_no.setter
def server_instance_no(self, server_instance_no):
"""Sets the server_instance_no of this GetBlockStorageInstanceListRequest.
서버인스턴스번호 # noqa: E501
:param server_instance_no: The server_instance_no of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._server_instance_no = server_instance_no
@property
def block_storage_instance_no_list(self):
"""Gets the block_storage_instance_no_list of this GetBlockStorageInstanceListRequest. # noqa: E501
블록스토리지인스턴스번호리스트 # noqa: E501
:return: The block_storage_instance_no_list of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: list[str]
"""
return self._block_storage_instance_no_list
@block_storage_instance_no_list.setter
def block_storage_instance_no_list(self, block_storage_instance_no_list):
"""Sets the block_storage_instance_no_list of this GetBlockStorageInstanceListRequest.
블록스토리지인스턴스번호리스트 # noqa: E501
:param block_storage_instance_no_list: The block_storage_instance_no_list of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: list[str]
"""
self._block_storage_instance_no_list = block_storage_instance_no_list
@property
def search_filter_name(self):
"""Gets the search_filter_name of this GetBlockStorageInstanceListRequest. # noqa: E501
검색할필터명 # noqa: E501
:return: The search_filter_name of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._search_filter_name
@search_filter_name.setter
def search_filter_name(self, search_filter_name):
"""Sets the search_filter_name of this GetBlockStorageInstanceListRequest.
검색할필터명 # noqa: E501
:param search_filter_name: The search_filter_name of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._search_filter_name = search_filter_name
@property
def search_filter_value(self):
"""Gets the search_filter_value of this GetBlockStorageInstanceListRequest. # noqa: E501
검색할필터값 # noqa: E501
:return: The search_filter_value of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._search_filter_value
@search_filter_value.setter
def search_filter_value(self, search_filter_value):
"""Sets the search_filter_value of this GetBlockStorageInstanceListRequest.
검색할필터값 # noqa: E501
:param search_filter_value: The search_filter_value of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._search_filter_value = search_filter_value
@property
def block_storage_type_code_list(self):
"""Gets the block_storage_type_code_list of this GetBlockStorageInstanceListRequest. # noqa: E501
블록스토리지구분코드리스트 # noqa: E501
:return: The block_storage_type_code_list of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: list[str]
"""
return self._block_storage_type_code_list
@block_storage_type_code_list.setter
def block_storage_type_code_list(self, block_storage_type_code_list):
"""Sets the block_storage_type_code_list of this GetBlockStorageInstanceListRequest.
블록스토리지구분코드리스트 # noqa: E501
:param block_storage_type_code_list: The block_storage_type_code_list of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: list[str]
"""
self._block_storage_type_code_list = block_storage_type_code_list
@property
def page_no(self):
"""Gets the page_no of this GetBlockStorageInstanceListRequest. # noqa: E501
페이지번호 # noqa: E501
:return: The page_no of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: int
"""
return self._page_no
@page_no.setter
def page_no(self, page_no):
"""Sets the page_no of this GetBlockStorageInstanceListRequest.
페이지번호 # noqa: E501
:param page_no: The page_no of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: int
"""
self._page_no = page_no
@property
def page_size(self):
"""Gets the page_size of this GetBlockStorageInstanceListRequest. # noqa: E501
페이지사이즈 # noqa: E501
:return: The page_size of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: int
"""
return self._page_size
@page_size.setter
def page_size(self, page_size):
"""Sets the page_size of this GetBlockStorageInstanceListRequest.
페이지사이즈 # noqa: E501
:param page_size: The page_size of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: int
"""
self._page_size = page_size
@property
def block_storage_instance_status_code(self):
"""Gets the block_storage_instance_status_code of this GetBlockStorageInstanceListRequest. # noqa: E501
블록스토리지인스턴스상태코드 # noqa: E501
:return: The block_storage_instance_status_code of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._block_storage_instance_status_code
@block_storage_instance_status_code.setter
def block_storage_instance_status_code(self, block_storage_instance_status_code):
"""Sets the block_storage_instance_status_code of this GetBlockStorageInstanceListRequest.
블록스토리지인스턴스상태코드 # noqa: E501
:param block_storage_instance_status_code: The block_storage_instance_status_code of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._block_storage_instance_status_code = block_storage_instance_status_code
@property
def disk_type_code(self):
"""Gets the disk_type_code of this GetBlockStorageInstanceListRequest. # noqa: E501
디스크유형코드 # noqa: E501
:return: The disk_type_code of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._disk_type_code
@disk_type_code.setter
def disk_type_code(self, disk_type_code):
"""Sets the disk_type_code of this GetBlockStorageInstanceListRequest.
디스크유형코드 # noqa: E501
:param disk_type_code: The disk_type_code of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._disk_type_code = disk_type_code
@property
def disk_detail_type_code(self):
"""Gets the disk_detail_type_code of this GetBlockStorageInstanceListRequest. # noqa: E501
디스크유형상세코드 # noqa: E501
:return: The disk_detail_type_code of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._disk_detail_type_code
@disk_detail_type_code.setter
def disk_detail_type_code(self, disk_detail_type_code):
"""Sets the disk_detail_type_code of this GetBlockStorageInstanceListRequest.
디스크유형상세코드 # noqa: E501
:param disk_detail_type_code: The disk_detail_type_code of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._disk_detail_type_code = disk_detail_type_code
@property
def region_no(self):
"""Gets the region_no of this GetBlockStorageInstanceListRequest. # noqa: E501
리전번호 # noqa: E501
:return: The region_no of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._region_no
@region_no.setter
def region_no(self, region_no):
"""Sets the region_no of this GetBlockStorageInstanceListRequest.
리전번호 # noqa: E501
:param region_no: The region_no of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._region_no = region_no
@property
def zone_no(self):
"""Gets the zone_no of this GetBlockStorageInstanceListRequest. # noqa: E501
ZONE번호 # noqa: E501
:return: The zone_no of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._zone_no
@zone_no.setter
def zone_no(self, zone_no):
"""Sets the zone_no of this GetBlockStorageInstanceListRequest.
ZONE번호 # noqa: E501
:param zone_no: The zone_no of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._zone_no = zone_no
@property
def sorted_by(self):
"""Gets the sorted_by of this GetBlockStorageInstanceListRequest. # noqa: E501
소팅대상 # noqa: E501
:return: The sorted_by of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._sorted_by
@sorted_by.setter
def sorted_by(self, sorted_by):
"""Sets the sorted_by of this GetBlockStorageInstanceListRequest.
소팅대상 # noqa: E501
:param sorted_by: The sorted_by of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._sorted_by = sorted_by
@property
def sorting_order(self):
"""Gets the sorting_order of this GetBlockStorageInstanceListRequest. # noqa: E501
소팅순서 # noqa: E501
:return: The sorting_order of this GetBlockStorageInstanceListRequest. # noqa: E501
:rtype: str
"""
return self._sorting_order
@sorting_order.setter
def sorting_order(self, sorting_order):
"""Sets the sorting_order of this GetBlockStorageInstanceListRequest.
소팅순서 # noqa: E501
:param sorting_order: The sorting_order of this GetBlockStorageInstanceListRequest. # noqa: E501
:type: str
"""
self._sorting_order = sorting_order
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GetBlockStorageInstanceListRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| StarcoderdataPython |
1755572 | <filename>ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/gumtree/tree/data/class/in_3.py<gh_stars>1-10
class A(object):
def __init__(self, arg):
self._arg = arg | StarcoderdataPython |
1604496 | <gh_stars>1-10
# -*- coding: utf-8 -*-
# Copyright 2010-2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Litify a .proto file.
This program add a line
"option optimize_for = LITE_RUNTIME;"
to the input .proto file.
"""
import fileinput
import optparse
LITE_OPTIMIZER = 'option optimize_for = LITE_RUNTIME;'
def ParseOption():
parser = optparse.OptionParser()
parser.add_option('--in_file_path', dest='in_file_path',
help='Specify the input protocol buffer definition file.')
parser.add_option('--out_file_path', dest='out_file_path',
help='Specify the result file name.')
(options, _) = parser.parse_args()
return options
def ExecuteLitify(in_file_path, out_file_path):
output_file = open(out_file_path, 'w')
for line in fileinput.input(in_file_path):
output_file.write(line)
output_file.write('\n%s\n' % LITE_OPTIMIZER)
output_file.close()
def main():
options = ParseOption()
ExecuteLitify(options.in_file_path, options.out_file_path)
if __name__ == '__main__':
main()
| StarcoderdataPython |
188577 | <reponame>rogeriopaulos/finpy<gh_stars>0
import datetime as dt
import logging
import os
import urllib
from abc import ABC, abstractmethod
import telegram
from pymongo import MongoClient
from pymongo.errors import BulkWriteError, ConnectionFailure
from requests.exceptions import ConnectionError, Timeout, TooManyRedirects
from requests.models import Response
# Logging
# ------------------------------------------------------------------------------
log_format = '[%(asctime)s][%(levelname)s] - %(message)s'
logging.basicConfig(level=logging.INFO, format=log_format)
LOGGER = logging.getLogger('root')
# Database
# ------------------------------------------------------------------------------
class MongodbClient:
username = urllib.parse.quote_plus(str(os.environ.get('MONGODB_USERNAME')))
password = urllib.parse.quote_plus(str(os.environ.get('MONGODB_PASSWORD')))
host = urllib.parse.quote_plus(str(os.environ.get('MONGODB_HOST')))
port = urllib.parse.quote_plus(str(os.environ.get('MONGODB_PORT')))
def client(self):
return MongoClient(f'mongodb://{self.username}:{self.password}@{self.host}:{self.port}')
# API
# ------------------------------------------------------------------------------
class BaseAPI(ABC):
@abstractmethod
def get_api_data(self):
...
def save(self):
try:
api_data = self.get_api_data()
source = api_data.get('source')
status_code = api_data.get('status_code')
LOGGER.info(f'Get data from {source}')
timestamp = dt.datetime.now()
data = [dict(item, **{'_created_at': timestamp, '_source': source})
for item in api_data.get('data')]
count_data = len(data)
LOGGER.info(f'Request successful: "status_code": {status_code}, "count": {count_data}')
docs_count = self.create_mongo_docs(data, timestamp, api_data.get('collection_name'))
LOGGER.info(f'Created {docs_count} docs at mongodb')
return {"status_code": status_code, "count_docs": count_data}
except (ConnectionError, Timeout, TooManyRedirects) as e:
LOGGER.error('An error has occurred. Check traceback.')
print(e)
def create_mongo_docs(self, data, timestamp, collection_name):
try:
client = MongodbClient().client()
db = client['cryptosdb']
collection = db[collection_name]
result = collection.insert_many(data)
requests_timestamp = db[f'{collection_name}_requests_timestamp']
requests_timestamp.insert_one({'request_timestamp': timestamp})
return len(result.inserted_ids)
except (ConnectionFailure, BulkWriteError) as e:
LOGGER.error('An error has occurred. Check traceback.')
print(e)
class CryptoAPI(ABC):
@abstractmethod
def make_request(self) -> Response:
...
# Helpers
# ------------------------------------------------------------------------------
def send2mongo(api: BaseAPI) -> None:
return api.save()
def clear_collections(collection_name):
client = MongodbClient().client()
db = client['cryptosdb']
collection_data = db[collection_name]
collection_timestamp = db[f'{collection_name}_requests_timestamp']
LOGGER.info(f'Removing docs from "{collection_data}" and "{collection_timestamp}" collection')
collection_data.delete_many({})
collection_timestamp.delete_many({})
# Telegram
# ------------------------------------------------------------------------------
class Telegram:
def __init__(self):
self.token = os.environ.get('TELEGRAM_BOT_TOKEN')
self.chat_id = os.environ.get('TELEGRAM_CHAT_ID')
def send_message(self, msg):
bot = telegram.Bot(token=self.token)
if len(msg) > 4096:
for x in range(0, len(msg), 4096):
response = bot.send_message(self.chat_id, msg[x:x+4096], parse_mode=telegram.ParseMode.HTML)
else:
response = bot.send_message(self.chat_id, msg, parse_mode=telegram.ParseMode.HTML)
return response.to_dict()
| StarcoderdataPython |
182044 | <filename>chip8.py<gh_stars>0
# Python emulator for Chip-8
import random
MEMSIZE = 0x1000
REGSIZE = 0x10
CHIP8_STRT = 0x200
memory = bytearray(MEMSIZE)
V = bytearray(REGSIZE)
I = random
PC = CHIP8_STRT
Instr_H = 0x00
Instr_L = 0x00
def Ifetch(prog_cntr):
global Instr_H
global Instr_L
Instr_H = memory[prog_cntr]
Instr_L = memory[prog_cntr+1]
prog_cntr += 2
return
def chip8_init():
for i in range(MEMSIZE):
memory[i] = random.randint(0, 0xFF)
chip8_init() # initialize chip8
print(memory[0x180:0x280].hex())
while True:
Ifetch(PC)
print(Instr_H, Instr_L)
if PC == 0x220:
break
| StarcoderdataPython |
181453 | <filename>operators.py
from functools import reduce
from math import factorial
import numpy as np
import scipy.sparse as sp
def differences(accuracy, order):
# Implemented based on the article here: http://web.media.mit.edu/~crtaylor/calculator.html
# By the properties of square Vandermonde matrices this matrix is invertible (non-singular)
# iff all stencil points are unique. This is always the case for this application.
def parts(points, o):
coefficients = np.vander(points)
coefficients = np.linalg.inv(coefficients)
return coefficients[-o - 1] * factorial(o)
return tuple(parts(range(-accuracy, accuracy + 1), o) for o in order)
def matrices(shape, operators, combine):
def parts():
for i, o in enumerate(operators):
diagonals = []
for j, p in enumerate(o):
index = j - len(o) // 2
diagonals.append((p * np.ones(shape[i] - abs(index)), index))
matrix = sp.diags(*zip(*diagonals))
if combine:
yield matrix
else:
# The sum of these kronecker product folds is equivalent to the kronecker sum of all the matrices.
# This identity can be derived from the properties of the kronecker product.
# This is useful when you need to apply each operator on a different axis,
# like in the case of finding the divergence of a velocity field using the gradient.
yield reduce(sp.kron, (matrix if k == i else sp.identity(d) for k, d in enumerate(shape)))
# Credit to <NAME> for figuring out that kronsum's argument order is reversed.
# Without that bit of wisdom I'd have lost it.
return reduce(lambda a, b: sp.kronsum(b, a), parts()) if combine else tuple(parts())
| StarcoderdataPython |
1620875 | import gym
import torch
import tensorboardX
from agents import TD3
import argparse
import os
import utils
import numpy as np
def main(args):
env = gym.make(args['env_name'])
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
action_dim = env.action_space.shape[0]
max_action = env.action_space.high[0]
state_dim = env.observation_space.shape[0]
td3 = TD3(args, action_dim, max_action, state_dim, device)
summary = tensorboardX.SummaryWriter('./log/{}_td3_{}'.format(args['env_name'], args['noise_type']))
timestep = 0
for episode in range(args['max_episode']):
episode_reward = 0
state = env.reset()
state = utils.init_state(state)
while True:
if timestep < args['random_action_timestep'] :
select = env.action_space.sample()
action = utils.carRace_action_to_output(select)
else :
action = td3.get_action(state)
select = utils.carRace_output_to_action(action)
tmp_reward = 0
for i in range(4):
tmp_next_state, reward, done, info = env.step(select)
tmp_reward += reward
tmp_next_state = utils.preprocess(tmp_next_state)
tmp_next_state = tmp_next_state[np.newaxis, np.newaxis, :, :]
next_state = np.append(tmp_next_state, state[:, :3, :, :], axis=1)
# show_state(next_state)
td3.save(state, action[0], tmp_reward, next_state, int(done))
episode_reward += tmp_reward
state = next_state.copy()
timestep += 1
if timestep > args['train_start_timestep']:
if timestep % 2 == 0 :
td3.train(summary, timestep)
if done:
print('episode: ', episode, ' reward : %.3f'%(episode_reward), ' timestep :', timestep)
summary.add_scalar('reward/timestep', episode_reward, timestep)
break
if episode % args['save_freq'] == 0:
if not os.path.exists('./SaveModel') :
os.mkdir('./SaveModel')
torch.save(td3.actor.state_dict(), './SaveModel/{}_td3_{}_{}'.format(args['env_name'], args['noise_type'], episode))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--seed', default=0)
parser.add_argument('--env-name', default='CarRacing-v0')
parser.add_argument('--env-seed', default=0)
parser.add_argument('--render', default=False, type=bool)
parser.add_argument('--evaluate', default=False, type=bool)
parser.add_argument('--model-directory', default='./SaveModel/Pendulum-v0_210', type=str)
parser.add_argument('--max-episode', default=1000000)
parser.add_argument('--save-freq', default=50)
parser.add_argument('--actor-lr', default=3e-4)
parser.add_argument('--critic-lr', default=1e-3)
parser.add_argument('--gamma', default=0.99)
parser.add_argument('--memory-size', default=350000)
parser.add_argument('--noise_type', default='gaussian')
parser.add_argument('--noise-delta', default=0.1)
parser.add_argument('--batch-size', default=32)
parser.add_argument('--train-start-timestep', default=2000)
parser.add_argument('--random-action-timestep', default=100)
parser.add_argument('--tau', default=5e-3)
args = vars(parser.parse_args())
main(args) | StarcoderdataPython |
1696536 | <reponame>kfarrelly/nucleo
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime, dateutil.parser, parse, requests, stream, sys
from allauth.account.adapter import get_adapter
from allauth.account import views as allauth_account_views
from allauth.utils import build_absolute_uri
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.sites.shortcuts import get_current_site
from django.db.models import (
Avg, BooleanField, Case, ExpressionWrapper, F, FloatField,
prefetch_related_objects, Value, When,
)
from django.db.models.functions import Lower, Trunc
from django.http import (
Http404, HttpResponse, HttpResponseNotFound, HttpResponseRedirect,
)
from django.http.request import QueryDict
from django.shortcuts import get_object_or_404, render
from django.urls import reverse, reverse_lazy
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from django.views import generic
from django.views.decorators.csrf import csrf_exempt
from functools import partial
from stellar_base.address import Address
from stellar_base.asset import Asset as StellarAsset
from stellar_base.operation import Operation
from stellar_base.stellarxdr import Xdr
from stellar_base.utils import AccountNotExistError
from stream_django.client import stream_client
from stream_django.feed_manager import feed_manager
from urlparse import urlparse
from . import forms, mixins
from .models import (
Account, AccountFundRequest, Asset, FollowRequest, Portfolio,
portfolio_data_collector, Profile, RawPortfolioData,
)
# Web app views
# Landing page
class HomeView(generic.TemplateView):
"""
Queryset is users sorted by performance rank.
Prefetch assets_trusting to also preview portfolio assets with each list item.
"""
template_name = 'index.html'
def get_context_data(self, **kwargs):
"""
Update context with top 5 performing users and top 5 Stellar assets.
"""
context = super(HomeView, self).get_context_data(**kwargs)
# Only fetch top 5 users
context['allowed_portfolio_displays'] = [ 'usd_value', 'performance_1d' ]
context['portfolio_display'] = 'performance_1d'
context['user_list'] = get_user_model().objects\
.exclude(profile__portfolio__rank=None)\
.filter(profile__portfolio__rank__lte=5)\
.prefetch_related('assets_trusting', 'profile__portfolio')\
.order_by('profile__portfolio__rank')
# Only fetch top 5 assets
context['allowed_asset_displays'] = [ 'activityScore', 'price_USD',
'change24h_USD' ]
context['asset_display'] = 'price_USD'
# Fetch the StellarTerm ticker json and store
r = requests.get(settings.STELLARTERM_TICKER_URL)
json = r.json()
ticker_assets = json.get('assets', [])
# NOTE: Need to get USD/XLM 24 hour change from _meta key (not in XLM-native asset)
xlm_change24h_USD = None
if '_meta' in json and 'externalPrices' in json['_meta']\
and 'USD_XLM_change' in json['_meta']['externalPrices']:
xlm_change24h_USD = json['_meta']['externalPrices']['USD_XLM_change']
# Clean the ticker assets to only include those that have
# the display attribute
cleaned_ticker_assets = [
a for a in ticker_assets
if a['id'] == 'XLM-native' or ('activityScore' in a and a['activityScore'] != None)
]
# Parse to get asset_ids for queryset filter
top_asset_ids = [ a['id'] for a in cleaned_ticker_assets ]
# Store the dict version of ticker assets
ticker_assets = { a['id']: a for a in cleaned_ticker_assets }
# Order the qset by activityScore
# TODO: Figure out how to annotate qset properly
assets = list(Asset.objects.filter(asset_id__in=top_asset_ids))
for a in assets:
for display in context['allowed_asset_displays']:
if a.asset_id == 'XLM-native' and display == 'change24h_USD':
# Handling the XLM-native USD % change edge case
setattr(a, display, xlm_change24h_USD)
else:
setattr(a, display, ticker_assets[a.asset_id].get(display))
assets.sort(key=lambda a: getattr(a, 'activityScore'), reverse=True)
context['asset_list'] = assets[:5]
return context
## Allauth
class SignupView(mixins.RecaptchaContextMixin, allauth_account_views.SignupView):
"""
Override so reCAPTCHA is validated.
"""
form_class = forms.SignupForm
def get_form_kwargs(self):
"""
Need to override to pass in the request for authenticated user.
"""
kwargs = super(SignupView, self).get_form_kwargs()
kwargs.update({
'g-recaptcha-response': self.request.POST.get('g-recaptcha-response')
})
return kwargs
class PasswordChangeView(allauth_account_views.PasswordChangeView):
"""
Override so success url redirects to user settings.
"""
success_url = reverse_lazy('nc:user-settings-redirect')
class SignupStellarUpdateView(LoginRequiredMixin, mixins.AccountFormContextMixin,
generic.TemplateView):
template_name = 'account/signup_stellar_update_form.html'
user_field = 'request.user'
class SignupUserUpdateView(LoginRequiredMixin, mixins.PrefetchedSingleObjectMixin,
generic.UpdateView):
model = get_user_model()
form_class = forms.UserProfileWithPrivacyUpdateMultiForm
template_name = 'account/signup_profile_update_form.html'
success_url = reverse_lazy('account-signup-stellar-update')
prefetch_related_lookups = ['profile']
def get_object(self, queryset=None):
"""
Just return the request.user object with prefetched profile.
"""
if queryset is None:
queryset = self.get_queryset()
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(_("No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
return obj
def get_form_kwargs(self):
"""
Need to override to pass in appropriate instances to multiform.
https://django-betterforms.readthedocs.io/en/latest/multiform.html#working-with-updateview
"""
kwargs = super(SignupUserUpdateView, self).get_form_kwargs()
kwargs.update(instance={
'user': self.object,
'profile': self.object.profile,
})
return kwargs
def get_queryset(self):
"""
Authenticated user can only update themselves.
"""
return self.model.objects.filter(id=self.request.user.id)
class SignupUserFollowingUpdateView(LoginRequiredMixin, generic.ListView):
template_name = 'account/signup_profile_follow_update_form.html'
def get_queryset(self):
"""
Queryset is users sorted by performance rank.
Prefetch assets_trusting to also preview portfolio assets with each list item.
"""
# Aggregate the users current user is following and has requested to follow
# for annotation
is_following_ids = [
u.id for u in get_user_model().objects\
.filter(profile__in=self.request.user.profiles_following.all())
]
requested_to_follow_ids = [
r.user.id for r in self.request.user.requests_to_follow.all()
]
# Only return top 25 users
return get_user_model().objects\
.exclude(profile__portfolio__rank=None)\
.filter(profile__portfolio__rank__lte=25)\
.annotate(is_following=Case(
When(id__in=is_following_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))\
.annotate(requested_to_follow=Case(
When(id__in=requested_to_follow_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))\
.prefetch_related('assets_trusting', 'profile__portfolio')\
.order_by('profile__portfolio__rank')
## User
class UserDetailView(mixins.PrefetchedSingleObjectMixin, mixins.IndexContextMixin,
mixins.LoginRedirectContextMixin, mixins.ActivityFormContextMixin,
mixins.AccountFormContextMixin, mixins.FeedActivityContextMixin,
mixins.ViewTypeContextMixin, mixins.DepositAssetsContextMixin,
mixins.UserAssetsContextMixin, generic.DetailView):
model = get_user_model()
slug_field = 'username'
template_name = 'nc/profile.html'
prefetch_related_lookups = ['accounts', 'profile__portfolio']
feed_type = settings.STREAM_USER_FEED
user_field = 'object'
view_type = 'profile'
def get_context_data(self, **kwargs):
"""
Provide a cryptographically signed username of authenticated user
for add Stellar account if detail object is current user.
"""
context = super(UserDetailView, self).get_context_data(**kwargs)
if self.object:
# Update the context for follow attrs
context['followers_count'] = self.object.profile.followers.count()
context['following_count'] = self.object.profiles_following.count()
context['is_following'] = self.object.profile.followers\
.filter(id=self.request.user.id).exists() if self.request.user.is_authenticated else False
context['requested_to_follow'] = self.object.follower_requests\
.filter(requester=self.request.user).exists() if self.request.user.is_authenticated else False
# Update the context for short teaser line of users
# who follow self.object that self.request.user also follows
q_followers_user_follows = self.object.profile.followers\
.filter(profile__in=self.request.user.profiles_following.all())\
.order_by(Lower('username'))\
if self.request.user.is_authenticated\
else get_user_model().objects.none()
context['followers_user_follows_teaser'] = q_followers_user_follows[0:2]
context['followers_user_follows_teaser_count'] = len(context['followers_user_follows_teaser'])
context['followers_user_follows_teaser_more_count'] = q_followers_user_follows.count() - context['followers_user_follows_teaser_count']
return context
class UserRedirectView(LoginRequiredMixin, generic.RedirectView):
query_string = True
pattern_name = 'nc:user-detail'
def get_redirect_url(self, *args, **kwargs):
kwargs.update({ 'slug': self.request.user.username })
return super(UserRedirectView, self).get_redirect_url(*args, **kwargs)
class UserUpdateView(LoginRequiredMixin, mixins.PrefetchedSingleObjectMixin,
mixins.IndexContextMixin, mixins.ViewTypeContextMixin, generic.UpdateView):
model = get_user_model()
slug_field = 'username'
form_class = forms.UserProfileUpdateMultiForm
template_name = 'nc/profile_update_form.html'
success_url = reverse_lazy('nc:user-redirect')
prefetch_related_lookups = ['profile']
view_type = 'profile'
def get_form_kwargs(self):
"""
Need to override to pass in appropriate instances to multiform.
https://django-betterforms.readthedocs.io/en/latest/multiform.html#working-with-updateview
"""
kwargs = super(UserUpdateView, self).get_form_kwargs()
kwargs.update(instance={
'user': self.object,
'profile': self.object.profile,
})
return kwargs
def get_queryset(self):
"""
Authenticated user can only update themselves.
"""
return self.model.objects.filter(id=self.request.user.id)
class UserSettingsRedirectView(LoginRequiredMixin, generic.RedirectView):
query_string = True
pattern_name = 'nc:user-settings-update'
def get_redirect_url(self, *args, **kwargs):
kwargs.update({ 'slug': self.request.user.username })
return super(UserSettingsRedirectView, self).get_redirect_url(*args, **kwargs)
class UserSettingsUpdateView(LoginRequiredMixin, mixins.PrefetchedSingleObjectMixin,
mixins.IndexContextMixin, mixins.ViewTypeContextMixin, generic.UpdateView):
model = get_user_model()
slug_field = 'username'
form_class = forms.ProfileSettingsUpdateMultiForm
template_name = 'nc/profile_settings_update_form.html'
success_url = reverse_lazy('nc:user-redirect')
prefetch_related_lookups = ['profile']
view_type = 'profile'
def get_form_kwargs(self):
"""
Need to override to pass in appropriate instance to form.
https://django-betterforms.readthedocs.io/en/latest/multiform.html#working-with-updateview
"""
kwargs = super(UserSettingsUpdateView, self).get_form_kwargs()
#kwargs.update(instance=self.object.profile)
kwargs.update(instance={
'email': self.object.profile,
'privacy': self.object.profile,
})
return kwargs
def get_queryset(self):
"""
Authenticated user can only update themselves.
"""
return self.model.objects.filter(id=self.request.user.id)
class UserFollowUpdateView(LoginRequiredMixin, mixins.PrefetchedSingleObjectMixin,
mixins.IndexContextMixin, mixins.ViewTypeContextMixin, generic.UpdateView):
model = get_user_model()
slug_field = 'username'
form_class = forms.UserFollowUpdateForm
template_name = 'nc/profile_follow_update_form.html'
prefetch_related_lookups = ['profile']
view_type = 'profile'
def get_context_data(self, **kwargs):
context = super(UserFollowUpdateView, self).get_context_data(**kwargs)
if self.object:
context['is_following'] = self.object.profile.followers\
.filter(id=self.request.user.id).exists()
return context
def get_success_url(self):
"""
If success url passed into query param, then use for redirect.
Otherwise, simply redirect to followed user's profile page.
"""
if self.success_url:
return self.success_url
return reverse('nc:user-detail', kwargs={'slug': self.object.username})
def post(self, request, *args, **kwargs):
"""
Toggle whether authenticated user is following detail user.
"""
self.object = self.get_object()
self.success_url = request.POST.get('success_url', None)
if self.object and self.object != self.request.user:
is_following = self.object.profile.followers\
.filter(id=request.user.id).exists()
# Add/remove from followers list and notify stream API of follow/unfollow
if is_following:
self.object.profile.followers.remove(request.user)
feed_manager.unfollow_user(request.user.id, self.object.id)
elif self.object.profile.is_private:
# If private account, send self.object user a follower request
# with notification.
follower_request, created = FollowRequest.objects\
.get_or_create(user=self.object, requester=request.user)
# Send an email to user being followed
if created and self.object.profile.allow_follower_email:
activity_path = reverse('nc:feed-activity')
activity_url = build_absolute_uri(request, activity_path)
email_settings_path = reverse('nc:user-settings-redirect')
email_settings_url = build_absolute_uri(request, email_settings_path)
ctx_email = {
'current_site': get_current_site(request),
'username': request.user.username,
'activity_url': activity_url,
'email_settings_url': email_settings_url,
}
get_adapter(request).send_mail('nc/email/feed_activity_follow_request',
self.object.email, ctx_email)
else:
# Delete the follow request since request.user has just
# toggled follow request off.
follower_request.delete()
else:
# Otherwise, simply add to list of followers
self.object.profile.followers.add(request.user)
feed_manager.follow_user(request.user.id, self.object.id)
# Add new activity to feed of user following
# NOTE: Not using stream-django model mixin because don't want Follow model
# instances in the Nucleo db. Adapted from feed_manager.add_activity_to_feed()
feed = feed_manager.get_feed(settings.STREAM_USER_FEED, request.user.id)
request_user_profile = request.user.profile
feed.add_activity({
'actor': request.user.id,
'verb': 'follow',
'object': self.object.id,
'actor_username': request.user.username,
'actor_pic_url': request_user_profile.pic_url(),
'actor_href': request_user_profile.href(),
'object_username': self.object.username,
'object_pic_url': self.object.profile.pic_url(),
'object_href': self.object.profile.href(),
})
# Send an email to user being followed
if self.object.profile.allow_follower_email:
profile_path = reverse('nc:user-detail', kwargs={'slug': request.user.username})
profile_url = build_absolute_uri(request, profile_path)
email_settings_path = reverse('nc:user-settings-redirect')
email_settings_url = build_absolute_uri(request, email_settings_path)
ctx_email = {
'current_site': get_current_site(request),
'username': request.user.username,
'profile_url': profile_url,
'email_settings_url': email_settings_url,
}
get_adapter(request).send_mail('nc/email/feed_activity_follow',
self.object.email, ctx_email)
return HttpResponseRedirect(self.get_success_url())
class UserFollowRequestUpdateView(LoginRequiredMixin, mixins.PrefetchedSingleObjectMixin,
mixins.IndexContextMixin, mixins.ViewTypeContextMixin, generic.UpdateView):
model = get_user_model()
slug_field = 'username'
form_class = forms.UserFollowRequestUpdateForm
template_name = 'nc/profile_follow_request_update_form.html'
prefetch_related_lookups = ['profile']
view_type = 'profile'
def get_success_url(self):
"""
If success url passed into query param, then use for redirect.
Otherwise, simply redirect to followed user's profile page.
"""
if self.success_url:
return self.success_url
return reverse('nc:feed-activity')
def get_object(self, queryset=None):
"""
Also retrieve and store the follower request.
"""
obj = super(UserFollowRequestUpdateView, self).get_object(queryset)
self.follow_request = get_object_or_404(FollowRequest,
requester=obj, user=self.request.user)
return obj
def post(self, request, *args, **kwargs):
"""
Allow requester to follow current user.
"""
self.object = self.get_object()
self.success_url = request.POST.get('success_url', None)
# Simply add to list of followers
request.user.profile.followers.add(self.object)
feed_manager.follow_user(self.object.id, request.user.id)
# Add new activity to feed of user following
# NOTE: Not using stream-django model mixin because don't want Follow model
# instances in the Nucleo db. Adapted from feed_manager.add_activity_to_feed()
feed = feed_manager.get_feed(settings.STREAM_USER_FEED, self.object.id)
request_user_profile = request.user.profile
feed.add_activity({
'actor': self.object.id,
'verb': 'follow',
'object': request.user.id,
'actor_username': self.object.username,
'actor_pic_url': self.object.profile.pic_url(),
'actor_href': self.object.profile.href(),
'object_username': request.user.username,
'object_pic_url': request_user_profile.pic_url(),
'object_href': request_user_profile.href(),
})
# Delete the follow request
self.follow_request.delete()
# Send an email to user following to notify of confirmation
if self.object.profile.allow_follower_email:
profile_path = reverse('nc:user-detail', kwargs={'slug': request.user.username})
profile_url = build_absolute_uri(request, profile_path)
email_settings_path = reverse('nc:user-settings-redirect')
email_settings_url = build_absolute_uri(request, email_settings_path)
ctx_email = {
'current_site': get_current_site(request),
'username': request.user.username,
'profile_url': profile_url,
'email_settings_url': email_settings_url,
}
get_adapter(request).send_mail('nc/email/feed_activity_follow_confirm',
self.object.email, ctx_email)
return HttpResponseRedirect(self.get_success_url())
class UserFollowRequestDeleteView(LoginRequiredMixin, mixins.PrefetchedSingleObjectMixin,
mixins.IndexContextMixin, mixins.ViewTypeContextMixin, generic.DeleteView):
model = get_user_model()
slug_field = 'username'
template_name = 'nc/profile_follow_request_confirm_delete.html'
prefetch_related_lookups = ['profile']
view_type = 'profile'
def get_success_url(self):
"""
If success url passed into query param, then use for redirect.
Otherwise, simply redirect to followed user's profile page.
"""
if self.success_url:
return self.success_url
return reverse('nc:feed-activity')
def get_object(self, queryset=None):
"""
Also retrieve and store the follower request.
"""
obj = super(UserFollowRequestDeleteView, self).get_object(queryset)
self.follow_request = get_object_or_404(FollowRequest,
requester=obj, user=self.request.user)
return obj
def delete(self, request, *args, **kwargs):
"""
Delete the follow request associated with user obj.
"""
self.object = self.get_object()
self.success_url = request.POST.get('success_url', None)
# Delete the follow request
self.follow_request.delete()
return HttpResponseRedirect(self.get_success_url())
class UserFollowerListView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.ViewTypeContextMixin, generic.ListView):
template_name = 'nc/profile_follow_list.html'
paginate_by = 50
view_type = 'profile'
def get_context_data(self, **kwargs):
"""
Add a boolean for template to determine if listing followers
or following.
"""
context = super(UserFollowerListView, self).get_context_data(**kwargs)
context['in_followers'] = True
context['in_followers_user_follows'] = self.in_followers_user_follows
context['object'] = self.object
context['is_following'] = self.is_following
return context
def get_queryset(self):
self.object = get_object_or_404(get_user_model(), username=self.kwargs['slug'])
self.profile = self.object.profile
# If curr user is not following and self.object has private profile,
# need to throw a 404
self.is_following = self.profile.followers\
.filter(id=self.request.user.id).exists()
if self.object.id != self.request.user.id and not self.is_following and self.profile.is_private:
raise Http404('No %s matches the given query.' % get_user_model()._meta.object_name)
is_following_ids = [
u.id for u in get_user_model().objects\
.filter(profile__in=self.request.user.profiles_following.all())
]
requested_to_follow_ids = [
r.user.id for r in self.request.user.requests_to_follow.all()
]
# Check whether we're in Followed By list view page
# If so, then filter queryset by users current user also follows
self.in_followers_user_follows = ('true' == self.request.GET.get('followed_by', 'false')) # Default to False
qset = self.object.profile.followers
if self.in_followers_user_follows:
qset = qset.filter(profile__in=self.request.user.profiles_following.all())
return qset.annotate(is_following=Case(
When(id__in=is_following_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))\
.annotate(requested_to_follow=Case(
When(id__in=requested_to_follow_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))\
.order_by(Lower('first_name'))\
.prefetch_related('profile')
class UserFollowingListView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.ViewTypeContextMixin, generic.ListView):
template_name = 'nc/profile_follow_list.html'
paginate_by = 50
view_type = 'profile'
def get_context_data(self, **kwargs):
"""
Add a boolean for template to determine if listing followers
or following.
"""
context = super(UserFollowingListView, self).get_context_data(**kwargs)
context['in_followers'] = False
context['in_followers_user_follows'] = False
context['object'] = self.object
context['is_following'] = self.is_following
return context
def get_queryset(self):
self.object = get_object_or_404(get_user_model(), username=self.kwargs['slug'])
self.profile = self.object.profile
# If curr user is not following and self.object has private profile,
# need to throw a 404
self.is_following = self.profile.followers\
.filter(id=self.request.user.id).exists()
if self.object.id != self.request.user.id and not self.is_following and self.profile.is_private:
raise Http404('No %s matches the given query.' % get_user_model()._meta.object_name)
is_following_ids = [
u.id for u in get_user_model().objects\
.filter(profile__in=self.request.user.profiles_following.all())
]
requested_to_follow_ids = [
r.user.id for r in self.request.user.requests_to_follow.all()
]
return get_user_model().objects\
.filter(profile__in=self.object.profiles_following.all())\
.annotate(is_following=Case(
When(id__in=is_following_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))\
.annotate(requested_to_follow=Case(
When(id__in=requested_to_follow_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))\
.order_by(Lower('first_name'))\
.prefetch_related('profile')
class UserPortfolioDataListView(mixins.JSONResponseMixin, generic.TemplateView):
template_name = "nc/profile_portfolio_data_list.html"
def render_to_response(self, context):
"""
Returns only JSON. Not meant for actual HTML page viewing.
In future, transition this to DRF API endpoint.
"""
return self.render_to_json_response(context)
def get_context_data(self, **kwargs):
"""
Context is portfolio history data for given user.
"""
context = {}
params = self.request.GET.copy()
# Get user's portfolio prefetched
self.object = get_object_or_404(get_user_model(), username=self.kwargs['slug'])
self.profile = self.object.profile
portfolio = self.profile.portfolio
# If curr user is not following and self.object has private profile,
# need to throw a 404
self.is_following = self.profile.followers\
.filter(id=self.request.user.id).exists() if self.request.user.is_authenticated else False
if self.profile.is_private and not self.is_following and self.object != self.request.user:
raise Http404('No %s matches the given query.' % get_user_model()._meta.object_name)
# Determine the counter asset to use
allowed_counter_codes = ['USD', 'XLM']
counter_code = params.get('counter_code', 'USD')
if counter_code not in allowed_counter_codes:
counter_code = allowed_counter_codes[0] # default to USD
params['counter_code'] = counter_code
value_attr = '{0}_value'.format(counter_code.lower())
# Get the start, end query params
# From portfolio_chart.js, we pass in start, end as
# UTC timestamp in milliseconds, so need to convert
start = datetime.datetime.utcfromtimestamp(float(params.get('start')) / 1000.0)
end = datetime.datetime.utcfromtimestamp(float(params.get('end')) / 1000.0)
# Determine trunc time interval to use for aggregated portfolio value data
# Adapt for client side getResolution() in asset_chart.js, but for
# allowed Django trunc values.
# NOTE: https://docs.djangoproject.com/en/2.1/ref/models/database-functions/#trunc
range = end - start
if range < datetime.timedelta(days=14):
# Two week range loads hour data
resolution = 'hour'
elif range < datetime.timedelta(days=730):
# 2 year range loads daily data
resolution = 'day'
else:
# Otherwise, use months
resolution = 'month'
# Update the params with username and counter code. Then add to the context
params.update({
'username': self.object.username
})
context.update(params)
# Retrieve the raw data with values aggregated based on interval length specified
q_portfolio_raw_data = portfolio.rawdata.filter(created__gte=start, created__lte=end)\
.annotate(time=Trunc('created', resolution)).values('time')\
.annotate(value=Avg(value_attr)).order_by('time')
# Parse for appropriate json format then update context
json = {
'results': [ d for d in q_portfolio_raw_data ]
}
context.update(json)
# Add last portfolio USD value and creation date of raw data
portfolio_latest_rawdata = portfolio.rawdata.first()
portfolio_latest_rawdata_value = getattr(portfolio_latest_rawdata, value_attr)\
if portfolio_latest_rawdata else RawPortfolioData.NOT_AVAILABLE
context['latest_value'] = portfolio_latest_rawdata_value\
if portfolio_latest_rawdata_value != RawPortfolioData.NOT_AVAILABLE\
else 0.0
return context
## Account
class AccountCreateView(LoginRequiredMixin, mixins.AjaxableResponseMixin,
mixins.IndexContextMixin, mixins.ViewTypeContextMixin, generic.CreateView):
model = Account
form_class = forms.AccountCreateForm
success_url = reverse_lazy('nc:user-redirect')
view_type = 'profile'
def get_form_kwargs(self):
"""
Need to override to pass in the request for authenticated user.
"""
kwargs = super(AccountCreateView, self).get_form_kwargs()
kwargs.update({
'request': self.request
})
return kwargs
def form_valid(self, form):
"""
Override to set request.user as Account.user and Stellar notifier id as
Account.notifier_subscription_id before committing the form.save().
"""
# Save form inputs for account
self.object = form.save(commit=False)
self.object.user = form.account_user
self.object.save()
# Create notifier subscription for account
# TODO: self.object.create_notifier_subscription(form.request)
return HttpResponseRedirect(self.get_success_url())
class AccountUpdateView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.ViewTypeContextMixin, generic.UpdateView):
model = Account
slug_field = 'public_key'
form_class = forms.AccountUpdateForm
template_name = 'nc/account_update_form.html'
success_url = reverse_lazy('nc:user-redirect')
view_type = 'profile'
def get_queryset(self):
"""
Authenticated user can only update their verified accounts.
"""
return self.request.user.accounts.all()
class AccountDeleteView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.ViewTypeContextMixin, generic.DeleteView):
model = Account
slug_field = 'public_key'
success_url = reverse_lazy('nc:user-redirect')
view_type = 'profile'
def get_queryset(self):
"""
Authenticated user can only update their verified accounts.
"""
return self.request.user.accounts.all()
class AccountFundRequestCreateView(LoginRequiredMixin, mixins.AjaxableResponseMixin,
mixins.IndexContextMixin, mixins.ViewTypeContextMixin, generic.CreateView):
model = AccountFundRequest
form_class = forms.AccountFundRequestCreateForm
template_name = 'nc/account_fund_request_form.html'
success_url = reverse_lazy('nc:user-redirect')
view_type = 'profile'
def get_form_kwargs(self):
"""
Need to override to pass in the request for authenticated user.
"""
kwargs = super(AccountFundRequestCreateView, self).get_form_kwargs()
kwargs.update({
'request': self.request
})
return kwargs
def form_valid(self, form):
"""
Override to set request.user as AccountFundRequest.requester before
committing the form.save()
"""
self.object = form.save(commit=False)
self.object.requester = self.request.user
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class AccountOperationListView(mixins.JSONResponseMixin, generic.TemplateView):
template_name = 'nc/account_operation_list.html'
def render_to_response(self, context):
"""
Returns only JSON. Not meant for actual HTML page viewing.
In future, transition this to DRF API endpoint.
"""
return self.render_to_json_response(context)
def get_context_data(self, **kwargs):
"""
Pass through for Stellar operations data given account slug. Adds a
list of relevant Nucleo users for public_keys involved in returned
operations.
"""
object = get_object_or_404(Account, public_key=self.kwargs['slug'])
context = {}
context['object'] = {
'name': object.name,
'public_key': object.public_key,
}
# Query Horizon to obtain pass through JSON response
# TODO: In future, use DRF to serialize/deserialize Stellar objects properly
address = Address(address=object.public_key,
network=settings.STELLAR_NETWORK)
# If any query params given by client, append the to the params dict
# for ops GET call to Horizon
params = self.request.GET.dict()
json = address.operations(**params)
# Store the next cursor if it exists
prev_cursor = params.get('cursor', None)
cursor = None
if '_links' in json and 'next' in json['_links'] and 'href' in json['_links']['next']:
cursor = QueryDict(urlparse(json['_links']['next']['href']).query).get('cursor', None)
context['cursor'] = cursor
context['has_more'] = cursor and prev_cursor != cursor
# Store the records from Horizon in context
records = json
if '_embedded' in json and 'records' in json['_embedded']:
records = json['_embedded']['records']
context['records'] = records
# Sort through list of returned operations to accumulate dict
# of { public_key: user } for user identity mapping in template.
context['accounts'] = self._parse_for_accounts(records)
return context
def _parse_operation_for_accounts(self, record):
"""
Returns a list of relevant account public keys for given
operation record.
"""
if record['type_i'] == Xdr.const.CREATE_ACCOUNT:
return [ record['account'], record['funder'] ]
elif record['type_i'] == Xdr.const.PAYMENT:
return [ record['from'], record['to'] ]
elif record['type_i'] == Xdr.const.PATH_PAYMENT:
return [ record['from'], record['to'] ]
elif record['type_i'] == Xdr.const.CHANGE_TRUST:
return [ record['trustee'], record['trustor'] ]
elif record['type_i'] == Xdr.const.ALLOW_TRUST:
return [ record['trustee'], record['trustor'] ]
elif record['type_i'] == Xdr.const.SET_OPTIONS:
return []
elif record['type_i'] == Xdr.const.MANAGE_OFFER:
return []
elif record['type_i'] == Xdr.const.CREATE_PASSIVE_OFFER:
return []
elif record['type_i'] == Xdr.const.ACCOUNT_MERGE:
return [ record['into'] ]
elif record['type_i'] == Xdr.const.INFLATION:
return []
elif record['type_i'] == Xdr.const.MANAGE_DATA:
return []
def _parse_for_accounts(self, records):
"""
Build the list of relevant public keys to search for in Nucleo db.
Returns dict of format { public_key: user }
"""
# Parse through records, building list of relevant public keys
# for each record
public_key_list_of_lists = [
self._parse_operation_for_accounts(record)
for record in records
]
# Flatten the list of lists and force uniqueness with a set
public_key_list = [
item for sublist in public_key_list_of_lists for item in sublist
]
public_keys = list(set(public_key_list))
# Now query the db for relevant accounts and form
# appropriate dict with format ...
# { public_key: { 'username': user.username, 'href': link_to_user_profile } }
accounts = {
a.public_key: {
'username': a.user.username,
'href': reverse('nc:user-detail', kwargs={'slug': a.user.username})
}
for a in Account.objects.filter(public_key__in=public_keys)\
.select_related('user')
}
return accounts
## Asset
class AssetRedirectView(generic.RedirectView):
query_string = True
pattern_name = 'nc:asset-top-list'
class AssetDetailView(mixins.PrefetchedSingleObjectMixin, mixins.IndexContextMixin,
mixins.ActivityFormContextMixin, mixins.LoginRedirectContextMixin,
mixins.ViewTypeContextMixin, generic.DetailView):
model = Asset
slug_field = 'asset_id'
template_name = 'nc/asset.html'
prefetch_related_lookups = ['issuer__user']
view_type = 'asset'
def get_context_data(self, **kwargs):
"""
Override to include asset from Horizon API GET.
"""
# Use horizon object.assets() with params:
# https://github.com/StellarCN/py-stellar-base/blob/v0.2/stellar_base/horizon.py
context = super(AssetDetailView, self).get_context_data(**kwargs)
is_native = (self.object.issuer_address == None)
context.update({'is_native': is_native})
record = None
if not is_native:
# Include the issuer URL on Horizon
context['asset_issuer_stellar_href'] = settings.STELLAR_EXPERT_ACCOUNT_URL + self.object.issuer_address
# Retrieve asset record from Horizon
horizon = settings.STELLAR_HORIZON_INITIALIZATION_METHOD()
params = {
'asset_issuer': self.object.issuer_address,
'asset_code': self.object.code,
}
json = horizon.assets(params=params)
# Store the asset record from Horizon in context
# NOTE: On testnet, won't get a record if mainnet issuer id isn't the same as testnet's
record = None
if '_embedded' in json and 'records' in json['_embedded'] and json['_embedded']['records']:
record = json['_embedded']['records'][0]
# Update existing model asset in our db
# General try, except here because always want to return asset
# obj no matter what
try:
self.object = self._update_asset(record)
context.update({'object': self.object})
except:
pass
else:
# Include the external exchange pair name for client side
# JSON parsing
allowed_pairs = {
'USD': settings.KRAKEN_XLMUSD_PAIR_NAME,
'BTC': settings.KRAKEN_XLMBTC_PAIR_NAME
}
counter_code = self.request.GET.get('counter_code', 'USD') # Default to USD
exchange_pair_name = allowed_pairs[counter_code]
context['allowed_pairs'] = allowed_pairs
context['counter_code'] = counter_code
context['exchange_pair_name'] = exchange_pair_name
context['asset'] = record
# Update the context for trust related info
context['is_trusting'] = self.object.trusters\
.filter(id=self.request.user.id).exists()\
if self.request.user.is_authenticated\
else False
context['trusters_count'] = self.object.trusters.count()
# Update the context for short teaser line of users
# who trust self.object that self.request.user also follows
q_trusters_user_follows = self.object.trusters\
.filter(profile__in=self.request.user.profiles_following.all())\
.order_by(Lower('username'))\
if self.request.user.is_authenticated\
else get_user_model().objects.none()
context['trusters_user_follows_teaser'] = q_trusters_user_follows[0:2]
context['trusters_user_follows_teaser_count'] = len(context['trusters_user_follows_teaser'])
context['trusters_teaser_more_count'] = context['trusters_count'] - context['trusters_user_follows_teaser_count']
if context['is_trusting']:
context['trusters_teaser_more_count'] -= 1
# Include accounts user has for account related info (positions, offers)
if self.request.user.is_authenticated:
context['accounts'] = self.request.user.accounts.all()
return context
def _update_asset(self, record):
"""
Update model asset instance given fetched asset record from Horizon call.
Returns updated model_asset.
NOTE: Technically shouldn't be creating on a GET, but ignore this
as it might be a good way to incrementally accumulate model assets
in the beginning.
"""
model_asset = self.object
# Use toml attribute of record to update instance from toml file (to fetch)
toml_url = record['_links']['toml']['href']\
if record and '_links' in record and 'toml' in record['_links']\
and 'href' in record['_links']['toml']\
else None
model_asset.update_from_toml(toml_url)
return model_asset
class AssetExchangeTickerListView(mixins.JSONResponseMixin, generic.TemplateView):
template_name = "nc/asset_exchange_ticker_list.html"
def render_to_response(self, context):
"""
Returns only JSON. Not meant for actual HTML page viewing.
In future, transition this to DRF API endpoint.
"""
return self.render_to_json_response(context)
def get_context_data(self, **kwargs):
"""
Context is paginated ticker history for given asset pair
response from Kraken.
Requires URL to have query param 'interval' and optional 'since'.
Response from Kraken has JSON format
{ 'result': { 'XXLMZUSD': [record], 'last': int, 'error': [] } }
with record = [ <time>, <open>, <high>, <low>, <close>, <vwap>,
<volume>, <count> ]
"""
# NOTE: https://www.kraken.com/help/api#get-ohlc-data
context = {}
params = self.request.GET.copy()
# Determine the counter asset to use (to base of XLM)
allowed_pairs = {
'USD': settings.KRAKEN_XLMUSD_PAIR_NAME,
'BTC': settings.KRAKEN_XLMBTC_PAIR_NAME
}
counter_code = self.request.GET.get('counter_code', 'USD') # Default to USD
exchange_pair_name = allowed_pairs[counter_code]
params.update({ 'pair': exchange_pair_name })
# Pop the start, end query param if there (to use later when filtering of resp data)
start = float(params.pop('start')[0]) if 'start' in params else None
end = float(params.pop('end')[0]) if 'end' in params else None
# NOTE: Kraken requires query for interval to be in mins and since
# to be in secs.
# From getResolution() in asset_chart.js, we pass in (interval, since)
# in milliseconds, so need to convert
if 'interval' in params:
params['interval'] = str(int(params['interval']) / (60 * 1000))
if 'since' in params:
params['since'] = str(float(params['since']) / 1000.0)
full_url = '{0}?{1}'.format(settings.KRAKEN_TICKER_URL, params.urlencode())
r = requests.get(full_url)
if r.status_code == requests.codes.ok:
# NOTE: Each <time> in record is returned by Kraken in seconds
# so need to convert back to milliseconds for client
ret = r.json()
if 'result' in ret and exchange_pair_name in ret['result']:
ret['result'][exchange_pair_name] = [
[record[0] * 1000] + record[1:]
for record in ret['result'][exchange_pair_name]
if (not start or record[0] * 1000 > start) and (not end or record[0] * 1000 < end)
]
context.update(ret)
return context
class AssetUpdateView(LoginRequiredMixin, mixins.PrefetchedSingleObjectMixin,
mixins.IndexContextMixin, mixins.ViewTypeContextMixin, generic.UpdateView):
model = Asset
form_class = forms.AssetUpdateForm
slug_field = 'asset_id'
template_name = 'nc/asset_update_form.html'
prefetch_related_lookups = ['issuer__user']
view_type = 'asset'
def get_queryset(self):
"""
Authenticated user can only update assets they have issued.
"""
return self.model.objects.filter(issuer__user=self.request.user)
def get_success_url(self):
"""
If success url passed into query param, then use for redirect.
Otherwise, simply redirect to asset profile page.
"""
if self.success_url:
return self.success_url
return reverse('nc:asset-detail', kwargs={'slug': self.object.asset_id})
class AssetTrustListView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.ViewTypeContextMixin, mixins.ActivityFormContextMixin, generic.ListView):
template_name = 'nc/asset_trust_list.html'
paginate_by = 50
view_type = 'asset'
def get_context_data(self, **kwargs):
"""
Add a boolean for template to determine if listing followers
or following.
"""
context = super(AssetTrustListView, self).get_context_data(**kwargs)
# Add the asset to the context
context['object'] = self.object
context['is_native'] = (self.object.issuer_address == None)
# Build the addresses dict
accounts = {
account.public_key: account
for account in self.object_list
}
addresses = {
account.public_key: Address(address=account.public_key,
network=settings.STELLAR_NETWORK)
for account in self.object_list
}
# NOTE: This is expensive! Might have to roll out into JS with loader
# Need to decouple Address initialization from get() method to work!
keys_to_pop = []
for k, a in addresses.iteritems():
try:
a.get()
except AccountNotExistError:
# If it doesn't exist on the Stellar network, then remove account record from db
keys_to_pop.append(k)
acc = accounts[k]
acc.delete()
# For deleted accounts that no longer exist on the network, remove from
# addresses dict before next iteration
for k in keys_to_pop:
addresses.pop(k)
# Build the trust dict with appropriate booleans: { public_key: {already_trusts: bool, can_change_trust: bool} }
# NOTE: for now, ignore minimum balance issues
trust = {}
for k, a in addresses.iteritems():
already_trusts = False
can_change_trust = True
if k == self.object.issuer_address:
# Then this account is the asset issuer so implicitly trusts
already_trusts = True
can_change_trust = False
else:
# Otherwise check balances for account to see if asset is there
for b in a.balances:
is_the_asset = (b.get('asset_issuer', None) == self.object.issuer_address\
and b.get('asset_code', None) == self.object.code)
if is_the_asset:
already_trusts = True
if float(b['balance']) > 0.0:
# Can't remove trust if have a balance of this asset
can_change_trust = False
trust[k] = {'already_trusts': already_trusts, 'can_change_trust': can_change_trust}
context['addresses'] = addresses
context['trust'] = trust
return context
def get_queryset(self):
"""
Queryset is this user's accounts but store the Asset instance as well.
"""
self.object = get_object_or_404(Asset, asset_id=self.kwargs['slug'])
return self.request.user.accounts.all()
class AssetTrustedByListView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.ViewTypeContextMixin, generic.ListView):
template_name = 'nc/asset_trusted_by_list.html'
paginate_by = 50
view_type = 'asset'
def get_context_data(self, **kwargs):
"""
Add a boolean for template to determine if listing followers
or following.
"""
context = super(AssetTrustedByListView, self).get_context_data(**kwargs)
# Add the asset to the context
context['object'] = self.object
context['is_native'] = (self.object.issuer_address == None)
return context
def get_queryset(self):
"""
Queryset is this user's accounts but store the Asset instance as well.
"""
self.object = get_object_or_404(Asset, asset_id=self.kwargs['slug'])
is_following_ids = [
u.id for u in get_user_model().objects\
.filter(profile__in=self.request.user.profiles_following.all())
]
return self.object.trusters.annotate(is_following=Case(
When(id__in=is_following_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))\
.order_by(Lower('first_name'))\
.prefetch_related('profile')
class AssetTopListView(mixins.IndexContextMixin, mixins.ViewTypeContextMixin,
mixins.LoginRedirectContextMixin, mixins.ActivityFormContextMixin,
mixins.DepositAssetsContextMixin, mixins.UserFollowerRequestsContextMixin,
mixins.UserPortfolioContextMixin, generic.ListView):
template_name = "nc/asset_top_list.html"
paginate_by = 50
user_field = 'request.user'
view_type = 'asset'
def get_context_data(self, **kwargs):
"""
Add the assets plus page related data.
"""
context = super(AssetTopListView, self).get_context_data(**kwargs)
# Set ticker assets
context['ticker_assets'] = self.ticker_assets
context['allowed_displays'] = self.allowed_displays
context['display'] = self.display
context['counter_code'] = self.counter_code
context['order_by'] = self.order_by
# Set list order rank of asset on top of current page
page_obj = context['page_obj']
context['page_top_number'] = page_obj.paginator.per_page * (page_obj.number - 1) + 1
return context
def get_queryset(self):
"""
Queryset is assets with asset_id in StellarTerm ticker list, sorted
by either StellarTerm activityScore, price in USD/XLM, change 24h in USD/XLM.
Query params have key, val options
{ display: 'activityScore', 'price_USD', 'price_XLM', 'change24h_USD',
or 'change24h_XLM'
order_by: 'asc' or 'desc' }
"""
# Display types in query param to give flexibility
self.allowed_displays = [ 'activityScore', 'price_USD',
'change24h_USD', 'price_XLM', 'change24h_XLM' ]
self.display = self.request.GET.get('display')
if self.display not in self.allowed_displays:
self.display = self.allowed_displays[0] # default to activityScore
# Get the counter code for reference currency of price, change24h
self.allowed_counter_codes = [ 'USD', 'XLM' ]
self.counter_code = self.request.GET.get('counter_code')
if self.counter_code not in self.allowed_counter_codes:
self.counter_code = self.allowed_counter_codes[0] # default to USD
# Ordering type in query param to give flexibility of ascending v. descending
self.allowed_orderings = [ 'desc', 'asc' ]
self.order_by = self.request.GET.get('order_by')
if self.order_by not in self.allowed_orderings:
self.order_by = self.allowed_orderings[0] # default to descending
# Fetch the StellarTerm ticker json and store
r = requests.get(settings.STELLARTERM_TICKER_URL)
json = r.json()
ticker_assets = json.get('assets', [])
# NOTE: Need to get USD/XLM 24 hour change from _meta key (not in XLM-native asset)
xlm_change24h_USD = None
if '_meta' in json and 'externalPrices' in json['_meta']\
and 'USD_XLM_change' in json['_meta']['externalPrices']:
xlm_change24h_USD = json['_meta']['externalPrices']['USD_XLM_change']
# Clean the ticker assets to only include those that have
# the display attribute
cleaned_ticker_assets = [
a for a in ticker_assets
if a['id'] == 'XLM-native' or (self.display in a and a[self.display] != None)
]
# Parse to get asset_ids for queryset filter
top_asset_ids = [ a['id'] for a in cleaned_ticker_assets ]
# Store the dict version of ticker assets
self.ticker_assets = { a['id']: a for a in cleaned_ticker_assets }
# Aggregate the assets current user is trusting for annotation
is_trusting_asset_ids = []
if self.request.user.is_authenticated:
is_trusting_asset_ids = [
a.asset_id for a in self.request.user.assets_trusting.all()
]
# Order the qset
# TODO: Figure out how to annotate qset properly versus this loop
qset = Asset.objects.filter(asset_id__in=top_asset_ids)\
.annotate(is_trusting=Case(
When(asset_id__in=is_trusting_asset_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))
assets = list(qset)
for a in assets:
for display in self.allowed_displays:
if a.asset_id == 'XLM-native' and display == 'change24h_USD':
# Handling the XLM-native USD % change edge case
setattr(a, display, xlm_change24h_USD)
else:
setattr(a, display, self.ticker_assets[a.asset_id].get(display))
assets.sort(key=lambda a: getattr(a, self.display), reverse=(self.order_by == "desc"))
return assets
## Leaderboard
class LeaderboardRedirectView(generic.RedirectView):
query_string = True
pattern_name = 'nc:leaderboard-list'
class LeaderboardListView(mixins.IndexContextMixin, mixins.ViewTypeContextMixin,
mixins.LoginRedirectContextMixin, mixins.DepositAssetsContextMixin,
mixins.UserFollowerRequestsContextMixin, mixins.UserPortfolioContextMixin,
generic.ListView):
template_name = "nc/leaderboard_list.html"
paginate_by = 50
view_type = 'leaderboard'
def get_context_data(self, **kwargs):
"""
Add the users plus page related data.
"""
context = super(LeaderboardListView, self).get_context_data(**kwargs)
# TODO: Finish up the three pronged list view clumping AND
# spin off performance header into a mixin with self.date_span -> self.performance_date_span
# Store the allowed displays
context['allowed_displays'] = [ 'usd_value', 'performance_{0}'.format(self.date_span) ]
context['display'] = 'performance_{0}'.format(self.date_span)
# Add date span and associated performance attribute to use to the context
context['date_span'] = self.date_span
context['allowed_date_orderings'] = self.allowed_date_orderings
# Set rank of asset on top of current page
page_obj = context['page_obj']
context['page_top_number'] = page_obj.paginator.per_page * (page_obj.number - 1) + 1
return context
def get_queryset(self):
"""
Queryset is users sorted by performance rank given date span from query param.
Prefetch assets_trusting to also preview portfolio assets with each list item.
Default date span is 24h.
"""
# Ordering in query param to give flexibility of performance_1w, performance_1m, etc.
# Only return top 100 users
self.allowed_date_orderings = [ '1d', '1w', '1m', '3m', '6m', '1y' ]
self.date_span = self.request.GET.get('span')
if self.date_span not in self.allowed_date_orderings:
self.date_span = self.allowed_date_orderings[0] # default to 1d
self.performance_attr = 'performance_{0}'.format(self.date_span)
order = 'profile__portfolio__performance_{0}'.format(self.date_span)
# Aggregate the users current user is following and has requested to follow
# for annotation
is_following_ids = []
requested_to_follow_ids = []
if self.request.user.is_authenticated:
is_following_ids = [
u.id for u in get_user_model().objects\
.filter(profile__in=self.request.user.profiles_following.all())
]
requested_to_follow_ids = [
r.user.id for r in self.request.user.requests_to_follow.all()
]
return get_user_model().objects\
.annotate(is_following=Case(
When(id__in=is_following_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))\
.annotate(requested_to_follow=Case(
When(id__in=requested_to_follow_ids, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
))\
.prefetch_related('assets_trusting', 'profile__portfolio')\
.filter(profile__portfolio__xlm_value__gt=settings.STELLAR_CREATE_ACCOUNT_QUOTA * float(settings.STELLAR_CREATE_ACCOUNT_MINIMUM_BALANCE) * 5.0)\
.order_by(F(order).desc(nulls_last=True))[:100]
## Feed
class FeedRedirectView(LoginRequiredMixin, generic.RedirectView):
query_string = True
pattern_name = 'nc:feed-activity'
### News
class FeedNewsListView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.DepositAssetsContextMixin, mixins.ViewTypeContextMixin,
mixins.UserFollowerRequestsContextMixin, mixins.UserPortfolioContextMixin,
mixins.JSONResponseMixin, generic.ListView):
template_name = "nc/feed_news_list.html"
view_type = 'feed'
def render_to_response(self, context):
"""
Look for a 'format=json' GET argument to determine if response
should be HTML or JSON.
"""
# Look for a 'format=json' GET argument
if self.request.GET.get('format') == 'json':
return self.render_to_json_response(context)
else:
return super(FeedNewsListView, self).render_to_response(context)
def get_context_data(self, **kwargs):
"""
Add previous, next urls and prefetched user object for profile.
"""
context = super(FeedNewsListView, self).get_context_data(**kwargs)
if self.request.GET.get('format') == 'json':
# Look for a 'format=json' GET argument and only store the object
# list as 'results' if JSON response expected
context = { 'results': context['object_list'] }
# Set the next link urls
context['next'] = '{0}?page={1}&format=json'.format(self.request.path, self.next_page) if self.next_page else None
return context
def get_queryset(self):
"""
Queryset is paginated news list from CryptoPanic.
"""
params = self.request.GET.copy()
# Fetch the news items from CryptoPanic
base_url = settings.CRYPTOPANIC_STELLAR_POST_URL
kwargs = {
'auth_token': settings.CRYPTOPANIC_API_KEY,
'currencies': 'XLM',
'public': True
}
if 'page' in params:
kwargs.update({ 'page': params.get('page', '') })
params.update(kwargs)
full_url = '{0}?{1}'.format(base_url, params.urlencode())
r = requests.get(full_url)
ret = []
next_page = None
if r.status_code == requests.codes.ok:
json = r.json()
next = json.get('next', None)
next_page = QueryDict(urlparse(next).query).get('page', None) if next else None
ret = json['results']
# Store the next page numbers
self.next_page = next_page
# Return the results
return ret
### Activity
class FeedActivityListView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.FeedActivityContextMixin, mixins.DepositAssetsContextMixin,
mixins.UserFollowerRequestsContextMixin, mixins.UserPortfolioContextMixin,
mixins.ViewTypeContextMixin, generic.TemplateView):
feed_type = settings.STREAM_TIMELINE_FEED
template_name = "nc/feed_activity_list.html"
user_field = 'request.user'
view_type = 'feed'
class FeedActivityCreateView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.ViewTypeContextMixin, generic.CreateView):
form_class = forms.FeedActivityCreateForm
template_name = "nc/feed_activity_form.html"
view_type = 'feed'
def get_form_kwargs(self):
"""
Need to override to pass in the request for authenticated user.
Pop instance key since FeedActivityCreateForm is not actually a ModelForm,
as we don't want to store Activity data in our db.
"""
kwargs = super(FeedActivityCreateView, self).get_form_kwargs()
kwargs.update({
'request': self.request,
'success_url': self.request.POST.get('success_url')
})
kwargs.pop('instance')
return kwargs
def get_success_url(self):
"""
If success url passed into query param, then use for redirect.
Otherwise, simply redirect to assets section of actor user's
profile page for immediate feedback.
"""
if self.success_url:
return self.success_url
return reverse('nc:user-detail', kwargs={'slug': self.request.user.username})
def form_valid(self, form):
"""
Override to accomodate success_url determined from parsing retrieval of
Stellar transaction of tx_hash.
"""
self.object = form.save()
self.success_url = self.object.get('success_url') if 'success_url' in self.object else self.success_url
return HttpResponseRedirect(self.get_success_url())
## Send
class SendRedirectView(LoginRequiredMixin, generic.RedirectView):
query_string = True
pattern_name = 'nc:send-detail'
class SendDetailView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.ViewTypeContextMixin, mixins.ActivityFormContextMixin, generic.TemplateView):
template_name = "nc/send.html"
view_type = 'send'
## Receive
class ReceiveRedirectView(LoginRequiredMixin, generic.RedirectView):
query_string = True
pattern_name = 'nc:receive-detail'
class ReceiveDetailView(LoginRequiredMixin, mixins.IndexContextMixin,
mixins.ViewTypeContextMixin, mixins.ActivityFormContextMixin, generic.TemplateView):
template_name = "nc/receive.html"
view_type = 'receive'
# TODO: For way later down the line in the roadmap.
# Refactor this so it's in a separate 'api' Django app
# API Viewsets
# Stellar Notifier views
## Webhook POST
@method_decorator(csrf_exempt, name='dispatch')
class ActivityCreateView(mixins.AjaxableResponseMixin, generic.View):
"""
Creates an activity feed record given the JSON POST data
received from Stellar Notifier listener watching account subscription.
NOTE: Currently only listening for payments from outside sources.
TODO: Eventually spin off FeedActivityCreateForm functionality into this view.
"""
def _is_valid(self, request):
"""
Verifies auth token and signature header, plus whether request body
has tx_hash and created times.
NOTE: Headers will have keys "X-Request-ED25519-Signature" and
"Authorization: Token <your_token>".
"""
# auth_token = parse.parse(settings.STELLAR_NOTIFIER_AUTHORIZATION_FORMAT,
# request.META.get(settings.STELLAR_NOTIFIER_AUTHORIZATION_HEADER, 'Token '))
# Verify body tx info exists
self.tx_hash = request.body["transaction"]["hash"] if "transaction" in request.body and "hash" in request.body["transaction"] else None
self.created_at = request.body["transaction"]["created_at"] if "transaction" in request.body and "created_at" in request.body["created_at"] else None
if not self.tx_hash or not self.created_at:
return False
return True
def _has_been_added(self):
"""
Verifies whether activity feed already has the transaction in it.
"""
resp = stream_client.get_activities(foreign_id_times=[
(self.tx_hash, dateutil.parser.parse(self.created_at))
])
if len(resp["results"]) > 0:
return True
return False
def post(self, request, *args, **kwargs):
print request
print request.body
return HttpResponse()
#if self._is_valid(request) and not self._has_been_added():
# TODO: USE FeedActivityCreateForm! FEED ACTIVITY FORMATTING ETC ETC
# print request.body
# return HttpResponse()
#else:
# return HttpResponseNotFound()
# Worker environment views
## Cron job tasks (AWS worker tier)
@method_decorator(csrf_exempt, name='dispatch')
class PerformanceCreateView(generic.View):
"""
Creates records of portfolio performance for each user every day. Portfolio
consists of all accounts associated with user profile.
AWS EB worker tier cron job POSTs to url endpoint associated with
this view.
"""
def _assemble_asset_prices(self):
"""
Assemble a dictionary { asset_id: xlm_price } of current
market prices in xlm of all assets in our db.
"""
asset_prices = {}
for model_asset in Asset.objects.all():
# NOTE: Expensive! but no other way to implement as far as I see.
asset = StellarAsset(model_asset.code, model_asset.issuer_address)
xlm = StellarAsset.native()
if asset.is_native():
# Then a is native so retrieve current price in USD
# from StellarTerm
r = requests.get(settings.STELLARTERM_TICKER_URL)
json = r.json()
usd_price = float(json['_meta']['externalPrices']['USD_XLM'])
asset_prices[model_asset.asset_id] = usd_price
else:
# Get the orderbook. Portfolio value is market price user
# can sell asset at for XLM.
# Retrieve asset record from Horizon
horizon = settings.STELLAR_HORIZON_INITIALIZATION_METHOD()
params = {
'selling_asset_type': asset.type,
'selling_asset_code': asset.code,
'selling_asset_issuer': asset.issuer,
'buying_asset_type': 'native',
'buying_asset_code': xlm.code
}
json = horizon.order_book(params=params)
# Use the first bid price if there is one
price = 0.0
if 'bids' in json and len(json['bids']) > 0:
price = float(json['bids'][0]['price'])
asset_prices[model_asset.asset_id] = price
return asset_prices
def _record_portfolio_values(self, asset_prices):
"""
Use the given asset_prices dictionary to record current
portfolio values for all accounts in our db.
"""
Portfolio.objects.update_timeseries('rawdata',
partial(portfolio_data_collector, asset_prices=asset_prices))
def _recalculate_performance_stats(self):
"""
Recalculate performance stats for all profile portfolios in our db.
"""
# TODO: Expensive! Figure out how to implement this with aggregates so not looping over queries
for portfolio in Portfolio.objects.all():
# Run queries where filter on created > now - timedelta(1d, 1w, etc.) and
# not equal to the default of unavailable
# take the last() off that qset. Use USD val.
# 1d, 1w, 1m, 3m, 6m, 1y
now = timezone.now()
# NOTE: qset.last() gives None if qset is empty. otherwise, last entry. Using
# last because TimeSeriesModel has ordering '-created'.
# Adding in extra min time series interval to get attr_oldest qset
# due to cron job processing time (to be safe).
portfolio_latest_rawdata = portfolio.rawdata.first()
attr_oldest = {
'performance_1d': portfolio.rawdata.filter(created__gte=now-(datetime.timedelta(days=1) + RawPortfolioData.TIMESERIES_INTERVAL))\
.exclude(usd_value=RawPortfolioData.NOT_AVAILABLE).last(),
'performance_1w': portfolio.rawdata.filter(created__gte=now-(datetime.timedelta(days=7) + RawPortfolioData.TIMESERIES_INTERVAL))\
.exclude(usd_value=RawPortfolioData.NOT_AVAILABLE).last(),
'performance_1m': portfolio.rawdata.filter(created__gte=now-(datetime.timedelta(days=30) + RawPortfolioData.TIMESERIES_INTERVAL))\
.exclude(usd_value=RawPortfolioData.NOT_AVAILABLE).last(),
'performance_3m': portfolio.rawdata.filter(created__gte=now-(datetime.timedelta(days=90) + RawPortfolioData.TIMESERIES_INTERVAL))\
.exclude(usd_value=RawPortfolioData.NOT_AVAILABLE).last(),
'performance_6m': portfolio.rawdata.filter(created__gte=now-(datetime.timedelta(days=180) + RawPortfolioData.TIMESERIES_INTERVAL))\
.exclude(usd_value=RawPortfolioData.NOT_AVAILABLE).last(),
'performance_1y': portfolio.rawdata.filter(created__gte=now-(datetime.timedelta(days=365) + RawPortfolioData.TIMESERIES_INTERVAL))\
.exclude(usd_value=RawPortfolioData.NOT_AVAILABLE).last(),
}
for attr, oldest_data in attr_oldest.iteritems():
if oldest_data and oldest_data.usd_value != RawPortfolioData.NOT_AVAILABLE\
and portfolio_latest_rawdata.usd_value != RawPortfolioData.NOT_AVAILABLE:
performance = (portfolio_latest_rawdata.usd_value - oldest_data.usd_value) / oldest_data.usd_value
else:
performance = None
setattr(portfolio, attr, performance)
# Also set the latest balance values for the portfolio for easy reference
if portfolio_latest_rawdata:
portfolio.usd_value = portfolio_latest_rawdata.usd_value
portfolio.xlm_value = portfolio_latest_rawdata.xlm_value
# Then save the portfolio
portfolio.save()
def _update_rank_values(self):
"""
Update rank values of top 100 users by performance over last day.
Reset all existing rank values first in update to None.
"""
# Reset all existing first so can easily just start from scratch in
# storing rank list.
Portfolio.objects.exclude(rank=None).update(rank=None)
# Iterate through top 100 on yearly performance, and store the rank.
# NOTE: Only show people on leaderboard that have added more than Nucleo allocated funds to profile
# TODO: Expensive! Incorporate django_bulk_update and create custom util.TimeSeries classes
for i, p in enumerate(list(Portfolio.objects\
.filter(xlm_value__gt=settings.STELLAR_CREATE_ACCOUNT_QUOTA * float(settings.STELLAR_CREATE_ACCOUNT_MINIMUM_BALANCE) * 5.0)\
.exclude(performance_1d=None)\
.order_by('-performance_1d')[:100])):
p.rank = i + 1
p.save()
def post(self, request, *args, **kwargs):
# If worker environment, then can process cron job
if settings.ENV_NAME == 'work':
# Keep track of the time cron job takes for performance reasons
cron_start = timezone.now()
# Get asset prices
asset_prices = self._assemble_asset_prices()
# Bulk create portfolio value time series records for all accounts in db
self._record_portfolio_values(asset_prices)
# For all profiles in db, recalculate performance stats
self._recalculate_performance_stats()
# Update rank values of top performing users.
self._update_rank_values()
# Print out length of time cron took
cron_duration = timezone.now() - cron_start
print 'Performance create cron job took {0} seconds for {1} assets and {2} portfolios'.format(
cron_duration.total_seconds(),
Asset.objects.count(),
Portfolio.objects.count()
)
return HttpResponse()
else:
return HttpResponseNotFound()
@method_decorator(csrf_exempt, name='dispatch')
class AssetTomlUpdateView(generic.View):
"""
Update asset information from fetched toml files on domain of asset.
AWS EB worker tier cron job POSTs to url endpoint associated with
this view.
"""
def _update_assets_from_tomls(self):
"""
For each asset in our db, update details using toml files.
"""
horizon = settings.STELLAR_HORIZON_INITIALIZATION_METHOD()
# Query the database for all Asset instances and then
# update from toml files. Exclude XLM asset instance.
asset_qs = Asset.objects.exclude(issuer_address=None)
count = 0
for model_asset in asset_qs:
# NOTE: this is expensive!
params = {
'asset_issuer': model_asset.issuer_address,
'asset_code': model_asset.code,
}
json = horizon.assets(params=params)
# Store the asset record from Horizon in context
# NOTE: On testnet, won't get a record if mainnet issuer id isn't the same as testnet's
record = None
if '_embedded' in json and 'records' in json['_embedded'] and json['_embedded']['records']:
record = json['_embedded']['records'][0]
# Use toml attribute of record to update instance from toml file (to fetch)
toml_url = record['_links']['toml']['href']\
if record and '_links' in record and 'toml' in record['_links']\
and 'href' in record['_links']['toml']\
else None
try:
model_asset.update_from_toml(toml_url)
count += 1
except:
print 'Error occurred fetching {0} for {1}'.format(toml_url, model_asset)
print 'Updated {0} assets from .toml files'.format(count)
def post(self, request, *args, **kwargs):
# If worker environment, then can process cron job
if settings.ENV_NAME == 'work':
# Keep track of the time cron job takes for performance reasons
cron_start = timezone.now()
# For all asets in db, refresh attributes from toml
self._update_assets_from_tomls()
# Print out length of time cron took
cron_duration = timezone.now() - cron_start
print 'Asset toml update cron job took {0} seconds for {1} assets'.format(
cron_duration.total_seconds(),
Asset.objects.count()
)
return HttpResponse()
else:
return HttpResponseNotFound()
| StarcoderdataPython |
3252542 | import ascii_chess
from ascii_chess.ascii_board import *
from ascii_chess.chess_rules import parse_square
def test_is_functional():
# TODO: include assertions
side = 10
board = ChessBoard(side, 0, 0.7)
print board
for p in ascii_pieces:
pp = ascii_pieces[p]
print pp
print p, pp.get_height(), pp.get_width()
black_pieces = ChessPiecesSet(side, 1)
for p in black_pieces.pieces.values():
print p
print p.get_height(), p.get_width()
white_pieces = ChessPiecesSet(side, 0.2)
for p in white_pieces.pieces.values():
print p
print p.get_height(), p.get_width()
board.add_piece(white_pieces[PAWN], *parse_square('d4'))
board.add_piece(white_pieces[PAWN], *parse_square('e4'))
board.add_piece(black_pieces[PAWN], *parse_square('d5'))
board.add_piece(black_pieces[PAWN], *parse_square('e5'))
print board
board.set_position(white_pieces, black_pieces)
print board
| StarcoderdataPython |
4811446 | import re
from itertools import chain
import collections
import math
from nltk.translate.bleu_score import sentence_bleu, SmoothingFunction, ngrams, brevity_penalty
from collections import Counter
from fractions import Fraction
from .wer import *
import numpy as np
from rouge import Rouge
import logging
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
def words_dist(cum_probs, texts):
def compute_boundaries(word, indices):
for idx, target in enumerate(indices):
if word < target:
return idx
return len(indices)
def get_indices(cum_prob):
x = [0.4, 0.7, 0.9]
cur = 0
res = []
for i in x:
while cum_prob[cur] < i:
cur += 1
res.append(cur)
return res
indices = get_indices(cum_probs)
res = [0, 0, 0, 0]
for text in texts:
for word in text:
res[compute_boundaries(word, indices)] += 1
return np.array(res) / sum(res)
def repetition(hyps):
max_n = 100
n_repeated_examples = 0
for obj in hyps:
gen = obj
rev_gen = list(reversed(gen))
last_n_repeats = [0] * max_n
for n in range(1, max_n + 1):
n_repeat = 1
while len(rev_gen[n * n_repeat:n * (n_repeat + 1)]) == n and \
rev_gen[n * n_repeat:n * (n_repeat + 1)] == rev_gen[:n]:
n_repeat += 1
last_n_repeats[n - 1] = n_repeat
max_repeated_n = max(range(max_n), key=lambda x: last_n_repeats[x])
if last_n_repeats[max_repeated_n] > 1 and (max_repeated_n + 1 >= 3 or last_n_repeats[max_repeated_n] > 50):
n_repeated_examples += 1
return n_repeated_examples / len(hyps)
def pad_sequence(sequence, n, pad_left=False, pad_right=False,
left_pad_symbol=None, right_pad_symbol=None):
"""
Returns a padded sequence of items before ngram extraction.
list(pad_sequence([1,2,3,4,5], 2, pad_left=True, pad_right=True, left_pad_symbol='<s>', right_pad_symbol='</s>'))
['<s>', 1, 2, 3, 4, 5, '</s>']
list(pad_sequence([1,2,3,4,5], 2, pad_left=True, left_pad_symbol='<s>'))
['<s>', 1, 2, 3, 4, 5]
list(pad_sequence([1,2,3,4,5], 2, pad_right=True, right_pad_symbol='</s>'))
[1, 2, 3, 4, 5, '</s>']
:param sequence: the source humelo to be padded
:type sequence: sequence or iter
:param n: the degree of the ngrams
:type n: int
:param pad_left: whether the ngrams should be left-padded
:type pad_left: bool
:param pad_right: whether the ngrams should be right-padded
:type pad_right: bool
:param left_pad_symbol: the symbol to use for left padding (default is None)
:type left_pad_symbol: any
:param right_pad_symbol: the symbol to use for right padding (default is None)
:type right_pad_symbol: any
:rtype: sequence or iter
"""
sequence = iter(sequence)
if pad_left:
sequence = chain((left_pad_symbol,) * (n - 1), sequence)
if pad_right:
sequence = chain(sequence, (right_pad_symbol,) * (n - 1))
return sequence
def ngrams(sequence, n, pad_left=False, pad_right=False,
left_pad_symbol=None, right_pad_symbol=None):
"""
Return the ngrams generated from a sequence of items, as an iterator.
For example:
from nltk.basic_util import ngrams
list(ngrams([1,2,3,4,5], 3))
[(1, 2, 3), (2, 3, 4), (3, 4, 5)]
Wrap with list for a list version of this function. Set pad_left
or pad_right to true in order to get additional ngrams:
list(ngrams([1,2,3,4,5], 2, pad_right=True))
[(1, 2), (2, 3), (3, 4), (4, 5), (5, None)]
list(ngrams([1,2,3,4,5], 2, pad_right=True, right_pad_symbol='</s>'))
[(1, 2), (2, 3), (3, 4), (4, 5), (5, '</s>')]
list(ngrams([1,2,3,4,5], 2, pad_left=True, left_pad_symbol='<s>'))
[('<s>', 1), (1, 2), (2, 3), (3, 4), (4, 5)]
list(ngrams([1,2,3,4,5], 2, pad_left=True, pad_right=True, left_pad_symbol='<s>', right_pad_symbol='</s>'))
[('<s>', 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, '</s>')]
:param sequence: the source humelo to be converted into ngrams
:type sequence: sequence or iter
:param n: the degree of the ngrams
:type n: int
:param pad_left: whether the ngrams should be left-padded
:type pad_left: bool
:param pad_right: whether the ngrams should be right-padded
:type pad_right: bool
:param left_pad_symbol: the symbol to use for left padding (default is None)
:type left_pad_symbol: any
:param right_pad_symbol: the symbol to use for right padding (default is None)
:type right_pad_symbol: any
:rtype: sequence or iter
"""
sequence = pad_sequence(sequence, n, pad_left, pad_right,
left_pad_symbol, right_pad_symbol)
history = []
while n > 1:
history.append(next(sequence))
n -= 1
for item in sequence:
history.append(item)
yield tuple(history)
del history[0]
def rogue(hyps, refs, avg=True):
hyps = [' '.join(map(str, i)) for i in hyps]
refs = [' '.join(map(str, i)) for i in refs]
rouge = Rouge()
scores = rouge.get_scores(hyps, refs, avg=avg)
return [scores['rouge-1']['f'], scores['rouge-2']['f'], scores['rouge-l']['f']]
def self_wer(reference, hypothesis):
score = 0.0
for refer, hypo in zip(reference, hypothesis):
score += wer(refer, hypo)
return score / len(reference)
def distinct_n_sentence_level(sentence, n):
"""
Compute distinct-N for a single sentence.
:param sentence: a list of words.
:param n: int, ngram.
:return: float, the metric value.
"""
if len(sentence) == 0:
return 0.0 # Prevent a zero division
distinct_ngrams = set(ngrams(sentence, n))
return len(distinct_ngrams) / len(sentence)
def bleu_upto(reference, hypothesis, n_gram):
res = []
for i in range(1,n_gram+1):
res.append(calc_bleu_ngram(reference, hypothesis, i))
return res
def calc_bleu_ngram(reference, hypothesis, n_gram):
score = 0.0
ratio = 1 / n_gram
cc = SmoothingFunction()
for refer, hypo in zip(reference, hypothesis):
# refer.index()
score += sentence_bleu([refer], hypo, (ratio,) * n_gram, cc.method1)
return score / len(reference)
def distinct_upto(sentences, n):
sentences = [i for i in sentences if len(i) > 5]
res = []
for i in range(1,n+1):
res.append(distinct_n_corpus_level(sentences, i))
return res
def distinct_n_corpus_level(sentences, n):
"""
Compute average distinct-N of a list of sentences (the corpus).
:param sentences: a list of sentence.
:param n: int, ngram.
:return: float, the average value.
"""
return sum(distinct_n_sentence_level(sentence, n) for sentence in sentences) / len(sentences)
def bleu_single(reference,hypothesis,n_gram):
ratio=1/n_gram
cc = SmoothingFunction()
return sentence_bleu([reference],hypothesis,(ratio,)*n_gram,cc.method1)
def bleu_multiples(references,hypothesis,n_gram):
ratio=1/n_gram
score = 0
cnt = 0
for i in hypothesis:
score += sentence_bleu(references,i,(ratio,)*n_gram)
cnt += 1
return score / cnt
def count(x, n_gram):
cnter = collections.Counter()
for line in x:
ngram_res = []
temp = [-1] * (n_gram - 1) + line + [-1] * (n_gram - 1)
for i in range(len(temp) + n_gram - 1):
ngram_res.append(str(temp[i:i + n_gram]))
cnter.update(ngram_res)
return cnter
from collections import defaultdict,Counter
from nltk import ngrams
def ngram_metrics(token_list, pad=30001):
if pad in token_list:
token_list = token_list[:token_list.index(pad)] # remove possible padding
stats = defaultdict(float)
for n in range(1, 5):
ngs = [ng for ng in ngrams(token_list, n)]
counter = Counter([ng for ng in ngrams(token_list, n)])
stats['pct_repeat_%dgrams' % n] = 1.0 - len(counter) / len(ngs)
return stats
def seq_rep_n(corpus):
score=0.0
total_n=len(corpus)
for token_list in corpus:
score+=ngram_metrics(token_list)["pct_repeat_4grams"]
return score/total_n
def compute_probs(cnter,token_lists):
tot = 0
probs = []
for i in cnter:
tot+= cnter[i]
for i in token_lists:
if i in cnter:
probs.append(cnter[i] / tot)
else:
probs.append(1e-10)
return np.array(probs)
def kld(references, hypotheses, n_gram):
r_cnter = count(references,n_gram)
h_cnter = count(hypotheses,n_gram)
s = set(r_cnter.keys())
s.update(h_cnter.keys())
s = list(s)
r_probs = compute_probs(r_cnter, s)
h_probs = compute_probs(h_cnter, s)
kld = np.sum(r_probs * np.log(r_probs/h_probs))
return kld
def entropy(x):
cnter = collections.Counter()
for line in x:
cnter.update(line)
tot = 0
prob=[]
for i in cnter:
tot += cnter[i]
for i in cnter:
prob.append(cnter[i]/tot)
prob = np.array(prob)
ent = np.sum(prob * np.log(prob))
return -ent
def ms_jaccard(ref,hyp,n_gram):
res = []
for i in range(1,1+n_gram):
rc = count(ref,i)
hc = count(hyp,i)
n_gram_set = set(rc.keys())
n_gram_set.update(hc.keys())
rprob= compute_probs(rc,n_gram_set)
hprob= compute_probs(hc,n_gram_set)
numerator = np.sum(np.minimum(rprob,hprob))
denominator = np.sum(np.maximum(rprob,hprob))
res.append(numerator / denominator)
score = []
for i in range(1,1+n_gram):
score.append(geo_mean(res[:i]))
return score
class Refcnts:
def __init__(self,references,n):
self.ref_mcnts = {i: ref_cnts1(references, i) for i in range(1, n + 1)}
self.ref_lens = [len(i) for i in references]
self.n = n
def bleu(self, hypothesis):
bleu_scores = {i: [] for i in range(1, self.n + 1)}
for hyp in hypothesis:
# print(p_denominators,p_numerators)
p_numerators = Counter() # Key = ngram order, and value = no. of ngram matches.
p_denominators = Counter() # Key = ngram order, and value = no. of ngram in ref.
for i in range(1, self.n + 1):
p_i = modified_precision(self.ref_mcnts[i], hyp, i)
# print(p_i)
p_numerators[i] = p_i.numerator
p_denominators[i] = p_i.denominator
hyp_len = len(hyp)
ref_len = closest_ref_length(iter(self.ref_lens), hyp_len)
bp = brevity_penalty(ref_len, hyp_len)
for i in range(1, self.n + 1):
if p_numerators[i] == 0: p_numerators[i] = 1e-100
s = (1 / i * math.log(p_numerators[j] / p_denominators[j]) for j in range(1, i + 1))
s = bp * math.exp(math.fsum(s))
bleu_scores[i].append(s)
return [np.mean(bleu_scores[i]) for i in range(1, self.n + 1)]
def ms_jaccard(self, ref, hyp, n_gram):
rc = count(ref, n_gram)
hc = count(hyp, n_gram)
n_gram_set = set(rc.keys())
n_gram_set.update(hc.keys())
rprob = compute_probs(rc, n_gram_set)
hprob = compute_probs(hc, n_gram_set)
numerator = np.sum(np.minimum(rprob, hprob))
denominator = np.sum(np.maximum(rprob, hprob))
return numerator / denominator
def build_refcnts(references,n):
ref_mcnts = {i:ref_cnts1(references, i) for i in range(1,n+1)}
ref_lens = [len(i) for i in references]
return ref_mcnts, ref_lens
def bleu(ref_mcnts, ref_lens, hypothesis, n):
# print(ref_mcnts)
# numerator, denominator = 0, 0
bleu_scores = {i:[] for i in range(1,n+1)}
for hyp in hypothesis:
# print(p_denominators,p_numerators)
p_numerators = Counter() # Key = ngram order, and value = no. of ngram matches.
p_denominators = Counter() # Key = ngram order, and value = no. of ngram in ref.
for i in range(1,n+1):
p_i = modified_precision(ref_mcnts[i], hyp, i)
# print(p_i)
p_numerators[i] = p_i.numerator
p_denominators[i] = p_i.denominator
hyp_len = len(hyp)
ref_len = closest_ref_length(iter(ref_lens), hyp_len)
bp = brevity_penalty(ref_len, hyp_len)
for i in range(1,n+1):
if p_numerators[i] == 0: p_numerators[i] = 1e-100
s = (1/i * math.log(p_numerators[j] / p_denominators[j]) for j in range(1,i+1))
s = bp * math.exp(math.fsum(s))
bleu_scores[i].append(s)
return [np.mean(bleu_scores[i]) for i in range(1,n+1)]
def selfbleu(x, n):
x_mcnts = {i: ref_cnts2(x, i) for i in range(1, n + 1)}
x_lens = [len(i) for i in x]
bleu_scores = {i:[] for i in range(1,n+1)}
for idx, hyp in enumerate(x):
p_numerators = Counter() # Key = ngram order, and value = no. of ngram matches.
p_denominators = Counter() # Key = ngram order, and value = no. of ngram in ref.
for i in range(1, n + 1):
p_i = modified_precision(x_mcnts[i], hyp, i,True)
p_numerators[i] = p_i.numerator
p_denominators[i] = p_i.denominator
hyp_lengths = len(hyp)
ref_lengths = closest_ref_length(iter(x_lens[:idx] + x_lens[idx+1:]), hyp_lengths)
bp = brevity_penalty(ref_lengths, hyp_lengths)
for i in range(1,n+1):
if p_numerators[i] == 0: p_numerators[i] = 1e-100
s = (1 / i * math.log(p_numerators[j] / p_denominators[j]) for j in range(1, i + 1))
s = bp * math.exp(math.fsum(s))
bleu_scores[i].append(s)
return [np.mean(bleu_scores[i]) for i in range(1,n+1)]
#
# def selfbleu(x,n):
# logits = []
# bleu_scores = []
# for i in range(1,n+1):
# logit = selfbleu_logit(x,i)
# logits.append(logit)
# bleu_score = geo_mean(logits)
# bleu_scores.append(bleu_score)
# return bleu_scores
def geo_mean(iterable):
a = np.array(iterable)
return a.prod()**(1.0/len(a))
def ref_cnts1(references,n):
ref_mcnts = {}
for reference in references:
reference_counts = (
Counter(ngrams(reference, n)) if len(reference) >= n else Counter()
)
for i in reference_counts:
if i not in ref_mcnts: ref_mcnts[i] = reference_counts[i]
elif ref_mcnts[i] < reference_counts[i]: ref_mcnts[i] = reference_counts[i]
return ref_mcnts
def ref_cnts2(references,n):
ref_mcnts = {}
for reference in references:
reference_counts = (
Counter(ngrams(reference, n)) if len(reference) >= n else Counter()
)
for i in reference_counts:
if i not in ref_mcnts: ref_mcnts[i] = [reference_counts[i],0]
elif ref_mcnts[i][-1] < reference_counts[i]:
if ref_mcnts[i][0] < reference_counts[i]:
ref_mcnts[i] = [reference_counts[i],ref_mcnts[i][0]]
else:
ref_mcnts[i][-1] = reference_counts[i]
return ref_mcnts
def modified_precision(ref_mcnts, hypothesis,n, isself=False):
counts = Counter(ngrams(hypothesis, n)) if len(hypothesis) >= n else Counter()
hyp_mcnts = {}
for ngram in counts:
if ngram in ref_mcnts: hyp_mcnts[ngram] = ref_mcnts[ngram]
else : hyp_mcnts[ngram] = 0
if isself:
clipped_counts = {
ngram: min(count, ref_mcnts[ngram][1]) if count == ref_mcnts[ngram][0] else min(count, ref_mcnts[ngram][0])
for ngram, count in counts.items()
}
else:
clipped_counts = {
ngram: min(count, ref_mcnts.get(ngram,0)) for ngram, count in counts.items()
}
numerator = sum(clipped_counts.values())
# Ensures that denominator is minimum 1 to avoid ZeroDivisionError.
# Usually this happens when the ngram order is > len(reference).
denominator = max(1, sum(counts.values()))
return Fraction(numerator, denominator, _normalize=False)
def closest_ref_length(ref_lens, hyp_len):
"""
This function finds the reference that is the closest length to the
hypothesis. The closest reference length is referred to as *r* variable
from the brevity penalty formula in Papineni et. al. (2002)
:param references: A list of reference translations.
:type references: list(list(str))
:param hyp_len: The length of the hypothesis.
:type hyp_len: int
:return: The length of the reference that's closest to the hypothesis.
:rtype: int
"""
closest_ref_len = min(
ref_lens, key=lambda ref_len: (abs(ref_len - hyp_len), ref_len)
)
return closest_ref_len
def count_avg_pos(sentences, pos="JJ"):
from nltk.parse import CoreNLPParser
try:
pos_tagger = CoreNLPParser(url="http://localhost:9876", tagtype='pos')
except:
logging.info("load pos_tagger on http://localhost:9876 failed!")
return -1
target_pos_num = 0
for sentence in sentences:
pos_result = pos_tagger.tag(sentence)
for word_pos in pos_result:
if word_pos[1] == pos:
target_pos_num += 1
return target_pos_num / len(sentences)
| StarcoderdataPython |
3380139 | <gh_stars>0
'''
https://leetcode.com/problems/palindrome-partitioning/
131. Palindrome Partitioning
Given a string s, partition s such that every substring of the partition is a palindrome. Return all possible palindrome partitioning of s.
A palindrome string is a string that reads the same backward as forward.
Check the following video for thorough explanation: https://www.youtube.com/watch?v=3jvWodd7ht0
'''
'''
Brute Force Backtracking
'''
class Solution:
# returns True if s[i] .. s[j] is a palindrome
def is_palindrome(self, s, i, j):
while i <= j:
if s[i] != s[j]:
return False
i += 1
j -= 1
return True
def dfs(self, start_index, current_list, s, results):
if start_index >= len(s):
# we reached the end of our string and current_list satisfies our conditions (otherwise we wouldn't have reached here)
# so we append current_list to results
results.append(current_list)
else:
# we need to generate all possible substrings we can get from s start at start_index
# what this loop does is partition a string incrementally starting at a certain index [our substring]
# c[aab] => c[a|ab] => recursively handle ca[ab] => [ab] fail because not palindrome BUT [a|b] works because each palindrome => c[a,a,b]
# c[aab] => c[aa|b] => recursively handle caa[b] => [b] is one string so palindrome => c[aa,b]
# c[aab] => c[aab] => won't work because aab is not palindrome
for end_index in range(start_index, len(s)):
# first end_index = start_index => we're checking only one character substrings s[start_index]
if self.is_palindrome(s, start_index, end_index):
current_list_copy = current_list.copy()
# the current substring is a palindrome so we need to add it to our current list
current_list_copy.append(s[start_index:end_index + 1])
# now we explore the remaining part of the string to see if this current branch can lead to any results
self.dfs(end_index + 1, current_list_copy, s, results)
# since we add our current substring to a copy of current_list, we when we iterate in the loop again with another end_index
# the last substring we added won't be there because it was added to a copy of current_list and not current_list itself
def partition(self, s: str) -> [[str]]:
# this will hold our final result: list of lists of palindrome substrings
results = []
self.dfs(0, [], s, results)
return results
'''
Backtracking + DP
The above can be optimize by reducing computations of isPalindrome(). If we know that s[start + 1 ... end - 1] is a palindrome, then for sure the substring
s[start ... end] is a palindrome IF s[start] = s[end]. So, we don't need reiterate over the characters from start to end, we can get our answer from our dp array
'''
class Solution2:
def dfs(self, start_index, current_list, s, results, dp):
if start_index >= len(s):
# we reached the end of our string and current_list satisfies our conditions (otherwise we wouldn't have reached here)
# so we append current_list to results
results.append(current_list)
else:
# we need to generate all possible substrings we can get from s start at start_index
# what this loop does is partition a string incrementally starting at a certain index [our substring]
# c[aab] => c[a|ab] => recursively handle ca[ab] => [ab] fail because not palindrome BUT [a|b] works because each palindrome => c[a,a,b]
# c[aab] => c[aa|b] => recursively handle caa[b] => [b] is one string so palindrome => c[aa,b]
# c[aab] => c[aab] => won't work because aab is not palindrome
for end_index in range(start_index, len(s)):
# first end_index = start_index => we're checking only one character substrings s[start_index]
# the palindrome check has 2 clauses:
# s[start_index] == s[end_index] => making sure that the first and last characters of the current substring match
# (end_index - start_index <= 2 or dp[start_index + 1][end_index - 1]) has 2 clauses:
# end_index - start_index <= 2 => by default any string of length 1 or 0 is a palindrome. By reaching here we know that
# s[start_index] == s[end_index] so if the length of the string 2 (e.g., 'aa', 'bb') or 3 (e.g., 'aba', 'bbb'),
# for sure it's also a palindrome.
#
# dp[start_index + 1][end_index - 1] => we reached here knowing that s[start_index] == s[end_index] AND length of substring
# is more than 2 => we check if the inner substring is a palindrome by consulting with array
if s[start_index] == s[end_index] and (end_index - start_index <= 2 or dp[start_index + 1][end_index - 1]):
dp[start_index][end_index] = True
current_list_copy = current_list.copy()
# the current substring is a palindrome so we need to add it to our current list
current_list_copy.append(s[start_index:end_index + 1])
# now we explore the remaining part of the string to see if this current branch can lead to any results
self.dfs(end_index + 1, current_list_copy, s, results, dp)
# since we add our current substring to a copy of current_list, we when we iterate in the loop again with another end_index
# the last substring we added won't be there because it was added to a copy of current_list and not current_list itself
def partition(self, s: str) -> [[str]]:
# this will hold our final result: list of lists of palindrome substrings
results = []
dp = [[]] * len(s)
for i in range(0, len(s)):
dp[i] = [False] * len(s)
# all substrings of length 1 are by default palindromes
for i in range(0, len(s)):
dp[i][i] = True
self.dfs(0, [], s, results, dp)
return results
# s = 'aabbcecbbdd'
# s = 'abcceccb'
# s = 'aab'
s = "bbab"
print(Solution2().partition(s))
| StarcoderdataPython |
3382729 | from brownie import *
DEADLINE = 999999999999
def test_gas_refund(helper, accounts):
""" Test if a gas reduction is achieved by burning tokens """
tx = helper.burnBuyAndFree(1000000, 25, {'from': accounts[0], 'value': "1 ether"})
assert tx.gas_used < 700000
| StarcoderdataPython |
1792959 | from PIL import Image
def split1(im):
return [im]
def split2(im):
return [im]
def split3(im: Image):
return [
im.crop((0, 0, 128, 128)),
im.crop((0, 128, 128, 257)),
im.crop((128, 0, 257, 128))
]
def split4(im):
return [
im.crop((0, 0, 128, 128)),
im.crop((0, 128, 128, 257)),
im.crop((128, 0, 257, 128)),
im.crop((128, 128, 257, 257))
]
if __name__ == '__main__':
from pathlib import Path
import pickle
pkl_path = 'mugshots.pkl'
# Split depending on the number of mugshots in the image
images_path = Path('images')
results = [
split4(Image.open(images_path / 'X.png')),
split3(Image.open(images_path / 'Zero.png')),
split4(Image.open(images_path / 'Axl.png')),
split3(Image.open(images_path / 'Alia.png')),
split3(Image.open(images_path / 'Layer.png')),
split4(Image.open(images_path / 'Pallette.png')),
split1(Image.open(images_path / 'Signas.png')),
split1(Image.open(images_path / 'Light.png')),
split1(Image.open(images_path / 'Sunflower.png')),
split1(Image.open(images_path / 'Antonion.png')),
split1(Image.open(images_path / 'Mantis.png')),
split1(Image.open(images_path / 'Man-o-War.png')),
split1(Image.open(images_path / 'Rooster.png')),
split1(Image.open(images_path / 'Yeti.png')),
split1(Image.open(images_path / 'Trilobyte.png')),
split1(Image.open(images_path / 'Pandamonium.png')),
split1(Image.open(images_path / 'Vile.png')),
split2(Image.open(images_path / 'Sigma.png')),
split3(Image.open(images_path / 'Lumine.png')),
split3(Image.open(images_path / 'Alia.png')),
split3(Image.open(images_path / 'Layer.png')),
split4(Image.open(images_path / 'Pallette.png')),
]
pickle.dump(results, open(pkl_path, 'wb')) | StarcoderdataPython |
3383603 | import logging
import faulthandler
import os
import sys
from client.ui import app
from client.data_manage import data_dir
def _redirect_streams():
logs_dir = data_dir.get_data_dir()
sys.stdout = open(os.path.join(logs_dir, 'app.log'), 'w')
sys.stderr = open(os.path.join(logs_dir, 'error.log'), 'w')
def _get_logger() -> logging.Logger:
_redirect_streams() # comment to watch logs in console
formatter = logging.Formatter('%(asctime)s\t%(levelname)s\t%(message)s')
info_handler = logging.StreamHandler(sys.stdout)
info_handler.setLevel(logging.INFO)
info_handler.setFormatter(formatter)
error_handler = logging.StreamHandler(sys.stderr)
error_handler.setLevel(logging.ERROR)
error_handler.setFormatter(formatter)
result = logging.getLogger('client')
result.setLevel(logging.INFO)
result.addHandler(info_handler)
result.addHandler(error_handler)
return result
logger = _get_logger()
def main():
faulthandler.enable()
logger.info('Starting app')
docs_app = app.DocsApp()
exit_status = docs_app.exec()
logger.info('App exited with status %s', exit_status)
sys.exit(exit_status)
if __name__ == '__main__':
main()
| StarcoderdataPython |
3353710 | <gh_stars>1-10
# =============================================================================
#
# EZID :: newsfeed.py
#
# Interface to the EZID RSS news feed.
#
# This module should be imported at server startup so that its daemon
# thread is started in advance of any UI page requests.
#
# Author:
# <NAME> <<EMAIL>>
#
# License:
# Copyright (c) 2012, Regents of the University of California
# http://creativecommons.org/licenses/BSD/
#
# -----------------------------------------------------------------------------
import django.conf
import threading
import time
import uuid
import config
import feedparser
import log
_enabled = None
_lock = threading.Lock()
_noItems = [("No news available", None)]
_url = None
_pollingInterval = None
_threadName = None
_items = None
def _newsDaemon():
global _items
while _enabled and threading.currentThread().getName() == _threadName:
try:
feed = feedparser.parse(_url)
if len(feed.entries) > 0:
items = []
for i in range(min(len(feed.entries), 3)):
items.append((feed.entries[i].title, feed.entries[i].link))
else:
items = _noItems
except Exception, e:
log.otherError("newsfeed._newsDaemon", e)
items = _noItems
_lock.acquire()
try:
if threading.currentThread().getName() == _threadName:
_items = items
finally:
_lock.release()
time.sleep(_pollingInterval)
def loadConfig():
global _enabled, _url, _pollingInterval, _threadName, _items
_enabled = (
django.conf.settings.DAEMON_THREADS_ENABLED
and config.get("daemons.newsfeed_enabled").lower() == "true"
)
if _enabled:
_url = config.get("newsfeed.url")
_pollingInterval = int(config.get("newsfeed.polling_interval"))
_lock.acquire()
try:
_items = _noItems
_threadName = uuid.uuid1().hex
t = threading.Thread(target=_newsDaemon, name=_threadName)
t.setDaemon(True)
t.start()
finally:
_lock.release()
else:
_items = _noItems
def getLatestItems():
"""
Returns the latest news items (up to 3 items) as a list of tuples
[(title, URL), ...]. At least one item is always returned. The URL
may be None in a tuple.
"""
return _items
| StarcoderdataPython |
4803854 | <gh_stars>0
"""
mmvt_cv.py
Define any type of collective variable (or milestone shape) that might
be used in an MMVT calculation.
"""
import seekr2.modules.common_base as base
import seekr2.modules.mmvt_base as mmvt_base
def make_mmvt_spherical_cv_object(spherical_cv_input, index):
"""
Create a SphericalCV object to be placed into the Model.
"""
groups = [spherical_cv_input.group1, spherical_cv_input.group2]
cv = mmvt_base.MMVT_spherical_CV(index, groups)
return cv
def make_mmvt_milestoning_objects_spherical(
spherical_cv_input, milestone_alias, milestone_index,
index, input_anchors):
"""
Make a set of 1-dimensional spherical Milestone objects to be put into
an Anchor, and eventually, the Model.
"""
milestones = []
num_anchors = len(input_anchors)
if index > 0:
neighbor_index = index - 1
milestone1 = base.Milestone()
milestone1.index = milestone_index
milestone1.neighbor_anchor_index = neighbor_index
milestone1.alias_index = milestone_alias
milestone1.cv_index = spherical_cv_input.index
if input_anchors[index].lower_milestone_radius is None:
radius = 0.5 * (input_anchors[index].radius \
+ input_anchors[neighbor_index].radius)
else:
radius = input_anchors[index].lower_milestone_radius
milestone1.variables = {"k": -1.0, "radius": radius}
milestone_alias += 1
milestone_index += 1
milestones.append(milestone1)
if index < num_anchors-1:
neighbor_index = index + 1
milestone2 = base.Milestone()
milestone2.index = milestone_index
milestone2.neighbor_anchor_index = neighbor_index
milestone2.alias_index = milestone_alias
milestone2.cv_index = spherical_cv_input.index
if input_anchors[index].upper_milestone_radius is None:
radius = 0.5 * (input_anchors[index].radius \
+ input_anchors[neighbor_index].radius)
else:
radius = input_anchors[index].upper_milestone_radius
milestone2.variables = {"k": 1.0, "radius": radius}
milestones.append(milestone2)
return milestones, milestone_alias, milestone_index
| StarcoderdataPython |
4820869 | import numpy as np
import cv2
from keras.models import Model, Sequential
from keras.layers import Input, Dense, Reshape, Flatten
from keras.layers.convolutional import Convolution2D, Conv2DTranspose
from keras.layers.normalization import BatchNormalization
from keras.layers.advanced_activations import LeakyReLU
from keras import layers
from keras.optimizers import Adam
from keras.datasets import mnist
from keras import backend as K
import matplotlib.pyplot as plt
from time import time
import tensorflow as tf
import os
from keras.callbacks import TensorBoard
from datetime import datetime
from keras.layers.merge import _Merge
from keras import backend as K
from functools import partial
EPOCHS = 20000
BATCH_SIZE = 8
# The training ratio is the number of discriminator updates
# per generator update. The paper uses 5.
TRAINING_RATIO = 8
GRADIENT_PENALTY_WEIGHT = 10 # As per the paper
INPUT_LEN = 128
def make_discriminator():
"""Creates a discriminator model that takes an image as input and outputs a single
value, representing whether the input is real or generated. Unlike normal GANs, the
output is not sigmoid and does not represent a probability! Instead, the output
should be as large and negative as possible for generated inputs and as large and
positive as possible for real inputs.
Note that the improved WGAN paper suggests that BatchNormalization should not be
used in the discriminator."""
model = Sequential()
model.add(Convolution2D(32, 5, padding='same', strides=[2, 2], input_shape=(256, 256, 2)))
model.add(LeakyReLU())
model.add(Convolution2D(64, 5, kernel_initializer='he_normal', strides=[2, 2], padding='same'))
model.add(LeakyReLU())
model.add(Convolution2D(128, 5, kernel_initializer='he_normal', padding='same', strides=[2, 2]))
model.add(LeakyReLU())
model.add(Convolution2D(256, 5, kernel_initializer='he_normal', padding='same', strides=[2, 2]))
model.add(LeakyReLU())
model.add(Convolution2D(512, 5, kernel_initializer='he_normal', padding='same', strides=[2, 2]))
model.add(LeakyReLU())
model.add(Convolution2D(1024, 5, kernel_initializer='he_normal', padding='same', strides=[2, 2]))
model.add(LeakyReLU())
model.add(Flatten())
# model.add(Dense(1024 * 4 * 4, kernel_initializer='he_normal'))
# model.add(LeakyReLU())
model.add(Dense(1, kernel_initializer='he_normal'))
return model
def make_generator():
"""Creates a generator model that takes a 128-dimensional noise vector as a "seed",
and outputs images of size 256x256x3."""
model = Sequential()
model.add(Dense(6* 6* 2048, input_dim=INPUT_LEN))
model.add(BatchNormalization())
model.add(LeakyReLU())
model.add(Reshape((6, 6, 2048), input_shape=(6* 6* 2048,)))
bn_axis = -1
model.add(Conv2DTranspose(1024, 5, strides=2, padding='same'))
model.add(BatchNormalization(axis=bn_axis))
model.add(LeakyReLU())
model.add(Conv2DTranspose(512, 5, strides=2, padding='same'))
model.add(BatchNormalization(axis=bn_axis))
model.add(LeakyReLU())
model.add(layers.ZeroPadding2D(padding=((1, 0), (1, 0))))
model.add(Conv2DTranspose(256, 5, strides=2, padding='same'))
model.add(BatchNormalization(axis=bn_axis))
model.add(LeakyReLU())
model.add(Conv2DTranspose(128, 5, strides=2, padding='same'))
model.add(BatchNormalization(axis=bn_axis))
model.add(LeakyReLU())
model.add(Conv2DTranspose(2, 5, strides=2, padding='same', activation='tanh'))
# El output de esta última es 256x256x3
return model
def save_imgs(generator, discriminator, imgs_path, epoch, noise_shape, total_images=100, get_n_best=10):
noise = np.random.normal(0, 1, (total_images, *noise_shape))
gen_imgs = generator.predict(noise)
images_mark = discriminator.predict(gen_imgs).reshape((total_images))
order = np.argsort(-images_mark)[:get_n_best]
images_final = gen_imgs[order, ...]
for i in range(get_n_best):
img_name = "%d_%d_generated_img.png" % (epoch, i)
this_img = images_final[i, ...]
re_scaled = (this_img - np.min(this_img)) * 255 / (np.max(this_img) - np.min(this_img))
cv2.imwrite(os.path.join(imgs_path, img_name),
np.concatenate([re_scaled[:, :, 0], re_scaled[:, :, 1]], axis=1))
def wasserstein_loss(y_true, y_pred):
"""Calculates the Wasserstein loss for a sample batch.
The Wasserstein loss function is very simple to calculate. In a standard GAN, the
discriminator has a sigmoid output, representing the probability that samples are
real or generated. In Wasserstein GANs, however, the output is linear with no
activation function! Instead of being constrained to [0, 1], the discriminator wants
to make the distance between its output for real and generated samples as
large as possible.
The most natural way to achieve this is to label generated samples -1 and real
samples 1, instead of the 0 and 1 used in normal GANs, so that multiplying the
outputs by the labels will give you the loss immediately.
Note that the nature of this loss means that it can be (and frequently will be)
less than 0."""
return K.mean(y_true * y_pred)
def gradient_penalty_loss(y_true, y_pred, averaged_samples, gradient_penalty_weight):
"""Calculates the gradient penalty loss for a batch of "averaged" samples.
In Improved WGANs, the 1-Lipschitz constraint is enforced by adding a term to the
loss function that penalizes the network if the gradient norm moves away from 1.
However, it is impossible to evaluate this function at all points in the input
space. The compromise used in the paper is to choose random points on the lines
between real and generated samples, and check the gradients at these points. Note
that it is the gradient w.r.t. the input averaged samples, not the weights of the
discriminator, that we're penalizing!
In order to evaluate the gradients, we must first run samples through the generator
and evaluate the loss. Then we get the gradients of the discriminator w.r.t. the
input averaged samples. The l2 norm and penalty can then be calculated for this
gradient.
Note that this loss function requires the original averaged samples as input, but
Keras only supports passing y_true and y_pred to loss functions. To get around this,
we make a partial() of the function with the averaged_samples argument, and use that
for model training."""
# first get the gradients:
# assuming: - that y_pred has dimensions (batch_size, 1)
# - averaged_samples has dimensions (batch_size, nbr_features)
# gradients afterwards has dimension (batch_size, nbr_features), basically
# a list of nbr_features-dimensional gradient vectors
gradients = K.gradients(y_pred, averaged_samples)[0]
# compute the euclidean norm by squaring ...
gradients_sqr = K.square(gradients)
# ... summing over the rows ...
gradients_sqr_sum = K.sum(gradients_sqr, axis=np.arange(1, len(gradients_sqr.shape)))
# ... and sqrt
gradient_l2_norm = K.sqrt(gradients_sqr_sum)
# compute lambda * (1 - ||grad||)^2 still for each single sample
gradient_penalty = gradient_penalty_weight * K.square(1 - gradient_l2_norm)
# return the mean as loss over all the batch samples
return K.mean(gradient_penalty)
def tile_images(image_stack):
"""Given a stacked tensor of images, reshapes them into a horizontal tiling for
display."""
assert len(image_stack.shape) == 3
image_list = [image_stack[i, :, :] for i in range(image_stack.shape[0])]
tiled_images = np.concatenate(image_list, axis=1)
return tiled_images
class RandomWeightedAverage(_Merge):
"""Takes a randomly-weighted average of two tensors. In geometric terms, this
outputs a random point on the line between each pair of input points.
Inheriting from _Merge is a little messy but it was the quickest solution I could
think o Improvements appreciated."""
def _merge_function(self, inputs):
weights = K.random_uniform((BATCH_SIZE, 1, 1, 1))
return (weights * inputs[0]) + ((1 - weights) * inputs[1])
import numpy as np
import sys
sys.path.append("..")
from constants import *
sys.path.append("../preprocessing")
from preprocessing.imageparser import ImageParser
import gc
import os
import cv2
parser = ImageParser(path_utrech='../../Utrecht/subjects',
path_singapore='../../Singapore/subjects',
path_amsterdam='../../GE3T/subjects')
utrech_dataset, singapore_dataset, amsterdam_dataset = parser.get_all_images_and_labels()
t1_utrecht, flair_utrecht, labels_utrecht, white_mask_utrecht, distance_utrecht = parser.get_all_sets_paths(utrech_dataset)
t1_singapore, flair_singapore, labels_singapore, white_mask_singapore, distance_singapore = parser.get_all_sets_paths(singapore_dataset)
t1_amsterdam, flair_amsterdam, labels_amsterdam, white_mask_amsterdam, distance_amsterdam = parser.get_all_sets_paths(amsterdam_dataset)
slice_shape = SLICE_SHAPE
print('Utrecht: ', len(t1_utrecht), len(flair_utrecht), len(labels_utrecht))
print('Singapore: ', len(t1_singapore), len(flair_singapore), len(labels_singapore))
print('Amsterdam: ', len(t1_amsterdam), len(flair_amsterdam), len(labels_amsterdam))
rm_extra_top = 14
rm_extra_bot = 17
rm_extra_amsterdam_bot = 21
rm_extra_amsterdam_top = 14
final_label_imgs = parser.preprocess_all_labels([labels_utrecht,
labels_singapore,
labels_amsterdam], slice_shape, [UTRECH_N_SLICES,
SINGAPORE_N_SLICES,
AMSTERDAM_N_SLICES],
REMOVE_TOP + rm_extra_top,
REMOVE_BOT + rm_extra_bot,
(rm_extra_amsterdam_top, rm_extra_amsterdam_bot))
'''
T1 DATA
'''
rm_total = (REMOVE_TOP + REMOVE_BOT) + rm_extra_top + rm_extra_bot
utrecht_normalized_t1 = parser.preprocess_dataset_t1(t1_utrecht, slice_shape, UTRECH_N_SLICES,
REMOVE_TOP + rm_extra_top, REMOVE_BOT + rm_extra_bot, norm_type="stand")
utrecht_normalized_t1 = parser.normalize_neg_pos_one(utrecht_normalized_t1, UTRECH_N_SLICES - rm_total)
singapore_normalized_t1 = parser.preprocess_dataset_t1(t1_singapore, slice_shape, SINGAPORE_N_SLICES,
REMOVE_TOP + rm_extra_top, REMOVE_BOT + rm_extra_bot, norm_type="stand")
singapore_normalized_t1 = parser.normalize_neg_pos_one(singapore_normalized_t1, SINGAPORE_N_SLICES - rm_total)
amsterdam_normalized_t1 = parser.preprocess_dataset_t1(t1_amsterdam, slice_shape, AMSTERDAM_N_SLICES,
REMOVE_TOP + rm_extra_top + rm_extra_amsterdam_top,
REMOVE_BOT + rm_extra_bot + rm_extra_amsterdam_bot, norm_type="stand")
amsterdam_normalized_t1 = parser.normalize_neg_pos_one(amsterdam_normalized_t1,
AMSTERDAM_N_SLICES - rm_total - rm_extra_amsterdam_bot - rm_extra_amsterdam_top)
del t1_utrecht, t1_singapore, t1_amsterdam
'''
FLAIR DATA
'''
utrecht_stand_flairs = parser.preprocess_dataset_flair(flair_utrecht, slice_shape, UTRECH_N_SLICES,
REMOVE_TOP + rm_extra_top, REMOVE_BOT + rm_extra_bot, norm_type="stand")
utrecht_stand_flairs = parser.normalize_neg_pos_one(utrecht_stand_flairs, UTRECH_N_SLICES - rm_total)
singapore_stand_flairs = parser.preprocess_dataset_flair(flair_singapore, slice_shape, SINGAPORE_N_SLICES,
REMOVE_TOP + rm_extra_top, REMOVE_BOT + rm_extra_bot, norm_type="stand")
singapore_stand_flairs = parser.normalize_neg_pos_one(singapore_stand_flairs, SINGAPORE_N_SLICES - rm_total)
amsterdam_stand_flairs = parser.preprocess_dataset_flair(flair_amsterdam, slice_shape, AMSTERDAM_N_SLICES,
REMOVE_TOP + rm_extra_top + rm_extra_amsterdam_top,
REMOVE_BOT + rm_extra_bot + rm_extra_amsterdam_bot, norm_type="stand")
amsterdam_stand_flairs = parser.normalize_neg_pos_one(amsterdam_stand_flairs,
AMSTERDAM_N_SLICES - rm_total - rm_extra_amsterdam_bot - rm_extra_amsterdam_top)
del flair_utrecht, flair_singapore, flair_amsterdam
'''
DATA CONCAT
'''
normalized_t1 = np.concatenate([utrecht_normalized_t1,
singapore_normalized_t1,
amsterdam_normalized_t1], axis=0)
normalized_flairs = np.concatenate([utrecht_stand_flairs,
singapore_stand_flairs,
amsterdam_stand_flairs], axis=0)
del utrecht_normalized_t1, singapore_normalized_t1, amsterdam_normalized_t1
del utrecht_stand_flairs, singapore_stand_flairs, amsterdam_stand_flairs
data_t1 = np.expand_dims(np.asanyarray(normalized_t1), axis=3)
data_flair = np.expand_dims(np.asanyarray(normalized_flairs), axis=3)
all_data = np.concatenate([data_t1, data_flair], axis=3)
images = all_data
n_images = len(images)
generator = make_generator()
discriminator = make_discriminator()
for layer in discriminator.layers:
layer.trainable = False
discriminator.trainable = False
generator_input = Input(shape=(INPUT_LEN,))
generator_layers = generator(generator_input)
discriminator_layers_for_generator = discriminator(generator_layers)
generator_model = Model(inputs=[generator_input], outputs=[discriminator_layers_for_generator])
# We use the Adam paramaters from Gulrajani et al.
generator_model.compile(optimizer=Adam(0.0001, beta_1=0.5, beta_2=0.9), loss=wasserstein_loss)
for layer in discriminator.layers:
layer.trainable = True
for layer in generator.layers:
layer.trainable = False
discriminator.trainable = True
generator.trainable = False
# The discriminator_model is more complex. It takes both real image samples and random
# noise seeds as input. The noise seed is run through the generator model to get
# generated images. Both real and generated images are then run through the
# discriminator. Although we could concatenate the real and generated images into a
# single tensor, we don't (see model compilation for why).
real_samples = Input(shape=images.shape[1:])
generator_input_for_discriminator = Input(shape=(INPUT_LEN,))
generated_samples_for_discriminator = generator(generator_input_for_discriminator)
discriminator_output_from_generator = discriminator(generated_samples_for_discriminator)
discriminator_output_from_real_samples = discriminator(real_samples)
averaged_samples = RandomWeightedAverage()([real_samples,
generated_samples_for_discriminator])
# We then run these samples through the discriminator as well. Note that we never
# really use the discriminator output for these samples - we're only running them to
# get the gradient norm for the gradient penalty loss.
averaged_samples_out = discriminator(averaged_samples)
partial_gp_loss = partial(gradient_penalty_loss,
averaged_samples=averaged_samples, gradient_penalty_weight=GRADIENT_PENALTY_WEIGHT)
# Functions need names or Keras will throw an error
partial_gp_loss.__name__ = 'gradient_penalty'
discriminator_model = Model(inputs=[real_samples, generator_input_for_discriminator],
outputs=[discriminator_output_from_real_samples, discriminator_output_from_generator, averaged_samples_out])
discriminator_model.compile(optimizer=Adam(0.0001, beta_1=0.5, beta_2=0.9),
loss=[wasserstein_loss, wasserstein_loss, partial_gp_loss])
# We make three label vectors for training. positive_y is the label vector for real
# samples, with value 1. negative_y is the label vector for generated samples, with
# value -1. The dummy_y vector is passed to the gradient_penalty loss function and
# is not used.
positive_y = np.ones((BATCH_SIZE, 1), dtype=np.float32)
negative_y = -positive_y
dummy_y = np.zeros((BATCH_SIZE, 1), dtype=np.float32)
intervado_guardado = 50
for epoch in range(EPOCHS):
start = time()
np.random.shuffle(images)
print("Epoch: ", epoch)
print("Number of batches: ", int(n_images // BATCH_SIZE))
discriminator_loss = []
generator_loss = []
minibatches_size = BATCH_SIZE * TRAINING_RATIO
print('Tenemos ', int(n_images // (BATCH_SIZE * TRAINING_RATIO)), ' minibatches.')
for i in range(int(n_images // (BATCH_SIZE * TRAINING_RATIO))):
discriminator_minibatches = images[i * minibatches_size: (i + 1) * minibatches_size]
for j in range(TRAINING_RATIO):
image_batch = discriminator_minibatches[j * BATCH_SIZE: (j + 1) * BATCH_SIZE]
noise = np.random.normal(0, 1, (BATCH_SIZE, INPUT_LEN)).astype(np.float32)
# noise = np.random.uniform(-1,1,(BATCH_SIZE, INPUT_LEN)).astype(np.float32)
discriminator_loss_val = discriminator_model.train_on_batch([image_batch, noise],
[positive_y, negative_y, dummy_y])
discriminator_loss.append(discriminator_loss_val)
# generator_loss_val = generator_model.train_on_batch(np.random.uniform(-1,1,(BATCH_SIZE, INPUT_LEN)), positive_y)
generator_loss_val = generator_model.train_on_batch(np.random.normal(0, 1, (BATCH_SIZE, INPUT_LEN)), positive_y)
generator_loss.append(generator_loss_val)
if epoch % intervado_guardado == 0:
base_path = os.getcwd()
generator.save_weights(os.path.join("weights", "generator_epoch_" + str(epoch) + ".h5"))
discriminator.save_weights(os.path.join("weights", "discriminator_epoch_" + str(epoch) + ".h5"))
imgs_path = os.path.join(base_path, "imgs")
save_imgs(generator, discriminator, imgs_path, epoch, (INPUT_LEN,))
| StarcoderdataPython |
1663397 | <gh_stars>1-10
import itertools
from collections import defaultdict
from noise_robust_cobras.noise_robust import find_cycles
from noise_robust_cobras.noise_robust.datastructures.cycle import Cycle
class CycleIndex:
"""
Cycle index is a class that keeps track of a set of cycles
Cycles are added through add_cycle_to_index and removed with remove_cycle
attributes:
- cycle-index a dictionary that maps a constraint to all cycles that involve this constraint
- all consistent cycles: all cycles in this cycle index that are consistent (#CL's != 1)
- all inconsistent cycles: all cycles in this cycle index that are inconsistent (#CL == 1)
Specific subclasses are provided to keep track of specific classes of cycles
"""
def __init__(self, constraint_index):
self.constraint_index = constraint_index
self.cycle_index = defaultdict(CycleIndex.set_tuple)
self.all_consistent_cycles = set()
self.all_inconsistent_cycles = set()
def replace_constraint(self, old_constraint, new_constraint):
all_cycles_with_constraint = self.get_all_cycles_for_constraint(old_constraint)
new_cycles_with_constraint = [
cycle.replace_constraint(old_constraint, new_constraint)
for cycle in all_cycles_with_constraint
]
for cycle_to_remove in all_cycles_with_constraint:
self.remove_cycle(cycle_to_remove)
for cycle in new_cycles_with_constraint:
self.add_cycle_to_index(cycle)
@staticmethod
def set_tuple():
return (set(), set())
def is_inconsistent(self):
return len(self.all_inconsistent_cycles) > 0
def __contains__(self, item):
return (
item in self.all_consistent_cycles or item in self.all_inconsistent_cycles
)
def all_cycles(self):
return self.all_inconsistent_cycles.union(self.all_consistent_cycles)
def get_all_cycles_for_constraint(self, constraint):
con_cycles, incon_cycles = self.cycle_index[constraint]
return con_cycles.union(incon_cycles)
def get_inconsistent_cycles_for_constraint(self, constraint):
_, incon_cycles = self.cycle_index[constraint]
return incon_cycles
def get_consistent_cycles_for_constraint(self, constraint):
con_cycles, _ = self.cycle_index[constraint]
return con_cycles
def add_cycle_to_index_entry(self, cycle, constraint):
consistent_cycles, inconsistent_cycles = self.cycle_index[constraint]
if cycle.is_inconsistent():
inconsistent_cycles.add(cycle)
else:
consistent_cycles.add(cycle)
def add_cycle_to_index(self, cycle):
"""
- inconsistent cycles are added to all_inconsistent_cycles and the inconsistent_cycle_index
- consistent cycles are added to all_cycles and the cycle_index
"""
assert cycle
# add cycle to cycle_index
for constraint in cycle.constraints:
self.add_cycle_to_index_entry(cycle, constraint)
# add cycle to all_inconsistent_cycles or all_consistent_cycles
if cycle.is_inconsistent():
self.all_inconsistent_cycles.add(cycle)
else:
# the cycle is consistent
self.all_consistent_cycles.add(cycle)
def remove_cycle(self, cycle_to_remove):
self.all_consistent_cycles.discard(cycle_to_remove)
self.all_inconsistent_cycles.discard(cycle_to_remove)
for con in cycle_to_remove:
consistent, inconsistent = self.cycle_index[con]
consistent.discard(cycle_to_remove)
inconsistent.discard(cycle_to_remove)
def remove_cycles_with_constraint(self, constraint_to_remove):
con_cycles, incon_cycles = self.cycle_index[constraint_to_remove]
self.all_consistent_cycles.difference_update(con_cycles)
self.all_inconsistent_cycles.difference_update(incon_cycles)
self.cycle_index.pop(constraint_to_remove)
class MinimalCycleIndex(CycleIndex):
"""
Through add constraint keeps track of all the minimal cycles in the graph
(for each constraint only the cycles are kept with the minimal length)
note: old cycles that are not minimal are not removed from this datastructure
constraints should be added through add_constraint and removed through remove_cycles_with_constraint to ensure consistency of the data structure
"""
def __init__(self, constraint_index):
super().__init__(constraint_index)
# minimal cycles dict is a dictionary from a constraint to a set of cycles
# it keeps the cycles that need to be retained for this constraint
self.minimal_cycles_dict = defaultdict(set)
def add_constraint(self, constraint):
all_cycles = find_cycles.find_all_cycles_with_minimal_length(
self.constraint_index, constraint
)
if all_cycles is not None:
self.add_minimal_cycles_to_index(all_cycles)
def add_minimal_cycles_to_index(self, cycles):
minimal_length = len(cycles[0])
assert all(len(cycle) == minimal_length for cycle in cycles)
# add the cycles to the index
for cycle in cycles:
self.add_cycle_to_index(cycle)
minimal_cycle: Cycle = cycles[0]
# remove longer cycles and add smaller minimal cycles
# this does nothing!
constraints_that_occur_in_short_cycle = minimal_cycle.constraints
cycles_to_check = {
cycle
for con in constraints_that_occur_in_short_cycle
for cycle in self.get_all_cycles_for_constraint(con)
if len(cycle) > minimal_length
}
for old_cycle in cycles_to_check:
composition = minimal_cycle.compose_with(old_cycle)
if composition is not None and len(composition) < len(old_cycle):
if composition not in self:
self.add_cycle_to_index(composition)
def add_cycle_to_index(self, cycle):
super(MinimalCycleIndex, self).add_cycle_to_index(cycle)
self.add_cycle_to_minimal_cycle_dict(cycle)
def check_cycles_for_removal(self, cycles):
for cycle in cycles:
if not self.is_minimal_cycle(cycle):
self.remove_cycle(cycle)
def add_cycle_to_minimal_cycle_dict(self, cycle):
for constraint in cycle.constraints:
existing_entry = self.minimal_cycles_dict[constraint]
if len(existing_entry) == 0:
self.minimal_cycles_dict[constraint].add(cycle)
else:
# you should keep the old cycle to ensure you have an inconsistent cycle
some_cycle = list(existing_entry)[0]
old_length = len(some_cycle)
new_length = len(cycle)
if new_length < old_length:
old_cycles = self.minimal_cycles_dict[constraint]
self.minimal_cycles_dict[constraint] = {cycle}
self.check_cycles_for_removal(old_cycles)
elif new_length == old_length:
self.minimal_cycles_dict[constraint].add(cycle)
else:
# new_length > old_length
pass
def is_minimal_cycle(self, cycle):
for constraint in cycle.constraints:
if cycle in self.minimal_cycles_dict[constraint]:
return True
return False
def remove_cycles_with_constraint(self, constraint_to_remove):
involved_cycles = self.get_all_cycles_for_constraint(constraint_to_remove)
new_cycles = []
for cycle1, cycle2 in itertools.combinations(involved_cycles, 2):
new_cycle = cycle1.compose_with(cycle2)
if new_cycle is None:
continue
new_cycles.append(new_cycle)
for cycle in involved_cycles:
self.remove_cycle(cycle)
for new_cycle in new_cycles:
self.add_cycle_to_index(new_cycle)
self.cycle_index.pop(constraint_to_remove)
def remove_cycle(self, cycle_to_remove):
super(MinimalCycleIndex, self).remove_cycle(cycle_to_remove)
self.remove_cycle_from_minimal_cycle_dict(cycle_to_remove)
def remove_cycle_from_minimal_cycle_dict(self, cycle_to_remove):
for con in cycle_to_remove:
entry = self.minimal_cycles_dict[con]
entry.discard(cycle_to_remove)
| StarcoderdataPython |
9338 | <gh_stars>10-100
import re
from argparse import ArgumentParser
from multiprocessing import Pool, Manager, Process
from pathlib import Path
from .utils import UnityDocument
YAML_HEADER = '%YAML'
class UnityProjectTester:
"""
Class to run tests on a given Unity project folder
"""
AVAILABLE_COMMANDS = ('test_no_yaml_is_modified',)
def __init__(self):
self.options = None
def run(self):
top_parser = ArgumentParser()
subparser = top_parser.add_subparsers()
subparser.required = True
for cmd in UnityProjectTester.AVAILABLE_COMMANDS:
fn = getattr(self, cmd)
parser = subparser.add_parser(cmd, help=fn.__doc__)
parser.set_defaults(func=fn)
top_parser.add_argument('project_path', help='Path to the Unity project folder')
top_parser.add_argument('--exclude',
help='Exclude regexp when searching project files. Can be specified multiple times.',
default=None,
action='append')
top_parser.add_argument('--keep-changes',
help='If a file changes after serialization, do not revert the changes.',
default=False,
action='store_true')
top_parser.add_argument('--dry-run',
help='Dont\'t modify.',
default=False,
action='store_true')
try:
self.options = top_parser.parse_args()
except TypeError:
top_parser.print_help()
return 2
# run given function
self.options.func()
def test_no_yaml_is_modified(self):
"""
Recurse the whole project folder looking for '.asset' files, load and save them all, and check that
there are no modifications
"""
if self.options.dry_run:
print("Dry-run mode enabled: YAMLs won't be dumped.")
if self.options.keep_changes:
print("Keep changes mode will not have any effect during dry run.")
elif self.options.keep_changes:
print("Keep changes mode enabled: Changes to files will be kept.")
project_path = Path(self.options.project_path)
asset_file_paths = [p for p in project_path.rglob('*.asset')]
print("Found {} '.asset' files".format(len(asset_file_paths)))
def is_path_included(path):
# compare regexp against absolute path
return not any(rexp.search(str(path.resolve())) for rexp in rexps)
if self.options.exclude is not None:
rexps = [re.compile(rexp) for rexp in self.options.exclude]
valid_file_paths = [p for p in filter(is_path_included, asset_file_paths)]
print("Excluded {} '.asset' files".format(len(asset_file_paths) - len(valid_file_paths)))
else:
valid_file_paths = asset_file_paths
file_results = []
with Manager() as manager:
print_queue = manager.Queue()
diff_list = manager.list()
queue_process = Process(target=UnityProjectTester.read_output, args=(print_queue,))
queue_process.start()
with Pool() as pool:
for f in valid_file_paths:
async_res = pool.apply_async(UnityProjectTester.open_and_save,
(f, print_queue, diff_list, self.options.keep_changes,
self.options.dry_run))
file_results.append((f, async_res))
pool.close()
pool.join()
# signal end of queue with None token
print_queue.put(None)
queue_process.join()
error_results = list(filter(lambda r: not r[1].successful(), file_results))
if len(error_results):
# raise the first exception
file_path, result = error_results[0]
print("Python process evaluating file {} failed with the following exception:".format(
file_path.resolve()), flush=True)
result.get()
if len(diff_list):
print("{} files are different now:".format(len(diff_list)))
print('\n'.join([str(f.resolve()) for f in diff_list]))
@staticmethod
def read_output(print_queue):
msg = print_queue.get()
while msg is not None:
print(msg, flush=True)
msg = print_queue.get()
@staticmethod
def open_and_save(asset_file_path, print_queue, diff_list, keep_changes=False, dry_run=False):
# check YAML version header, save original content
with open(str(asset_file_path), 'rb') as fp:
header = fp.read(len(YAML_HEADER))
try:
is_yaml_file = header.decode('utf-8') == YAML_HEADER
except UnicodeDecodeError:
is_yaml_file = False
finally:
if not is_yaml_file:
print_queue.put("Ignoring non-yaml file {}".format(asset_file_path))
return
else:
fp.seek(0)
print_queue.put("Processing {}".format(asset_file_path))
a_file_content = fp.read()
doc = UnityDocument.load_yaml(str(asset_file_path))
if dry_run:
return
try:
doc.dump_yaml()
with open(str(asset_file_path), 'rb') as fp:
b_file_content = fp.read()
# compare
if a_file_content != b_file_content:
diff_list.append(asset_file_path)
if not keep_changes:
with open(str(asset_file_path), 'wb') as fp:
fp.write(a_file_content)
except Exception:
with open(str(asset_file_path), 'wb') as fp:
fp.write(a_file_content)
raise
if __name__ == '__main__':
# None is considered successful
code = UnityProjectTester().run() or 0
exit(code)
| StarcoderdataPython |
75188 | from array import array
import os
import numpy as np
import imageio
imageio.plugins.ffmpeg.download()
from moviepy.editor import *
import pygame
import sys
import uuid
import nltk
from nltk.corpus import PlaintextCorpusReader
import random
import librosa as lib
from matplotlib import pyplot as plt
import pydub as dub
from pydub import AudioSegment
import math as m
corpus_root = 'C:/Users/IBM/Desktop/FYP Codes/untitled2'
wordlists = PlaintextCorpusReader(corpus_root, '.*')
words = wordlists.words('feedback.txt')
print(words)
freq_dance = nltk.FreqDist(words)
cfreq_dance_2gram = nltk.ConditionalFreqDist(nltk.bigrams(words))
cprob_dance_2gram = nltk.ConditionalProbDist(cfreq_dance_2gram, nltk.MLEProbDist)
import numpy as np
#np.set_printoptions(threshold='nan')
mazy_f=[];
moves_90={};
moves_60={};
moves_120={};
repeat_t=3
selection_t=9
beats_f=[];
bpm_range =[];
body_range=[];
count_range=[];
file_names=[];
end_string='.mov'
formate_song='mp3'
center_string='_'
file_duration=[];
combination_index=[];
song_file_in='95.mp3'
song_file_out='output.mp3'
time_crop=60000;
beat_t_labels={}
def round_beats(beats_time):
x=0;
for i in beats_time:
beats_time[x]=round(beats_time[x],2)
#beats_t.append(round(beats_time[x],2))
x=x+1
#give time in ms
#https://github.com/jiaaro/pydub/blob/master/API.markdown
def crop_audio(song_filename,time_crop,formate_song):
sound1 = AudioSegment.from_file(song_filename, format=formate_song)
# first time_crop seconds in mili seconds of sound1
sound1 = sound1[:time_crop]
file_handle = sound1.export(song_file_out, format="mp3")
#http://librosa.github.io/librosa/generated/librosa.beat.tempo.html
def beats_array(song_filename):
y,source_file = lib.load(song_filename)
# onset_env = lib.onset.onset_strength(y, sr=source_file)
#tempo = lib.beat.tempo(onset_envelope=onset_env, sr=source_file)
#print (tempo)
tempo,beats = lib.beat.beat_track(y=y,sr=source_file)
beats_time=lib.frames_to_time(beats,sr=source_file)
round_beats(beats_time)
return beats_time,tempo
#print (beats_t)
# print (tempo)
#print (beats_time[1])
# reads the data from the file to populate the respective lists
def motion_data(filename):
file_path= filename;
file= open(file_path);
for line in file:
line= line.strip();
list= line.split(" ");
count_range.append(list.pop());
body_range.append(list.pop());
bpm_range.append(list.pop());
return
#populate the list of the files with the names based on the bpm
#populates file_names on given bpm
def list_names(bpm):
found=False
temp =[];
x=0
for i in bpm_range:
if int(i)==bpm:
temp.append(x);
x=x+1;
for i in temp:
y=int(count_range[i]);
for z in range(1,y+1,1):
file_names.append(bpm_range[i]+center_string+body_range[i]+center_string+'Y'+center_string+str(z)+end_string);
file_names.append(bpm_range[i] + center_string + body_range[i] + center_string + 'N' + center_string + str(z) + end_string);
#print(file_names);
#calculates the duration in secs for each clip
#https://zulko.github.io/moviepy/
def file_duration_cal():
for i in file_names:
seq = VideoFileClip(i);
file_duration.append(seq.duration);
#https://zulko.github.io/moviepy/
def play_videofile(file_name):
clip = VideoFileClip(file_name)
#sum of combined indces.
def sum_seq():
s=0;
for i in combination_index:
s=s+file_duration[i]
return s
def search_dif(temp_sec,beats_f):
x=0;
while(x <len(beats_f) and temp_sec>=beats_f[x][0] ):
if temp_sec==beats_f[x][0]:
break
x=x+1
if x>=len(beats_f):
x=x-1;
# print("search next", beats_f[x][0]-temp_sec)
return abs(beats_f[x][0]-temp_sec);
def search_next(beats_f,time):
for i in beats_f:
if time<= i[0]:
# print("search next",i[1])
return i[1]
def populate_names(): #name of the video is loaded
global moves_60,moves_90,moves_120;
m_60=10
m_90=22
m_120=28
str_60="60_"
str_90="90_"
str_120="120_"
for i in range(1,m_60+1,1):
name=str_60+str(i)+".mov"
seq = VideoFileClip(name);
moves_60[name]=seq.duration
for i in range(1,m_90+1,1):
name=str_90+str(i)+".mov"
seq = VideoFileClip(name);
moves_90[name]=seq.duration
for i in range(1,m_120+1,1):
name=str_120+str(i)+".mov"
seq = VideoFileClip(name);
moves_120[name]=seq.duration
def regions(dtempo,beats_time_c):
change=[]
beats=[]
change.append(beats_time_c[0])
beats.append(dtempo[0])
for i in range(0,len(dtempo)-1,1):
if dtempo[i]!=dtempo[i+1]:
change.append(beats_time_c[i])
beats.append(dtempo[i+1])
change.append(beats_time_c[i])
change.append(beats_time_c[len(beats_time_c)-1])
print (change)
print(beats)
return change,beats
#define regions with change[i]-change[i+1] with there valyue on corresponsing at beats[i]
def return_label(change,beat_value,local_beat):
count =0
for i in range(0,len(change)-1,2):
if local_beat>= change[i] and local_beat<change[i+1]:
return beat_value[count]
count=count+1
def place_label(beats_loc_act,dtempo,beats_time_c):
change=[]
beat_value=[]
change,beat_value =regions(dtempo,beats_time_c)
global beat_t_labels
beat_t_labels=np.zeros(shape=(len(beats_loc_act), 2))
for i in range(0,len(beats_loc_act),1):
beat_t_labels[i][0] = beats_loc_act[i]
beat_t_labels[i][1]=return_label(change,beat_value,beats_loc_act[i])
print(beat_t_labels)
return beat_t_labels
#loc
#feedback functin
def getFeedBack(combination_seq):
feedback = ""
trueVal = 1
print("Please watch your video and provide feedback in range for 1-5 for smoothness of the dance.\n")
print("Enter feedback 1-5:")
while (trueVal == 1):
if (sys.version_info > (3, 0)):
# Python 3 code in this block
feedback = input("")
feedbackInt = int(feedback)
if ((feedbackInt > 0) & (feedbackInt < 6)):
f = open("feedback.txt", "a+")
for line in range(feedbackInt):
f.write("* ")
for i in combination_seq:
f.write(i.replace(".mov","") + " ")
f.write("*\n")
f.close()
break
else:
print("Please enter a valid feedback again between 1-5 inclusive.")
def dynamic_temp(song_file_in):
y, source_file = lib.load(song_file_in)
tempo, beats = lib.beat.beat_track(y=y, sr=source_file)
plot_beats=lib.frames_to_time(beats, sr=source_file)
#convert to to 2 decimal place here
round_beats(plot_beats)
onset_env = lib.onset.onset_strength(y, sr=source_file)
dtempo = lib.beat.tempo(onset_envelope=onset_env, sr=source_file, aggregate = None)
#dynamic bpm being calculated
for i in range(0,len(dtempo),1):
if dtempo[i] >= 60 and dtempo[i] < (60 + 90) / 2:
dtempo[i] = 60;
elif dtempo[i] >= (60 + 90) / 2 and dtempo[i] < (90 + 120) / 2:
dtempo[i] = 90;
else:
dtempo[i] = 120;
#smothing of the dynamic bpm
tempo, beats = lib.beat.beat_track(y=y, sr=source_file)
beats_time = lib.frames_to_time(np.arange(len(dtempo)))
#time frame of the beats that being tracked
return place_label(plot_beats,dtempo,beats_time)
# print(len(beats_time))
#
# print(beats_time)
# print(len(dtempo))
# plt.plot(beats_time, dtempo,linewidth=1.5)
# plt.plot(plot_beats,np.full(len(plot_beats),100),'ro')
# plt.show()
def return_max(array_in, last_move):
max_prob = 0
print(max_prob)
index = 0
for i in range (0,len(array_in),1):
if combination_index.count(array_in[i][0])<repeat_t:
max_prob = cprob_dance_2gram[last_move].prob(array_in[i][0].replace(".mov",""))
index=i
break
# print("PROB samples", cprob_brown_2gram["90"].samples())
for i in range(0, selection_t,1):
print("range")
print(cprob_dance_2gram[last_move.replace(".mov","")].prob(array_in[i][0].replace(".mov","")))
if max_prob < cprob_dance_2gram[last_move.replace(".mov","")].prob(array_in[i][0].replace(".mov","")) and combination_index.count(array_in[i][0])<repeat_t:
index = i
print("max 1",max_prob)
max_prob = cprob_dance_2gram[last_move.replace(".mov","")].prob(array_in[i][0].replace(".mov",""))
print("max2",max_prob,"after")
return array_in[index]
def mazy_dance_2(duration_song,beats_f):
mazy_factor=[];
sum_far=0
a=beats_f[0][1] #type of the dance bpm we need
if a==60:
x=list(moves_60.keys())[random.randint(1,len(moves_60)-1)]
y=moves_60[x]
elif a==90:
x = list(moves_90.keys())[random.randint(1, len(moves_90) - 1)]
y = moves_90[x]
else:
x = list(moves_120.keys())[random.randint(1, len(moves_120) - 1)]
y = moves_120[x]
combination_index.append(x)
sum_far=sum_far+y;
while (sum_far<=duration_song):
min=1000000;
array_60=np.zeros(shape=(len(moves_60),3),dtype=object)
array_90=np.zeros(shape=(len(moves_90), 3),dtype=object)
array_120=np.zeros(shape=(len(moves_120), 3),dtype=object)
index=0;
x=0;
c=0;
#select the next beat based on the sequence you have combined
next_beat=search_next(beats_f,sum_far)
print("Next Beat",next_beat,"sum far",sum_far)
if next_beat==60:
for k,v in moves_60.items(): # we need to see the file of the type the beat is
temp = sum_far + v;
y = search_dif(temp, beats_f)
array_60[c][0]=k
array_60[c][1]=v
array_60[c][2]=y
c = c + 1
array_60=sorted(array_60, key=lambda x: x[2])
#pass it to lambda function
temp2=return_max(array_60,combination_index[len(combination_index)-1])
min=temp2[2]
index=temp2[0]
x=temp2[1]
elif next_beat==90:
for k,v in moves_90.items(): # we need to see the file of the type the beat is
temp = sum_far + v;
y = search_dif(temp, beats_f)
array_90[c][0]=k
array_90[c][1]=v
array_90[c][2]=y
c=c+1
array_90=sorted(array_90, key=lambda x: x[2])
#pass it to lambda function
temp2=return_max(array_90,combination_index[len(combination_index)-1])
min = temp2[2]
index = temp2[0]
x = temp2[1]
else:
for k,v in moves_120.items(): # we need to see the file of the type the beat is
temp = sum_far + v;
y = search_dif(temp, beats_f)
array_120[c][0]=k
array_120[c][1]=v
array_120[c][2]=y
c = c + 1
array_120=sorted(array_120, key=lambda x: x[2])
#pass it to lambda function
temp2=return_max(array_120,combination_index[len(combination_index)-1])
min = temp2[2]
index = temp2[0]
x = temp2[1]
print("adding move")
print(temp2,index,min)
mazy_factor.append(min);
combination_index.append(index)
sum_far = sum_far + x;
return mazy_factor
def mazy_dance(duration_song,beats_f):
mazy_factor=[];
print(len(file_duration))
a=random.randint(1,len(file_duration)-1)
combination_index.append(a)
sum_far=sum_seq();
while (sum_far<=duration_song):
min=1000000;
index=0;
x=0;
for i in file_duration: #we need to see the file of the type the beat is
temp=sum_far+i;
y=search_dif(temp,beats_f)
if (y<min):
min=y;
index=x;
x=x+1;
mazy_factor.append(min);
combination_index.append(index)
sum_far = sum_seq();
return mazy_factor
# list_names(90);
#crop_audio('abc.mp3',time_crop,formate_song);
def load_data():
motion_data("Motion.txt")
#function needs to deal with the problem of dynamic temp and the numbe of beats it has, currently not adding up together.
# def smoithing_differenc(video_file1,video_file2):
# seq = VideoFileClip(video_file1)
# img1=seq.duration-0.1; #since it might miss out last frame therefore we need ot have this.
# seq.save_frame("frame1.jpeg",img1)
# seq.save_frame("frame2.jpeg", 0.0)
# x=cv2.cvtColor(cv2.imread("frame1.jpeg"), cv2.COLOR_BGR2GRAY)
# y=cv2.cvtColor(cv2.imread("frame2.jpeg"), cv2.COLOR_BGR2GRAY)
#
# ret, x = cv2.threshold(x, 126, 256, cv2.THRESH_BINARY_INV)
# cv2.imshow('',x)
# ret, y = cv2.threshold(y, 126, 256, cv2.THRESH_BINARY_INV)
# cv2.imshow('messigray.png',cv2.absdiff(x,y))
# cv2.waitKey()
def main(song_file_in):
populate_names()
# load_data()
global beats_f
beats_f, temp_f = beats_array(song_file_in)
type_temp = 0;
sound = AudioSegment.from_file(song_file_in, format=formate_song)
x = len(sound)
a = x
x = x - beats_f[0] * 1000
sound = sound[-x:]
print(x)
x = x - (a - beats_f[len(beats_f) - 1] * 1000)
print(x)
sound = sound[:x]
sound.export(song_file_in, format=formate_song)
round(temp_f, 2);
print("Bpm for this song is", temp_f)
print("Beats are found at following locations", beats_f)
# decide the type of file we want to load.
if temp_f >= 60 and temp_f < (60 + 90) / 2:
type_temp = 60;
elif temp_f >= (60 + 90) / 2 and temp_f < (90 + 120) / 2:
type_temp = 90;
else:
type_temp = 120;
print(type_temp)
# load the file names
list_names(type_temp);
file_duration_cal();
song_input = AudioSegment.from_file(song_file_in, format=formate_song)
duration_song = round(len(song_input) / 1000, 2)
print("Song has following duration", duration_song)
global mazy_f
mazy_f = mazy_dance(duration_song, beats_f);
print(mazy_f)
print(combination_index)
combination_seq = combination_index
combination_seq_video = []
for i in combination_seq:
temp = VideoFileClip(i)
combination_seq_video.append(temp);
name = str(uuid.uuid4()) + ".mp4"
final_clip = combination_seq_video[0];
combination_seq_video.remove(combination_seq_video[0]);
for i in combination_seq_video:
final_clip = concatenate_videoclips([final_clip, i])
audioclip = AudioFileClip(song_file_in)
final_clip.write_videofile(name, audio=song_file_in);
# final_clip.set_audio(audioclip)
print(name + "Is your dance");
getFeedBack(combination_index)
return name;
def main2(song_file_in):
populate_names()
# load_data()
global beats_f
beats_f = dynamic_temp(song_file_in)
sound = AudioSegment.from_file(song_file_in, format=formate_song)
x = len(sound)
a = x
x = x - beats_f[0][0] * 1000
sound = sound[-x:]
if len(sound)/1000 >120:
sound=sound[:120000]
print(x)
x = x - (a - beats_f[len(beats_f) - 1][0] * 1000)
print(x)
sound = sound[:x]
sound.export(song_file_in, format=formate_song)
# decide the type of file we want to load.
song_input = AudioSegment.from_file(song_file_in, format=formate_song)
duration_song = round(len(song_input) / 1000, 2)
print("Song has following duration", duration_song)
global mazy_f
mazy_f = mazy_dance_2(duration_song, beats_f);
print(mazy_f)
print(combination_index)
combination_seq = combination_index
combination_seq_video = []
for i in combination_seq:
temp = VideoFileClip(i)
combination_seq_video.append(temp);
name = str(uuid.uuid4()) + ".mp4"
final_clip = combination_seq_video[0];
combination_seq_video.remove(combination_seq_video[0]);
for i in combination_seq_video:
final_clip = concatenate_videoclips([final_clip, i])
audioclip = AudioFileClip(song_file_in)
final_clip.write_videofile(name, audio=song_file_in);
# final_clip.set_audio(audioclip)
print(name + "Is your dance");
play_videofile(name)
getFeedBack(combination_seq)
return name;
if __name__ == '__main__':
audio = "audio.mp3"
main2(audio)
# dynamic_temp(audio)
#smoithing_differenc("120_HAND_Y_3.mov","120_HAND_Y_3.mov")
#class for plotting graphs
# stats = graphs()
# name=main(audio)
# name="9341878f-4507-4560-a1c6-b176a39c7b03.mp4"
#x=0.5
#stats.plot_mazy(mazy_factor=mazy_f,width_bar=x)
# stats.rms_plot("Bom.mp3")
# stats.plot_show()
# os.system('python video_energy.py '+name)
# clip = VideoFileClip(name)
# clip.preview();
#stats.video_plot(name)
| StarcoderdataPython |
54876 | import unittest
from django.core.exceptions import ValidationError
from petstagram.common.validators import MaxFileSizeInMbValidator
class FakeFile:
size = 5
class FakeImage:
file = FakeFile()
class MaxFileSizeInMbValidatorTests(unittest.TestCase):
def test_when_file_is_bigger__expect_to_raise(self):
validator = MaxFileSizeInMbValidator(0.000001)
file = FakeImage()
with self.assertRaises(ValidationError) as context:
validator(file)
self.assertIsNotNone(context.exception)
def test_when_file_size_is_valid__expect_to_do_nothing(self):
validator = MaxFileSizeInMbValidator(1)
file = FakeImage()
validator(file)
| StarcoderdataPython |
3381991 | <reponame>astrobase/cli
import shutil
import typer
from astrobase_cli.schemas.command import Command
app = typer.Typer(help="Run preflight and status checks.")
@app.command()
def commands() -> None:
"""
Check that the cli can access certain commands in your PATH.
"""
message = ""
for command in list(Command):
command_location = shutil.which(command.value)
if command_location is None:
command_message = f"{command.value} not found in path!"
else:
command_message = command_location
message += f"{command.value} found at: {command_message}\n"
typer.echo(message.strip())
| StarcoderdataPython |
1658665 | <filename>feeds/migrations/0004_auto_20180802_0226.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-08-02 02:26
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('feeds', '0003_delete_hello'),
]
operations = [
migrations.RenameModel(
old_name='users',
new_name='Registers',
),
]
| StarcoderdataPython |
3277946 | import time
end = input('Pick a number. ')
start_time = time.time()
allNums = []
for x in range(0, end+1):
allNums.append(x)
mid = 0
sum = 0
for x in allNums:
for y in str(x):
sum = sum + int(y)
print("The sum of all the digits of the numbers between 1 and {} is {}.".format(end, sum))
print("--- %.6s seconds ---" % (time.time() - start_time)) | StarcoderdataPython |
1692007 | <filename>examples/ptb/char_rnn.py
#!/usr/bin/env python
# ******************************************************************************
# Copyright 2017-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
"""
Train a network with one recurrent layer of tanh units on the Penn
treebank data parsing on character-level.
Reference:
Advances in optimizing recurrent networks `[Pascanu2012]`_
.. _[Pascanu2012]: http://arxiv.org/pdf/1212.0901.pdf
Usage:
python examples/ptb/char_rnn.py -b gpu -r 0 -t 6800 --iter_interval 680
"""
from contextlib import closing
import neon as ng
from neon.frontend import (Layer, Sequential, Preprocess, BiRNN, Recurrent, Affine,
Softmax, Tanh, LookupTable)
from neon.frontend import UniformInit, RMSProp
from neon.frontend import ax, loop_train
from neon.frontend import NeonArgparser, make_bound_computation, make_default_callbacks
from neon.frontend import SequentialArrayIterator
import neon.transformers as ngt
from neon.frontend import PTB
# parse the command line arguments
parser = NeonArgparser(__doc__)
parser.add_argument('--layer_type', default='rnn', choices=['rnn', 'birnn'],
help='type of recurrent layer to use (rnn or birnn)')
parser.add_argument('--use_lut', action='store_true',
help='choose to use lut as first layer')
parser.set_defaults()
args = parser.parse_args()
# these hyperparameters are from the paper
args.batch_size = 50
time_steps = 150
hidden_size = 500
# download penn treebank
tree_bank_data = PTB(path=args.data_dir)
ptb_data = tree_bank_data.load_data()
train_set = SequentialArrayIterator(ptb_data['train'], batch_size=args.batch_size,
time_steps=time_steps, total_iterations=args.num_iterations)
valid_set = SequentialArrayIterator(ptb_data['valid'], batch_size=args.batch_size,
time_steps=time_steps)
inputs = train_set.make_placeholders()
ax.Y.length = len(tree_bank_data.vocab)
def expand_onehot(x):
return ng.one_hot(x, axis=ax.Y)
# weight initialization
init = UniformInit(low=-0.08, high=0.08)
if args.use_lut:
layer_0 = LookupTable(50, 100, init, update=True, pad_idx=0)
else:
layer_0 = Preprocess(functor=lambda x: ng.one_hot(x, axis=ax.Y))
if args.layer_type == "rnn":
rlayer = Recurrent(hidden_size, init, activation=Tanh())
elif args.layer_type == "birnn":
rlayer = BiRNN(hidden_size, init, activation=Tanh(), return_sequence=True, sum_out=True)
# model initialization
seq1 = Sequential([layer_0,
rlayer,
Affine(init, activation=Softmax(), bias_init=init, axes=(ax.Y,))])
optimizer = RMSProp()
train_prob = seq1(inputs['inp_txt'])
train_loss = ng.cross_entropy_multi(train_prob,
ng.one_hot(inputs['tgt_txt'], axis=ax.Y),
usebits=True)
batch_cost = ng.sequential([optimizer(train_loss), ng.mean(train_loss, out_axes=())])
train_outputs = dict(batch_cost=batch_cost)
with Layer.inference_mode_on():
inference_prob = seq1(inputs['inp_txt'])
eval_loss = ng.cross_entropy_multi(inference_prob,
ng.one_hot(inputs['tgt_txt'], axis=ax.Y),
usebits=True)
eval_outputs = dict(cross_ent_loss=eval_loss)
# Now bind the computations we are interested in
with closing(ngt.make_transformer()) as transformer:
train_computation = make_bound_computation(transformer, train_outputs, inputs)
loss_computation = make_bound_computation(transformer, eval_outputs, inputs)
cbs = make_default_callbacks(transformer=transformer,
output_file=args.output_file,
frequency=args.iter_interval,
train_computation=train_computation,
total_iterations=args.num_iterations,
eval_set=valid_set,
loss_computation=loss_computation,
use_progress_bar=args.progress_bar)
loop_train(train_set, cbs)
| StarcoderdataPython |
12238 | <filename>scripts/analysis_one.py
name = input('Enter file name: ')
lst=list()
lst2=list()
with open(name) as f:
for line in f:
#print(line)
blops=line.rstrip()
blop=blops.split()
#for val in blop:
my_lst = [float(val) for val in blop]#list_comprehension
for num in my_lst:
if num <= 3.5:
lst.append(num)
if num >=4: lst2.append(num)
#num = float(val)
#print(num)
#text = f.read()
#print(text)
#print(type(text))
#print(type(line))
#print(blop)
#print(type(blop))
#print(lst)
#print(lst2)
import itertools
import matplotlib.pyplot as plt
import seaborn as sns
#for (f, b) in zip(lst2 ,lst):
#print (f, b)
#print(type(my_lst))
with open('neu_sam_4b.csv', 'w') as fh:
for (f, b) in zip(lst, lst2):
print(f,',',b, file=fh)
ext=lst
force=lst2
plt.plot(ext, force)
plt.xlabel('Extension')
plt.ylabel('Force')
plt.title('sample with 0.25wt%')
plt.tight_layout()
plt.show()
#for digit in lst:
#print(digit, file=fh) | StarcoderdataPython |
69767 | <reponame>ShAlireza/Yektanet
from .short_url_service import ShortenedURLService
__all__ = ('ShortenedURLService',)
| StarcoderdataPython |
3234909 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from eventbrite.compat import PY3
from eventbrite.models import EventbriteObject
from requests.structures import CaseInsensitiveDict
from .base import unittest, mock
class TestEventbriteObject(unittest.TestCase):
def setUp(self):
self.url = "https://www.eventbriteapi.com/v3/users/me/"
self.response = mock.Mock()
self.response.json = lambda: {u'id': u'1234567890', u'first_name': u'Daniel', u'last_name': u'Greenfeld', u'emails': [{u'verified': True, u'email': u'<EMAIL>', u'primary': True}], u'name': u'<NAME>'} # noqa
self.response.url = self.url
self.response.ok = True
self.response.elapsed = timedelta(5)
self.response.headers = CaseInsensitiveDict()
self.response.reason = u"OK"
self.response.status_code = 200
self.evbobject = EventbriteObject.create(self.response)
def test_create_from_payload(self):
evbobject = self.evbobject
self.assertEqual(
sorted([u'id', u'first_name', u'last_name', u'emails', u'name']),
sorted(evbobject.keys())
)
self.assertTrue(evbobject.ok)
self.assertEqual(
self.url,
evbobject.resource_uri
)
self.assertTrue(isinstance(evbobject.elapsed, timedelta))
self.assertTrue(isinstance(evbobject.headers, CaseInsensitiveDict))
@unittest.skipIf(condition=PY3, reason='Python 3 appears to return stdout')
def test_pretty(self):
self.assertEqual(
self.evbobject.pretty,
"{u'emails': [{u'email': u'<EMAIL>',\n u'primary': True,\n u'verified': True}],\n u'first_name': u'Daniel',\n u'id': u'1234567890',\n u'last_name': u'Greenfeld',\n u'name': u'<NAME>'}" # noqa
)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1700459 | """
This module extends and defines Torndo RequestHandlers.
Classes:
* MainHandler - this is the main RequestHandler subclass, that is mapped to
all URIs and serves as a dispatcher handler.
* DefaultHandler - this class serves all the URIs that are not defined
within the application, returning `404` error.
"""
import re
import json
import inspect
from inspect import Parameter
import logging
import datetime
from tornado.web import RequestHandler
from tornado.web import MissingArgumentError
from untt.util import parse_docstring
from untt.ex import ValidationError
from calm.ex import (ServerError, ClientError, BadRequestError,
MethodNotAllowedError, NotFoundError, DefinitionError)
from calm.param import QueryParam, PathParam
__all__ = ['MainHandler', 'DefaultHandler']
class MainHandler(RequestHandler):
"""
The main dispatcher request handler.
This class extends the Tornado `RequestHandler` class, and it is mapped to
all the defined applications handlers handlers. This class implements all
HTTP method handlers, which dispatch the control to the appropriate user
handlers based on their definitions and request itself.
"""
BUILTIN_TYPES = (str, list, tuple, set, int, float, datetime.datetime)
def __init__(self, *args, **kwargs):
"""
Initializes the dispatcher request handler.
Arguments:
* get, post, put, delete - appropriate HTTP method handler for
a specific URI
* argument_parser - a `calm.ArgumentParser` subclass
* app - the Calm application
"""
self._get_handler = kwargs.pop('get', None)
self._post_handler = kwargs.pop('post', None)
self._put_handler = kwargs.pop('put', None)
self._delete_handler = kwargs.pop('delete', None)
self._argument_parser = kwargs.pop('argument_parser')()
self._app = kwargs.pop('app')
self.log = logging.getLogger('calm')
super(MainHandler, self).__init__(*args, **kwargs)
def _get_query_args(self, handler_def):
"""Retreives the values for query arguments."""
query_args = {}
for qarg in handler_def.query_args:
try:
query_args[qarg.name] = self.get_query_argument(qarg.name)
except MissingArgumentError:
if not qarg.required:
continue
raise BadRequestError(
"Missing required query argument '{}'".format(qarg.name)
)
return query_args
def _cast_args(self, handler, args):
"""Converts the request arguments to appropriate types."""
arg_types = handler.__annotations__
for arg in args:
arg_type = arg_types.get(arg)
if not arg_type:
continue
args[arg] = self._argument_parser.parse(arg_type, args[arg])
def _parse_and_update_body(self, handler_def):
"""Parses the request body to JSON."""
if self.request.body:
try:
json_body = json.loads(self.request.body.decode('utf-8'))
except json.JSONDecodeError:
raise BadRequestError(
"Malformed request body. JSON is expected."
)
new_body = json_body
if handler_def.consumes:
try:
new_body = handler_def.consumes.from_json(json_body)
except ValidationError:
# TODO: log warning or error
raise BadRequestError("Bad data structure.")
self.request.body = new_body
async def _handle_request(self, handler_def, **kwargs):
"""A generic HTTP method handler."""
if not handler_def:
raise MethodNotAllowedError()
handler = handler_def.handler
kwargs.update(self._get_query_args(handler_def))
self._cast_args(handler, kwargs)
self._parse_and_update_body(handler_def)
if inspect.iscoroutinefunction(handler):
resp = await handler(self.request, **kwargs)
else:
self.log.warning("'%s' is not a coroutine!", handler_def.handler)
resp = handler(self.request, **kwargs)
if resp:
self._write_response(resp, handler_def)
async def get(self, **kwargs):
"""The HTTP GET handler."""
await self._handle_request(self._get_handler, **kwargs)
async def post(self, **kwargs):
"""The HTTP POST handler."""
await self._handle_request(self._post_handler, **kwargs)
async def put(self, **kwargs):
"""The HTTP PUT handler."""
await self._handle_request(self._put_handler, **kwargs)
async def delete(self, **kwargs):
"""The HTTP DELETE handler."""
await self._handle_request(self._delete_handler, **kwargs)
def _write_response(self, response, handler_def=None):
"""Converts various types to JSON and returns to the client"""
result = response
if hasattr(response, '__json__'):
result = response.__json__()
if handler_def:
if handler_def.produces:
try:
handler_def.produces.validate(result)
except ValidationError:
self.log.warning("Bad output data structure in '%s'",
handler_def.uri)
else:
self.log.warning("'%s' has no return type but returns data.",
handler_def.uri)
try:
json_str = json.dumps(result)
except TypeError:
raise ServerError(
"Could not serialize '{}' to JSON".format(
type(response).__name__
)
)
self.set_header('Content-Type', 'application/json')
self.write(json_str)
self.finish()
def write_error(self, status_code, exc_info=None, **kwargs):
"""The top function for writing errors"""
if exc_info:
exc_type, exc_inst, _ = exc_info
if issubclass(exc_type, ClientError):
self._write_client_error(exc_inst)
return
self._write_server_error()
def _write_client_error(self, exc):
"""Formats and returns a client error to the client"""
result = {
self._app.config['error_key']: exc.message or str(exc)
}
self.set_status(exc.code)
self.write(json.dumps(result))
def _write_server_error(self):
"""Formats and returns a server error to the client"""
result = {
self._app.config['error_key']: 'Oops our bad. '
'We are working to fix this!'
}
self.set_status(500)
self.write(json.dumps(result))
def data_received(self, data): # pragma: no cover
"""This is to ommit quality check errors."""
pass
class DefaultHandler(MainHandler):
"""
This class extends the main dispatcher class for request handlers
`MainHandler`.
It implements the `_handle_request` method and raises `NotFoundError` which
will be returned to the user as an appropriate JSON message.
"""
async def _handle_request(self, *_, **dummy):
raise NotFoundError()
class HandlerDef(object):
"""
Defines a request handler.
During initialization, the instance will process and store all argument
information.
"""
URI_REGEX = re.compile(r'\{([^\/\?\}]*)\}')
def __init__(self, uri, uri_regex, handler):
super(HandlerDef, self).__init__()
self.uri = uri
self.uri_regex = uri_regex
self.handler = handler
self._signature = inspect.signature(handler)
self._params = {
k: v for k, v in list(
self._signature.parameters.items()
)[1:]
}
self.path_args = []
self.query_args = []
self.consumes = getattr(handler, 'consumes', None)
self.produces = getattr(handler, 'produces', None)
self.errors = getattr(handler, 'errors', [])
self.deprecated = getattr(handler, 'deprecated', False)
self._extract_arguments()
self.operation_definition = self._generate_operation_definition()
def _extract_path_args(self):
"""Extracts path arguments from the URI."""
regex = re.compile(self.uri_regex)
path_arg_names = list(regex.groupindex.keys())
for arg_name in path_arg_names:
if arg_name in self._params:
if self._params[arg_name].default is not Parameter.empty:
raise DefinitionError(
"Path argument '{}' must not be optional in '{}'"
.format(
arg_name,
self.handler.__name__
)
)
self.path_args.append(
PathParam(arg_name,
self._params[arg_name].annotation)
)
else:
raise DefinitionError(
"Path argument '{}' must be expected by '{}'".format(
arg_name,
self.handler.__name__
)
)
def _extract_query_arguments(self):
"""
Extracts query arguments from handler signature
Should be called after path arguments are extracted.
"""
for _, param in self._params.items():
if param.name not in [a.name for a in self.path_args]:
self.query_args.append(
QueryParam(param.name,
param.annotation,
param.default)
)
def _extract_arguments(self):
"""Extracts path and query arguments."""
self._extract_path_args()
self._extract_query_arguments()
def _generate_operation_definition(self):
summary, description = parse_docstring(self.handler.__doc__ or '')
operation_id = '.'.join(
[self.handler.__module__, self.handler.__name__]
).replace('.', '_')
parameters = [q.generate_swagger() for q in self.path_args]
parameters += [q.generate_swagger() for q in self.query_args]
if self.consumes:
parameters.append({
'in': 'body',
'name': 'body',
'schema': self.consumes.json_schema
})
responses = {}
if self.produces:
responses['200'] = {
'description': '', # TODO: decide what to put down here
'schema': self.produces.json_schema
}
else:
responses['204'] = {
'description': 'This endpoint does not return data.'
}
for error in self.errors:
responses[str(error.code)] = {
'$ref': '#/responses/{}'.format(error.__name__)
}
opdef = {
'summary': summary,
'description': description,
'operationId': operation_id,
'parameters': parameters,
'responses': responses
}
if self.deprecated:
opdef['deprecated'] = True
return opdef
class SwaggerHandler(DefaultHandler):
"""
The handler for Swagger.io (OpenAPI).
This handler defined the GET method to output the Swagger.io (OpenAPI)
definition for the Calm Application.
"""
async def get(self):
self._write_response(self._app.swagger_json)
| StarcoderdataPython |
172547 | #!/usr/bin/env python
from setuptools import setup
setup(
name='eve-arango',
version='0.3.3',
description='Eve ArangoDB data layer',
long_description=open('README.rst').read(),
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/tangram/eve-arango',
license='MIT',
packages=['eve_arango'],
include_package_data=True,
install_requires=['Eve', 'python-arango'],
tests_require=['pylint', 'pytest'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
| StarcoderdataPython |
1617287 | <gh_stars>10-100
import os
import glob
import json
import shutil
from multiprocessing import Pool
import fire
import easyocr
import numpy as np
import torch
from PIL import Image
from skimage import transform
from skimage.feature import canny
from skimage.color import rgb2gray, gray2rgb
def multi_boxes_mask(image, boxes, pad_crop=5):
"""
image: np.uint8 (h, w, c)
boxes: np.int32 (n, 4) ymin, xmin, ymax, xmax
"""
image = image.copy()
mask = np.zeros_like(image)
ih, iw, ic = image.shape
resize = lambda a, b: transform.resize(a, b, preserve_range=True).astype(np.uint8)
import matplotlib.pyplot as plt
for box in boxes:
# image[box[0]: box[2], box[1]: box[3], :] = 0
box[:2] = np.maximum(box[:2] - pad_crop, 0)
box[2:] = np.minimum(box[2:] + pad_crop, image.shape[:2])
patch = image[box[0]: box[2], box[1]: box[3], :]
pure_white = (patch > 253).all(axis=-1).astype(np.uint8)
mask[box[0]: box[2], box[1]: box[3], :] = pure_white[..., None]
# plt.subplot(2, 1, 1)
# plt.imshow(patch)
# plt.subplot(2, 1, 2)
# plt.imshow(pure_white)
# plt.colorbar()
# plt.show()
print('pure_white ', pure_white.sum())
shift = 3
shifts = [
(0, 0), (shift, 0), (-shift, 0), (0, shift), (0, -shift),
(shift, shift), (-shift, shift), (shift, -shift), (-shift, -shift)
]
# shifts = []
for offset in shifts:
ox, oy = offset
_mask = mask.copy()
slice_y = slice(max(0, 0 + oy), min(ih, ih + oy))
slice_x = slice(max(0, 0 + ox), min(iw, iw + ox))
print(slice_y, slice_x)
_mask = _mask[
max(0, 0 + oy): min(ih, ih + oy),
max(0, 0 + ox): min(iw, iw + ox),
:
]
crop_pad = [
(max(0, -oy), max(0, oy)),
(max(0, -ox), max(0, ox)),
(0, 0)
]
_mask = np.pad(_mask, crop_pad)
print(
crop_pad,
np.abs(_mask - mask).sum(),
np.abs(mask - np.clip(_mask + mask, 0, 1)).sum()
)
mask = np.clip(_mask + mask, 0, 1)
image = image * (1 - mask) + mask * 255 * 0
mask *= 255
return image, mask
def cast_pred_type(pred):
result = []
for tup in pred:
coord, txt, score = tup
coord = np.array(coord).tolist()
score = float(score)
result.append((coord, txt, score))
return result
def detect(root_dir):
reader = easyocr.Reader(['en'])
image_dir = os.path.join(root_dir, 'img')
images = glob.glob(os.path.join(image_dir, '*.png'))
images += glob.glob(os.path.join(image_dir, '**', '*.png'))
# images = images[:3]
assert len(images) > 9000
out_json = os.path.join(root_dir, 'ocr.json')
out_anno = {}
print(f"Find {len(images)} images!")
for i, image_path in enumerate(images):
print(F"{i}/{len(images)}")
img_name = os.path.basename(image_path)
pred = reader.readtext(image_path)
out_anno[img_name] = cast_pred_type(pred)
with open(out_json, 'w') as f:
json.dump(out_anno, f)
def point_to_box(anno_json):
with open(anno_json, 'r') as f:
ocr_anno = json.load(f)
boxed_anno = {}
for k, v in ocr_anno.items():
img_ocr_infos = []
for txt_info in v:
coord, txt, score = txt_info
xmin = min([p[0] for p in coord])
xmax = max([p[0] for p in coord])
ymin = min([p[1] for p in coord])
ymax = max([p[1] for p in coord])
box = [xmin, ymin, xmax, ymax]
img_ocr_infos.append([box, txt, score])
boxed_anno[k] = img_ocr_infos
out_path = anno_json.replace('.json', '.box.json')
with open(out_path, 'w') as f:
json.dump(boxed_anno, f)
def _mask_white_txt(args):
img_name, img_boxes, img_dir, out_dir = args
img_path = os.path.join(img_dir, img_name)
out_path = os.path.join(out_dir, img_name)
if os.path.exists(out_path):
return
# if img_name != '01487.png':
# continue
print(out_path)
img_boxes = [box_info[0] for box_info in img_boxes]
if len(img_boxes) > 0:
boxes = np.asarray(img_boxes, dtype=np.int32)
# print(boxes)
boxes = np.concatenate([boxes[:, ::-1][:, 2:], boxes[:,::-1][:, :2]], axis=1)
# print(boxes)
# x,y,x,y -> y,x,y,x
img = np.array(Image.open(img_path).convert('RGB'))
# res = inpaint_model.inpaint_multi_boxes(img, boxes)
masked_img, mask = multi_boxes_mask(img, boxes)
Image.fromarray(masked_img).save(out_path)
out_path = os.path.join(out_dir, img_name.replace('.png', '.mask.png'))
Image.fromarray(mask).save(out_path)
else:
img = np.asarray(Image.open(img_path).convert('RGB'))
shutil.copy(img_path, out_path)
mask = np.zeros_like(img)
out_path = os.path.join(out_dir, img_name.replace('.png', '.mask.png'))
Image.fromarray(mask).save(out_path)
def generate_mask(ocr_box_anno, img_dir, out_dir):
os.makedirs(out_dir, exist_ok=True)
with open(ocr_box_anno, 'r') as f:
boxes_anno = json.load(f)
# for i, (img_name, img_boxes) in enumerate(boxes_anno.items()):
# pass
with Pool(16) as pool:
args = [
(img_name, img_boxes, img_dir, out_dir)
for img_name, img_boxes in boxes_anno.items()
]
pool.map(_mask_white_txt, args)
if __name__ == "__main__":
"""
detect -[ocr.json]-> point_to_box -[ocr.box.json]-> generate_mask
"""
# detect()
# point_to_box('/home/ron/Downloads/hateful_meme_data/ocr.json')
# print('hi')
# generate_mask(
# '/home/ron/Downloads/hateful_meme_data/ocr.box.json',
# '/home/ron/Downloads/hateful_meme_data_phase2/img',
# '/home/ron/Downloads/hateful_meme_data_phase2/img_mask_3px'
# )
fire.Fire({
"detect": detect,
"point_to_box": point_to_box,
"generate_mask": generate_mask,
}) | StarcoderdataPython |
1690844 | <filename>kusto-logging/tests/test_ql.py
"""Simulated testing without a Kusto cluster"""
import logging
import time
import threading
from queue import Queue
from logging.handlers import QueueHandler, QueueListener
import pytest
from azure.kusto.data import KustoConnectionStringBuilder
from azure.kusto.data.exceptions import KustoServiceError
from kusto.logging import KustoHandler
from test_setup import BaseTestKustoLogging
def do_logging(numberOfMessages):
nb_of_tests = numberOfMessages
for i in range(nb_of_tests):
logging.warning("Test {} warning {} from thread {}".format(__file__, i, threading.get_ident()))
class TestKustoQueueListenerMemoryHandlerLogging(BaseTestKustoLogging):
@classmethod
def setup_class(cls):
super().setup_class()
if not cls.is_live_testing_ready:
pytest.skip("No backend end available", allow_module_level=True)
queue_cap = 5000
cls.kh = KustoHandler(
kcsb=cls.kcsb, database=cls.test_db, table=cls.test_table, useStreaming=True, capacity=queue_cap, flushLevel=logging.CRITICAL, retries=[]
)
cls.kh.setLevel(logging.DEBUG)
cls.q = Queue()
cls.qh = QueueHandler(cls.q)
cls.ql = QueueListener(cls.q, cls.kh)
cls.ql.start()
logger = logging.getLogger()
logger.addHandler(cls.qh)
logger.setLevel(logging.DEBUG)
@classmethod
def teardown_class(cls):
cls.ql.stop()
cls.qh.flush()
retries = 50
while retries:
time.sleep(1)
if cls.q.empty():
break
logging.getLogger().removeHandler(cls.ql)
super().teardown_class()
def test_info_logging(self, caplog):
caplog.set_level(logging.CRITICAL, logger="adal-python")
caplog.set_level(logging.CRITICAL, logger="urllib3.connectionpool")
nb_of_tests = 30000
for i in range(0, nb_of_tests):
logging.info("Test %s info %d", __file__, i)
logging.critical("Flush")
self.assert_rows_added(nb_of_tests, logging.INFO, timeout=10000)
def test_debug_logging(self, caplog):
caplog.set_level(logging.CRITICAL, logger="adal-python")
caplog.set_level(logging.CRITICAL, logger="urllib3.connectionpool")
caplog.set_level(logging.CRITICAL, logger="msal.application")
nb_of_tests = 40000
for i in range(0, nb_of_tests):
logging.debug("Test debug %d", i)
logging.critical("Flush")
self.assert_rows_added(nb_of_tests, logging.DEBUG, timeout=500)
def test_error_logging(self, caplog):
caplog.set_level(logging.CRITICAL, logger="adal-python")
caplog.set_level(logging.CRITICAL, logger="urllib3.connectionpool")
caplog.set_level(logging.CRITICAL, logger="msal.application")
nb_of_tests = 20000
for i in range(0, nb_of_tests):
logging.error("Test error %d", i)
logging.critical("Flush")
self.assert_rows_added(nb_of_tests, logging.ERROR, timeout=500)
def test_critical_logging(self, caplog):
caplog.set_level(logging.CRITICAL, logger="adal-python")
caplog.set_level(logging.CRITICAL, logger="urllib3.connectionpool")
caplog.set_level(logging.CRITICAL, logger="msal.application")
nb_of_tests = 20
for i in range(0, nb_of_tests):
logging.critical("Test critical %d", i)
self.assert_rows_added(nb_of_tests, logging.CRITICAL)
def test_mt_warning_logging(self, caplog):
"""multithreading test"""
caplog.set_level(logging.CRITICAL, logger="adal-python")
caplog.set_level(logging.CRITICAL, logger="urllib3.connectionpool")
caplog.set_level(logging.CRITICAL, logger="msal.application")
logging_threads = []
expected_results = 0
for i in range(16):
nb_of_logging = i * 100
x = threading.Thread(target=do_logging, args=(nb_of_logging,))
x.start()
expected_results += nb_of_logging
logging_threads.append(x)
for t in logging_threads:
t.join()
logging.critical("Flush")
self.assert_rows_added(expected_results, logging.WARNING)
| StarcoderdataPython |
28784 | from functools import reduce
data = []
with open("aoc6.inp") as rf:
sets = []
for l in rf:
if l == "\n":
data.append(sets)
sets = []
else:
sets.append(set([c for c in l.strip()]))
a1 = a2 = 0
for sets in data:
a1 += len(reduce(lambda s1, s2: s1 | s2, sets))
a2 += len(reduce(lambda s1, s2: s1 & s2, sets))
print(a1, a2)
| StarcoderdataPython |
3347569 | # Copyright L.P.Klyne 2013
# Licenced under 3 clause BSD licence
# $Id: UPNPBrisa.py 3199 2009-06-15 15:21:25Z philipp.schuster $
#
# Class to interface with Brisa UPnP framework
#
# <NAME>
#
import logging, threading
from brisa.core.reactors import install_default_reactor
reactor = install_default_reactor()
from brisa.upnp.control_point.control_point_webbrick import ControlPointWB
from MiscLib.DomHelpers import getDictFromXmlString
from urlparse import urlparse,urljoin
from EventLib.Event import Event
from EventLib.Status import StatusVal
from EventLib.SyncDeferred import makeDeferred
from EventHandlers.BaseHandler import BaseHandler
from MiscLib.DomHelpers import *
from MiscLib.TimeUtils import *
#
# This is so we can publish the dynamic updates to a UPNP service.
#
class UPNPBrisa( BaseHandler, threading.Thread ):
"""
Class to provide a UPnP controlpoint based on Brisa framework
Features:
-
TODO:
-
"""
CDS_namespace = 'urn:schemas-upnp-org:service:ContentDirectory:1'
AVT_namespace = 'urn:schemas-upnp-org:service:AVTransport:1'
DMS_type = 'urn:schemas-upnp-org:device:MediaServer:'
DMR_type = 'urn:schemas-upnp-org:device:MediaRenderer:'
def __init__ (self, localRouter):
BaseHandler.__init__(self, localRouter)
self._devicesFound = {}
self._devicesIncluded = {}
self._renderers = {}
self._includeList = None
self._excludeList = None
self._controlpoint = None
threading.Thread.__init__(self)
self.setDaemon( True )
def configure( self, cfgDict ):
self._upnpcfg = cfgDict
# Process excludedevice
if self._upnpcfg.has_key('excludedevice') and self._upnpcfg['excludedevice'].has_key('modelName'):
excs = self._upnpcfg['excludedevice']['modelName']
self._excludeList = list()
if isinstance( excs, list ):
for ntry in excs:
self._log.debug("excludedevice %s", ntry)
self._excludeList.append( ntry[''].lower() )
else:
self._log.debug("exclude %s", excs)
self._excludeList.append( excs[''].lower() )
# Process includedevice
if self._upnpcfg.has_key('includedevice') and self._upnpcfg['includedevice'].has_key('modelName'):
incs = self._upnpcfg['includedevice']['modelName']
self._includeList = list()
if isinstance( incs, list ):
for ntry in incs:
self._log.debug("include %s", ntry)
self._includeList.append( ntry[''].lower() )
else:
self._log.debug("includedevice %s", incs)
self._includeList.append( incs[''].lower() )
def start(self):
self._log.debug( 'starting' )
BaseHandler.start(self)
self._upnpcfg['controlpoint'] = 'yes' # force it
self._controlpoint = ControlPointWB()
# subscribe to gateway events that are of interest
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/events/time/runtime' )
# subscribe to media control events
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/events/upnp/debug' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/events/av/transport/control' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/events/av/connection/control' )
# subscribe to brisa events events
self._controlpoint.subscribe('new_device_event', self.on_new_device)
self._controlpoint.subscribe('removed_device_event', self.on_removed_device)
# start the control point
self._controlpoint.start()
# TODO: Does this have to be doen here or can we do this in the stop function?
reactor.add_after_stop_func(self._controlpoint.destroy)
# start seperate thread for brisa reactor
threading.Thread.start(self)
def stop(self):
self._log.debug( 'stop' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/events/upnp/debug' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/events/av/transport/control' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/events/av/connection/control' )
# unsubscribe from brisa events events
self._controlpoint.unsubscribe('new_device_event', self.on_new_device)
self._controlpoint.unsubscribe('removed_device_event', self.on_removed_device)
# stop the control point
# TODO: Atm done in start by adding after_stop_func to raector
# self._controlpoint.stop()
BaseHandler.stop(self)
print "about to stop"
reactor.main_quit()
def run(self):
"""
Called on new thread, when the Tread is started
Lets Brisa Reactor run on seperate thread
NOTE: Need to avoid race conditions in the rest of the design!
"""
self._log.debug( 'Enter run on Brisa Reactor Thread' )
reactor.main()
def doHandleEvent( self, handler, inEvent ):
try:
self._log.debug( 'doHandleEvent %s', inEvent )
if inEvent.getType() == 'http://id.webbrick.co.uk/events/upnp/debug':
self.doHandleUpnpDebug( inEvent )
elif inEvent.getType() == "http://id.webbrick.co.uk/events/av/transport/control" :
self.doHandleAVTransport( inEvent )
elif inEvent.getType() == "http://id.webbrick.co.uk/events/av/connection/control" :
self.doHandleZone( inEvent )
elif inEvent.getType() == "http://id.webbrick.co.uk/events/time/runtime" :
self.doHandleRuntime( inEvent )
else:
# unexpected
self._log.error( "Not expecting this event %s", inEvent.getType() )
except Exception, ex:
self._log.exception(ex)
return makeDeferred(StatusVal.OK)
def doHandleUpnpDebug(self, inEvent):
src = inEvent.getSource().split("/")
if src[1] == "search":
self._log.info( "Start search for new UPnP devices" )
self._controlpoint.start_search(600, 'upnp:rootdevice')
elif src[1] == "stop":
self._log.info( "Stop search for new UPnP devices" )
self._controlpoint.stop_search()
elif src[1] == "list":
n = 0
if src[2] == "all":
self._log.info( "List all discovered UPnP devices" )
for device in self._devicesFound.values():
self._log.info( "Device %d:" % n)
self._log.info( "udn: %s", device.udn)
self._log.info( "friendly_name: %s", device.friendly_name)
self._log.info( "type: %s", device.device_type)
n += 1
elif src[2] == "included":
self._log.info( "List included discovered UPnP devices" )
for device in self._devicesIncluded.values():
self._log.info( "Device %d:" % n)
self._log.info( "udn: %s", device.udn)
self._log.info( "friendly_name: %s", device.friendly_name)
self._log.info( "type: %s", device.device_type)
n += 1
elif src[1] == "play":
self._log.info( "Stop search for new UPnP devices" )
self._controlpoint.stop_search()
def doHandleRuntime( self, inEvent ):
od = inEvent.getPayload()
#TODO: Probably more reasonable to start this search later
if int(od["elapsed"]) == 10:
self._log.info( "Start search for new UPnP devices" )
self._controlpoint.start_search(600, 'upnp:rootdevice')
def doHandleAVTransport(self, inEvent):
src = inEvent.getSource()
if src == "av/transport/control":
payload = inEvent.getPayload()
if payload.has_key("udn") and payload.has_key("action"):
udn = payload["udn"]
if payload["action"] == "play":
try:
self._controlpoint.set_current_renderer(self.get_device(udn))
self._controlpoint.av_play()
self._log.info("Sent play")
except Exception, ex:
self._log.exception( ex )
elif payload["action"] == "stop":
try:
self._controlpoint.set_current_renderer(self.get_device(udn))
self._controlpoint.av_stop()
self._log.info("Sent stop")
except Exception, ex:
self._log.exception( ex )
elif payload["action"] == "pause":
try:
self._controlpoint.set_current_renderer(self.get_device(udn))
self._controlpoint.av_pause()
self._log.info("Sent pause")
except Exception, ex:
self._log.exception( ex )
elif payload["action"] == "next":
try:
self._controlpoint.set_current_renderer(self.get_device(udn))
self._controlpoint.av_next()
self._log.info("Sent next")
except Exception, ex:
self._log.exception( ex )
elif payload["action"] == "previous":
try:
self._controlpoint.set_current_renderer(self.get_device(udn))
self._controlpoint.av_previous()
self._log.info("Sent previous")
except Exception, ex:
self._log.exception( ex )
elif payload["action"] == "shuffle":
try:
self._controlpoint.set_current_renderer(self.get_device(udn))
self._controlpoint.av_playmode("SHUFFLE_NOREPEAT")
self._log.info("Sent shuffle_norepeat")
except Exception, ex:
self._log.exception( ex )
elif payload["action"] == "repeat":
try:
self._controlpoint.set_current_renderer(self.get_device(udn))
self._controlpoint.av_playmode("REPEAT_ALL")
self._log.info("Sent repeat_all")
except Exception, ex:
self._log.exception( ex )
else:
self._log.error("Payload not correct - payload: %s" % payload)
def get_device(self, udn, search_nested=True, search_all=False):
"""
Function to return device from inclused devices
"""
result = None
if search_all:
devices = self._devicesFound
else:
devices = self._devicesIncluded
if devices.has_key(udn):
# found the device (is root device)
result = devices[udn]
elif search_nested:
#Lets try and search nested devices
for dev in devices.values():
if dev.devices:
for embedded_dev in dev.devices.values():
if embedded_dev.udn == udn:
result = embedded_dev
return result
def on_new_device(self, dev):
"""
Callback, is triggered when a new device is found.
We keep a record of all devices we find, but operate
on a limited number of devices stored in a seperate dict.
"""
self._log.info( 'Found new device: %s', dev.udn )
print "NEW DEVICE %s" %dev.udn
self.process_new_device(dev)
def on_removed_device(self, udn):
"""
Callback, is triggered when a device leaves the network.
"""
self._log.info( 'Device Gone: %s', udn )
self.remove_device(udn)
def process_new_device(self, dev ):
"""
This function processes any new devices that are found in as
followed:
1. Add all root devices to the devices found dict.
2. Add all root devices that are identified as included to
the devicesIncluded dict
3. Find included (including none root) MediaRenders and call
process_process_new_renderer
"""
self._log.debug( 'Processing new device: %s', dev.udn)
self._devicesFound[dev.udn] = dev
if self.check_include(dev.model_name.lower()):
self._log.debug( 'Including new device: %s', dev.udn )
self._devicesIncluded[dev.udn] = dev
self.sendNumberOfDevices(len(self._devicesIncluded))
self.sendDevicesDiscoveryInfo(len(self._devicesIncluded), dev.udn, dev.model_name.lower())
self._log.debug( "New Device's Type is: %s", dev.device_type )
if dev.device_type == "urn:schemas-upnp-org:device:MediaRenderer:1":
self.process_new_renderer(dev)
self.sendRendererDiscoveryInfo(len(self._devicesIncluded), 1, dev.udn)
if dev.devices:
self._log.debug( 'Processing embedded devices of: %s', dev.udn )
i = 1
for embedded_dev in dev.devices.values():
self._log.debug( "Embedded Device's Type is: %s", embedded_dev.device_type )
if embedded_dev.device_type == "urn:schemas-upnp-org:device:MediaRenderer:1":
self.sendRendererDiscoveryInfo(len(self._devicesIncluded), i, embedded_dev.udn)
self.process_new_renderer(embedded_dev)
i = i + 1
def remove_device(self, udn ):
if self._devicesFound.has_key(udn):
self._log.info( 'Removing Device: %s', udn )
del self._devicesFound[udn]
if self._devicesIncluded.has_key(udn):
self._log.info( 'Removing Device from INCLUDED devices: %s', udn )
del self._devicesIncluded[udn]
def check_include (self, model_name):
self._log.debug( 'Check whether to include: %s', model_name )
include = False
if self._includeList:
for ntry in self._includeList:
self._log.debug( 'check_include - is include entry: %s in model_name: %s' % (ntry, model_name) )
if ntry in model_name:
include = True
return include
def sendNumberOfDevices(self, count):
self.sendEvent( Event("http://id.webbrick.co.uk/events/upnp/system",
"upnp/device/count",
{'val': count} ) )
def sendDevicesDiscoveryInfo(self, device_number, udn, model):
self.sendEvent( Event("http://id.webbrick.co.uk/events/upnp/system",
"upnp/device/%s" %device_number,
{'udn': udn,
'model': model} ) )
def sendRendererDiscoveryInfo(self, device_number, renderer_number, udn):
self.sendEvent( Event("http://id.webbrick.co.uk/events/upnp/system",
"upnp/device/%s/%s" %(device_number,renderer_number),
{'udn': udn} ) )
def process_new_renderer(self, dev ):
name = dev.friendly_name
udn = dev.udn
self._log.debug( 'Processing new render: %s', name )
self._renderers[udn] = dev
self.sendEvent( Event( "http://id.webbrick.co.uk/events/av/client",
"upnp/%s"%(udn), { 'udn':udn, 'name': name } ) )
# subscribe to changes.
avt = dev.get_service_by_type(self.AVT_namespace)
if avt:
avt.event_subscribe(self._controlpoint.event_host,
lambda c,s,t: self._avt_event_subscribe_callback(udn,c,s,t),
None,
True,
lambda c,s,t: self._avt_event_renew_callback(udn,c,s,t))
avt.subscribe_for_variable("LastChange",
lambda n,v: self._avt_event_callback(udn,n,v) )
# louie.connect(self.render_service_update,
# 'Coherence.UPnP.StateVariable.changed',
# client.rendering_control.service )
# if client.av_transport:
# louie.connect(self.transport_service_update,
# 'Coherence.UPnP.StateVariable.changed',
# client.av_transport.service )
#if client.connection_manager:
# louie.connect(self.connection_manager_update,
# 'Coherence.UPnP.StateVariable.changed',
# client.connection_manager.service )
#self.clear_renderer(udn)
# get transport variables and ensure system knows them..
#for vname in ("CurrentTrackMetaData", "CurrentTrackDuration", 'TransportState'):
# self.transport_service_update(client.av_transport.service.get_state_variable(vname))
# get renderer variables
#for vname in ("Volume","Mute"):
# self.render_service_update(client.rendering_control.service.get_state_variable(vname))
def _avt_event_subscribe_callback(self, udn, cargo, subscription_id, timeout):
self._log.debug( 'Subscribed to Events of AVT Service on: %s Subs-ID: %s Timeout: %s' %(udn, str(subscription_id[5:]), str(timeout)) )
def _avt_event_renew_callback(self, udn, cargo, subscription_id, timeout):
self._log.debug( 'Renewed Event Subscription for AVT Service on: %s Subs-ID: %s Timeout: %s' %(udn, str(subscription_id[5:]), str(timeout)) )
def _event_unsubscribe_callback(self, cargo, old_subscription_id):
print
print "Event unsubscribe done!"
print 'Old subscription ID: ' + str(old_subscription_id[5:])
def _avt_event_callback(self, udn, name, value):
self._log.debug( '%s update for device: %s' %(name,udn) )
# TODO: Process event
if name == "LastChange":
try:
last_change = getDictFromXmlString(str(value))
except:
print "no valid xml"
try:
last_change["Event"]["InstanceID"]["CurrentTrackMetaData"]["val"] = getDictFromXmlString(last_change["Event"]["InstanceID"]["CurrentTrackMetaData"]["val"])
except:
print "no valid xml 2"
#try:
# last_change["Event"]["InstanceID"]["r:NextTrackMetaData"]["val"] = getDictFromXmlString(last_change["Event"]["InstanceID"]["r:NextTrackMetaData"]["val"])
#except:
# print "no valid xml 3"
try:
title = last_change["Event"]["InstanceID"]["CurrentTrackMetaData"]["val"]["DIDL-Lite"]["item"]["dc:title"][""]
except:
print "no title"
title = ""
try:
artist = last_change["Event"]["InstanceID"]["CurrentTrackMetaData"]["val"]["DIDL-Lite"]["item"]["dc:creator"][""]
except:
print "no artist"
artist = ""
try:
album = last_change["Event"]["InstanceID"]["CurrentTrackMetaData"]["val"]["DIDL-Lite"]["item"]["upnp:album"][""]
except:
print "no album"
album = ""
try:
current_track = last_change["Event"]["InstanceID"]["CurrentTrack"]["val"]
except:
print "no current track"
current_track = ""
try:
no_of_tracks = last_change["Event"]["InstanceID"]["NumberOfTracks"]["val"]
except:
print "no number of tracks"
no_of_tracks = ""
try:
current_track_duration = last_change["Event"]["InstanceID"]["CurrentTrackDuration"]["val"]
except:
print "no duration"
current_track_duration = ""
try:
transport_state = last_change["Event"]["InstanceID"]["TransportState"]["val"]
except:
print "no transport state"
transport_state = ""
try:
current_play_mode = last_change["Event"]["InstanceID"]["CurrentPlayMode"]["val"]
except:
print "no playmode"
current_play_mode = ""
if transport_state == "PLAYING":
playing = 1
paused = 0
stopped = 0
elif transport_state == "PAUSED_PLAYBACK":
playing = 0
paused = 1
stopped = 0
elif transport_state == "STOPPED":
playing = 0
paused = 0
stopped = 1
else:
print "no valid tarnsport state"
playing = 0
paused = 0
stopped = 0
if current_play_mode == "NORMAL":
repeat = 0
shuffle = 0
elif current_play_mode == "REPEAT_ALL":
repeat = 1
shuffle = 0
elif current_play_mode == "SHUFFLE_NOREPEAT":
repeat = 0
shuffle = 1
elif current_play_mode == "SHUFFLE":
repeat = 1
shuffle = 1
else:
print "no valid playmode"
repeat = 0
shuffle = 0
self.sendEvent( Event( "http://id.webbrick.co.uk/events/av/transport/state",
"av/transport/state/%s" %(udn),
{ 'udn':udn,
'artist':artist,
'album':album,
'title':title,
'albumarturi':"",
'CurrentTrack':current_track,
'NumberOfTracks':no_of_tracks,
'CurrentTrackDuration':current_track_duration,
'TransportState':transport_state,
'playing':playing,
'stopped':stopped,
'paused':paused,
'shuffle':shuffle,
'repeat':repeat,
} ) )
def clear_renderer(self, udn ):
pass
def render_service_update(self, variable ):
pass
def send_metadata(self, srvc, udn, mdata, prefix ):
pass
def transport_service_update(self, variable ):
pass
def connection_manager_update(self, variable ):
pass
def new_server(self, client, udn ):
pass
def remove_server(self, client, udn ):
pass
def actionSuccess( self, data, action ):
pass
def actionError( self, failure, action ):
pass
def gotError( self, failure ):
pass
def noError( self, result ):
pass
def handleTransportControl( self, inEvent ):
pass
def unMute( self, result, client ):
pass
def checkIfMuted( self, client, df ):
pass
def handleRenderControl( self, inEvent ):
pass
def handleConnectionControl( self, inEvent ):
pass
def newPositionInfo( self, result, udn, srvc ):
pass
def getTransportPosition( self, inEvent ):
pass
| StarcoderdataPython |
3293289 | """Mixin classes."""
from typing import Protocol
class Lockable(Protocol):
@property
def lock(self) -> Lock:
...
class AtomicCloseMixin:
def atomic_close(self: Lockable) -> int:
with self.lock:
# perform actions
...
class AtomicOpenMixin:
def atomic_open(self: Lockable) -> int:
with self.lock:
# perform actions
...
class File(AtomicCloseMixin, AtomicOpenMixin):
def __init__(self) -> None:
self.lock = Lock()
class Bad(AtomicCloseMixin):
pass
f = File()
b: Bad
f.atomic_close() # OK
b.atomic_close() # Error: Invalid self type for "atomic_close"
| StarcoderdataPython |
1650765 | import backtrader as bt
import pandas as pd
import numpy as np
class NetTradeStrategy(bt.Strategy):
params=(('p1',12),('p2',26),('p3',9),)
def __init__(self):
self.order = None
#获取MACD柱
self.macdhist = bt.ind.MACDHisto(self.data,
period_me1=self.p.p1,
period_me2=self.p.p2,
period_signal=self.p.p3)
# bt.ind.MACD(self.data)
# bt.ind.MACDHisto(self.data)
# bt.ind.RSI(self.data,period=14)
# bt.ind.BBands(self.data)
self.highest = bt.indicators.Highest(self.data.high, period=650, subplot=False)
self.lowest = bt.indicators.Lowest(self.data.low, period=650, subplot=False)
mid = (self.highest + self.lowest)/2
perc_levels = [x for x in np.arange(
1 + 0.005 * 5, 1 - 0.005 * 5 - 0.005/2, -0.005)]
self.price_levels = [mid * x for x in perc_levels]
self.last_price_index = None
for i in range(len(self.price_levels)):
print(i)
print(self.price_levels[i] + 0)
def next(self):
if self.last_price_index == None:
for i in range(len(self.price_levels)):
if self.data.close > self.price_levels[i]:
self.last_price_index = i
self.order_target_percent(
target=i/(len(self.price_levels) - 1))
return
else:
signal = False
while True:
upper = None
lower = None
if self.last_price_index > 0:
upper = self.price_levels[self.last_price_index - 1]
if self.last_price_index < len(self.price_levels) - 1:
lower = self.price_levels[self.last_price_index + 1]
# 还不是最轻仓,继续涨,就再卖一档
if upper != None and self.data.close > upper:
self.last_price_index = self.last_price_index - 1
signal = True
continue
# 还不是最重仓,继续跌,再买一档
if lower != None and self.data.close < lower:
self.last_price_index = self.last_price_index + 1
signal = True
continue
break
if signal:
self.long_short = None
self.order_target_percent(
target=self.last_price_index/(len(self.price_levels) - 1)) | StarcoderdataPython |
11219 | """The devolo_home_control integration."""
from __future__ import annotations
import asyncio
from functools import partial
from types import MappingProxyType
from typing import Any
from devolo_home_control_api.exceptions.gateway import GatewayOfflineError
from devolo_home_control_api.homecontrol import HomeControl
from devolo_home_control_api.mydevolo import Mydevolo
from homeassistant.components import zeroconf
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import Event, HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from .const import (
CONF_MYDEVOLO,
DEFAULT_MYDEVOLO,
DOMAIN,
GATEWAY_SERIAL_PATTERN,
PLATFORMS,
)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up the devolo account from a config entry."""
hass.data.setdefault(DOMAIN, {})
mydevolo = configure_mydevolo(entry.data)
credentials_valid = await hass.async_add_executor_job(mydevolo.credentials_valid)
if not credentials_valid:
raise ConfigEntryAuthFailed
if await hass.async_add_executor_job(mydevolo.maintenance):
raise ConfigEntryNotReady
gateway_ids = await hass.async_add_executor_job(mydevolo.get_gateway_ids)
if entry.unique_id and GATEWAY_SERIAL_PATTERN.match(entry.unique_id):
uuid = await hass.async_add_executor_job(mydevolo.uuid)
hass.config_entries.async_update_entry(entry, unique_id=uuid)
try:
zeroconf_instance = await zeroconf.async_get_instance(hass)
hass.data[DOMAIN][entry.entry_id] = {"gateways": [], "listener": None}
for gateway_id in gateway_ids:
hass.data[DOMAIN][entry.entry_id]["gateways"].append(
await hass.async_add_executor_job(
partial(
HomeControl,
gateway_id=gateway_id,
mydevolo_instance=mydevolo,
zeroconf_instance=zeroconf_instance,
)
)
)
except GatewayOfflineError as err:
raise ConfigEntryNotReady from err
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
def shutdown(event: Event) -> None:
for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]:
gateway.websocket_disconnect(
f"websocket disconnect requested by {EVENT_HOMEASSISTANT_STOP}"
)
# Listen when EVENT_HOMEASSISTANT_STOP is fired
hass.data[DOMAIN][entry.entry_id]["listener"] = hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, shutdown
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
await asyncio.gather(
*[
hass.async_add_executor_job(gateway.websocket_disconnect)
for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]
]
)
hass.data[DOMAIN][entry.entry_id]["listener"]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload
def configure_mydevolo(conf: dict[str, Any] | MappingProxyType[str, Any]) -> Mydevolo:
"""Configure mydevolo."""
mydevolo = Mydevolo()
mydevolo.user = conf[CONF_USERNAME]
mydevolo.password = conf[CONF_PASSWORD]
mydevolo.url = conf.get(CONF_MYDEVOLO, DEFAULT_MYDEVOLO)
return mydevolo
| StarcoderdataPython |
162054 | #!/usr/bin/env python3
from datetime import datetime
from html.parser import HTMLParser
from openpyxl import Workbook
import os
now = datetime.now()
current_time = now.strftime("%Y%m%d")
#with open("Logistics_133800_20200528.xls") as f: // use for testing
# add path to file
path = r'C:\Users\Username\Path\To\Logistics_133800_' + current_time + '.xls'
with open(path) as f:
data = f.read()
class CoopHTMLParser(HTMLParser):
parsed_data = ''
start_tag = ''
is_t_head = False
is_t_body = False
is_span = False
firstRow = []
count = 0
orders_list = []
order = {}
def handle_starttag(self, tag, attrs):
if tag == 'span':
self.is_span = True
if tag == 'thead':
self.is_t_head = True
self.is_t_body = False
if tag == 'tbody':
self.is_t_body = True
self.is_t_head = False
start_tag = tag
def handle_data(self, data):
if not self.is_span:
data = data.strip()
if self.count == 1:
start_time, sep, end_time = data.partition('-')
start_time = start_time.strip()
end_time = end_time.strip()
data = start_time + sep + end_time
if (self.parsed_data == 'Avhämtning' or self.parsed_data == 'Hemleverans') and data == '':
pass
else:
self.parsed_data = data
def handle_endtag(self, tag):
if tag == 'span':
self.is_span = False
if self.is_t_head and tag == 'th':
self.firstRow.append(self.parsed_data)
if self.is_t_body and tag == 'td':
if self.count < 18:
self.order.update({ self.firstRow[self.count] : self.parsed_data })
self.count += 1
if self.count == 11:
self.parsed_data = ''
if self.count == 18:
self.orders_list.append(self.order.copy())
self.count = 0
p = CoopHTMLParser()
try:
p.feed(data)
except TypeError:
print()
kommunalwb = Workbook()
kommunalws = kommunalwb.active
avhamtningwb = Workbook()
avhamtningws = avhamtningwb.active
kommunal = []
avhamtning = []
for x in range(0, len(p.orders_list) - 1):
if p.orders_list[x]['Leveranstyp'] == 'Avhämtning':
avhamtning.append(p.orders_list[x])
else:
kommunal.append(p.orders_list[x])
def switch_column(argument):
switcher = {
'A': '#',
'B': 'Lev tid',
'C': 'Exakt lev tid',
'D': 'Id',
'E': 'Kundtyp',
'F': 'Kundnamn',
'G': 'Gata',
'H': 'Postnr',
'I': 'Ort',
'J': 'Rutt',
'K': 'Leveranstyp',
'L': 'Enhet',
'M': 'Beställare' ,
'N': 'Speditör kommentar',
'O': 'Orderkommentar',
'P': 'Kund Telefon',
'Q': 'Enhet Telefon',
'R': 'Ordersumma (inkl moms)',
}
return switcher.get(argument, 'felfelfel')
# add separation between each 'time-slot'.
def addSeparation(list):
for i in range(0, len(list) -1):
if i != 0:
if not list[i - 1]['Lev tid'] == '':
if list[i]['Lev tid'].lower() != list[i - 1]['Lev tid'].lower():
list.insert(i, {'#': '','Lev tid': '','Exakt lev tid': '','Id': '', 'Kundtyp': '', 'Kundnamn': '', 'Gata': '', 'Postnr': '',
'Ort': '', 'Rutt': '', 'Leveranstyp': '', 'Enhet': '', 'Beställare': '', 'Speditör kommentar': '', 'Orderkommentar': '', 'Kund Telefon': '','Enhet Telefon': '', 'Ordersumma (inkl moms)': ''})
# add the first row describing each coloumn
def add_first_row(from_list, to_work_sheet,):
for i in range(0, len(from_list) - 1):
to_work_sheet[str(chr(i + 65)) + str(1)] = from_list[i]
# add valuse to columns
def addToColumn(list, work_sheet):
for i in range(0, len(list) - 1):
for y in range(65, len(list[i]) + 64):
work_sheet[str(chr(y)) + str(i + 2)] = list[i][switch_column(str(chr(y)))]
kommunal = sorted(kommunal, key=lambda i: i['Orderkommentar'].lower())
avhamtning = sorted(avhamtning, key=lambda i: i['Lev tid'])
addSeparation(avhamtning)
add_first_row(p.firstRow, kommunalws)
add_first_row(p.firstRow, avhamtningws)
addToColumn(avhamtning, avhamtningws)
addToColumn(kommunal, kommunalws)
kommunal_path = r'G:\' #Add path to folders
avhamtning_path = r'G:\' #Add path to folders
kommunal_file_name = "Kommunal." + current_time + ".xlsx"
avhamtning_file_name = "Avhämtning." + current_time + ".xlsx"
kommunalwb.save(kommunal_path + kommunal_file_name)
avhamtningwb.save(avhamtning_path + avhamtning_file_name)
os.system('start "excel" "{kommunal_path}{kommunal_file_name}"')
os.system('start "excel" "{avhamting_path}{avhamtning_file_name}"') | StarcoderdataPython |
4840828 | # Generated by Django 3.1.7 on 2021-04-13 17:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('price', models.IntegerField()),
('seller', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='seller', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(auto_now_add=True)),
('buyer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='buyer', to=settings.AUTH_USER_MODEL)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product', to='store.product')),
],
),
]
| StarcoderdataPython |
1600305 | <gh_stars>0
import numpy as np
from simplenn import Network
from simplenn.layer import Dense
from simplenn.activation import ReLu
from simplenn.activation import SoftMaxLoss
from simplenn.layer.dropout import Dropout
from simplenn.metrics.loss import CategoricalCrossEntropy
from simplenn.metrics import Accuracy
from simplenn.optimizers import Adam
SAMPLES = 1000
FEATURES = 5
N_CLASSES = 3
X = np.random.random((SAMPLES, FEATURES))
y = np.random.binomial(N_CLASSES - 1, 1 / N_CLASSES, SAMPLES)
y_ohe = np.eye(N_CLASSES)[y] # Input to model targets must be one hot encoded
class DemoNetwork(Network):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.l1 = Dense(FEATURES, 64, W_l1=5e-4, b_l1=5e-4)
self.dropout1 = Dropout(rate=0.2)
self.activation1 = ReLu()
self.l2 = Dense(64, 32)
self.dropout2 = Dropout(rate=0.2)
self.activation2 = ReLu()
self.l3 = Dense(32, N_CLASSES)
self.output = SoftMaxLoss(loss=CategoricalCrossEntropy())
def forward(self, x, targets):
# forward pass
x = self.l1(x)
x = self.dropout1(x)
x = self.activation1(x)
x = self.l2(x)
x = self.dropout2(x)
x = self.activation2(x)
x = self.l3(x)
return self.output(x, targets)
optimizer = Adam(lr=0.03, decay=5e-4, b1=0.9, b2=0.999)
acc = Accuracy()
model = DemoNetwork(optimizer=optimizer)
model.fit(X, y_ohe, epochs=100, batch_size=512, metrics=[acc])
yprob_train = model.predict(X)
train_acc = acc(yprob_train, y_ohe)
print(f"Train Accuracy: {train_acc}")
| StarcoderdataPython |
172187 | from multiprocessing import Process, Queue, Lock
from Queue import Empty
from core.utilities import logging_handler_setup
class Device(object):
layout_type = "Layout"
def __init__(self):
# Output queue
self.out_queue = Queue()
# Input Queue
self.in_queue = Queue()
# Mutex for the queues
self.queue_mutex = Lock()
# Default to the class name, overwrite this for a unique human-readable reference
self.name = self.__class__.__name__
def main(self):
"""
This should be called to start the process
"""
raise NotImplementedException("Need to define main for %s"%self.__class__.__name__)
def run_main(self, *args):
self.logger = logging_handler_setup(self.name)
self.logger.info("Starting Process %s"%self.name)
self.main(*args)
def start(self, *args):
"""
Starts itself in a new process, all *args are passed to device main
Returns the new process
"""
p = Process(target=self.run_main, args=args)
p.daemon = True
try:
p.start()
return p
except Exception as e:
raise e
def get_in_queue(self):
"""
No wait by default
Saves having to do an empty check and safer
"""
return get_nowait(self.in_queue)
def get_out_queue(self):
"""
Saves having to do an empty check and safer
"""
return get_nowait(self.out_queue)
def get_nowait(queue):
"""
A helper to get something from a queue
Queue.get_nowait() throws an exception if nothing is in the queue
And python docs say you shouldn't assume Queue.empty() is reliable
I think this is nicer than catching an exception
TODO: should this be in utils or something?
"""
try:
return queue.get_nowait()
except Empty as e:
return None
| StarcoderdataPython |
1676451 | <reponame>sideroff/python-exercises
def get_input_prices():
input_text = input('Enter a price or "stop": ')
prices = []
while input_text != 'stop':
new_price = None
try:
new_price = float(input_text)
except:
print('Input could not be parsed. Please choose another.')
input_text = input('Enter a price or "stop": ')
continue
prices.append(new_price)
input_text = input('Enter a price or "stop": ')
return prices
def main():
prices = []
while (len(prices) == 0):
print('No prices provided. Retry.')
prices = get_input_prices()
prices.sort()
lowest = prices[0]
while len(prices) > 0 and prices[0] == lowest:
prices.pop(0)
highest = None
if len(prices) > 0:
highest = prices[-1]
while len(prices) > 0 and prices[-1] == highest:
prices.pop(-1)
if len(prices) == 0:
print('No values left after trimming lowest and highest prices.')
else:
print('''
Highest price: %.2f
Lowest price: %.2f
Average from rest: %.2f
''' % (highest, lowest, sum(prices)/max(1, len(prices))))
if __name__ == "__main__":
main() | StarcoderdataPython |
3264996 | <reponame>levan92/detectron2<filename>projects/train_pp/config.py<gh_stars>1-10
# -*- coding = utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from detectron2.config import CfgNode as CN
def add_IN_config(cfg: CN):
"""
Add config for densepose head.
"""
_C = cfg
_C.MODEL.INSTANCE_NORM = True
| StarcoderdataPython |
97044 | <gh_stars>0
"""SPADL schema for StatsBomb data."""
from typing import Optional
import pandera as pa
from pandera.typing import DateTime, Object, Series
from socceraction.data.schema import (
CompetitionSchema,
EventSchema,
GameSchema,
PlayerSchema,
TeamSchema,
)
class StatsBombCompetitionSchema(CompetitionSchema):
"""Definition of a dataframe containing a list of competitions and seasons."""
country_name: Series[str]
"""The name of the country the competition relates to."""
competition_gender: Series[str]
"""The gender of the players competing in the competition."""
class StatsBombGameSchema(GameSchema):
"""Definition of a dataframe containing a list of games."""
competition_stage: Series[str]
"""The name of the phase of the competition this game is in."""
home_score: Series[int]
"""The final score of the home team."""
away_score: Series[int]
"""The final score of the away team."""
venue: Series[str] = pa.Field(nullable=True)
"""The name of the stadium where the game was played."""
referee: Series[str] = pa.Field(nullable=True)
"""The name of the referee."""
class StatsBombPlayerSchema(PlayerSchema):
"""Definition of a dataframe containing the list of players of a game."""
nickname: Series[str] = pa.Field(nullable=True)
"""The nickname of the player on the team."""
starting_position_id: Series[int]
"""The unique identifier for the starting position of the player on the team."""
starting_position_name: Series[str]
"""The name of the starting position of the player on the team."""
class StatsBombTeamSchema(TeamSchema):
"""Definition of a dataframe containing the list of teams of a game."""
class StatsBombEventSchema(EventSchema):
"""Definition of a dataframe containing event stream data of a game."""
index: Series[int]
"""Sequence notation for the ordering of events within each match."""
timestamp: Series[DateTime]
"""Time in the match the event takes place, recorded to the millisecond."""
minute: Series[int]
"""The minutes on the clock at the time of this event."""
second: Series[int] = pa.Field(ge=0, le=59)
"""The second part of the timestamp."""
possession: Series[int]
"""Indicates the current unique possession in the game."""
possession_team_id: Series[int]
"""The ID of the team that started this possession in control of the ball."""
possession_team_name: Series[str]
"""The name of the team that started this possession in control of the ball."""
play_pattern_id: Series[int]
"""The ID of the play pattern relevant to this event."""
play_pattern_name: Series[str]
"""The name of the play pattern relevant to this event."""
team_name: Series[str]
"""The name of the team this event relates to."""
duration: Series[float] = pa.Field(nullable=True)
"""If relevant, the length in seconds the event lasted."""
extra: Series[Object]
"""A JSON string containing type-specific information."""
related_events: Series[Object]
"""A comma separated list of the IDs of related events."""
player_name: Series[str] = pa.Field(nullable=True)
"""The name of the player this event relates to."""
position_id: Series[float] = pa.Field(nullable=True)
"""The ID of the position the player was in at the time of this event."""
position_name: Series[str] = pa.Field(nullable=True)
"""The name of the position the player was in at the time of this event."""
location: Series[Object] = pa.Field(nullable=True)
"""Array containing the x and y coordinates of the event."""
under_pressure: Series[bool] = pa.Field(nullable=True)
"""Whether the action was performed while being pressured by an opponent."""
counterpress: Series[bool] = pa.Field(nullable=True)
"""Pressing actions within 5 seconds of an open play turnover."""
visible_area_360: Optional[Series[Object]] = pa.Field(nullable=True)
"""An array of coordinates describing the polygon visible to the camera / in the 360 frame."""
freeze_frame_360: Optional[Series[Object]] = pa.Field(nullable=True)
"""An array of freeze frame objects."""
| StarcoderdataPython |
1688791 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .ensemble_trainer import EnsembleTrainer, EnsembleTrainer_Deprecated
from .hogwild_trainer import HogwildTrainer
from .trainer import TaskTrainer, Trainer, TrainingState
__all__ = [
"Trainer",
"TrainingState",
"EnsembleTrainer",
"EnsembleTrainer_Deprecated",
"HogwildTrainer",
"TaskTrainer",
]
| StarcoderdataPython |
1791705 | import wx
class AutoResizeTextCtrl(wx.TextCtrl):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.min_auto_width = self.get_width()
self.Bind(wx.EVT_KEY_UP, self.on_key_up)
self.add_width = 10
self.auto_resize()
def on_key_up(self, event):
if self.auto_resize():
self.GetParent().Layout()
event.Skip()
def has_to_resize(self):
width = self.get_width()
fit_width = self.get_fit_width()
to_small = fit_width + self.add_width > width
to_big = fit_width + self.add_width < width
return to_small or to_big
def auto_resize(self):
fit_width = self.get_fit_width()
if self.has_to_resize():
new_width = max(self.min_auto_width, fit_width + self.add_width)
self.set_min_width(new_width)
return True
return False
def set_min_width(self, width):
self.SetMinSize(wx.Size(width, self.GetSize()[1]))
def get_width(self):
return self.GetSize()[0]
def get_fit_width(self):
border_size = self.GetWindowBorderSize()
return 2*border_size[0] + self.get_text_width()
def get_text_extent(self, text = None):
if not text: text = self.GetValue()
font = self.GetFont()
dc = wx.ScreenDC()
dc.SetFont(font)
return dc.GetTextExtent(text)
def get_text_width(self, text = None):
return self.get_text_extent(text)[0]
| StarcoderdataPython |
55610 | import plotly.graph_objects as go
large_rockwell_template = dict(
layout=go.Layout(title_font=dict(family="Rockwell", size=24))
)
fig = go.Figure()
fig.update_layout(title='Figure Title', template=large_rockwell_template)
fig.show()
| StarcoderdataPython |
3288618 | # Copyright (c) 2018 <NAME>.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Any
from UM.Qt.ListModel import ListModel
from PyQt5.QtCore import pyqtSlot, Qt
class SidebarCustomMenuItemsModel(ListModel):
name_role = Qt.UserRole + 1
actions_role = Qt.UserRole + 2
menu_item_role = Qt.UserRole + 3
menu_item_icon_name_role = Qt.UserRole + 5
def __init__(self, parent=None):
super().__init__(parent)
self.addRoleName(self.name_role, "name")
self.addRoleName(self.actions_role, "actions")
self.addRoleName(self.menu_item_role, "menu_item")
self.addRoleName(self.menu_item_icon_name_role, "icon_name")
self._updateExtensionList()
def _updateExtensionList(self)-> None:
from cura.CuraApplication import CuraApplication
for menu_item in CuraApplication.getInstance().getSidebarCustomMenuItems():
self.appendItem({
"name": menu_item["name"],
"icon_name": menu_item["icon_name"],
"actions": menu_item["actions"],
"menu_item": menu_item["menu_item"]
})
@pyqtSlot(str, "QVariantList", "QVariantMap")
def callMenuItemMethod(self, menu_item_name: str, menu_item_actions: list, kwargs: Any) -> None:
for item in self._items:
if menu_item_name == item["name"]:
for method in menu_item_actions:
getattr(item["menu_item"], method)(kwargs)
break | StarcoderdataPython |
92396 | <filename>api/api_fanfic.py
from django.conf import settings
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import BadHeaderError, send_mail
from rest_framework import permissions, views, status
from rest_framework.response import Response
from fanfics.models import Fanfic
class ShareFanficAPIView(views.APIView):
"""
Share fanfiction with e-mail
"""
permission_classes = (permissions.AllowAny,)
authentication_classes = ()
def post(self, request, *args, **kwargs):
fanfic_id = request.data.get('id')
fanfic = Fanfic.objects.get(id=fanfic_id)
current_site = get_current_site(request)
name = request.data.get('name')
email = request.data.get('email')
to = request.data.get('to')
comments = request.data.get('comments')
try:
fanfic_url = current_site.domain + '/#/' + 'fanfic/detail/' + fanfic.slug
subject = '{} ({}) recommends you reading "{}"'.format(name, email, fanfic.title)
message = 'Read "{}" at {}\n\n{}\'s comments: {}'.format(fanfic.title, fanfic_url,name, comments)
send_mail(subject, message, settings.SERVER_EMAIL, [to])
sent = True
return Response({"message": sent}, status=status.HTTP_200_OK)
except BadHeaderError:
return Response({"status": "invalid headers"}, status=status.HTTP_400_BAD_REQUEST)
| StarcoderdataPython |
1795713 | from sqlalchemy import create_engine
import pandas as pd
import time
uri = f"mssql+pyodbc://AGR-DB17.sfso.no/AgrHam_PK01?driver=ODBC+Driver+17+for+SQL+Server"
engine = create_engine(uri)
| StarcoderdataPython |
1740874 | <filename>main.py
# std
import argparse
from argparse import Namespace, ArgumentParser
from pathlib import Path
from typing import Tuple
import uuid
from config.config import Config
from log import log
from scheduler_device import add_scheduler_job
def parse_arguments() -> Tuple[ArgumentParser, Namespace]:
parser = argparse.ArgumentParser(
description="ChiaFarmWatch: Watch your crops " "with a piece in mind for the yield."
)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--config', type=str, help="path to config.yaml")
return parser, parser.parse_args()
def init(config: Config):
log.debug_logger.logger.info(f"Starting Chiadoge")
# 生成设备ID
device_id = uuid.uuid1()
log.debug_logger.logger.info(f"Your Device ID: " + str(device_id))
add_scheduler_job.add_job_scheduler(config, str(device_id))
if __name__ == "__main__":
# Parse config and configure logger
argparse, args = parse_arguments()
if args.config:
conf = Config(Path(args.config))
init(conf)
| StarcoderdataPython |
1751200 | x = int(input("digite um valor "))
def falar(palavra):
print(palavra + "!!")
def tossir():
falar("cough")
def espirrar():
falar("atchoo")
for i in range(x):
tossir()
espirrar()
| StarcoderdataPython |
3311383 | from rest_framework import serializers
from blog.models import Post, Tags
class TagsSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Tags
fields = ('name', )
class PostsSerializer(serializers.HyperlinkedModelSerializer):
tags = TagsSerializer(many=True, read_only=True)
class Meta:
model = Post
fields = ('unique_id', 'title', 'body', 'share_image', 'author', 'created_at', 'publish', 'tags')
| StarcoderdataPython |
3304362 | # cmdline options
# Author: <NAME>
import sys
from .phase_snp import phase_snp
from .config import APP
def __usage(fp = sys.stderr):
msg = "\n"
msg += "Usage: %s <command> [options]\n" % APP
msg += "\n" \
"Commands:\n" \
" phase_snp Aggregate SNPs into haplotype blocks.\n" \
" -h, --help Print this message.\n" \
"\n"
fp.write(msg)
def main():
argc = len(sys.argv)
if argc < 2:
__usage()
sys.exit(1)
command = sys.argv[1]
if command == "phase_snp": phase_snp(argc, sys.argv)
elif command in ("-h", "--help"): __usage(); sys.exit(3)
else: sys.stderr.write("Error: wrong command '%s'\n" % command); sys.exit(5)
if __name__ == "__main__":
main()
| StarcoderdataPython |
141988 | import sys
def convert(f_in, f_out, f_features):
features = []
labels = []
for line in f_in.readlines():
words = line.strip().split(" ")
labels.append(int(round(float(words[0]))))
features.append(zip(xrange(1,len(words)+1), words[1:]))
indexes = map(lambda x: int(x)-1, f_features.readline().split())
for i in xrange(len(features)):
feature = [features[i][index] for index in indexes]
label = labels[i]
features_str = " ".join(map(lambda x: str(x[0])+":"+x[1], feature))
#print features_str
f_out.write(str(label) + " " + features_str+"\n")
try:
train_file_name = sys.argv[1]
test_file_name = sys.argv[2]
except:
print "Usage: " + sys.argv[0] + "<train_file.txt> <test_file.txt>"
print "Please change the source file according to the designated folders"
exit(1)
sourcedir = "../features/matlab_extracted_features/"
resultdir = "../features/svm_files/"
train_in = open(train_file_name)
train_out = open("svm/data", "w")
features_file = open("selected_features.txt")
convert(train_in, train_out, features_file)
features_file.close()
train_in.close()
train_out.close()
| StarcoderdataPython |
160713 | <reponame>denyingmxd/Torchssc
# encoding: utf-8
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from models.DDR import *
class SimpleRB(nn.Module):
def __init__(self, in_channel, norm_layer, bn_momentum):
super(SimpleRB, self).__init__()
self.path = nn.Sequential(
nn.Conv3d(in_channel, in_channel, kernel_size=3, padding=1, bias=False),
norm_layer(in_channel, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(in_channel, in_channel, kernel_size=3, padding=1, bias=False),
norm_layer(in_channel, momentum=bn_momentum),
)
self.relu = nn.ReLU()
def forward(self, x):
residual = x
conv_path = self.path(x)
out = residual + conv_path
out = self.relu(out)
return out
'''
3D Residual Block,3x3x3 conv ==> 3 smaller 3D conv, refered from DDRNet
'''
class STAGE1(nn.Module):
def __init__(self, class_num, norm_layer, resnet_out=2048, feature=512, ThreeDinit=True,
bn_momentum=0.1, pretrained_model=None, eval=False, freeze_bn=False, sketch_gt=False):
super(STAGE1, self).__init__()
self.business_layer = []
self.feature = feature
self.oper1 = nn.Sequential(
nn.Conv3d(1, 3, kernel_size=3, padding=1, bias=False),
norm_layer(3, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(3, 64, kernel_size=3, padding=1, bias=False),
norm_layer(64, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(64, feature, kernel_size=3, padding=1, bias=False),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False),
)
self.business_layer.append(self.oper1)
self.oper2 = nn.Sequential(
nn.Conv3d(feature, 64, kernel_size=3, padding=1, bias=False),
norm_layer(64, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(64, 3, kernel_size=3, padding=1, bias=False),
norm_layer(3, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(3, 2, kernel_size=3, padding=1, bias=False),
norm_layer(2, momentum=bn_momentum),
nn.ReLU(inplace=False),
)
self.business_layer.append(self.oper2)
self.completion1 = nn.Sequential(
BottleneckDDR3d(c_in=feature, c=feature//4, c_out=feature, kernel=3, dilation=2, residual=True),
BottleneckDDR3d(c_in=feature, c=feature//4, c_out=feature, kernel=3, dilation=2, residual=True),
BottleneckDDR3d(c_in=feature, c=feature//4, c_out=feature, kernel=3, dilation=2, residual=True),
BottleneckDDR3d(c_in=feature, c=feature//4, c_out=feature, kernel=3, dilation=2, residual=True),
)
self.pool11 = DownsampleBlock3d(c_in=feature, c_out=feature*2)
self.pool12 = DownsampleBlock3d(c_in=feature, c_out=feature*2)
self.completion2 = nn.Sequential(
BottleneckDDR3d(c_in=feature*2, c=32, c_out=feature * 2, kernel=3, dilation=2, residual=True),
BottleneckDDR3d(c_in=feature * 2, c=32, c_out=feature * 2, kernel=3, dilation=2, residual=True),
BottleneckDDR3d(c_in=feature * 2, c=32, c_out=feature * 2, kernel=3, dilation=2, residual=True),
BottleneckDDR3d(c_in=feature * 2, c=32, c_out=feature * 2, kernel=3, dilation=2, residual=True),
)
self.pool2 = nn.MaxPool3d(kernel_size=2, stride=2)
self.deconv1 = nn.Sequential(
nn.ConvTranspose3d(feature*2, feature*2, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1),
norm_layer(feature*2, momentum=bn_momentum),
nn.ReLU(inplace=False)
)
self.deconv2 = nn.Sequential(
nn.ConvTranspose3d(feature*2, feature, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False)
)
self.deconv3 = nn.Sequential(
nn.ConvTranspose3d(feature*2, feature, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False)
)
self.business_layer.append(self.pool11)
self.business_layer.append(self.pool12)
self.business_layer.append(self.completion1)
self.business_layer.append(self.completion2)
self.business_layer.append(self.deconv1)
self.business_layer.append(self.deconv2)
self.business_layer.append(self.deconv3)
def forward(self, tsdf, depth_mapping_3d, sketch_gt=None):
'''
extract 3D feature
'''
raw_tsdf = self.oper1(tsdf)
y1 = self.pool11(self.completion1(raw_tsdf))+self.pool12(raw_tsdf)
y2 = self.pool2(self.completion2(y1))
y3 = self.deconv1(y2)+y1
y4 = self.deconv2(y3)+self.deconv3(y3)
pred_sketch_refine = self.oper2(y4)
return pred_sketch_refine
'''
main network2d
'''
class Network_baseline(nn.Module):
def __init__(self, class_num, norm_layer, resnet_out=2048, feature=512, ThreeDinit=True,
bn_momentum=0.1, pretrained_model=None, eval=False, freeze_bn=False):
super(Network_baseline, self).__init__()
self.business_layer = []
self.dilate = 2
self.stage1 = STAGE1(class_num, norm_layer, resnet_out=resnet_out, feature=feature, ThreeDinit=ThreeDinit,
bn_momentum=bn_momentum, pretrained_model=pretrained_model, eval=eval, freeze_bn=freeze_bn)
self.business_layer += self.stage1.business_layer
def forward(self, rgb, depth_mapping_3d, tsdf, sketch_gt=None):
h, w = rgb.size(2), rgb.size(3)
pred_sketch_refine = self.stage1(tsdf,depth_mapping_3d)
results={'pred_sketch_refine':pred_sketch_refine}
return results
# @staticmethod
def _nostride_dilate(self, m, dilate):
if isinstance(m, nn.Conv2d):
if m.stride == (2, 2):
m.stride = (1, 1)
if m.kernel_size == (3, 3):
m.dilation = (dilate, dilate)
m.padding = (dilate, dilate)
else:
if m.kernel_size == (3, 3):
m.dilation = (dilate, dilate)
m.padding = (dilate, dilate)
if __name__ == '__main__':
model = Network_baseline(class_num=2, norm_layer=nn.BatchNorm3d, feature=128, eval=True)
# print(model)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = model.to(device)
model.eval()
left = torch.rand(1, 3, 480, 640).cuda()
right = torch.rand(1, 3, 480, 640).cuda()
depth_mapping_3d = torch.from_numpy(np.ones((1, 129600)).astype(np.int64)).long().cuda()
tsdf = torch.rand(1, 1, 60, 36, 60).cuda()
out = model(left, depth_mapping_3d, tsdf, None) | StarcoderdataPython |
1623739 | <filename>Algorithms/A Number After a Double Reversal/solution.py
class Solution:
def isSameAfterReversals(self, num: int) -> bool:
return str(num) == str(num).rstrip("0") or num == 0
| StarcoderdataPython |
61612 | <gh_stars>10-100
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
import os
import sys
import socket
import logging
#logging.basicConfig()
# configure root logger
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
log_handler = logging.StreamHandler(stream=sys.stderr)
log_formatter = logging.Formatter('[%(levelname)s] %(message)s')
log_handler.setFormatter(log_formatter)
logger.addHandler(log_handler)
logger = logging.getLogger("example")
#sys.path.append(os.path.join("..", "endpoints"))
sys.path.append("..")
sys.path.append(os.path.join("..", ".."))
from endpoints import Call, Request, Response
class SimpleHandler(BaseHTTPRequestHandler):
def handle_one_request(self):
try:
self.raw_requestline = self.rfile.readline(65537)
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request():
# An error code has been sent, just exit
return
if '?' in self.path:
path, query = self.path.split('?', 1)
else:
path = self.path
query = ""
req = Request()
req.path = path
req.query = query
req.method = self.command
req.headers = self.headers.dict
c = Call("controllers")
c.request = req
c.response = Response()
res = c.handle()
self.send_response(res.code)
for h, hv in res.headers.iteritems():
self.send_header(h, hv)
self.send_header('Connection', 'close')
self.end_headers()
body = res.body
if body:
self.wfile.write(res.body)
self.wfile.flush()
except socket.timeout, e:
self.log_error("Request timed out: %r", e)
self.close_connection = 1
return
if __name__ == "__main__":
port = 8000
logger.info("Listening on port {}".format(port))
server_address = ('', port)
httpd = HTTPServer(server_address, SimpleHandler)
httpd.serve_forever()
#httpd.handle_request()
| StarcoderdataPython |
18288 | <gh_stars>0
import unittest
from selenium import webdriver
from tests import Base
class WebKitGTKDriverBenchmarkTest(Base.Base):
def getDriver(self):
return webdriver.WebKitGTK()
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
73561 | import os, sys
import numpy as np
from math import sqrt
# testing without install
#sys.path.insert(0, '../build/lib.macosx-10.9-x86_64-3.8')
import poppunk_refine
# Original PopPUNK function (with some improvements)
def withinBoundary(dists, x_max, y_max, slope=2):
boundary_test = np.ones((dists.shape[0]))
for row in range(boundary_test.size):
if slope == 2:
in_tri = dists[row, 1] * x_max + dists[row, 0] * y_max - x_max * y_max
elif slope == 0:
in_tri = dists[row, 0] - x_max
elif slope == 1:
in_tri = dists[row, 1] - y_max
if abs(in_tri) < np.finfo(np.float32).eps:
boundary_test[row] = 0
elif in_tri < 0:
boundary_test[row] = -1
return(boundary_test)
def check_tuples(t1, t2):
for t in t1:
if t not in t2:
raise RuntimeError("Results don't match")
def iter_tuples(assign_results, n_samples):
tuple_list = []
idx = 0
for i in range(n_samples):
for j in range(i + 1, n_samples):
if assign_results[idx] == -1:
tuple_list.append((i, j))
idx += 1
return tuple_list
def check_res(res, expected):
if (not np.all(res == expected)):
print(res)
print(expected)
raise RuntimeError("Results don't match")
# assigning
x = np.arange(0, 1, 0.1, dtype=np.float32)
y = np.arange(0, 1, 0.1, dtype=np.float32)
xv, yv = np.meshgrid(x, y)
distMat = np.hstack((xv.reshape(-1,1), yv.reshape(-1,1)))
assign0 = poppunk_refine.assignThreshold(distMat, 0, 0.5, 0.5, 2)
assign1 = poppunk_refine.assignThreshold(distMat, 1, 0.5, 0.5, 2)
assign2 = poppunk_refine.assignThreshold(distMat, 2, 0.5, 0.5, 2)
assign0_res = withinBoundary(distMat, 0.5, 0.5, 0)
assign1_res = withinBoundary(distMat, 0.5, 0.5, 1)
assign2_res = withinBoundary(distMat, 0.5, 0.5, 2)
check_res(assign0, assign0_res)
check_res(assign1, assign1_res)
check_res(assign2, assign2_res)
# Check results when returned as tuple
samples = 100
distMat = np.random.rand(int(0.5 * samples * (samples - 1)), 2)
distMat = np.array(distMat, dtype = np.float32)
assign0_res = withinBoundary(distMat, 0.5, 0.5, 0)
assign0_edge_res = iter_tuples(assign0_res, samples)
check_tuples(assign0_edge_res,
poppunk_refine.generateTuples([int(x) for x in assign0_res], -1))
assign1_edge_res = iter_tuples(withinBoundary(distMat, 0.5, 0.5, 1), samples)
assign2_edge_res = iter_tuples(withinBoundary(distMat, 0.5, 0.5, 2), samples)
assign0_edges = poppunk_refine.edgeThreshold(distMat, 0, 0.5, 0.5)
assign1_edges = poppunk_refine.edgeThreshold(distMat, 1, 0.5, 0.5)
assign2_edges = poppunk_refine.edgeThreshold(distMat, 2, 0.5, 0.5)
check_tuples(assign0_edges, assign0_edge_res)
check_tuples(assign1_edges, assign1_edge_res)
check_tuples(assign2_edges, assign2_edge_res)
# move boundary 1D
# example is symmetrical at points (0.1, 0.1); (0.2, 0.2); (0.3, 0.3)
offsets = [x * sqrt(2) for x in [-0.1, 0.0, 0.1]]
i_vec, j_vec, idx_vec = poppunk_refine.thresholdIterate1D(distMat, offsets, 2, 0.2, 0.2, 0.3, 0.3)
sketchlib_i = []
sketchlib_j = []
for offset_idx, offset in enumerate(offsets):
for i, j, idx in zip(i_vec, j_vec, idx_vec):
if idx > offset_idx:
break
elif idx == offset_idx:
sketchlib_i.append(i)
sketchlib_j.append(j)
py_i = []
py_j = []
xmax = 0.4 + (2 * (offset/sqrt(2)))
assign = poppunk_refine.assignThreshold(distMat, 2, xmax, xmax, 1)
dist_idx = 0
for i in range(samples):
for j in range(i + 1, samples):
if assign[dist_idx] <= 0:
py_i.append(i)
py_j.append(j)
dist_idx += 1
if set(zip(py_i, py_j)) != set(zip(sketchlib_i, sketchlib_j)):
raise RuntimeError("Threshold 1D iterate mismatch at offset " + str(offset))
# move boundary 2D
# example is for boundaries (0.1, 0.2); (0.2, 0.2); (0.3, 0.2)
offsets = [0.1, 0.2, 0.3]
y_max = 0.2
i_vec, j_vec, idx_vec = poppunk_refine.thresholdIterate2D(distMat, offsets, y_max)
sketchlib_i = []
sketchlib_j = []
for offset_idx, offset in enumerate(offsets):
for i, j, idx in zip(i_vec, j_vec, idx_vec):
if idx > offset_idx:
break
elif idx == offset_idx:
sketchlib_i.append(i)
sketchlib_j.append(j)
py_i = []
py_j = []
assign = poppunk_refine.assignThreshold(distMat, 2, offset, y_max, 1)
dist_idx = 0
for i in range(samples):
for j in range(i + 1, samples):
if assign[dist_idx] <= 0:
py_i.append(i)
py_j.append(j)
dist_idx += 1
if set(zip(py_i, py_j)) != set(zip(sketchlib_i, sketchlib_j)):
raise RuntimeError("Threshold 2D iterate mismatch at offset " + str(offset))
| StarcoderdataPython |
87819 | <reponame>Willsparker/FYP_EdgeDetection
import cv2
import os
import re
from math import hypot
import numpy as np
CurPath = os.path.dirname(__file__)
# Path to output txt file:
iris_pos_file = CurPath + '/PrintedImages/IrisPositions.txt'
image_dir = CurPath + '/PrintedImages/'
output_dir = CurPath + '/PrintedIris/'
# Get all printed images, if they're .jpg
def getPrintedImageNames():
return [os.path.splitext(f)[0] for f in os.listdir(image_dir) if os.path.splitext(f)[1] == ".jpg"]
def cropImage(image,centre,radius):
mask = np.zeros(image.shape, dtype=np.uint8)
cv2.circle(mask, centre, radius, (255,255,255), -1)
# Bitwise-and for ROI
ROI = cv2.bitwise_and(image, mask)
# Crop mask and turn background white
mask = cv2.cvtColor(mask, cv2.COLOR_BGR2GRAY)
x,y,w,h = cv2.boundingRect(mask)
result = ROI[y:y+h,x:x+w]
mask = mask[y:y+h,x:x+w]
result[mask==0] = (0,0,0)
return result
for image_path in getPrintedImageNames():
with open(iris_pos_file,"r") as file:
for line in file:
if re.search(image_path, line):
# Get all details from text file
centre = line.split(' ')[1]
centre = (int(centre.split(",")[0]),int(centre.split(",")[1]))
distance = line.split(' ')[2]
# Read image and draw the circle on it
image = cv2.imread(image_dir + image_path + ".jpg")
cv2.imwrite(output_dir+image_path+".jpg",cropImage(image,centre,int(float(distance))))
| StarcoderdataPython |
1688131 | from math import ceil
budget = float(input())
students = int(input())
price_flour_package = float(input())
price_single_egg = float(input())
price_single_apron = float(input())
free_flour_packages = students // 5
price = price_single_apron * ceil(students * 1.2) + price_single_egg * 10 * students + price_flour_package * (students - free_flour_packages)
if price <= budget:
print(f"Items purchased for {price:.2f}$.")
else:
print(f"{price-budget:.2f}$ more needed.")
| StarcoderdataPython |
157810 | <gh_stars>0
#===============================================
#RESOLUTION KEYWORDS
#===============================================
oref = 0 #over refine factor - should typically be set to 0
n_ref = 32 #when n_particles > n_ref, octree refines further
zoom_box_len = 100 #kpc; so the box will be +/- zoom_box_len from the center
bbox_lim = 1.e5 #kpc - this is the initial bounding box of the grid (+/- bbox_lim)
#This *must* encompass all of the particles in the
#simulation.
#===============================================
#PARALLELIZATION
#===============================================
n_processes = 16 #number of pool processes to run
n_MPI_processes = 1 #number oF MPI processes to run
#===============================================
#RT INFORMATION
#===============================================
n_photons_initial = 1.e5
n_photons_imaging = 1.e5
n_photons_raytracing_sources = 1.e5
n_photons_raytracing_dust = 1.e5
FORCE_RANDOM_SEED = False
seed = -12345 #has to be an int, and negative.
#===============================================
#DUST INFORMATION
#===============================================
dustdir = '/home/desika.narayanan/hyperion-dust-0.1.0/dust_files/' #location of your dust files
dustfile = 'd03_3.1_6.0_A.hdf5'
PAH = True
dust_grid_type = 'dtm' #needs to be in ['dtm','rr','manual','li_bestfit']
dusttometals_ratio = 0.4
enforce_energy_range = False #False is the default; ensures energy conservation
SUBLIMATION = False #do we automatically kill dust grains above the
#sublimation temperature; right now is set to fast
#mode
SUBLIMATION_TEMPERATURE = 1600. #K -- meaningliess if SUBLIMATION == False
#===============================================
#STELLAR SEDS INFO
#===============================================
FORCE_BINNING = True #force SED binning
imf_type = 2 #FSPS imf types; 0 = salpeter, 1 = chabrier; 2 = kroupa; 3 and 4 (vandokkum/dave) not currently supported
pagb = 0 #weight given to post agb stars# 1 is the default
add_neb_emission = False #add nebular line emission from Cloudy Lookup tables (dev. by <NAME>)
gas_logu = -2 #gas ionization parameter for HII regions; only relevant
#if add_neb_emission = True default = -2
FORCE_gas_logz = False #if set, then we force the gas_logz of HII
#regions to be gas_logz (next parameter); else, it is taken to be the star particles metallicity. default is False
gas_logz = 0 #units of log(Z/Z_sun); metallicity of the HII region
#metallicity; only relevant if add_neb_emission = True;
#default is 0
add_agb_dust_model=False #add circumstellar AGB dust model (100%); <NAME> & Jonson 2015
CF_on = False #if set to true, then we enable the Charlot & Fall birthcloud models
birth_cloud_clearing_age = 0.01 #Gyr - stars with age <
#birth_cloud_clearing_age have
#charlot&fall birthclouds meaningless
#of CF_on == False
Z_init = 0 #force a metallicity increase in the newstar particles.
#This is useful for idealized galaxies. The units for this
#are absolute (so enter 0.02 for solar). Setting to 0
#means you use the stellar metallicities as they come in
#the simulation (more likely appropriate for cosmological
#runs)
#Idealized Galaxy SED Parameters
disk_stars_age = 8 #Gyr ;meaningless if this is a cosmological simulation; note, if this is <= 7, then these will live in birth clouds
bulge_stars_age = 8 #Gyr ; meaningless if this is a cosmological simulation; note, if this is <= 7, then these will live in birth clouds
disk_stars_metals = 19 #in fsps metallicity units
bulge_stars_metals = 19 #in fsps metallicity units
#bins for binning the stellar ages and metallicities for SED
#assignments in cases of many (where many ==
#>N_METALLICITY_BINS*N_STELLAR_AGE_BINS) stars; this is necessary for
#reduction of memory load; see manual for details.
N_STELLAR_AGE_BINS = 100
metallicity_legend= "/home/desika.narayanan/fsps/ISOCHRONES/Padova/Padova2007/zlegend.dat"
#===============================================
#BLACK HOLE STUFF
#===============================================
BH_SED = False
BH_eta = 0.1 #bhluminosity = BH_eta * mdot * c**2.
BH_model = "Nenkova"
BH_modelfile = "/home/desika.narayanan/powderday/agn_models/clumpy_models_201410_tvavg.hdf5"
# The Nenkova BH_modelfile can be downloaded here:
# https://www.clumpy.org/downloads/clumpy_models_201410_tvavg.hdf5
nenkova_params = [5,30,0,1.5,30,40] #Nenkova+ (2008) model parameters
#===============================================
#IMAGES AND SED
#===============================================
NTHETA = 1
NPHI = 1
SED = True
SED_MONOCHROMATIC = False
FIX_SED_MONOCHROMATIC_WAVELENGTHS = False #if set, then we only use
#nlam wavelengths in the
#range between min_lam and
#max_lam
SED_MONOCHROMATIC_min_lam = 0.3 #micron
SED_MONOCHROMATIC_max_lam = 0.4 #micron
SED_MONOCHROMATIC_nlam = 100
IMAGING = False
filterdir = '/home/desika.narayanan/powderday/filters/'
filterfiles = [
'arbitrary.filter',
# 'ACS_F475W.filter',
# 'ACS_F606W.filter',
# 'ACS_F814W.filter',
# 'B_subaru.filter',
]
# Insert additional filter files as above. In bash, the following command
# formats the filenames for easy copying/pasting.
# $ shopt -s globstar; printf "# '%s'\n" *.filter
npix_x = 128
npix_y = 128
#experimental and under development - not advised for use
IMAGING_TRANSMISSION_FILTER = False
filter_list = ['filters/irac_ch1.filter']
TRANSMISSION_FILTER_REDSHIFT = 0.001
#===============================================
#OTHER INFORMATION
#===============================================
solar = 0.013
PAH_frac = {'usg': 0.0586, 'vsg': 0.1351, 'big': 0.8063} # values will be normalized to 1
#===============================================
#DEBUGGING
#===============================================
SOURCES_IN_CENTER = False
STELLAR_SED_WRITE = True
SKIP_RT = False #skip radiative transfer (i.e. just read in the grids and maybe write some diagnostics)
SUPER_SIMPLE_SED = False #just generate 1 oct of 100 pc on a side,
#centered on [0,0,0]. sources are added at
#random positions.
SKIP_GRID_READIN = False
CONSTANT_DUST_GRID = False #if set, then we don't create a dust grid by
#smoothing, but rather just make it the same
#size as the octree with a constant value of
#4e-20
N_MASS_BINS = 1 #this is really just a place holder that exists in
#some loops to be able to insert some code downstream
#for spatially varying IMFs. right now for speed best
#to set to 1 as it doesn't actually do anything.
FORCE_STELLAR_AGES = False
FORCE_STELLAR_AGES_VALUE = 0.05# Gyr
FORCE_STELLAR_METALLICITIES = False
FORCE_STELLAR_METALLICITIES_VALUE = 0.012 #absolute values (so 0.013 ~ solar)
| StarcoderdataPython |
1630780 | import clr
clr.AddReference("RevitServices")
import RevitServices
from RevitServices.Persistence import DocumentManager
from RevitServices.Transactions import TransactionManager
doc = DocumentManager.Instance.CurrentDBDocument
clr.AddReference("RevitNodes")
import Revit
clr.ImportExtensions(Revit.Elements)
clr.ImportExtensions(Revit.GeometryConversion)
clr.AddReference("RevitAPI")
from Autodesk.Revit.DB import *
import System
from System.Collections.Generic import *
############## Definitions Start ##############
# Convert to List if singleton...
def tolist(obj1):
if hasattr(obj1,"__iter__"): return obj1
else: return [obj1]
# Returns the index of the found level given a Level and a list of Levels...
def FindLevelIndex(levels, lev):
ind = None
i = 0
for l in levels:
if l.Id.ToString() == lev.Id.ToString():
ind = i
i = i+1
return ind
# Copy the original wall and set it's levels using the Built-In Parameters for the Base and Top Constraints...
def CopyWallByLevel(wall, b, t):
wallOut = None
try:
# Copy the Original Wall with a transformation vector of 0,0,0...
w = ElementTransformUtils.CopyElement(doc,wall.Id,XYZ(0,0,0))
# Since the CopyElements method returns the ElementId of the new wall we need to get this Element from the Document...
w = doc.GetElement(w[0])
# Update the Base and Top constraints Parameters using the Built-In Parameters.
# Note: I have explicitly chosen the Overload as I was getting flaky behaviour where the wrong overload was being used...
p = w.get_Parameter(BuiltInParameter.WALL_BASE_CONSTRAINT)
p.Set.Overloads.Functions[2](b.Id)
p = w.get_Parameter(BuiltInParameter.WALL_HEIGHT_TYPE)
p.Set.Overloads.Functions[2](t.Id)
wallOut = w.ToDSType(True)
# Write out any exceptions...
except Exception, e:
wallOut = e.message
# Return new wall..
return wallOut
############## Definitions End ##############
# IN-Variables...
run = tolist(IN[0])[0]
walls = tolist(UnwrapElement(IN[1]))
# OUT-Variables...
outList = []
# Main Script...
# Test if user has selected Run as True...
if run:
# Get All Levels in the Document and cast to .net List...
levels = list([l for l in FilteredElementCollector(doc).OfClass(Level).ToElements()])
# Sort Levels by Elevation using a lamda expression...
levels.sort(key=lambda x: x.Elevation, reverse=False)
# Start a new Transaction ready for modifying the Document...
TransactionManager.Instance.EnsureInTransaction(doc)
for w in walls:
arr = []
# Check if the Element is a Wall...
if w.GetType() == Wall:
# Get Base and Top Constraints as Levels...
p = w.get_Parameter(BuiltInParameter.WALL_BASE_CONSTRAINT)
base = doc.GetElement(p.AsElementId())
p = w.get_Parameter(BuiltInParameter.WALL_HEIGHT_TYPE)
top = doc.GetElement(p.AsElementId())
# Test whether walls Base and Top levels are NOT the same, if they are we will skip this wall, if they are not then we will get the Index of the Level in the sorted list of Levels we collected earlier for both the Base and Top of the wall...
if not base.Id.IntegerValue == top.Id.IntegerValue:
# Note: we are setting the bounds of the Loop below with the Indices of the found Levels so we will only loop through the Levels in between the Base and Top Levels...
i = FindLevelIndex(levels,base)
j = FindLevelIndex(levels,top)
# Loop through the Levels between the Base and Top Levels copying the original wall for each iteration and stepping up one Level...
while i < j:
wCopy = CopyWallByLevel(w,levels[i], levels[i+1])
arr.append(wCopy)
i = i+1
outList.append(arr)
# Delete original Wall as this has now been split by Level...
doc.Delete(w.Id)
# End the Transaction...
TransactionManager.Instance.TransactionTaskDone()
# Return the new Walls...
OUT = outList
# Return if user has not set input Run to True...
else:
OUT = "Please Set Run to True" | StarcoderdataPython |
3246603 | <reponame>karmanya007/pdf2audio
from io import StringIO
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.pdfpage import PDFPage
from pdfminer.pdfparser import PDFParser
from PyPDF2 import PdfFileReader
from tqdm import tqdm
import pyttsx3
import six
from time import sleep
try:
import colorama
colorama.init()
except ImportError:
colorama = None
try:
from termcolor import colored
except ImportError:
colored = None
engine = pyttsx3.init()
def on_start(name,length):
with tqdm(total=length, desc=colored(name,"cyan")) as pbar:
for i in range(length):
sleep(0.2)
pbar.update(1)
def on_error(name, exception):
six.print_(colored(f"{name} caused error: ","red"))
raise Exception(exception)
def get_info(pdf_path,in_file):
pdf = PdfFileReader(in_file)
information = pdf.getDocumentInfo()
number_of_pages = pdf.getNumPages()
txt = f"""
Information about {pdf_path}:
Author: {information.author}
Creator: {information.creator}
Producer: {information.producer}
Subject: {information.subject}
Title: {information.title}
Number of pages: {number_of_pages}
"""
return txt,number_of_pages
def get_text(in_file,output_string,number_of_pages):
parser = PDFParser(in_file)
doc = PDFDocument(parser)
rsrcmgr = PDFResourceManager()
device = TextConverter(rsrcmgr, output_string, laparams=LAParams())
interpreter = PDFPageInterpreter(rsrcmgr, device)
for page in tqdm(PDFPage.create_pages(doc), total=(number_of_pages - 1), desc=colored('Converting pdf to text',"cyan")):
interpreter.process_page(page)
def convert_to_audio(number_of_pages,output,file_name):
engine.connect('started-utterance', on_start('Converting text to audio', number_of_pages - 1))
engine.connect('error', on_error)
engine.save_to_file(output, f'{file_name}.mp3')
engine.runAndWait()
def extract_text(pdf_path, file_name):
output_string = StringIO()
with open(pdf_path, 'rb') as in_file:
info,number_of_pages = get_info(pdf_path, in_file)
get_text(in_file,output_string,number_of_pages)
output = output_string.getvalue()
output = output.replace('\n','')
output = output.replace('\t','')
output = info + output
convert_to_audio(number_of_pages,output,file_name)
return f"Finished converting pdf at {pdf_path} to {file_name}.mp3"
| StarcoderdataPython |
10069 | """
Crack a password using a genetic algorithm!
"""
import random as rnd
def main():
"""
This file implements a genetic algorithm to solve the problem of
cracking a given password, by creating 'generations' of different
words, selecting the best, breeeding them, applying a simple crossover
(randomized) and a mutation chance.
"""
#variables dict: Define the problem constants
genetic_variables = {
'password' : "<PASSWORD>",
'size_population' : 100,
'best_sample' : 20,
'lucky_few' : 20,
'number_of_child' : 5,
'number_of_generations' : 10000, #Overkill >:D
'chance_of_mutation' : .5
}
prob = genetic_variables
#program
if (prob['best_sample'] + prob['lucky_few'])/2*prob['number_of_child'] != prob['size_population']:
print ("population size not stable")
return
last_gen, _ = genetic_algorithm(**genetic_variables)
print("Last generation: \n\n")
print(last_gen)
def genetic_algorithm(**kwargs):
"""
Execute the genetic algorithm.
This algorithm takes a dict as an argument.
It will iterate based on the variable 'number_of_generations', and return
the last_gen and the historic
"""
# Unpack the values from the dict
password = kwargs['password']
size_population = kwargs['size_population']
best_sample = kwargs['best_sample']
lucky_few = kwargs['lucky_few']
number_of_child = kwargs['number_of_child']
number_of_generations = kwargs['number_of_generations']
chance_of_mutation = kwargs['chance_of_mutation']
hist = []
# The genetic algorithm
curr_pop = initial_pop(size_population, password)
hist = curr_pop
last_found = -1
for _ in range (number_of_generations):
curr_pop = next_gen(curr_pop, password, best_sample, lucky_few, number_of_child, chance_of_mutation)
hist.append(curr_pop)
if check_solution(curr_pop, password):
last_found = _
break
if last_found != -1:
print(f"Found a solution in the {last_found} generation!!")
else:
print("No solution found! D':")
return curr_pop, hist
def next_gen(curr_pop, password, best_sample, lucky_few, number_of_child, chance_of_mutation):
"""
-> This is the main task of the Genetic Algorithm <-
Given the current population, apply the following steps:
- Compute the fitness of each individual in the population
- Select the best ones (and some lucky guys)
- Make them reproduce
- Mutate the children
- Return this new population
"""
pop_sorted = compute_perf_pop(curr_pop, password)
next_breeders = select_from_population(pop_sorted, best_sample, lucky_few)
next_pop = create_children(next_breeders, number_of_child)
next_gen = mutate_pop(next_pop, chance_of_mutation)
return next_gen
def initial_pop(size, password):
"""
Generate a population consisting of random words, each with the same
length as the password, and the population has the size specified.
"""
return [word_generate(len(password)) for _ in range(size)]
def fitness (password, test_word):
"""
The fitness function:
fitness(test_word): (# of correct chars) / (total number of chars)
fitness(test_word) = 0 if # of correct chars = 0
fitness(test_word) = 100 if # of correct chars = total number of chars
"""
if (len(test_word) != len(password)):
print("Incompatible password...")
return
else:
score = (1 if password[i] == test_word[i] else 0 for i in range(len(password)))
return sum(score)*100/len(password)
def compute_perf_pop(population, password):
"""
Return the population, sorted by the fitness from each individual
"""
populationPerf = {ind:fitness(password, ind) for ind in population}
# Sort by fitness, reversed (best ones in the beginning of the list)
return sorted(populationPerf.items(), key= lambda it: it[1], reverse=True)
def select_from_population(pop_sorted, best_sample, lucky_few):
"""
Create the next breeders, with 'best_sample' individuals which have the
top fitness value from the population, and 'lucky_few' individuals which
are randomly selected.
"""
next_gen = []
for i in range(best_sample):
next_gen.append(pop_sorted[i][0])
# Simple lucky few: randomly select some elements from the population
for i in range(lucky_few):
next_gen.append(rnd.choice(pop_sorted)[0])
rnd.shuffle(next_gen)
return next_gen
def create_children(breeders, nof_childs):
"""
Create the next population of individuals, by breeding two by two
"""
next_pop = []
mid_pos = len(breeders)//2 # len(breeders) must be an even number
for ind_1, ind_2 in zip(breeders[:mid_pos], breeders[mid_pos:]):
for _ in range(nof_childs):
next_pop.append(create_child(ind_1, ind_2))
return next_pop
def mutate_pop(population, chance):
"""
Given a chance for mutation, this apply the mutation layer
to the genetic algorithm, by generating a mutation with the chance
specified.
"""
for i in range(len(population)):
if rnd.random() < chance:
population[i] = mutate_word(population[i])
return population
def mutate_word(word):
"""
Mutate a letter(gene) from the word, then return it
"""
pos = int(rnd.random()*len(word))
word = word[:pos] + chr(97 + int(26*rnd.random())) + word[pos + 1:]
return word
def create_child(ind_1, ind_2):
"""
For each letter of the child, get a random gene from ind_1 or ind_2
in the i-th position.
"""
temp = [ind_1[i] if rnd.random() < 0.5 else ind_2[i] for i in range(len(ind_1))]
return "".join(temp)
def word_generate(length):
"""
Generate a string with random lowercase letters, with length = length!
"""
# Generate a random letter from alphabet, lowercase, and add to result
return "".join((chr(97 + rnd.randint(0, 26)) for _ in range(length)))
def check_solution(population, password):
"""
Check if the population found a solution to the problem
"""
return any(ind == password for ind in population)
if __name__ == '__main__':
main()
| StarcoderdataPython |
4816837 | <reponame>mcculloh213/alchemist-stack
from alchemist_stack.repository.models import Base
from sqlalchemy import Column, Integer, DateTime
class ExampleTable(Base):
__tablename__ = 'example'
primary_key = Column('id', Integer, primary_key=True)
timestamp = Column(DateTime(timezone=True), nullable=False)
def __repr__(self):
return '<Example(timestamp={timestamp})>'.format(timestamp=self.timestamp) | StarcoderdataPython |
1624297 | import shutil
import yaml
DEFAULT_FRONT_MATTER_END = u"\n...\n"
def loads(file_contents, front_matter_end=DEFAULT_FRONT_MATTER_END):
end = file_contents.find(front_matter_end)
if end == -1:
return (None, file_contents)
return (yaml.load(file_contents[:end]),
file_contents[end + len(front_matter_end):])
def load(file_obj, front_matter_end=DEFAULT_FRONT_MATTER_END):
return loads(file_obj.read())
| StarcoderdataPython |
1796631 | <filename>globalpkg/mydb.py<gh_stars>0
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'laifuyu'
import configparser
import sys
import mysql.connector
from globalpkg.global_var import logger
class MyDB:
"""动作类,获取数据库连接,配置数据库IP,端口等信息,获取数据库连接"""
def __init__(self, config_file, db):
config = configparser.ConfigParser()
# 从配置文件中读取数据库服务器IP、域名,端口
config.read(config_file, encoding='utf-8')
self.host = config[db]['host']
self.port = config[db]['port']
self.user = config[db]['user']
self.passwd = config[db]['passwd']
self.db_name = config[db]['db']
self.charset = config[db]['charset']
try:
self.dbconn = mysql.connector.connect(host=self.host, port=self.port, user=self.user, password=self.passwd, database=self.db_name, charset=self.charset)
except Exception as e:
logger.error('初始化数据连接失败:%s' % e)
sys.exit()
def get_host(self):
return self.host
def get_port(self):
return self.port
def get_conn(self):
return self.dbconn
def execute_create(self,query):
logger.info('query:%s' % query)
try:
db_cursor = self.dbconn.cursor()
db_cursor.execute(query)
db_cursor.execute('commit')
db_cursor.close()
return True
except Exception as e:
logger.error('创建数据库表操作失败:%s' % e)
db_cursor.execute('rollback')
db_cursor.close()
exit()
def execute_insert(self, query, data):
logger.info('query:%s data:%s' % (query, data))
try:
db_cursor = self.dbconn.cursor()
db_cursor.execute(query, data)
db_cursor.execute('commit')
db_cursor.close()
return True
except Exception as e:
logger.error('执行数据库插入操作失败:%s' % e)
db_cursor.execute('rollback')
db_cursor.close()
exit()
def execute_update(self, query, data):
query = query % data
logger.info('query:%s' % query)
try:
db_cursor = self.dbconn.cursor()
db_cursor.execute(query)
db_cursor.execute('commit')
db_cursor.close()
return ('',True)
except Exception as e:
logger.error('执行数据库更新操作失败:%s' % e)
db_cursor.execute('rollback')
db_cursor.close()
return (e, False)
def select_one_record(self, query, data=""):
'''返回结果只包含一条记录'''
logger.info('query:%s data:%s' % (query, data))
try:
db_cursor = self.dbconn.cursor()
if data:
db_cursor.execute(query, data)
else:
db_cursor.execute(query)
query_result = db_cursor.fetchone()
db_cursor.close()
return (query_result,True)
except Exception as e:
logger.error('执行数据库查询操作失败:%s' % e)
db_cursor.close()
return(e,False)
def select_many_record(self, query, data=""):
'''返回结果只包含多条记录'''
logger.info('query:%s data:%s' % (query, data))
try:
db_cursor = self.dbconn.cursor()
if data:
db_cursor.execute(query, data)
else:
db_cursor.execute(query)
query_result = db_cursor.fetchall()
db_cursor.close()
return query_result
except Exception as e:
logger.error('执行数据库查询操作失败:%s' % e)
db_cursor.close()
exit()
def close(self):
self.dbconn.close
| StarcoderdataPython |
11012 | """Test module ``plot_profile/utils.py``."""
# Standard library
import logging
# First-party
from plot_profile.utils import count_to_log_level
def test_count_to_log_level():
assert count_to_log_level(0) == logging.ERROR
assert count_to_log_level(1) == logging.WARNING
assert count_to_log_level(2) == logging.INFO
assert count_to_log_level(3) == logging.DEBUG
| StarcoderdataPython |
1640418 | <filename>bin/process_bigrams.py
# Intended to be used with count_2w.txt which has the following format:
# A B\tFREQENCY
# Sometimes "A" is "<S>" for start and "</S>" for end.
# Output is similar with all output lower-cased (including "<S>" and "</S>").
import collections
from src.data import data
all_results = collections.defaultdict(int)
for line in data.open_project_path('data/count_2w.txt', errors='ignore'):
a, b, count = line.split()
key = ('%s %s' % (a, b)).lower()
all_results[key] += int(count)
for item in sorted(all_results.items(), key=lambda x: x[1], reverse=True):
print('%s\t%s' % item)
| StarcoderdataPython |
4816060 | <reponame>ev-agelos/acr-server
from django.shortcuts import render
from django.contrib.auth.models import User
from laptimes.models import Laptime
def index(request):
laptimes = Laptime.objects.order_by('-id')[:5]
return render(request, 'index.html', context=dict(laptimes=laptimes))
def drivers(request):
users = User.objects.all()
return render(request, 'users.html', context=dict(users=users))
| StarcoderdataPython |
131789 | <gh_stars>0
"""
.. module: lemur.destinations.service
:platform: Unix
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: <NAME> <<EMAIL>>
"""
from sqlalchemy import func
from lemur import database
from lemur.models import certificate_destination_associations
from lemur.destinations.models import Destination
from lemur.certificates.models import Certificate
def create(label, plugin_name, options, description=None):
"""
Creates a new destination, that can then be used as a destination for certificates.
:param label: Destination common name
:param description:
:rtype : Destination
:return: New destination
"""
# remove any sub-plugin objects before try to save the json options
for option in options:
if 'plugin' in option['type']:
del option['value']['plugin_object']
destination = Destination(label=label, options=options, plugin_name=plugin_name, description=description)
return database.create(destination)
def update(destination_id, label, options, description):
"""
Updates an existing destination.
:param destination_id: Lemur assigned ID
:param label: Destination common name
:param description:
:rtype : Destination
:return:
"""
destination = get(destination_id)
destination.label = label
destination.options = options
destination.description = description
return database.update(destination)
def delete(destination_id):
"""
Deletes an destination.
:param destination_id: Lemur assigned ID
"""
database.delete(get(destination_id))
def get(destination_id):
"""
Retrieves an destination by its lemur assigned ID.
:param destination_id: Lemur assigned ID
:rtype : Destination
:return:
"""
return database.get(Destination, destination_id)
def get_by_label(label):
"""
Retrieves a destination by its label
:param label:
:return:
"""
return database.get(Destination, label, field='label')
def get_all():
"""
Retrieves all destination currently known by Lemur.
:return:
"""
query = database.session_query(Destination)
return database.find_all(query, Destination, {}).all()
def render(args):
filt = args.pop('filter')
certificate_id = args.pop('certificate_id', None)
if certificate_id:
query = database.session_query(Destination).join(Certificate, Destination.certificate)
query = query.filter(Certificate.id == certificate_id)
else:
query = database.session_query(Destination)
if filt:
terms = filt.split(';')
query = database.filter(query, Destination, terms)
return database.sort_and_page(query, Destination, args)
def stats(**kwargs):
"""
Helper that defines some useful statistics about destinations.
:param kwargs:
:return:
"""
items = database.db.session.query(Destination.label, func.count(certificate_destination_associations.c.certificate_id))\
.join(certificate_destination_associations)\
.group_by(Destination.label).all()
keys = []
values = []
for key, count in items:
keys.append(key)
values.append(count)
return {'labels': keys, 'values': values}
| StarcoderdataPython |
4809585 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
import cProfile
import os
import pyprof2html
def main():
"""Do the profiling work."""
from KOPy.targets import TargetList
filename = pkg_resources.resource_filename('KOPy.tests', 'data/big_starlist.txt')
profile = cProfile.Profile()
profile.runcall(TargetList.from_starlist, filename)
output = os.path.relpath(os.path.join(os.path.dirname(__file__), "profiles", "targetlist_parse.profile"))
try:
os.makedirs(os.path.dirname(output) + "/")
except IOError:
pass
profile.dump_stats(output)
p2h = pyprof2html.Converter(output)
outhtml = os.path.join(os.path.dirname(output), 'targetlist_parse')
p2h.printout('html', outhtml)
print("Profiled '{0:s}' in '{1:s}'".format('TargetList.from_starlist', outhtml))
if p2h.profiledata_count > 20:
p2h = pyprof2html.Converter(output)
p2h.printout(filetype='html',
output_directory=outhtml,
output_htmlfile='index-all.html',
functions_number=99999)
if __name__ == '__main__':
main()
| StarcoderdataPython |
1638696 | import errno
import os
import pickle
import unittest
import libtorrent as lt
ALL_CATEGORIES = (
lt.generic_category(),
lt.system_category(),
lt.libtorrent_category(),
lt.upnp_category(),
lt.http_category(),
lt.socks_category(),
lt.bdecode_category(),
lt.i2p_category(),
)
class ErrorCategoryTest(unittest.TestCase):
def test_equal(self) -> None:
self.assertEqual(lt.generic_category(), lt.generic_category())
self.assertNotEqual(lt.generic_category(), lt.system_category())
def test_accessors(self) -> None:
with self.assertWarns(DeprecationWarning):
self.assertEqual(lt.get_libtorrent_category(), lt.libtorrent_category())
with self.assertWarns(DeprecationWarning):
self.assertEqual(lt.get_upnp_category(), lt.upnp_category())
with self.assertWarns(DeprecationWarning):
self.assertEqual(lt.get_http_category(), lt.http_category())
with self.assertWarns(DeprecationWarning):
self.assertEqual(lt.get_socks_category(), lt.socks_category())
with self.assertWarns(DeprecationWarning):
self.assertEqual(lt.get_bdecode_category(), lt.bdecode_category())
with self.assertWarns(DeprecationWarning):
self.assertEqual(lt.get_i2p_category(), lt.i2p_category())
def test_name(self) -> None:
self.assertEqual(lt.generic_category().name(), "generic")
self.assertEqual(lt.system_category().name(), "system")
self.assertEqual(lt.libtorrent_category().name(), "libtorrent")
self.assertEqual(lt.upnp_category().name(), "upnp")
self.assertEqual(lt.http_category().name(), "http")
self.assertEqual(lt.socks_category().name(), "socks")
self.assertEqual(lt.bdecode_category().name(), "bdecode")
self.assertEqual(lt.i2p_category().name(), "i2p error")
def test_message(self) -> None:
for category in ALL_CATEGORIES:
self.assertIsInstance(category.message(1), str)
class ErrorCodeTest(unittest.TestCase):
def test_empty(self) -> None:
ec = lt.error_code()
self.assertEqual(ec.value(), 0)
def test_init(self) -> None:
ec = lt.error_code(1, lt.generic_category())
self.assertEqual(ec.value(), 1)
self.assertEqual(ec.category(), lt.generic_category())
def test_message(self) -> None:
ec = lt.error_code(errno.ENOENT, lt.generic_category())
self.assertEqual(ec.message(), os.strerror(errno.ENOENT))
def test_value(self) -> None:
ec = lt.error_code(errno.ENOENT, lt.generic_category())
self.assertEqual(ec.value(), errno.ENOENT)
def test_clear(self) -> None:
ec = lt.error_code(errno.ENOENT, lt.generic_category())
ec.clear()
self.assertEqual(ec.value(), 0)
self.assertEqual(ec.category(), lt.system_category())
def test_assign(self) -> None:
ec = lt.error_code(errno.ENOENT, lt.generic_category())
ec.assign(123, lt.libtorrent_category())
self.assertEqual(ec.value(), 123)
self.assertEqual(ec.category(), lt.libtorrent_category())
def test_pickle(self) -> None:
ec = lt.error_code(errno.ENOENT, lt.generic_category())
ec = pickle.loads(pickle.dumps(ec))
self.assertEqual(ec.value(), errno.ENOENT)
self.assertEqual(ec.category(), lt.generic_category())
| StarcoderdataPython |
135801 | <reponame>Pugavkomm/-test-multy_cognitive_tasks
from typing import Tuple
import numpy as np
def _compare_time(f_time, interval):
"""
Compares time with interval less than interval
Args:
f_time ([type]): [description]
interval ([type]): [description]
Returns:
[type]: [description]
"""
if f_time < interval:
f_time = interval
elif f_time % interval != 0:
f_time -= f_time % interval
return f_time
class TaskCognitive:
"""
A class method to create a TaskCognitive class .
Returns:
[type]: [description]
"""
ob_size = 0
act_size = 0
def __init__(self, params: dict, batch_size: int) -> None:
self._params = params
self._batch_size = batch_size
def one_dataset(self) -> Tuple[np.ndarray, np.ndarray]:
return Tuple[np.ndarray, np.ndarray]
def dataset(self, number_of_trials: int = 1) -> Tuple[np.ndarray, np.ndarray]:
inputs = []
outputs = []
for _ in range(number_of_trials):
one_trial_input, one_trial_output = self.one_dataset()
inputs.append(one_trial_input)
outputs.append(one_trial_output)
inputs = np.concatenate(inputs, axis=0)
outputs = np.concatenate(outputs, axis=0)
return inputs, outputs
@property
def feature_and_act_size(self):
return self.ob_size, self.act_size
@property
def task_parameters(self):
"""Property to return the Tasks parameters ."""
return self._params
@task_parameters.setter
def task_parameters(self, params: dict):
"""Setter for task parameters ."""
self._params = params
@property
def batch_size(self):
return self._batch_size
@batch_size.setter
def batch_size(self, batch_size: int):
self._batch_size = batch_size
class ContextDM(TaskCognitive):
ob_size = 5 # number of inputs
act_size = 3 # number of outputs
def __init__(
self,
params: dict = dict(
[
("sigma", 0),
("fixation", 0.3),
("target", 0.35),
("delay", 0.3),
("trial", 0.75),
("dt", 1e-3),
]
),
batch_size: int = 1,
) -> None:
super().__init__(params, batch_size)
def one_dataset(self):
sigma = self._params["sigma"]
t_fixation = self._params["fixation"]
t_target = self._params["target"]
t_delay = self._params["delay"]
t_trial = self._params["trial"]
batch_size = self._batch_size
dt = self._params["dt"]
# two stimuly and two (m.b. one) context signal = 4 (3) inputs and fixation
fixation = int(t_fixation / dt)
target = int(t_target / dt)
delay = int(t_delay / dt)
trial = int(t_trial / dt)
full_interval = fixation + target + delay + trial
full_interval_and_delay = full_interval + delay
context = np.zeros((2, batch_size))
# inputs = np.zeros((0, batch_size, self.ob_size))
# outputs = np.zeros((0, batch_size, self.act_size))
context[0, :] = np.random.choice([0, 1], size=batch_size)
context[1, :] = 1 - context[0, :]
move_average = np.random.uniform(0, 1, size=batch_size)
color_average = np.random.uniform(0, 1, size=batch_size)
move_average_label = move_average > 0.5
move_average_label = move_average_label.astype(np.longlong)
color_average_label = color_average > 0.5
color_average_label = color_average_label.astype(np.longlong)
fixation_array = np.ones(
(full_interval, batch_size, 1)
) # m.b full_interva - time of between trials
context_one = np.ones((full_interval, batch_size, 1))
context_one[:, :, 0] *= context[0]
context_two = np.ones((full_interval, batch_size, 1))
context_two[:, :, 0] *= context[1]
input_one = np.zeros((full_interval, batch_size, 1))
input_two = np.zeros((full_interval, batch_size, 1))
output_one = np.zeros((full_interval, batch_size, 1))
output_two = np.zeros((full_interval, batch_size, 1))
target_fixation = np.zeros((full_interval_and_delay, batch_size, 1))
target_fixation[0:full_interval, ...] = fixation_array[...]
indexes_context = np.where(context == 0)[0].astype(bool) # list 0 1 0 1 1 0
for j in range(batch_size):
if sigma == 0:
input_one[:, j] += np.ones((full_interval, 1)) * move_average[j]
input_two[:, j] += np.ones((full_interval, 1)) * color_average[j]
else:
input_one[:, j] += np.random.normal(
move_average[j], sigma, size=(full_interval, 1)
)
input_two[:, j] += np.random.normal(
color_average[j], sigma, size=(full_interval, 1)
)
if indexes_context[j]:
output_one[:, j] += move_average_label[j]
output_two[:, j] += 1 - output_one[:, j]
else:
output_one[:, j] += color_average_label[j]
output_two[:, j] += 1 - output_one[:, j]
inputs = np.concatenate(
(fixation_array, input_one, input_two, context_one, context_two), axis=-1
)
inputs = np.concatenate(
(inputs, np.zeros((delay, self._batch_size, self.ob_size)))
)
outputs = np.concatenate((fixation_array, output_one, output_two), axis=-1)
outputs = np.concatenate(
(outputs, np.zeros((delay, self._batch_size, self.act_size)))
)
return inputs, outputs
class WorkingMemory(TaskCognitive):
r"""
Neuronal correlates of parametric working memory in the prefrontal cortex
<NAME>, <NAME>, Adria ́<NAME> ́ndez & <NAME>
Instituto de Fisiologı ́a Celular, Universidad Nacional Autono ́ma de Me ́xico,
Me ́xico D.F. 04510, Me ́xico
"""
ob_size = 2 # number of inputs: fix + stimul
act_size = 3 # number of outputs: fix + two outputs
def __init__(
self,
params: dict = dict(
[
("dt", 1e-3), # step 1ms
("delay", 0.5), # 500 ms
("trial", 0.5), # 500 ms
("KU", 0.05), # 50 ms
("PB", 0.05), # 50 ms
("min", 0),
("max", 1),
("first", -1),
("second", -1),
]
),
batch_size: int = 1,
) -> None:
super().__init__(params, batch_size)
def one_dataset(self):
dt = self._params["dt"]
delay = int(self._params["delay"] / dt)
trial = int(self._params["trial"] / dt)
KU = int(self._params["KU"] / dt)
PB = int(self._params["PB"] / dt)
full_interval = delay + 2 * trial + KU + PB
start_base = 0
end_base = trial
start_compare = trial + delay
end_compare = start_compare + trial
start_act = end_compare - trial
fixation_interval = end_compare # full_interval - PB
min_stimulus = self._params["min"]
max_stimulus = self._params["max"]
fixation = np.zeros((full_interval, self._batch_size, 1))
fixation[0:fixation_interval] = 1
if self._params["first"] == -1:
base_stimulus = np.random.uniform(
min_stimulus, max_stimulus, size=self._batch_size
)
else:
base_stimulus = np.ones(self._batch_size) * self._params["first"]
if self._params["second"] == -1:
comparison = np.random.uniform(
min_stimulus, max_stimulus, size=self._batch_size
)
else:
comparison = np.ones(self._batch_size) * self._params["second"]
trial_input = np.zeros((full_interval, self._batch_size, 1))
trial_output = np.zeros((full_interval, self._batch_size, 2))
for batch in range(self._batch_size):
base = base_stimulus[batch]
compare = comparison[batch]
input_stimulus = np.zeros((full_interval, 1, 1))
input_stimulus[start_base:end_base] = base
input_stimulus[start_compare:end_compare] = compare
trial_input[:, batch, 0] = input_stimulus[:, 0, 0]
act_output = np.zeros((full_interval, 1, 2))
act_output[start_act:, 0, int(compare > base)] = 1
trial_output[:, batch, :] = act_output[:, 0, :]
inputs = np.concatenate((fixation, trial_input), axis=-1)
outputs = np.concatenate((fixation, trial_output), axis=-1)
inputs = np.concatenate(
(inputs, np.zeros((delay * 2, inputs.shape[1], inputs.shape[2])))
)
outputs = np.concatenate(
(outputs, np.zeros((delay * 2, outputs.shape[1], outputs.shape[2])))
)
return inputs, outputs
class CompareObjects(TaskCognitive):
ob_size = 2 # number of inputs (fixation + one input)
act_size = 2 # number of outputs (fixation + one output)
examples = (0.1, 0.3, 0.5, 0.9)
def __init__(
self,
params: dict = dict(
[("dt", 1e-3), ("delay", 1), ("trial", 0.5), ("time_object", 0.3)]
),
batch_size: int = 1,
) -> None:
super().__init__(params, batch_size)
def one_dataset(self):
dt = self._params["dt"]
delay = int(self._params["delay"] / dt)
trial = int(self._params["trial"] / dt)
time_object = int(self._params["time_object"] / dt)
tasks_number = [0, 1]
full_interval = (
time_object + (len(tasks_number) + 1) * delay + len(tasks_number) * trial
) # example, 6 delay, 5 trial (sum)
fixation = np.zeros((full_interval, 1))
stimul = np.zeros((full_interval, 1))
target_output = np.zeros((full_interval, 1))
object_for_batch = np.random.uniform(0, 1, size=(self._batch_size))
choice_correct = np.random.choice(tasks_number, size=(self._batch_size, 1))
inputs = np.zeros((full_interval, self._batch_size, self.ob_size))
outputs = np.zeros((full_interval, self._batch_size, self.act_size))
for batch in range(self._batch_size):
fixation *= 0
stimul *= 0
target_output *= 0
fixation[
0 : time_object
+ (1 + choice_correct[batch, 0]) * (trial + delay)
+ trial
+ delay
- int(delay / 100),
0,
] = 1
start_out = (
time_object + choice_correct[batch, 0] * (trial + delay) + 2 * trial
)
target_output[
start_out : start_out + trial + delay - int(delay / 100), 0
] = 1
stimul[0:time_object, 0] = object_for_batch[batch]
for j in range(choice_correct[batch, 0]):
curent_example = np.random.uniform(0, 1)
while curent_example == object_for_batch[batch]:
curent_example = np.random.uniform(0, 1)
stimul[
time_object
+ delay
+ j * (trial + delay) : time_object
+ delay
+ j * (trial + delay)
+ trial
] = curent_example
stimul[
time_object
+ delay
+ choice_correct[batch, 0] * (trial + delay) : time_object
+ delay
+ choice_correct[batch, 0] * (trial + delay)
+ trial,
0,
] = object_for_batch[batch]
inputs[:, batch, 0] = fixation[:, 0]
inputs[:, batch, 1] = stimul[:, 0]
outputs[:, batch, 0] = fixation[:, 0]
outputs[:, batch, 1] = target_output[:, 0]
inputs = np.concatenate(
(inputs, np.zeros((delay, self._batch_size, self.ob_size))), axis=0
)
outputs = np.concatenate(
(outputs, np.zeros((delay, self._batch_size, self.act_size))), axis=0
)
return inputs, outputs
class MultyTask:
task_list = [
("ContextDM", ContextDM),
("CompareObjects", CompareObjects),
("WorkingMemory", WorkingMemory),
]
task_list.sort()
TASKSDICT = dict(task_list)
def __init__(self, tasks: dict[str, dict], batch_size: int = 1) -> None:
# tasks : dict(task_name -> parameters)
for name in tasks:
if not (name in self.TASKSDICT):
raise KeyError(f'"{name}" not supported')
self._tasks = tasks
self._sorted_tasks()
self._batch_size = batch_size
self._task_list = []
self._init_tasks() # init all tasks
def _sorted_tasks(self):
new_dict = dict()
for key in sorted(self._tasks):
new_dict[key] = self._tasks[key]
self._tasks = new_dict
def _init_tasks(self):
self._task_list.clear()
for key in self._tasks:
if len(self._tasks[key]) > 0:
self._task_list.append(
self.TASKSDICT[key](self._tasks[key], self._batch_size)
)
else:
self._task_list.append(self.TASKSDICT[key](batch_size=self._batch_size))
def dataset(self, number_of_generations: int = 1) -> tuple[np.ndarray, np.ndarray]:
number_of_tasks = len(self._tasks)
choice_tasks = [i for i in range(number_of_tasks)]
all_inputs, all_outputs = self._count_feature_and_act_size()
rules = np.eye(number_of_tasks)
inputs = np.zeros((0, self._batch_size, all_inputs))
outputs = np.zeros((0, self._batch_size, all_outputs))
sizes_all_tasks = self._feature_and_act_size_every_task()
start_input_tasks = [1 + number_of_tasks]
start_output_tasks = [1]
size_input_tasks = []
size_output_tasks = []
for key in sizes_all_tasks:
n_inputs, n_outputs = sizes_all_tasks[key]
n_inputs -= 1 # -fix
n_outputs -= 1 # -fix
size_input_tasks.append(n_inputs)
size_output_tasks.append(n_outputs)
start_input_tasks.append(n_inputs + start_input_tasks[-1])
start_output_tasks.append(n_outputs + start_output_tasks[-1])
for _ in range(number_of_generations):
task_number = np.random.choice(choice_tasks)
task_inputs, task_outputs = self._task_list[task_number].dataset()
# 1. expansion of matrices
inputs = np.concatenate(
(inputs, np.zeros((task_inputs.shape[0], self._batch_size, all_inputs)))
)
outputs = np.concatenate(
(
outputs,
np.zeros((task_outputs.shape[0], self._batch_size, all_outputs)),
)
)
# 2. put fixations
inputs[-task_inputs.shape[0] :, :, 0] = task_inputs[
-task_inputs.shape[0] :, :, 0
]
outputs[-task_inputs.shape[0] :, :, 0] = task_outputs[
-task_outputs.shape[0] :, :, 0
]
# 3. put rule
inputs[-task_inputs.shape[0] :, :, 1 : 1 + number_of_tasks] += rules[
:, task_number
]
# 4. put stimuly and outputs
start_input = start_input_tasks[task_number]
stop_input = start_input + size_input_tasks[task_number]
start_output = start_output_tasks[task_number]
stop_output = start_output + size_output_tasks[task_number]
inputs[-task_inputs.shape[0] :, :, start_input:stop_input] = task_inputs[
:, :, 1:
]
outputs[
-task_outputs.shape[0] :, :, start_output:stop_output
] = task_outputs[:, :, 1:]
return inputs, outputs
@property
def feature_and_act_size(self) -> tuple[tuple[int, int], dict]:
return (
self._count_feature_and_act_size(),
self._feature_and_act_size_every_task(),
)
def _count_feature_and_act_size(self) -> tuple[int, int]:
all_inputs = 0
all_outputs = 0
for key in self._tasks:
all_inputs += self.TASKSDICT[key].ob_size - 1 # minus fix
all_outputs += self.TASKSDICT[key].act_size - 1 # minus_fix
all_inputs += 1 + len(self._tasks) # fix + rule vector
all_outputs += 1 # fix
return (all_inputs, all_outputs)
def _feature_and_act_size_every_task(self):
sizes = dict()
for key in self._tasks:
sizes[key] = (self.TASKSDICT[key].ob_size, self.TASKSDICT[key].act_size)
return sizes
@property
def tasks(self) -> dict:
return self._tasks
@tasks.setter
def tasks(self, tasks) -> None:
self.__init__(tasks)
def get_task(self, key) -> dict:
if not (key in self._tasks):
raise KeyError()
return self._tasks[key]
def set_task(self, key: str, params: dict):
if not (key in self._tasks):
raise KeyError()
self._tasks[key] = params
self._init_tasks()
def __getitem__(self, index: int) -> tuple:
if index < 0 and index > len(self._tasks) - 1:
raise IndexError(f"index not include in [{0}, {len(self._tasks)}]")
for i, key in enumerate(self._tasks):
if index == i:
return key, self._tasks[key]
def __setitem__(self, index: int, new_task: tuple):
if index < 0 and index > len(self._tasks) - 1:
raise IndexError(f"index not include in [{0}, {len(self._tasks)}]")
new_name, new_parameters = new_task
if not (new_name in self.TASKSDICT):
raise KeyError(f'"{new_name}" not supported')
for i, key in enumerate(self._tasks):
if index == i:
old_key = key
break
del self._tasks[old_key]
self._tasks[new_name] = new_parameters
self._init_tasks()
def __len__(self):
return len(self._tasks)
| StarcoderdataPython |
1754059 | <reponame>bmoretz/Mastering-Flask
"""initial migration
Revision ID: 462cbdc2765a
Revises:
Create Date: 2019-03-19 09:40:21.801310
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('username', sa.String(length=255), nullable=False))
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
op.drop_index('ix_user_user_name', table_name='user')
op.drop_column('user', 'user_name')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('user_name', sa.VARCHAR(length=255), nullable=False))
op.create_index('ix_user_user_name', 'user', ['user_name'], unique=1)
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_column('user', 'username')
# ### end Alembic commands ###
| StarcoderdataPython |
4772 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
"""
Calibration Controller
Performs calibration for hue, center of camera position, and servo offsets
"""
import os
import cv2
import time
import json
import argparse
import datetime
import numpy as np
import logging as log
from env import MoabEnv
from typing import Tuple
from common import Vector2
from detector import hsv_detector
from controllers import pid_controller
from dataclasses import dataclass, astuple
from hardware import plate_angles_to_servo_positions
@dataclass
class CalibHue:
hue: int = 44 # Reasonable default
success: bool = False
early_quit: bool = False # If menu is pressed before the calibration is complete
def __iter__(self):
return iter(astuple(self))
@dataclass
class CalibPos:
position: Tuple[float, float] = (0.0, 0.0)
success: bool = False
early_quit: bool = False # If menu is pressed before the calibration is complete
def __iter__(self):
return iter(astuple(self))
@dataclass
class CalibServos:
servos: Tuple[float, float, float] = (0.0, 0.0, 0.0)
success: bool = False
early_quit: bool = False # If menu is pressed before the calibration is complete
def __iter__(self):
return iter(astuple(self))
def ball_close_enough(x, y, radius, max_ball_dist=0.045, min_ball_dist=0.01):
# reject balls which are too far from the center and too small
return (
np.abs(x) < max_ball_dist
and np.abs(y) < max_ball_dist
and radius > min_ball_dist
)
def calibrate_hue(camera_fn, detector_fn, is_menu_down_fn):
hue_low = 0
hue_high = 360
hue_steps = 41 # Is 41 instead of 40 so that the steps are even
img_frame, elapsed_time = camera_fn()
hue_options = list(np.linspace(hue_low, hue_high, hue_steps))
detected_hues = []
for hue in hue_options:
if is_menu_down_fn():
return CalibHue(early_quit=True)
img_frame, elapsed_time = camera_fn()
ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue, debug=True)
# If we found a ball roughly in the center that is large enough
if ball_detected and ball_close_enough(x, y, radius):
log.info(
f"hue={hue:0.3f}, ball_detected={ball_detected}, "
f"(x, y)={x:0.3f} {y:0.3f}, radius={radius:0.3f}"
)
detected_hues.append(hue)
if len(detected_hues) > 0:
# https://en.wikipedia.org/wiki/Mean_of_circular_quantities
detected_hues_rad = np.radians(detected_hues)
sines, cosines = np.sin(detected_hues_rad), np.cos(detected_hues_rad)
sin_mean, cos_mean = np.mean(sines), np.mean(cosines)
avg_hue_rad = np.arctan2(sin_mean, cos_mean)
avg_hue = np.degrees(avg_hue_rad) % 360 # Convert back to [0, 360]
print(f"Hues are: {detected_hues}")
print(f"Hue calibrated: {avg_hue:0.2f}")
print(f"Avg hue: {avg_hue:0.2f}")
return CalibHue(hue=int(avg_hue), success=True)
else:
log.warning(f"Hue calibration failed.")
return CalibHue()
def calibrate_pos(camera_fn, detector_fn, hue, is_menu_down_fn):
for i in range(10): # Try and detect for 10 frames before giving up
if is_menu_down_fn():
return CalibPos(early_quit=True)
img_frame, elapsed_time = camera_fn()
ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue)
# If we found a ball roughly in the center that is large enough
if ball_detected and ball_close_enough(x, y, radius):
x_offset = round(x, 3)
y_offset = round(y, 3)
log.info(f"Offset calibrated: [{x_offset:.3f}, {y_offset:.3f}]")
return CalibPos(position=(x_offset, y_offset), success=True)
log.warning(f"Offset calibration failed.")
return CalibPos()
def calibrate_servo_offsets(pid_fn, env, stationary_vel=0.005, time_limit=20):
start_time = time.time()
action = Vector2(0, 0)
# Initial high vel_history (to use the vel_hist[-100:] later)
vel_x_hist = [1.0 for _ in range(100)]
vel_y_hist = [1.0 for _ in range(100)]
# Run until the ball has stabilized or the time limit was reached
while time.time() < start_time + time_limit:
state = env.step(action)
action, info = pid_fn(state)
(x, y, vel_x, vel_y, sum_x, sum_y), ball_detected, buttons = state
# Quit on menu down
if buttons.menu_button:
return CalibServos(early_quit=True)
if ball_detected:
vel_x_hist.append(vel_x)
vel_y_hist.append(vel_y)
prev_100_x = np.mean(np.abs(vel_x_hist[-100:]))
prev_100_y = np.mean(np.abs(vel_y_hist[-100:]))
print("Prev 100: ", (prev_100_x, prev_100_y))
# If the average velocity for the last 100 timesteps is under the limit
if (prev_100_x < stationary_vel) and (prev_100_y < stationary_vel):
# Calculate offsets by calculating servo positions at the
# current stable position and subtracting the `default` zeroed
# position of the servos.
servos = np.array(plate_angles_to_servo_positions(*action))
servos_zeroed = np.array(plate_angles_to_servo_positions(0, 0))
servo_offsets = list(servos - servos_zeroed)
return CalibServos(servos=servo_offsets, success=True)
# If the plate could be stabilized in time_limit seconds, quit
log.warning(f"Servo calibration failed.")
return CalibServos()
def write_calibration(calibration_dict, calibration_file="bot.json"):
log.info("Writing calibration.")
# write out stuff
with open(calibration_file, "w+") as outfile:
log.info(f"Creating calibration file {calibration_file}")
json.dump(calibration_dict, outfile, indent=4, sort_keys=True)
def read_calibration(calibration_file="bot.json"):
log.info("Reading previous calibration.")
if os.path.isfile(calibration_file):
with open(calibration_file, "r") as f:
calibration_dict = json.load(f)
else: # Use defaults
calibration_dict = {
"ball_hue": 44,
"plate_offsets": (0.0, 0.0),
"servo_offsets": (0.0, 0.0, 0.0),
}
return calibration_dict
def wait_for_joystick_or_menu(hardware, sleep_time=1 / 30):
"""Waits for either the joystick or the menu. Returns the buttons"""
while True:
buttons = hardware.get_buttons()
if buttons.menu_button or buttons.joy_button:
return buttons
time.sleep(sleep_time)
def wait_for_menu(hardware, sleep_time=1 / 30):
while True:
menu_button, joy_button, joy_x, joy_y = hardware.get_buttons()
time.sleep(sleep_time)
if menu_button:
return
def run_calibration(env, pid_fn, calibration_file):
# Get some hidden things from env
hardware = env.hardware
camera_fn = hardware.camera
detector_fn = hardware.detector
def is_menu_down(hardware=hardware) -> bool:
return hardware.get_buttons().menu_button
# lift plate up first
hardware.set_angles(0, 0)
# Display message and wait for joystick
hardware.display(
"put ball on stand\nclick joystick",
# "Place ball in\ncenter using\nclear stand.\n\n" "Click joystick\nwhen ready."
scrolling=True,
)
buttons = wait_for_joystick_or_menu(hardware)
if buttons.menu_button: # Early quit
hardware.go_up()
return
hardware.display("Calibrating...")
hue_calib = calibrate_hue(camera_fn, detector_fn, is_menu_down)
if hue_calib.early_quit:
hardware.go_up()
return
# Calibrate position
pos_calib = calibrate_pos(camera_fn, detector_fn, hue_calib.hue, is_menu_down)
if pos_calib.early_quit:
hardware.go_up()
return
# Save calibration
calibration_dict = read_calibration(calibration_file)
calibration_dict["ball_hue"] = hue_calib.hue
calibration_dict["plate_offsets"] = pos_calib.position
x_offset, y_offset = pos_calib.position
write_calibration(calibration_dict)
# Update the environment to use the new calibration
# Warning! This mutates the state!
hardware.reset_calibration(calibration_file=calibration_file)
if pos_calib.success and hue_calib.success: # and servo_calib.success:
hardware.display(f"Ok! Ball hue={hue_calib.hue}\nClick menu...", scrolling=True)
elif not (pos_calib.success or hue_calib.success): # or servo_calib.success):
hardware.display("Calibration failed\nClick menu...", scrolling=True)
else:
hue_str = (
f"Hue calib:\nsuccessful\nBall hue = {hue_calib.hue}\n\n"
if hue_calib.success
else "Hue calib:\nfailed\n\n"
)
pos_str = (
f"Position \ncalib:\nsuccessful\nPosition = \n({100*x_offset:.1f}, {100*y_offset:.1f})cm\n\n"
if hue_calib.success
else "(X, Y) calib:\nfailed\n\n"
)
hardware.display(
"Calibration\npartially succeeded\n\n"
+ hue_str
+ pos_str
+ "Click menu\nto return...\n",
scrolling=True,
)
# When the calibration is complete, save the image of what the moab camera
# sees (useful for debugging when the hue calibration fails)
# Have a nice filename with the time and whether it succeeded or failed
time_of_day = datetime.datetime.now().strftime("%H%M%S")
filename = "/tmp/hue"
if hue_calib.success:
filename += f".{hue_calib.hue}.{time_of_day}.jpg"
else:
filename += f".fail.{time_of_day}.jpg"
img_frame, _ = camera_fn()
# Huemask keeps an internal cache. By sending a new hue (hue + 1) invalidates
# the cache. TODO: added this while searching for a state bug
detector_fn(img_frame, hue=hue_calib.hue + 1, debug=True, filename=filename)
hardware.go_up()
def run_servo_calibration(env, pid_fn, calibration_file):
# Warning: servo calib works but doesn't currently give a good calibration
raise NotImplementedError
# Get some hidden things from env
hardware = env.hardware
camera_fn = hardware.camera
detector_fn = hardware.detector
# Start the calibration with uncalibrated servos
hardware.servo_offsets = (0, 0, 0)
# lift plate up fist
hardware.set_angles(0, 0)
# Calibrate servo offsets
hardware.display(
"Calibarating\nservos\n\n"
"Place ball in\ncenter without\n stand.\n\n"
"Click joystick\nto continue.",
scrolling=True,
)
buttons = wait_for_joystick_or_menu(hardware)
if buttons.menu_button: # Early quit
hardware.go_up()
return
hardware.display("Calibrating\nservos...", scrolling=True)
servo_calib = calibrate_servo_offsets(pid_fn, env)
# Save calibration
calibration_dict = read_calibration(calibration_file)
calibration_dict["servo_offsets"] = servo_calib.servos
s1, s2, s3 = servo_calib.servos
write_calibration(calibration_dict)
# Update the environment to use the new calibration
# Warning! This mutates the state!
env.reset_calibration(calibration_file=calibration_file)
if servo_calib.success:
hardware.display(
f"servo offsets =\n({s1:.2f}, {s2:.2f}, {s3:.2f})\n\n"
"Click menu\nto return...\n",
scrolling=True,
)
print(f"servo offsets =\n({s1:.2f}, {s2:.2f}, {s3:.2f})")
else:
hardware.display(
"Calibration\nfailed\n\nClick menu\nto return...", scrolling=True
)
hardware.go_up()
def calibrate_controller(**kwargs):
run_calibration(
kwargs["env"],
kwargs["pid_fn"],
kwargs["calibration_file"],
)
def wait_for_menu_and_stream():
# Get some hidden things from env to be able to stream the calib results
env = kwargs["env"]
hardware = env.hardware
camera_fn = hardware.camera
detector_fn = hardware.detector
menu_button = False
while not menu_button:
img_frame, _ = camera_fn()
detector_fn(img_frame, debug=True) # Save to streaming
menu, joy, _, _ = hardware.get_buttons()
if menu or joy:
break
env.hardware.go_up()
return wait_for_menu_and_stream
def main(calibration_file, frequency=30, debug=True):
pid_fn = pid_controller(frequency=frequency)
with MoabEnv(frequency=frequency, debug=debug) as env:
env.step((0, 0))
time.sleep(0.2)
env.hardware.enable_servos()
time.sleep(0.2)
env.hardware.set_servos(133, 133, 133)
run_calibration(env, pid_fn, calibration_file)
env.hardware.disable_servos()
if __name__ == "__main__": # Parse command line args
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
parser.add_argument("-f", "--file", default="bot.json", type=str)
args, _ = parser.parse_known_args()
main(args.file, debug=args.debug)
| StarcoderdataPython |
3205240 | <reponame>nissimergas/ipre
from time import sleep
from PyQt4 import QtGui
from PyQt4.QtCore import QTimer
from PyQt4.QtCore import QRect, QPropertyAnimation
import json
import requests
import subprocess
import os
class Robot:
def __init__(self,x,y,id,ventana):
self.x=((x-22)//37)
self.y=((y-146)//33)
self.x_cor=x
self.y_cor=y
self.id=id
self.pic = QtGui.QLabel(ventana)
#self.pic.resize(70, 70)
self.pic.setPixmap(QtGui.QPixmap("imagenes/robot.png").scaledToWidth(50))
self.pic.show() # You were missing this.
self.pic.move(x-26,y-30)
self.left=Ball2()
self.right=Ball2()
def move(self,x,y):
#self.pic.hide()
#self.pic.setPixmap(QtGui.QPixmap("robot.png").scaledToWidth(50))
#self.pic.resize(70, 70)
#self.pic.setPixmap(QtGui.QPixmap("robot.png").scaledToWidth(50))
self.pic.move(x-26,y-30)
self.left.move(x-20-21,y-30)
self.right.move(x-20+21,y-30)
#self.pic.show() # You were missing this.
#return self.pic.show()
def agarrar(self, ball,hand):
print("11111")
if hand=="r":
print("hola")
self.right=ball
x=self.pic.x()+17
y=self.pic.y()
self.right.move(x,y)
if hand=="l":
print("chao")
self.left=ball
x=self.pic.x()-17
y=self.pic.y()
self.left.move(x,y)
def soltar(self,ball,hand):
pass
class Ball2:
def move(self,x,y):
pass
class Ball:
def __init__(self,x,y,id,ventana):
self.x=((x-22)//37)
self.y=((y-146)//33)
self.x_cor=x
self.y_cor=y
self.id="ball"+str(id)
self.pic = QtGui.QLabel(ventana)
self.pic.setPixmap(QtGui.QPixmap("imagenes/pelota.png").scaledToWidth(40))
self.pic.show() # You were missing this.
self.pic.move(x-20,y-20)
self.nombre = QtGui.QLabel(ventana)
self.nombre.setText(str(id))
self.nombre.move(x,y)
self.nombre.show()
def move(self,x,y):
self.pic.move(x,y)
self.nombre.move(x+20,y+25)
class Obstaculo:
def __init__(self,x,y,id,ventana):
self.x=((x-22)//37)
self.y=((y-146)//33)
self.x_cor=x
self.y_cor=y
self.id="obstaculo"+str(id)
self.pic = QtGui.QLabel(ventana)
self.pic.setPixmap(QtGui.QPixmap("imagenes/obstaculo.png"))
self.pic.show() # You were missing this.
self.pic.move(x-18.5,y-16.5)
self.pic2 = QtGui.QLabel(ventana)
self.pic2.setPixmap(QtGui.QPixmap("imagenes/obstaculo.png"))
self.pic2.show() # You were missing this.
self.pic2.move(x-18.5+630,y-16.5)
class MiMapa (QtGui.QWidget):
def __init__(self):
super().__init__()
self.setWindowTitle('Mundo de los robots')
# Definimos la geometría de la ventana.
# Parámetros: (x_top_left, y_top_left, width, height)
# p = QtGui.QPalette()
# gradient = QtGui.QLinearGradient(0, 0, 0, 400)
# gradient.setColorAt(1.0, QtGui.QColor(252, 252, 252))
# p.setBrush(QtGui.QPalette.Window, QtGui.QBrush(gradient))
# self.setPalette(p)
# self.show()
self.timer = QTimer(self)
self.cont_obstaculos=0
self.background = QtGui.QLabel(self)
self.background.setPixmap(QtGui.QPixmap("imagenes/fondo.png"))
self.background.resize(600, 600)
self.background.move(20,110)
self.instrucciones=[]
self.program_counter=0
#self.obstaculo = QtGui.QLabel(self)
#self.obstaculo.setPixmap(QtGui.QPixmap("obstaculo.png"))
#self.obstaculo.move(21,147)
self.text1 = QtGui.QLabel(self)
self.text1.setText("Initial State")
self.text1.move(200,110)
self.instruccion_ejecutada=QtGui.QLabel(self)
self.instruccion_ejecutada.setText(" ")
self.instruccion_ejecutada.move(200,128)
self.boton_estado0 = QtGui.QPushButton('&Back to initial state', self)
self.boton_estado0.resize(self.boton_estado0.sizeHint())
self.boton_estado0.move(450, 20)
self.boton_estado0.clicked.connect(self.volver_estado)
self.boton1 = QtGui.QPushButton('&Delete Ball', self)
self.boton1.resize(self.boton_estado0.sizeHint())
self.boton1.move(10, 20)
self.boton1.clicked.connect(self.boton1_callback)
self.boton_bor_ob = QtGui.QPushButton('&Delete Obstacle', self)
self.boton_bor_ob.resize(self.boton_estado0.sizeHint())
self.boton_bor_ob.move(150, 20)
self.boton_bor_ob.clicked.connect(self.boton_bor_ob_callback)
self.boton2 = QtGui.QPushButton('&Delete Robot', self)
self.boton2.resize(self.boton_estado0.sizeHint())
self.boton2.move(10, 50)
self.boton2.clicked.connect(self.boton2_callback)
self.boton_pelota = QtGui.QPushButton('&Place Ball', self)
self.boton_pelota.resize(self.boton_estado0.sizeHint())
self.boton_pelota.move(150, 50)
self.boton_pelota.clicked.connect(self.boton_pelota_callback)
self.boton3 = QtGui.QPushButton('&Generate Pddl Code', self)
self.boton3.resize(self.boton_estado0.sizeHint())
self.boton3.move(10, 80)
self.boton3.clicked.connect(self.boton3_callback)
self.boton_obstaculos = QtGui.QPushButton('&Place Obstacle', self)
self.boton_obstaculos.resize(self.boton_estado0.sizeHint())
self.boton_obstaculos.move(150, 80)
self.boton_obstaculos.clicked.connect(self.boton_obstaculos_callback)
self.boton_poner_rob = QtGui.QPushButton('&Place Robot', self)
self.boton_poner_rob.resize(self.boton_estado0.sizeHint())
self.boton_poner_rob.move(300, 20)
self.boton_poner_rob.clicked.connect(self.poner_rob_call)
self.background2 = QtGui.QLabel(self)
self.background2.resize(600, 600)
self.background2.setPixmap(QtGui.QPixmap("imagenes/fondo.png"))
self.background2.move(650,110)
self.text2 = QtGui.QLabel(self)
self.text2.setText("Final State")
self.text2.move(870,110)
self.boton4 = QtGui.QPushButton('&Delete Ball', self)
self.boton4.resize(self.boton_estado0.sizeHint())
self.boton4.move(650, 20)
self.boton4.clicked.connect(self.boton4_callback)
self.boton5 = QtGui.QPushButton('&Delete Robot', self)
self.boton5.resize(self.boton_estado0.sizeHint())
self.boton5.move(650, 50)
self.boton5.clicked.connect(self.boton5_callback)
self.boton_ejecutar = QtGui.QPushButton('&Execute api', self)
self.boton_ejecutar.resize(self.boton_estado0.sizeHint())
self.boton_ejecutar.move(300, 50)
self.boton_ejecutar.clicked.connect(self.ejecutar_acciones)
self.boton_ejecutar2 = QtGui.QPushButton('&Execute local', self)
self.boton_ejecutar2.resize(self.boton_estado0.sizeHint())
self.boton_ejecutar2.move(300, 80)
self.boton_ejecutar2.clicked.connect(self.ejecutar_acciones2)
self.contador_rob=0
self.contador_ball=0
self.robots=[]
self.balls=[]
self.robots_final=[]
self.balls_final=[]
self.obstaculos=[]
#self.setGeometry(400, 400, 700, 700)
self.setFixedSize(1300, 700)
self.contador="r"
self.contador_final="r"
def poner_rob_call(self):
if len(self.robots)==0:
self.contador="r"
if len(self.robots_final)==0:
self.contador_final="r"
def boton_pelota_callback(self):
self.contador="p"
def boton_obstaculos_callback(self):
self.contador="o"
print("dfdf")
def boton1_callback(self):
# Este método maneja el evento sobre quien opera
if len(self.balls)>0:
b=self.balls.pop()
b.pic.hide()
b.nombre.hide()
if len(self.robots)>0:
self.contador="p"
else:
self.contador="r"
def boton2_callback(self):
if len(self.robots)>0:
self.robots[0].pic.hide()
self.robots=[]
self.contador="r"
def boton3_callback(self):
if len(self.robots_final)==len(self.robots) and len(self.balls_final)==len(self.balls):
self.generar()
def boton4_callback(self):
if len(self.balls_final)>0:
b=self.balls_final.pop()
b.pic.hide()
b.nombre.hide()
if len(self.robots_final)>0:
self.contador_final="p"
else:
self.contador_final="r"
def boton5_callback(self):
if len(self.robots_final)>0:
self.robots_final[0].pic.hide()
self.robots_final=[]
self.contador_final="r"
def boton_bor_ob_callback(self):
if len(self.obstaculos)>0:
ob=self.obstaculos.pop()
ob.pic.hide()
ob.pic2.hide()
def mousePressEvent(self, event):
# Este método maneja cuando se presiona alguno de los botones del mouse.
# Viene creada por defecto en la aplicación. Se puede sobreescribir el
# método de acuerdo a como se maneja el evento en cada aplicación.
x=event.x()
y= event.y()
print("x:",x)
print("y:",y)
print(self.contador)
if (self.contador=="r" and x<613)or (self.contador_final=="r" and x>613):
self.robot(x,y)
print("sfsd")
elif self.contador=="o":
print("obs")
self.obstaculo(x,y)
else:
self.pelota(x,y)
def robot(self,x,y):
# self.pic = QtGui.QLabel(self)
# self.pic.resize(110, 110)
# self.pic.setPixmap(QtGui.QPixmap("robot.png").scaledToWidth(80))
# self.pic.show() # You were missing this.
# self.pic.move(x-20,y-40)
if x>21 and y>146 and x<613 and y<674 and self.contador=="r":
x=((x-22)//37)*37+22+18.5
y=((y-146)//33)*33+146+16.5
r=Robot(x,y,1,self)
self.robots.append(r)
self.contador="p"
if x>652 and y>146 and x<1244 and y<674 and self.contador_final=="r":
x=((x-652)//37)*37+652+18.5
y=((y-146)//33)*33+146+16.5
r=Robot(x,y,1,self)
self.robots_final.append(r)
self.contador_final="p"
def obstaculo(self,x,y):
# self.pic = QtGui.QLabel(self)
# self.pic.resize(110, 110)
# self.pic.setPixmap(QtGui.QPixmap("robot.png").scaledToWidth(80))
# self.pic.show() # You were missing this.
# self.pic.move(x-20,y-40)
if x>21 and y>146 and x<613 and y<674 and self.contador=="o":
x=((x-22)//37)*37+22+18.5
y=((y-146)//33)*33+146+16.5
o=Obstaculo(x,y,self.cont_obstaculos,self)
self.obstaculos.append(o)
self.cont_obstaculos+=1
def pelota(self,x,y):
if x>21 and y>146 and x<613 and y<674 and self.contador=="p":
x=((x-22)//37)*37+22+18.5
y=((y-146)//33)*33+146+16.5
s=Ball(x,y,len(self.balls),self)
self.balls.append(s)
self.contador_ball+=1
if x>652 and y>146 and x<1244 and y<674 and self.contador_final=="p":
x=((x-22)//37)*37+22+18.5
y=((y-146)//33)*33+146+16.5
s=Ball(x,y,len(self.balls_final),self)
self.balls_final.append(s)
self.contador_ball+=1
# self.pic = QtGui.QLabel(self)
# self.pic.resize(110, 110)
# self.pic.setPixmap(QtGui.QPixmap("pelota.png").scaledToWidth(80))
# self.pic.show() # You were missing this.
# self.pic.move(x-20,y-40)
def generar(self):
code="""(define (problem problema2)
(:domain gripper2)"""
code+="(:objects "
posiciones=""
for i in range (16):
posiciones+=" x"+str(i)+" y"+str(i)
code+=posiciones
id=0
for ball in self.balls:
code+= " ball{0} ".format(id)
id+=1
code+=" left right) (:init "
for i in range (16):
code+=" (coord_x x"+str(i)+" ) (coord_y y"+str(i)+ ")"+"\n"
for i in range (15):
code+=" (next x"+str(i)+" x"+str(i)+")"
code+=" (next y"+str(i)+" y"+str(i)+")"
code+=" (next x"+str(i)+" x"+str(i+1)+")"
code+=" (next y"+str(i)+" y"+str(i+1)+")"
code+=" (next x"+str(i+1)+" x"+str(i)+")"
code+=" (next y"+str(i+1)+" y"+str(i)+")"+"\n"
id=0
code+=" (next x15"+" x15)"
code+=" (next y15"+" y15)"
for ball in self.balls:
code+="(BALL ball{0})(at-ball ball{0} {1} {2}) ".format(id, "x"+str(int(ball.x)), "y"+str(int(ball.y)))
id+=1
matriz=[[0 for i in range(16)]for j in range(16)]
for obstaculo in self.obstaculos:
matriz[int(obstaculo.x)][int(obstaculo.y)]=1
for obstaculo in self.obstaculos:
code+="(at-obstaculo {0} {1}) ".format( "x"+str(int(obstaculo.x)), "y"+str(int(obstaculo.y)))
# for x in range(16):
# for y in range(16):
# if matriz[x][y]==0:
# code+="(noobstaculo {0} {1}) \n".format( "x"+str(int(x)), "y"+str(int(y)))
code+="(GRIPPER left) (GRIPPER right) (free left) (free right)"
code+="(at-robby {0} {1}) ".format("x"+str(int(self.robots[0].x)), "y"+str(int(self.robots[0].y)))
code+=")" #cierre init
code+="""(:goal (and"""
code+="(at-robby x{0} y{1})".format(str(int(self.robots_final[0].x-17)),str(int(self.robots_final[0].y)))
# """(at-ball_x ball1 x5) (at-ball_y ball1 y7)
# (at-ball_x ball2 x5) (at-ball_y ball2 y7)
# (at-ball_x ball3 x5) (at-ball_y ball3 y7)
# """
id=0
for ball in self.balls_final:
code+=" (at-ball ball{0} {1} {2}) ".format(id, "x"+str(int(ball.x-17)), "y"+str(int(ball.y)))
id+=1
code+="""
))
)"""
f = open("temporal.pddl", "w")
f.write(code)
f.close()
print(code)
def mover(self):
self.robots[0].pic.hide()
self.robots[0].pic.setPixmap(QtGui.QPixmap("imagenes/robot.png").scaledToWidth(50))
self.robots[0].pic.resize(70, 70)
self.robots[0].pic.setPixmap(QtGui.QPixmap("imagenes/robot.png").scaledToWidth(50))
#self.robots[0].pic.move(self.x2real, self.y2real)
x2r=self.instrucciones[self.program_counter][2]
y2r=self.instrucciones[self.program_counter][3]
x2real=int(x2r[1:])*37+22
y2real=int(y2r[1:])*33+141
self.robots[0].pic.move(x2real, y2real)
print(" x y ",x2real,y2real)
self.robots[0].pic.show()
self.program_counter+=1
def funcion_instrucciones(self):
if len(self.instrucciones)>self.program_counter:
if self.instrucciones[self.program_counter][0]=="move":
if len(self.instr)>self.program_counter:
self.instruccion_ejecutada.setText(self.instr[self.program_counter])
x2real =self.instrucciones[self.program_counter][3]
y2real=self.instrucciones[self.program_counter][4]
self.robots[0].move(x2real,y2real)
#self.robots[0].pic.move(x2real-26, y2real-30)
print(" x y ",x2real,y2real)
#self.robots[0].pic.show()
self.program_counter+=1
elif self.instrucciones[self.program_counter][0]=="pick-up":
if len(self.instr)>self.program_counter:
self.instruccion_ejecutada.setText(self.instr[self.program_counter])
pelota=None
for ball in self.balls:
print("comparacion ",ball.id, self.instrucciones[self.program_counter][1] )
if ball.id==self.instrucciones[self.program_counter][1]:
pelota=ball
hand=self.instrucciones[self.program_counter][4]
if hand=="right":
self.robots[0].agarrar(pelota,"r")
if hand=="left":
self.robots[0].agarrar(pelota,"l")
self.program_counter+=1
elif self.instrucciones[self.program_counter][0]=="drop":
if len(self.instr)>self.program_counter:
self.instruccion_ejecutada.setText(self.instr[self.program_counter])
x2real =self.instrucciones[self.program_counter][2]
y2real=self.instrucciones[self.program_counter][3]
hand=self.instrucciones[self.program_counter][4]
pelota=None
if hand=="right":
pelota=self.robots[0].right
self.robots[0].right=Ball2()
elif hand=="left":
pelota=self.robots[0].left
self.robots[0].left=Ball2()
pelota.pic.move(x2real-20,y2real-18)
pelota.nombre.move(x2real,y2real)
self.program_counter+=1
else:
self._timer.stop()
def ejecutar_acciones2(self):
self.instrucciones=[]
self.program_counter=0
self.estado_inicial_pelotas=[]
for pelota in self.balls:
self.estado_inicial_pelotas.append([pelota.pic.x(), pelota.pic.y(),pelota.nombre.x(),pelota.nombre.y()])
self.estado_inicial_robot=[self.robots[0].pic.x(),self.robots[0].pic.y()]
self.generar()
####################################
#### codigo para pddl local #########
####################################
directorio=os.getcwd()+"/"
conf_file=open("configuration.txt","r")
lineas = conf_file.readlines()
ubicacion_ff=lineas[0].strip()
conf_file.close()
print(ubicacion_ff)
#result = subprocess.run(["ff/./ff","-p","/Users/nissimergas/Desktop/ipre/","-o","main_domain.pddl","-f","temporal.pddl"]
result = subprocess.run([ubicacion_ff,"-p",directorio,"-o","main_domain.pddl","-f","temporal.pddl"]
, stdout=subprocess.PIPE)
print(type(str(result.stdout)))
output=str(result.stdout)
inicio=output.find("step" )
fin=output.find("ntime" )
#print(output[inicio:fin])
file = open("resultado_terminal.txt","w")
lineas=output[inicio:fin].strip("step").split("n")
for li in lineas:
print(li)
l=li.strip('\ ')
f=l.find(" ")
if len(l[f:].lower().strip(" "))>2:
file.write("("+l[f:].lower().strip(" ")+")"+"\n")
file.close()
######crear resultados
###############################fin
dic={"move":self.move}
with open('resultado_terminal.txt','r') as f:
instrucciones=f.readlines()
self.instr=instrucciones
for instruccion in instrucciones:
parametros=instruccion.strip("(").strip(")").split()
nombre_instr=parametros[0]
# for parametro in parametros:
# print(parametro)
if nombre_instr=="move":
x2r=parametros[3]
y2r=parametros[4].strip(")")
x2real=int(x2r[1:])*37+22+18.5
y2real=int(y2r[1:])*33+141+18.5
self.instrucciones.append((nombre_instr,parametros[1],parametros[2],x2real,y2real))
elif nombre_instr=="pick-up":
print("hola")
self.instrucciones.append((nombre_instr,parametros[1],parametros[2],parametros[3],parametros[4].strip(")")))
elif nombre_instr=="drop":
x2r=parametros[2]
y2r=parametros[3].strip(")")
x2real=int(x2r[1:])*37+22+18.5
y2real=int(y2r[1:])*33+141+18.5
self.instrucciones.append((nombre_instr,parametros[1],x2real,y2real,parametros[4].strip(")")))
#dic[nombre_instr](parametros[1],parametros[2],parametros[3],parametros[4].strip(")"))
#dic[nombre_instr]()
self._timer = QTimer(interval=1,
timeout=self.funcion_instrucciones)
self._timer.start(500)
def ejecutar_acciones(self):
self.program_counter=0
self.instrucciones=[]
self.estado_inicial_pelotas=[]
for pelota in self.balls:
self.estado_inicial_pelotas.append([pelota.pic.x(), pelota.pic.y(),pelota.nombre.x(),pelota.nombre.y()])
self.estado_inicial_robot=[self.robots[0].pic.x(),self.robots[0].pic.y()]
self.generar()
####################################
#### codigo para pddl nube #########
####################################
data1 = {'domain': open("main_domain.pddl", 'r').read(),
'problem': open("temporal.pddl", 'r').read()}
r = requests.post('http://solver.planning.domains/solve', data=data1, allow_redirects=True)
s=r.content.decode("utf-8")
s=json.loads(s)
print(s["result"]["plan"])
file = open("resultado.txt","w")
for instruccion in s["result"]["plan"]:
print(instruccion["name"])
file.write(instruccion["name"]+"\n")
file.close()
###############################fin
dic={"move":self.move}
with open('resultado.txt','r') as f:
instrucciones=f.readlines()
self.instr=instrucciones
for instruccion in instrucciones:
parametros=instruccion.strip("(").strip(")").split()
nombre_instr=parametros[0]
# for parametro in parametros:
# print(parametro)
if nombre_instr=="move":
x2r=parametros[3]
y2r=parametros[4].strip(")")
x2real=int(x2r[1:])*37+22+18.5
y2real=int(y2r[1:])*33+141+18.5
self.instrucciones.append((nombre_instr,parametros[1],parametros[2],x2real,y2real))
elif nombre_instr=="pick-up":
print("hola")
self.instrucciones.append((nombre_instr,parametros[1],parametros[2],parametros[3],parametros[4].strip(")")))
elif nombre_instr=="drop":
x2r=parametros[2]
y2r=parametros[3].strip(")")
x2real=int(x2r[1:])*37+22+18.5
y2real=int(y2r[1:])*33+141+18.5
self.instrucciones.append((nombre_instr,parametros[1],x2real,y2real,parametros[4].strip(")")))
#dic[nombre_instr](parametros[1],parametros[2],parametros[3],parametros[4].strip(")"))
#dic[nombre_instr]()
self._timer = QTimer(interval=1,
timeout=self.funcion_instrucciones)
self._timer.start(500)
def volver_estado(self):
self.robots[0].pic.move(self.estado_inicial_robot[0],self.estado_inicial_robot[1])
i=0
inicial=self.estado_inicial_pelotas
for pelota in self.balls:
pelota.pic.move(inicial[i][0],inicial[i][1])
pelota.nombre.move(inicial[i][2],inicial[i][3])
i+=1
if __name__ == '__main__':
app = QtGui.QApplication([])
mapa = MiMapa()
mapa.show()
app.exec_() | StarcoderdataPython |
1750717 | # -*- coding: utf-8 -*-
"""
test.test_avps
~~~~~~~~~~~~~~
This module contains the Diameter protocol AVP unittests.
:copyright: (c) 2020 <NAME>.
:license: MIT, see LICENSE for more details.
"""
import unittest
import os
import sys
import datetime
testing_dir = os.path.dirname(os.path.abspath(__file__))
base_dir = os.path.dirname(testing_dir)
sys.path.insert(0, base_dir)
from bromelia.etsi_3gpp_swm.avps import *
from bromelia.etsi_3gpp_swm.definitions import *
from bromelia.avps import *
from bromelia.base import *
from bromelia.constants import *
from bromelia.exceptions import *
class TestDiameterAVP(unittest.TestCase):
def test_diameter_avp__vendor_id_bit__default(self):
avp = DiameterAVP()
self.assertFalse(avp.is_vendor_id())
avp.set_vendor_id_bit(True)
self.assertTrue(avp.is_vendor_id())
avp.set_vendor_id_bit(False)
self.assertFalse(avp.is_vendor_id())
avp.set_vendor_id_bit(True)
self.assertTrue(avp.is_vendor_id())
avp.set_vendor_id_bit(False)
self.assertFalse(avp.is_vendor_id())
def test_diameter_avp__vendor_id_bit__unset_when_is_unset(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.set_vendor_id_bit(False)
self.assertEqual(cm.exception.args[0], "V-bit was already unset")
def test_diameter_avp__vendor_id_bit__set_when_is_set(self):
avp = DiameterAVP()
self.assertFalse(avp.is_vendor_id())
avp.set_vendor_id_bit(True)
self.assertTrue(avp.is_vendor_id())
with self.assertRaises(AVPAttributeValueError) as cm:
avp.set_vendor_id_bit(True)
self.assertEqual(cm.exception.args[0], "V-bit was already set")
def test_diameter_avp__mandatory_bit__default(self):
avp = DiameterAVP()
self.assertFalse(avp.is_mandatory())
avp.set_mandatory_bit(True)
self.assertTrue(avp.is_mandatory())
avp.set_mandatory_bit(False)
self.assertFalse(avp.is_mandatory())
avp.set_mandatory_bit(True)
self.assertTrue(avp.is_mandatory())
avp.set_mandatory_bit(False)
self.assertFalse(avp.is_mandatory())
def test_diameter_avp__mandatory_bit__unset_when_is_unset(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.set_mandatory_bit(False)
self.assertEqual(cm.exception.args[0], "M-bit was already unset")
def test_diameter_avp__mandatory_bit__set_when_is_set(self):
avp = DiameterAVP()
self.assertFalse(avp.is_mandatory())
avp.set_mandatory_bit(True)
self.assertTrue(avp.is_mandatory())
with self.assertRaises(AVPAttributeValueError) as cm:
avp.set_mandatory_bit(True)
self.assertEqual(cm.exception.args[0], "M-bit was already set")
def test_diameter_avp__protected_bit__default(self):
avp = DiameterAVP()
self.assertFalse(avp.is_protected())
avp.set_protected_bit(True)
self.assertTrue(avp.is_protected())
avp.set_protected_bit(False)
self.assertFalse(avp.is_protected())
avp.set_protected_bit(True)
self.assertTrue(avp.is_protected())
avp.set_protected_bit(False)
self.assertFalse(avp.is_protected())
def test_diameter_avp__protected_bit__unset_when_is_unset(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.set_protected_bit(False)
self.assertEqual(cm.exception.args[0], "P-bit was already unset")
def test_diameter_avp__protected_bit__set_when_is_set(self):
avp = DiameterAVP()
self.assertFalse(avp.is_protected())
avp.set_protected_bit(True)
self.assertTrue(avp.is_protected())
with self.assertRaises(AVPAttributeValueError) as cm:
avp.set_protected_bit(True)
self.assertEqual(cm.exception.args[0], "P-bit was already set")
def test_diameter_avp__default_object(self):
avp = DiameterAVP()
self.assertEqual(avp.code.hex(), "00000000")
self.assertEqual(avp.flags.hex(), "00")
self.assertIsNone(avp.vendor_id)
self.assertEqual(avp.length.hex(), "000008")
self.assertIsNone(avp.data)
self.assertIsNone(avp.padding)
self.assertEqual(avp.dump().hex(), "0000000000000008")
self.assertEqual(avp.get_code(), 0)
self.assertEqual(avp.get_flags(), 0)
self.assertIsNone(avp.get_vendor_id())
self.assertEqual(avp.get_length(), 8)
def test_diameter_avp__custom_object_by_constructor(self):
avp = DiameterAVP(code=1, vendor_id=3, flags=2, data="srcrary")
self.assertEqual(avp.code.hex(), "00000001")
self.assertEqual(avp.flags.hex(), "02")
self.assertEqual(avp.vendor_id.hex(), "00000003")
self.assertEqual(avp.length.hex(), "000013")
self.assertEqual(avp.data.hex(), "73726372617279")
self.assertEqual(avp.padding.hex(), "00")
self.assertEqual(avp.dump().hex(), "0000000102000013000000037372637261727900")
self.assertEqual(avp.get_code(), 1)
self.assertEqual(avp.get_flags(), 2)
self.assertEqual(avp.get_vendor_id(), 3)
self.assertEqual(avp.get_length(), 19)
self.assertEqual(avp.get_padding_length(), 1)
def test_diameter_avp__custom_object_by_instance_attributes(self):
avp = DiameterAVP()
avp.code = 1
avp.flags = 2
avp.vendor_id = 3
avp.data = "srcrary"
self.assertEqual(avp.code.hex(), "00000001")
self.assertEqual(avp.flags.hex(), "02")
self.assertEqual(avp.vendor_id.hex(), "00000003")
self.assertEqual(avp.length.hex(), "000013")
self.assertEqual(avp.data.hex(), "73726372617279")
self.assertEqual(avp.padding.hex(), "00")
self.assertEqual(avp.dump().hex(), "0000000102000013000000037372637261727900")
self.assertEqual(avp.get_code(), 1)
self.assertEqual(avp.get_flags(), 2)
self.assertEqual(avp.get_vendor_id(), 3)
self.assertEqual(avp.get_length(), 19)
def test_diameter_avp__custom_object__invalid_code_value__string(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.code = "4294967296"
self.assertEqual(cm.exception.args[0], "invalid code attribute value")
def test_diameter_avp__custom_object__invalid_code_value__list(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.code = ["4294967296"]
self.assertEqual(cm.exception.args[0], "invalid code attribute value")
def test_diameter_avp__custom_object__invalid_code_value__integer_1(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.code = 4294967296
self.assertEqual(cm.exception.args[0], "code attribute has 4-bytes length long")
def test_diameter_avp__custom_object__invalid_code_value__integer_2(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.code = -1
self.assertEqual(cm.exception.args[0], "code attribute has 4-bytes length long")
def test_diameter_avp__custom_object__invalid_code_value__bytes_1(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.code = bytes.fromhex("00")
self.assertEqual(cm.exception.args[0], "code attribute has 4-bytes length long")
def test_diameter_avp__custom_object__invalid_code_value__bytes_2(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.code = bytes.fromhex("0000")
self.assertEqual(cm.exception.args[0], "code attribute has 4-bytes length long")
def test_diameter_avp__custom_object__invalid_code_value__bytes_3(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.code = bytes.fromhex("000000")
self.assertEqual(cm.exception.args[0], "code attribute has 4-bytes length long")
def test_diameter_avp__custom_object__invalid_code_value__bytes_4(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.code = bytes.fromhex("0000000001")
self.assertEqual(cm.exception.args[0], "code attribute has 4-bytes length long")
def test_diameter_avp__custom_object__invalid_flags_value__string(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.flags = "256"
self.assertEqual(cm.exception.args[0], "invalid flags attribute value")
def test_diameter_avp__custom_object__invalid_flags_value__list(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.flags = ["256"]
self.assertEqual(cm.exception.args[0], "invalid flags attribute value")
def test_diameter_avp__custom_object__invalid_flags_value__integer_1(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.flags = 256
self.assertEqual(cm.exception.args[0], "flags attribute has 1-byte length long")
def test_diameter_avp__custom_object__invalid_flags_value__integer_2(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.flags = -1
self.assertEqual(cm.exception.args[0], "flags attribute has 1-byte length long")
def test_diameter_avp__custom_object__invalid_flags_value__bytes_1(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.flags = bytes.fromhex("0000")
self.assertEqual(cm.exception.args[0], "flags attribute has 1-byte length long")
def test_diameter_avp__custom_object__invalid_flags_value__bytes_2(self):
avp = DiameterAVP()
with self.assertRaises(AVPAttributeValueError) as cm:
avp.flags = bytes.fromhex("000000")
self.assertEqual(cm.exception.args[0], "flags attribute has 1-byte length long")
def test_diameter_avp__custom_object__flags_by_setting_class_attributes(self):
avp = DiameterAVP()
avp.flags = DiameterAVP.flag_vendor_id_bit
self.assertEqual(avp.flags.hex(), "80")
avp.flags = DiameterAVP.flag_mandatory_bit
self.assertEqual(avp.flags.hex(), "40")
avp.flags = DiameterAVP.flag_protected_bit
self.assertEqual(avp.flags.hex(), "20")
avp.flags = DiameterAVP.flag_reserved5_bit
self.assertEqual(avp.flags.hex(), "10")
avp.flags = DiameterAVP.flag_reserved4_bit
self.assertEqual(avp.flags.hex(), "08")
avp.flags = DiameterAVP.flag_reserved3_bit
self.assertEqual(avp.flags.hex(), "04")
avp.flags = DiameterAVP.flag_reserved2_bit
self.assertEqual(avp.flags.hex(), "02")
avp.flags = DiameterAVP.flag_reserved1_bit
self.assertEqual(avp.flags.hex(), "01")
def test_diameter_avp__custom_object__flags_by_set_flags_methods(self):
avp = DiameterAVP()
avp.set_protected_bit(True)
avp.set_mandatory_bit(True)
self.assertEqual(avp.flags.hex(), "60")
avp.set_vendor_id_bit(True)
self.assertEqual(avp.flags.hex(), "e0")
avp.set_protected_bit(False)
self.assertEqual(avp.flags.hex(), "c0")
avp.set_protected_bit(True)
avp.set_mandatory_bit(False)
self.assertEqual(avp.flags.hex(), "a0")
avp.set_protected_bit(False)
self.assertEqual(avp.flags.hex(), "80")
avp.set_vendor_id_bit(False)
self.assertEqual(avp.flags.hex(), "00")
avp.set_protected_bit(True)
self.assertEqual(avp.flags.hex(), "20")
avp.set_mandatory_bit(True)
avp.set_protected_bit(False)
self.assertEqual(avp.flags.hex(), "40")
avp.set_protected_bit(True)
self.assertEqual(avp.flags.hex(), "60")
def test_diameter_avp__custom_object__length_by_using_len_builtin_function(self):
avp = DiameterAVP(1, 2, 3, "srcrary")
self.assertEqual(len(avp), avp.get_length())
avp = DiameterAVP(1, 2, 3, "Pythonicsrcrary")
self.assertEqual(len(avp), avp.get_length())
def test_diameter_avp__custom_object__length_changing_based_on_data(self):
avp = DiameterAVP()
avp.data = "srcrary"
self.assertEqual(avp.get_length(), 15)
self.assertEqual(avp.get_padding_length(), 1)
avp.data += b"FOO"
self.assertEqual(avp.get_length(), 18)
self.assertEqual(avp.get_padding_length(), 2)
avp.data += b"BAR"
self.assertEqual(avp.get_length(), 21)
self.assertEqual(avp.get_padding_length(), 3)
avp.data += b"F"
self.assertEqual(avp.get_length(), 22)
self.assertEqual(avp.get_padding_length(), 2)
avp.data += b"B"
self.assertEqual(avp.get_length(), 23)
self.assertEqual(avp.get_padding_length(), 1)
avp.data += b"="
self.assertEqual(avp.get_length(), 24)
self.assertIsNone(avp.get_padding_length())
def test_diameter_avp__custom_object__length_by_adding_vendor_id(self):
avp = DiameterAVP()
avp.data = "srcrary"
self.assertEqual(avp.get_length(), 15)
self.assertEqual(avp.get_padding_length(), 1)
avp.vendor_id = VENDOR_ID_3GPP
self.assertEqual(avp.get_length(), 19)
self.assertEqual(avp.get_padding_length(), 1)
avp.data += b"FooBar"
self.assertEqual(avp.get_length(), 25)
self.assertEqual(avp.get_padding_length(), 3)
avp.data = b""
self.assertEqual(avp.get_length(), 12)
self.assertIsNone(avp.get_padding_length())
avp.vendor_id = None
self.assertEqual(avp.get_length(), 8)
self.assertIsNone(avp.get_padding_length())
def test_diameter_avp__eq_dunder(self):
avp1 = DiameterAVP()
avp2 = DiameterAVP()
self.assertEqual(avp1, avp2)
avp1.set_mandatory_bit(True)
self.assertNotEqual(avp1, avp2)
avp2.set_mandatory_bit(True)
self.assertEqual(avp1, avp2)
avp2 = OriginHostAVP("host")
self.assertNotEqual(avp1, avp2)
avp1.code = avp2.code
avp1.flags = avp2.flags
avp1.data = avp2.data
self.assertEqual(avp1, avp2)
self.assertEqual(avp1.__repr__(), "<Diameter AVP: 264 [Origin-Host] MANDATORY>")
self.assertEqual(avp2.__repr__(), "<Diameter AVP: 264 [Origin-Host] MANDATORY>")
# include tests for data attribute and vendor_id
def test_diameter_avp__custom_object__padding_not_allowed_to_set(self):
avp = DiameterAVP()
with self.assertRaises(AttributeError) as cm:
avp.padding = bytes.fromhex("0000")
self.assertEqual(cm.exception.args[0], "can't set attribute")
def test_diameter_avp__load_staticmethod__parsing_user_name_avp_stream(self):
stream = bytes.fromhex("00000001400000356d792d75736572406e61692e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], UserNameAVP))
self.assertEqual(avps[0].code, USER_NAME_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 53)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"<EMAIL>")
self.assertEqual(avps[0].get_padding_length(), 3)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 1 [User-Name] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_class_avp_stream(self):
stream = bytes.fromhex("000000194000000e4f50454e45440000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], ClassAVP))
self.assertEqual(avps[0].code, CLASS_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 14)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"OPENED")
self.assertEqual(avps[0].get_padding_length(), 2)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 25 [Class] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_session_timeout_avp_stream(self):
stream = bytes.fromhex("0000001b4000000c00002a2f")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], SessionTimeoutAVP))
self.assertEqual(avps[0].code, SESSION_TIMEOUT_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, bytes.fromhex("00002a2f"))
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 27 [Session-Timeout] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_calling_station_id_avp_stream(self):
stream = bytes.fromhex("0000001f4000000d66726f646f000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], CallingStationIdAVP))
self.assertEqual(avps[0].code, CALLING_STATION_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 13)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"frodo")
self.assertEqual(avps[0].get_padding_length(), 3)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 31 [Calling-Station-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_proxy_state_avp_stream(self):
stream = bytes.fromhex("000000214000000e434c4f5345440000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], ProxyStateAVP))
self.assertEqual(avps[0].code, PROXY_STATE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 14)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"CLOSED")
self.assertEqual(avps[0].get_padding_length(), 2)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 33 [Proxy-State] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_acct_session_id_avp_stream(self):
stream = bytes.fromhex("0000002c4000000c00001a4d")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AcctSessionIdAVP))
self.assertEqual(avps[0].code, ACCT_SESSION_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, convert_to_4_bytes(6733))
self.assertEqual(avps[0].get_padding_length(), None)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 44 [Acct-Session-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_acct_multi_session_id_avp_stream(self):
stream = bytes.fromhex("000000324000003a6d792d6469616d657465722d7365727665722e6d792d6e6574776f726b3b3430333239323b3430333239323b3430333239320000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AcctMultiSessionIdAVP))
self.assertEqual(avps[0].code, ACCT_MULTI_SESSION_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 58)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"my-diameter-server.my-network;403292;403292;403292")
self.assertEqual(avps[0].get_padding_length(), 2)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 50 [Acct-Multi-Session-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_event_timestamp_avp_stream(self):
stream = bytes.fromhex("000000374000000ce357fa5b")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], EventTimestampAVP))
self.assertEqual(avps[0].code, EVENT_TIMESTAMP_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "e357fa5b")
self.assertEqual(avps[0].get_padding_length(), None)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 55 [Event-Timestamp] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_acct_interim_interval_avp_stream(self):
stream = bytes.fromhex("000000554000000c0000012c")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AcctInterimIntervalAVP))
self.assertEqual(avps[0].code, ACCT_INTERIM_INTERVAL_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "0000012c")
self.assertEqual(avps[0].get_padding_length(), None)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 85 [Acct-Interim-Interval] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_host_ip_address_avp_stream(self):
stream = bytes.fromhex("000001014000000e00010a9f78240000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], HostIpAddressAVP))
self.assertEqual(avps[0].code, HOST_IP_ADDRESS_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 14)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, bytes.fromhex("00010a9f7824"))
self.assertEqual(avps[0].get_padding_length(), 2)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 257 [Host-Ip-Address] MANDATORY>")
self.assertTrue(avps[0].is_ipv4())
self.assertFalse(avps[0].is_ipv6())
self.assertEqual(avps[0].get_ip_address(), "10.159.120.36")
def test_diameter_avp__load_staticmethod__parsing_auth_application_id_avp_stream(self):
stream = bytes.fromhex("000001024000000c01000030")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AuthApplicationIdAVP))
self.assertEqual(avps[0].code, AUTH_APPLICATION_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, DIAMETER_APPLICATION_SWm)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 258 [Auth-Application-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_acct_application_id_avp_stream(self):
stream = bytes.fromhex("000001034000000c01000030")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AcctApplicationIdAVP))
self.assertEqual(avps[0].code, ACCT_APPLICATION_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, DIAMETER_APPLICATION_SWm)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 259 [Acct-Application-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_vendor_specific_application_id_avp_stream(self):
stream = bytes.fromhex("00000104400000200000010a4000000c000028af000001024000000c01000030")
avps = DiameterAVP.load(stream)
vendor_specific_application_id_avp = avps[0]
self.assertTrue(isinstance(vendor_specific_application_id_avp, VendorSpecificApplicationIdAVP))
self.assertEqual(vendor_specific_application_id_avp.code, VENDOR_SPECIFIC_APPLICATION_ID_AVP_CODE)
self.assertFalse(vendor_specific_application_id_avp.is_vendor_id())
self.assertTrue(vendor_specific_application_id_avp.is_mandatory())
self.assertFalse(vendor_specific_application_id_avp.is_protected())
self.assertEqual(vendor_specific_application_id_avp.get_length(), 32)
self.assertIsNone(vendor_specific_application_id_avp.vendor_id)
self.assertEqual(vendor_specific_application_id_avp.data.hex(), "0000010a4000000c000028af000001024000000c01000030")
self.assertEqual(vendor_specific_application_id_avp.__repr__(), "<Diameter AVP: 260 [Vendor-Specific-Application-Id] MANDATORY>")
vendor_id_avp = vendor_specific_application_id_avp.avps[0]
auth_app_id_avp = vendor_specific_application_id_avp.avps[1]
self.assertTrue(isinstance(vendor_id_avp, VendorIdAVP))
self.assertEqual(vendor_id_avp.code, VENDOR_ID_AVP_CODE)
self.assertFalse(vendor_id_avp.is_vendor_id())
self.assertTrue(vendor_id_avp.is_mandatory())
self.assertFalse(vendor_id_avp.is_protected())
self.assertEqual(vendor_id_avp.get_length(), 12)
self.assertIsNone(vendor_id_avp.vendor_id)
self.assertEqual(vendor_id_avp.data.hex(), "000028af")
self.assertEqual(vendor_id_avp.__repr__(), "<Diameter AVP: 266 [Vendor-Id] MANDATORY>")
self.assertTrue(isinstance(auth_app_id_avp, AuthApplicationIdAVP))
self.assertEqual(auth_app_id_avp.code, AUTH_APPLICATION_ID_AVP_CODE)
self.assertFalse(auth_app_id_avp.is_vendor_id())
self.assertTrue(auth_app_id_avp.is_mandatory())
self.assertFalse(auth_app_id_avp.is_protected())
self.assertEqual(auth_app_id_avp.get_length(), 12)
self.assertIsNone(auth_app_id_avp.vendor_id)
self.assertEqual(auth_app_id_avp.data.hex(), "01000030")
self.assertEqual(auth_app_id_avp.__repr__(), "<Diameter AVP: 258 [Auth-Application-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_redirect_host_usage_avp_stream(self):
stream = bytes.fromhex("000001054000000c00000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], RedirectHostUsageAVP))
self.assertEqual(avps[0].code, REDIRECT_HOST_USAGE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, REDIRECT_HOST_USAGE_DONT_CACHE)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 261 [Redirect-Host-Usage] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_redirect_max_cache_time_avp_stream(self):
stream = bytes.fromhex("000001064000000c00002a2f")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], RedirectMaxCacheTimeAVP))
self.assertEqual(avps[0].code, REDIRECT_MAX_CACHE_TIME_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, convert_to_4_bytes(10799))
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 262 [Redirect-Max-Cache-Time] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_session_id_avp_stream(self):
stream = bytes.fromhex("00000107400000206573323b3430333239323b3430333239323b343033323932")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], SessionIdAVP))
self.assertEqual(avps[0].code, SESSION_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 32)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"es2;403292;403292;403292")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 263 [Session-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_origin_host_avp_stream(self):
stream = bytes.fromhex("000001084000000b65733200")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], OriginHostAVP))
self.assertEqual(avps[0].code, ORIGIN_HOST_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 11)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"es2")
self.assertEqual(avps[0].get_padding_length(), 1)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 264 [Origin-Host] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_supported_vendor_id_avp_stream(self):
stream = bytes.fromhex("000001094000000c000028af")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], SupportedVendorIdAVP))
self.assertEqual(avps[0].code, SUPPORTED_VENDOR_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, VENDOR_ID_3GPP)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 265 [Supported-Vendor-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_vendor_id_avp_stream(self):
stream = bytes.fromhex("0000010a4000000c00000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], VendorIdAVP))
self.assertEqual(avps[0].code, VENDOR_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, bytes.fromhex("00000000"))
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 266 [Vendor-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_firmware_revision_avp_stream(self):
stream = bytes.fromhex("0000010b0000000c00000001")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], FirmwareRevisionAVP))
self.assertEqual(avps[0].code, FIRMWARE_REVISION_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertFalse(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, bytes.fromhex("00000001"))
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 267 [Firmware-Revision]>")
def test_diameter_avp__load_staticmethod__parsing_result_code_avp_stream(self):
stream = bytes.fromhex("0000010c4000000c000007d1")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], ResultCodeAVP))
self.assertEqual(avps[0].code, RESULT_CODE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, DIAMETER_SUCCESS)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 268 [Result-Code] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_product_name_avp_stream(self):
stream = bytes.fromhex("0000010d0000001e507974686f6e2062726f6d656c69612076312e302e300000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], ProductNameAVP))
self.assertEqual(avps[0].code, PRODUCT_NAME_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertFalse(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 30)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"Python bromelia v1.0.0")
self.assertEqual(avps[0].get_padding_length(), 2)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 269 [Product-Name]>")
def test_diameter_avp__load_staticmethod__parsing_session_binding_avp_stream(self):
stream = bytes.fromhex("0000010e4000000c00000002")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], SessionBindingAVP))
self.assertEqual(avps[0].code, SESSION_BINDING_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "00000002")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 270 [Session-Binding] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_session_server_failover_avp_stream(self):
stream = bytes.fromhex("0000010f4000000c00000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], SessionServerFailoverAVP))
self.assertEqual(avps[0].code, SESSION_SERVER_FAILOVER_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, SESSION_SERVER_FAILOVER_REFUSE_SERVICE)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 271 [Session-Server-Failover] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_multi_round_time_out_avp_stream(self):
stream = bytes.fromhex("000001104000000c00015180")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], MultiRoundTimeOutAVP))
self.assertEqual(avps[0].code, MULTI_ROUND_TIME_OUT_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "00015180")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 272 [Multi-Round-Time-Out] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_disconnect_cause_avp_stream(self):
stream = bytes.fromhex("000001114000000c00000002")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], DisconnectCauseAVP))
self.assertEqual(avps[0].code, DISCONNECT_CAUSE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, DISCONNECT_CAUSE_DO_NOT_WANT_TO_TALK_TO_YOU)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 273 [Disconnect-Cause] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_auth_request_type_avp_stream(self):
stream = bytes.fromhex("000001124000000c00000001")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AuthRequestTypeAVP))
self.assertEqual(avps[0].code, AUTH_REQUEST_TYPE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, AUTH_REQUEST_TYPE_AUTHENTICATE_ONLY)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 274 [Auth-Request-Type] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_auth_grace_period_avp_stream(self):
stream = bytes.fromhex("000001144000000c00000e10")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AuthGracePeriodAVP))
self.assertEqual(avps[0].code, AUTH_GRACE_PERIOD_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "00000e10")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 276 [Auth-Grace-Period] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_auth_session_state_avp_stream(self):
stream = bytes.fromhex("000001154000000c00000001")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AuthSessionStateAVP))
self.assertEqual(avps[0].code, AUTH_SESSION_STATE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, NO_STATE_MAINTAINED)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 277 [Auth-Session-State] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_origin_state_id_avp_stream(self):
stream = bytes.fromhex("000001164000000c5ae19512")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], OriginStateIdAVP))
self.assertEqual(avps[0].code, ORIGIN_STATE_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, convert_to_4_bytes(1524733202))
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 278 [Origin-State-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_failed_avp_avp_stream(self):
stream = bytes.fromhex("00000117400000680000011a4000002f68656272612e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000011a4000002f656c64696e2e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f726700")
avps = DiameterAVP.load(stream)
failed_avp_avp = avps[0]
self.assertTrue(isinstance(failed_avp_avp, FailedAvpAVP))
self.assertEqual(failed_avp_avp.code, FAILED_AVP_AVP_CODE)
self.assertFalse(failed_avp_avp.is_vendor_id())
self.assertTrue(failed_avp_avp.is_mandatory())
self.assertFalse(failed_avp_avp.is_protected())
self.assertEqual(failed_avp_avp.get_length(), 104)
self.assertIsNone(failed_avp_avp.vendor_id)
self.assertEqual(failed_avp_avp.data.hex(), "0000011a4000002f68656272612e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000011a4000002f656c64696e2e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f726700")
self.assertIsNone(failed_avp_avp.get_padding_length())
self.assertEqual(failed_avp_avp.__repr__(), "<Diameter AVP: 279 [Failed-Avp] MANDATORY>")
route_record_avp__1 = failed_avp_avp.avps[0]
route_record_avp__2 = failed_avp_avp.avps[1]
self.assertTrue(isinstance(route_record_avp__1, RouteRecordAVP))
self.assertEqual(route_record_avp__1.code, ROUTE_RECORD_AVP_CODE)
self.assertFalse(route_record_avp__1.is_vendor_id())
self.assertTrue(route_record_avp__1.is_mandatory())
self.assertFalse(route_record_avp__1.is_protected())
self.assertEqual(route_record_avp__1.get_length(), 47)
self.assertIsNone(route_record_avp__1.vendor_id)
self.assertEqual(route_record_avp__1.data, b"hebra.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertTrue(route_record_avp__1.get_padding_length(), 1)
self.assertEqual(route_record_avp__1.__repr__(), "<Diameter AVP: 282 [Route-Record] MANDATORY>")
self.assertTrue(isinstance(route_record_avp__2, RouteRecordAVP))
self.assertEqual(route_record_avp__2.code, ROUTE_RECORD_AVP_CODE)
self.assertFalse(route_record_avp__2.is_vendor_id())
self.assertTrue(route_record_avp__2.is_mandatory())
self.assertFalse(route_record_avp__2.is_protected())
self.assertEqual(route_record_avp__2.get_length(), 47)
self.assertIsNone(route_record_avp__2.vendor_id)
self.assertEqual(route_record_avp__2.data, b"eldin.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertIsNotNone(route_record_avp__2.get_padding_length())
self.assertEqual(route_record_avp__2.__repr__(), "<Diameter AVP: 282 [Route-Record] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_proxy_host_avp_stream(self):
stream = bytes.fromhex("0000011840000030687373736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], ProxyHostAVP))
self.assertEqual(avps[0].code, PROXY_HOST_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 48)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 280 [Proxy-Host] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_error_message_avp_stream(self):
stream = bytes.fromhex("000001190000001a44524c2d4552522d333030322d3330343a2e0000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], ErrorMessageAVP))
self.assertEqual(avps[0].code, ERROR_MESSAGE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertFalse(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 26)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"DRL-ERR-3002-304:.")
self.assertEqual(avps[0].get_padding_length(), 2)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 281 [Error-Message]>")
def test_diameter_avp__load_staticmethod__parsing_route_record_avp_stream(self):
stream = bytes.fromhex("0000011a400000326a616773616530332e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], RouteRecordAVP))
self.assertEqual(avps[0].code, ROUTE_RECORD_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 50)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"jagsae03.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avps[0].get_padding_length(), 2)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 282 [Route-Record] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_destination_realm_avp_stream(self):
stream = bytes.fromhex("0000011b400000296570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], DestinationRealmAVP))
self.assertEqual(avps[0].code, DESTINATION_REALM_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 41)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avps[0].get_padding_length(), 3)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 283 [Destination-Realm] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_proxy_info_avp_stream(self):
stream = bytes.fromhex("0000011c400000480000011840000030687373736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000214000000e434c4f5345440000")
avps = DiameterAVP.load(stream)
proxy_info_avp = avps[0]
self.assertTrue(isinstance(proxy_info_avp, ProxyInfoAVP))
self.assertEqual(proxy_info_avp.code, PROXY_INFO_AVP_CODE)
self.assertFalse(proxy_info_avp.is_vendor_id())
self.assertTrue(proxy_info_avp.is_mandatory())
self.assertFalse(proxy_info_avp.is_protected())
self.assertEqual(proxy_info_avp.get_length(), 72)
self.assertIsNone(proxy_info_avp.vendor_id)
self.assertEqual(proxy_info_avp.data.hex(), "0000011840000030687373736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000214000000e434c4f5345440000")
self.assertIsNone(proxy_info_avp.get_padding_length())
self.assertEqual(proxy_info_avp.__repr__(), "<Diameter AVP: 284 [Proxy-Info] MANDATORY>")
proxy_host_avp = proxy_info_avp.avps[0]
proxy_state_avp = proxy_info_avp.avps[1]
self.assertTrue(isinstance(proxy_host_avp, ProxyHostAVP))
self.assertEqual(proxy_host_avp.code, PROXY_HOST_AVP_CODE)
self.assertFalse(proxy_host_avp.is_vendor_id())
self.assertTrue(proxy_host_avp.is_mandatory())
self.assertFalse(proxy_host_avp.is_protected())
self.assertEqual(proxy_host_avp.get_length(), 48)
self.assertIsNone(proxy_host_avp.vendor_id)
self.assertEqual(proxy_host_avp.data, b"hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertIsNone(proxy_host_avp.get_padding_length())
self.assertEqual(proxy_host_avp.__repr__(), "<Diameter AVP: 280 [Proxy-Host] MANDATORY>")
self.assertTrue(isinstance(proxy_state_avp, ProxyStateAVP))
self.assertEqual(proxy_state_avp.code, PROXY_STATE_AVP_CODE)
self.assertFalse(proxy_state_avp.is_vendor_id())
self.assertTrue(proxy_state_avp.is_mandatory())
self.assertFalse(proxy_state_avp.is_protected())
self.assertEqual(proxy_state_avp.get_length(), 14)
self.assertIsNone(proxy_state_avp.vendor_id)
self.assertEqual(proxy_state_avp.data, b"CLOSED")
self.assertEqual(proxy_state_avp.get_padding_length(), 2)
self.assertEqual(proxy_state_avp.__repr__(), "<Diameter AVP: 33 [Proxy-State] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_re_auth_request_type_avp_stream(self):
stream = bytes.fromhex("0000011d4000000c00000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], ReAuthRequestTypeAVP))
self.assertEqual(avps[0].code, RE_AUTH_REQUEST_TYPE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, RE_AUTH_REQUEST_TYPE_AUTHORIZE_ONLY)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 285 [Re-Auth-Request-Type] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_accounting_sub_session_id_avp_stream(self):
stream = bytes.fromhex("0000011f4000001000000000057c8f9f")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AccountingSubSessionIdAVP))
self.assertEqual(avps[0].code, ACCOUNTING_SUB_SESSION_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 16)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "00000000057c8f9f")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 287 [Accounting-Sub-Session-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_authorization_lifetime_avp_stream(self):
stream = bytes.fromhex("000001234000000c00015180")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AuthorizationLifetimeAVP))
self.assertEqual(avps[0].code, AUTHORIZATION_LIFETIME_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "00015180")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 291 [Authorization-Lifetime] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_redirect_host_avp_stream(self):
stream = bytes.fromhex("000001244000002c6161613a2f2f686f73742e6578616d706c652e636f6d3b7472616e73706f72743d746370")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], RedirectHostAVP))
self.assertEqual(avps[0].code, REDIRECT_HOST_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 44)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"aaa://host.example.com;transport=tcp")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 292 [Redirect-Host] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_destination_host_avp_stream(self):
stream = bytes.fromhex("0000012540000030687373736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], DestinationHostAVP))
self.assertEqual(avps[0].code, DESTINATION_HOST_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 48)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 293 [Destination-Host] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_error_reporting_host_avp_stream(self):
stream = bytes.fromhex("000001260000002d726a342e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], ErrorReportingHostAVP))
self.assertEqual(avps[0].code, ERROR_REPORTING_HOST_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertFalse(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 45)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"rj4.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avps[0].get_padding_length(), 3)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 294 [Error-Reporting-Host]>")
def test_diameter_avp__load_staticmethod__parsing_termination_cause_avp_stream(self):
stream = bytes.fromhex("000001274000000c00000001")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], TerminationCauseAVP))
self.assertEqual(avps[0].code, TERMINATION_CAUSE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, DIAMETER_LOGOUT)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 295 [Termination-Cause] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_origin_realm_avp_stream(self):
stream = bytes.fromhex("000001284000000b65736d00")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], OriginRealmAVP))
self.assertEqual(avps[0].code, ORIGIN_REALM_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 11)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"esm")
self.assertEqual(avps[0].get_padding_length(), 1)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 296 [Origin-Realm] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_experimental_result_avp_stream(self):
stream = bytes.fromhex("00000129000000200000012a4000000c000013940000010a4000000c000028af")
avps = DiameterAVP.load(stream)
experimental_result_avp = avps[0]
self.assertTrue(isinstance(experimental_result_avp, ExperimentalResultAVP))
self.assertEqual(experimental_result_avp.code, EXPERIMENTAL_RESULT_AVP_CODE)
self.assertFalse(experimental_result_avp.is_vendor_id())
self.assertFalse(experimental_result_avp.is_mandatory())
self.assertFalse(experimental_result_avp.is_protected())
self.assertEqual(experimental_result_avp.get_length(), 32)
self.assertIsNone(experimental_result_avp.vendor_id)
self.assertEqual(experimental_result_avp.data.hex(), "0000012a4000000c000013940000010a4000000c000028af")
self.assertEqual(experimental_result_avp.__repr__(), "<Diameter AVP: 297 [Experimental-Result]>")
experimental_result_code_avp = experimental_result_avp.avps[0]
vendor_id_avp = experimental_result_avp.avps[1]
self.assertTrue(isinstance(experimental_result_code_avp, ExperimentalResultCodeAVP))
self.assertEqual(experimental_result_code_avp.code, EXPERIMENTAL_RESULT_CODE_AVP_CODE)
self.assertFalse(experimental_result_code_avp.is_vendor_id())
self.assertTrue(experimental_result_code_avp.is_mandatory())
self.assertFalse(experimental_result_code_avp.is_protected())
self.assertEqual(experimental_result_code_avp.get_length(), 12)
self.assertIsNone(experimental_result_code_avp.vendor_id)
self.assertEqual(experimental_result_code_avp.data, DIAMETER_ERROR_SERVING_NODE_FEATURE_UNSUPPORTED)
self.assertIsNone(experimental_result_code_avp.get_padding_length())
self.assertEqual(experimental_result_code_avp.__repr__(), "<Diameter AVP: 298 [Experimental-Result-Code] MANDATORY>")
self.assertTrue(isinstance(vendor_id_avp, VendorIdAVP))
self.assertEqual(vendor_id_avp.code, VENDOR_ID_AVP_CODE)
self.assertFalse(vendor_id_avp.is_vendor_id())
self.assertTrue(vendor_id_avp.is_mandatory())
self.assertFalse(vendor_id_avp.is_protected())
self.assertEqual(vendor_id_avp.get_length(), 12)
self.assertIsNone(vendor_id_avp.vendor_id)
self.assertEqual(vendor_id_avp.data, VENDOR_ID_3GPP)
self.assertIsNone(vendor_id_avp.get_padding_length())
self.assertEqual(vendor_id_avp.__repr__(), "<Diameter AVP: 266 [Vendor-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_experimental_result_code_avp_stream(self):
stream = bytes.fromhex("0000012a4000000c000007d1")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], ExperimentalResultCodeAVP))
self.assertEqual(avps[0].code, EXPERIMENTAL_RESULT_CODE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, DIAMETER_SUCCESS_SERVER_NAME_NOT_STORED)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 298 [Experimental-Result-Code] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_inband_security_id_avp_stream(self):
stream = bytes.fromhex("0000012b0000000c00000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], InbandSecurityIdAVP))
self.assertEqual(avps[0].code, INBAND_SECURITY_ID_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertFalse(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, INBAND_SECURITY_ID_NO_SECURITY)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 299 [Inband-Security-Id]>")
def test_diameter_avp__load_staticmethod__parsing_mip_home_agent_host_avp_stream(self):
stream = bytes.fromhex("0000015c400000700000011b400000296570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000000001254000003a746f706f6e2e73357067772e6e6f64652e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000")
avps = DiameterAVP.load(stream)
mip_home_agent_host_avp = avps[0]
self.assertTrue(isinstance(mip_home_agent_host_avp, MipHomeAgentHostAVP))
self.assertEqual(mip_home_agent_host_avp.code, MIP_HOME_AGENT_HOST_AVP_CODE)
self.assertFalse(mip_home_agent_host_avp.is_vendor_id())
self.assertTrue(mip_home_agent_host_avp.is_mandatory())
self.assertFalse(mip_home_agent_host_avp.is_protected())
self.assertEqual(mip_home_agent_host_avp.get_length(), 112)
self.assertIsNone(mip_home_agent_host_avp.vendor_id)
self.assertEqual(mip_home_agent_host_avp.data.hex(), "0000011b400000296570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000000001254000003a746f706f6e2e73357067772e6e6f64652e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000")
self.assertIsNone(mip_home_agent_host_avp.get_padding_length())
self.assertEqual(mip_home_agent_host_avp.__repr__(), "<Diameter AVP: 348 [Mip-Home-Agent-Host] MANDATORY>")
destination_realm_avp = mip_home_agent_host_avp.destination_realm_avp
destination_host_avp = mip_home_agent_host_avp.destination_host_avp
self.assertTrue(isinstance(destination_realm_avp, DestinationRealmAVP))
self.assertEqual(destination_realm_avp.code, DESTINATION_REALM_AVP_CODE)
self.assertFalse(destination_realm_avp.is_vendor_id())
self.assertTrue(destination_realm_avp.is_mandatory())
self.assertFalse(destination_realm_avp.is_protected())
self.assertEqual(destination_realm_avp.get_length(), 41)
self.assertIsNone(destination_realm_avp.vendor_id)
self.assertEqual(destination_realm_avp.data, b"epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(destination_realm_avp.get_padding_length(), 3)
self.assertEqual(destination_realm_avp.__repr__(), "<Diameter AVP: 283 [Destination-Realm] MANDATORY>")
self.assertTrue(isinstance(destination_host_avp, DestinationHostAVP))
self.assertEqual(destination_host_avp.code, DESTINATION_HOST_AVP_CODE)
self.assertFalse(destination_host_avp.is_vendor_id())
self.assertTrue(destination_host_avp.is_mandatory())
self.assertFalse(destination_host_avp.is_protected())
self.assertEqual(destination_host_avp.get_length(), 58)
self.assertIsNone(destination_host_avp.vendor_id)
self.assertEqual(destination_host_avp.data, b"topon.s5pgw.node.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(destination_host_avp.get_padding_length(), 2)
self.assertEqual(destination_host_avp.__repr__(), "<Diameter AVP: 293 [Destination-Host] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_subscription_id_avp_stream(self):
stream = bytes.fromhex("000001bb4000002c000001c24000000c00000000000001bc4000001535353636313233343536373839000000")
avps = DiameterAVP.load(stream)
subscription_id_avp = avps[0]
self.assertTrue(isinstance(subscription_id_avp, SubscriptionIdAVP))
self.assertEqual(subscription_id_avp.code, SUBSCRIPTION_ID_AVP_CODE)
self.assertFalse(subscription_id_avp.is_vendor_id())
self.assertTrue(subscription_id_avp.is_mandatory())
self.assertFalse(subscription_id_avp.is_protected())
self.assertEqual(subscription_id_avp.get_length(), 44)
self.assertIsNone(subscription_id_avp.vendor_id)
self.assertEqual(subscription_id_avp.data.hex(), "000001c24000000c00000000000001bc4000001535353636313233343536373839000000")
self.assertIsNone(subscription_id_avp.get_padding_length())
self.assertEqual(subscription_id_avp.__repr__(), "<Diameter AVP: 443 [Subscription-Id] MANDATORY>")
subscription_id_type_avp = subscription_id_avp.subscription_id_type_avp
subscription_id_data_avp = subscription_id_avp.subscription_id_data_avp
self.assertTrue(isinstance(subscription_id_type_avp, SubscriptionIdTypeAVP))
self.assertEqual(subscription_id_type_avp.code, SUBSCRIPTION_ID_TYPE_AVP_CODE)
self.assertFalse(subscription_id_type_avp.is_vendor_id())
self.assertTrue(subscription_id_type_avp.is_mandatory())
self.assertFalse(subscription_id_type_avp.is_protected())
self.assertEqual(subscription_id_type_avp.get_length(), 12)
self.assertIsNone(subscription_id_type_avp.vendor_id)
self.assertEqual(subscription_id_type_avp.data, END_USER_E164)
self.assertIsNone(subscription_id_type_avp.get_padding_length())
self.assertEqual(subscription_id_type_avp.__repr__(), "<Diameter AVP: 450 [Subscription-Id-Type] MANDATORY>")
self.assertTrue(isinstance(subscription_id_data_avp, SubscriptionIdDataAVP))
self.assertEqual(subscription_id_data_avp.code, SUBSCRIPTION_ID_DATA_AVP_CODE)
self.assertFalse(subscription_id_data_avp.is_vendor_id())
self.assertTrue(subscription_id_data_avp.is_mandatory())
self.assertFalse(subscription_id_data_avp.is_protected())
self.assertEqual(subscription_id_data_avp.get_length(), 21)
self.assertIsNone(subscription_id_data_avp.vendor_id)
self.assertEqual(subscription_id_data_avp.data, b"5566123456789")
self.assertEqual(subscription_id_data_avp.get_padding_length(), 3)
self.assertEqual(subscription_id_data_avp.__repr__(), "<Diameter AVP: 444 [Subscription-Id-Data] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_subscription_id_data_avp_stream(self):
stream = bytes.fromhex("000001bc4000001535353131313233343536373839000000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], SubscriptionIdDataAVP))
self.assertEqual(avps[0].code, SUBSCRIPTION_ID_DATA_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 21)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"5511123456789")
self.assertEqual(avps[0].get_padding_length(), 3)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 444 [Subscription-Id-Data] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_subscription_id_type_avp_stream(self):
stream = bytes.fromhex("000001c24000000c00000001")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], SubscriptionIdTypeAVP))
self.assertEqual(avps[0].code, SUBSCRIPTION_ID_TYPE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, END_USER_IMSI)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 450 [Subscription-Id-Type] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_eap_payload_avp_stream(self):
stream = bytes.fromhex("000001ce4000003a02000032016d792d75736572406e61692e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], EapPayloadAVP))
self.assertEqual(avps[0].code, EAP_PAYLOAD_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 58)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, b"\x02\x00\x002\x01my-user@nai.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avps[0].get_padding_length(), 2)
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 462 [Eap-Payload] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_eap_master_session_key_avp_stream(self):
stream = bytes.fromhex("000001d000000048ec3208c43154f60862858afa650dd875e8a095dfcd364e73420fcc573388d4c207308ace020aa3e3f9ff76ed1821a044e8deed2470997fbfbf5197d724d51fa1")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], EapMasterSessionKeyAVP))
self.assertEqual(avps[0].code, EAP_MASTER_SESSION_KEY_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertFalse(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 72)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "ec3208c43154f60862858afa650dd875e8a095dfcd364e73420fcc573388d4c207308ace020aa3e3f9ff76ed1821a044e8deed2470997fbfbf5197d724d51fa1")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 464 [Eap-Master-Session-Key]>")
def test_diameter_avp__load_staticmethod__parsing_accounting_record_type_avp_stream(self):
stream = bytes.fromhex("000001e04000000c00000003")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AccountingRecordTypeAVP))
self.assertEqual(avps[0].code, ACCOUNTING_RECORD_TYPE_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, ACCOUNTING_RECORD_TYPE_INTERIM_RECORD)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 480 [Accounting-Record-Type] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_accounting_realtime_required_avp_stream(self):
stream = bytes.fromhex("000001e34000000c00000003")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AccountingRealtimeRequiredAVP))
self.assertEqual(avps[0].code, ACCOUNTING_REALTIME_REQUIRED_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, ACCOUNTING_REALTIME_REQUIRED_GRAND_AND_LOSE)
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 483 [Accounting-Realtime-Required] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_accounting_record_number_avp_stream(self):
stream = bytes.fromhex("000001e54000000c00000002")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], AccountingRecordNumberAVP))
self.assertEqual(avps[0].code, ACCOUNTING_RECORD_NUMBER_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "00000002")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 485 [Accounting-Record-Number] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_mip6_agent_info_avp_stream(self):
self.maxDiff = None
stream = bytes.fromhex("000001e6400000780000015c400000700000011b400000296570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000000001254000003a746f706f6e2e73357067772e6e6f64652e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000")
avps = DiameterAVP.load(stream)
self.assertTrue(isinstance(avps[0], Mip6AgentInfoAVP))
self.assertEqual(avps[0].code, MIP6_AGENT_INFO_AVP_CODE)
self.assertFalse(avps[0].is_vendor_id())
self.assertTrue(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 120)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data.hex(), "0000015c400000700000011b400000296570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000000001254000003a746f706f6e2e73357067772e6e6f64652e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000")
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 486 [Mip6-Agent-Info] MANDATORY>")
mip_home_agent_host_avp = avps[0].mip_home_agent_host_avp
self.assertTrue(isinstance(mip_home_agent_host_avp, MipHomeAgentHostAVP))
self.assertEqual(mip_home_agent_host_avp.code, MIP_HOME_AGENT_HOST_AVP_CODE)
self.assertFalse(mip_home_agent_host_avp.is_vendor_id())
self.assertTrue(mip_home_agent_host_avp.is_mandatory())
self.assertFalse(mip_home_agent_host_avp.is_protected())
self.assertEqual(mip_home_agent_host_avp.get_length(), 112)
self.assertIsNone(mip_home_agent_host_avp.vendor_id)
self.assertEqual(mip_home_agent_host_avp.data.hex(), "0000011b400000296570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000000001254000003a746f706f6e2e73357067772e6e6f64652e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000")
self.assertIsNone(mip_home_agent_host_avp.get_padding_length())
self.assertEqual(mip_home_agent_host_avp.__repr__(), "<Diameter AVP: 348 [Mip-Home-Agent-Host] MANDATORY>")
destination_realm_avp = mip_home_agent_host_avp.destination_realm_avp
destination_host_avp = mip_home_agent_host_avp.destination_host_avp
self.assertTrue(isinstance(destination_realm_avp, DestinationRealmAVP))
self.assertEqual(destination_realm_avp.code, DESTINATION_REALM_AVP_CODE)
self.assertFalse(destination_realm_avp.is_vendor_id())
self.assertTrue(destination_realm_avp.is_mandatory())
self.assertFalse(destination_realm_avp.is_protected())
self.assertEqual(destination_realm_avp.get_length(), 41)
self.assertIsNone(destination_realm_avp.vendor_id)
self.assertEqual(destination_realm_avp.data, b"epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(destination_realm_avp.get_padding_length(), 3)
self.assertEqual(destination_realm_avp.__repr__(), "<Diameter AVP: 283 [Destination-Realm] MANDATORY>")
self.assertTrue(isinstance(destination_host_avp, DestinationHostAVP))
self.assertEqual(destination_host_avp.code, DESTINATION_HOST_AVP_CODE)
self.assertFalse(destination_host_avp.is_vendor_id())
self.assertTrue(destination_host_avp.is_mandatory())
self.assertFalse(destination_host_avp.is_protected())
self.assertEqual(destination_host_avp.get_length(), 58)
self.assertIsNone(destination_host_avp.vendor_id)
self.assertEqual(destination_host_avp.data, b"topon.s5pgw.node.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(destination_host_avp.get_padding_length(), 2)
self.assertEqual(destination_host_avp.__repr__(), "<Diameter AVP: 293 [Destination-Host] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_2_different_avps(self):
stream = bytes.fromhex("0000012b0000000c0000000000000129000000200000012a4000000c000013940000010a4000000c000028af")
avps = DiameterAVP.load(stream)
#: First AVP
inband_security_id_avp = avps[0]
self.assertTrue(isinstance(inband_security_id_avp, InbandSecurityIdAVP))
self.assertEqual(inband_security_id_avp.code, INBAND_SECURITY_ID_AVP_CODE)
self.assertFalse(inband_security_id_avp.is_vendor_id())
self.assertFalse(inband_security_id_avp.is_mandatory())
self.assertFalse(inband_security_id_avp.is_protected())
self.assertEqual(inband_security_id_avp.get_length(), 12)
self.assertIsNone(inband_security_id_avp.vendor_id)
self.assertEqual(inband_security_id_avp.data, INBAND_SECURITY_ID_NO_SECURITY)
self.assertIsNone(inband_security_id_avp.get_padding_length())
self.assertEqual(inband_security_id_avp.__repr__(), "<Diameter AVP: 299 [Inband-Security-Id]>")
#: Second AVP
experimental_result_avp = avps[1]
self.assertTrue(isinstance(experimental_result_avp, ExperimentalResultAVP))
self.assertEqual(experimental_result_avp.code, EXPERIMENTAL_RESULT_AVP_CODE)
self.assertFalse(experimental_result_avp.is_vendor_id())
self.assertFalse(experimental_result_avp.is_mandatory())
self.assertFalse(experimental_result_avp.is_protected())
self.assertEqual(experimental_result_avp.get_length(), 32)
self.assertIsNone(experimental_result_avp.vendor_id)
self.assertEqual(experimental_result_avp.data.hex(), "0000012a4000000c000013940000010a4000000c000028af")
self.assertEqual(experimental_result_avp.__repr__(), "<Diameter AVP: 297 [Experimental-Result]>")
experimental_result_code_avp = experimental_result_avp.avps[0]
vendor_id_avp = experimental_result_avp.avps[1]
self.assertTrue(isinstance(experimental_result_code_avp, ExperimentalResultCodeAVP))
self.assertEqual(experimental_result_code_avp.code, EXPERIMENTAL_RESULT_CODE_AVP_CODE)
self.assertFalse(experimental_result_code_avp.is_vendor_id())
self.assertTrue(experimental_result_code_avp.is_mandatory())
self.assertFalse(experimental_result_code_avp.is_protected())
self.assertEqual(experimental_result_code_avp.get_length(), 12)
self.assertIsNone(experimental_result_code_avp.vendor_id)
self.assertEqual(experimental_result_code_avp.data, DIAMETER_ERROR_SERVING_NODE_FEATURE_UNSUPPORTED)
self.assertIsNone(experimental_result_code_avp.get_padding_length())
self.assertEqual(experimental_result_code_avp.__repr__(), "<Diameter AVP: 298 [Experimental-Result-Code] MANDATORY>")
self.assertTrue(isinstance(vendor_id_avp, VendorIdAVP))
self.assertEqual(vendor_id_avp.code, VENDOR_ID_AVP_CODE)
self.assertFalse(vendor_id_avp.is_vendor_id())
self.assertTrue(vendor_id_avp.is_mandatory())
self.assertFalse(vendor_id_avp.is_protected())
self.assertEqual(vendor_id_avp.get_length(), 12)
self.assertIsNone(vendor_id_avp.vendor_id)
self.assertEqual(vendor_id_avp.data, VENDOR_ID_3GPP)
self.assertIsNone(vendor_id_avp.get_padding_length())
self.assertEqual(vendor_id_avp.__repr__(), "<Diameter AVP: 266 [Vendor-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_3_different_avps(self):
stream = bytes.fromhex("000001260000002d726a342e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000000001274000000c00000001000001284000000b65736d00")
avps = DiameterAVP.load(stream)
#: First AVP
experimental_result_avp = avps[0]
self.assertTrue(isinstance(experimental_result_avp, ErrorReportingHostAVP))
self.assertEqual(experimental_result_avp.code, ERROR_REPORTING_HOST_AVP_CODE)
self.assertFalse(experimental_result_avp.is_vendor_id())
self.assertFalse(experimental_result_avp.is_mandatory())
self.assertFalse(experimental_result_avp.is_protected())
self.assertEqual(experimental_result_avp.get_length(), 45)
self.assertIsNone(experimental_result_avp.vendor_id)
self.assertEqual(experimental_result_avp.data, b"rj4.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(experimental_result_avp.get_padding_length(), 3)
self.assertEqual(experimental_result_avp.__repr__(), "<Diameter AVP: 294 [Error-Reporting-Host]>")
#: Second AVP
termination_cause_avp = avps[1]
self.assertTrue(isinstance(termination_cause_avp, TerminationCauseAVP))
self.assertEqual(termination_cause_avp.code, TERMINATION_CAUSE_AVP_CODE)
self.assertFalse(termination_cause_avp.is_vendor_id())
self.assertTrue(termination_cause_avp.is_mandatory())
self.assertFalse(termination_cause_avp.is_protected())
self.assertEqual(termination_cause_avp.get_length(), 12)
self.assertIsNone(termination_cause_avp.vendor_id)
self.assertEqual(termination_cause_avp.data, DIAMETER_LOGOUT)
self.assertIsNone(termination_cause_avp.get_padding_length())
self.assertEqual(termination_cause_avp.__repr__(), "<Diameter AVP: 295 [Termination-Cause] MANDATORY>")
#: Third AVP
origin_realm_avp = avps[2]
self.assertTrue(isinstance(origin_realm_avp, OriginRealmAVP))
self.assertEqual(origin_realm_avp.code, ORIGIN_REALM_AVP_CODE)
self.assertFalse(origin_realm_avp.is_vendor_id())
self.assertTrue(origin_realm_avp.is_mandatory())
self.assertFalse(origin_realm_avp.is_protected())
self.assertEqual(origin_realm_avp.get_length(), 11)
self.assertIsNone(origin_realm_avp.vendor_id)
self.assertEqual(origin_realm_avp.data, b"esm")
self.assertEqual(origin_realm_avp.get_padding_length(), 1)
self.assertEqual(origin_realm_avp.__repr__(), "<Diameter AVP: 296 [Origin-Realm] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_4_different_avps(self):
stream = bytes.fromhex("000001260000002d726a342e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000000000012540000030687373736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000011d4000000c00000000000001124000000c00000001")
avps = DiameterAVP.load(stream)
#: First AVP
error_reporting_host_avp = avps[0]
self.assertTrue(isinstance(error_reporting_host_avp, ErrorReportingHostAVP))
self.assertEqual(error_reporting_host_avp.code, ERROR_REPORTING_HOST_AVP_CODE)
self.assertFalse(error_reporting_host_avp.is_vendor_id())
self.assertFalse(error_reporting_host_avp.is_mandatory())
self.assertFalse(error_reporting_host_avp.is_protected())
self.assertEqual(error_reporting_host_avp.get_length(), 45)
self.assertIsNone(error_reporting_host_avp.vendor_id)
self.assertEqual(error_reporting_host_avp.data, b"rj4.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(error_reporting_host_avp.get_padding_length(), 3)
self.assertEqual(error_reporting_host_avp.__repr__(), "<Diameter AVP: 294 [Error-Reporting-Host]>")
#: Second AVP
destination_host_avp = avps[1]
self.assertTrue(isinstance(destination_host_avp, DestinationHostAVP))
self.assertEqual(destination_host_avp.code, DESTINATION_HOST_AVP_CODE)
self.assertFalse(destination_host_avp.is_vendor_id())
self.assertTrue(destination_host_avp.is_mandatory())
self.assertFalse(destination_host_avp.is_protected())
self.assertEqual(destination_host_avp.get_length(), 48)
self.assertIsNone(destination_host_avp.vendor_id)
self.assertEqual(destination_host_avp.data, b"hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertIsNone(destination_host_avp.get_padding_length())
self.assertEqual(destination_host_avp.__repr__(), "<Diameter AVP: 293 [Destination-Host] MANDATORY>")
#: Third AVP
re_auth_request_type_avp = avps[2]
self.assertTrue(isinstance(re_auth_request_type_avp, ReAuthRequestTypeAVP))
self.assertEqual(re_auth_request_type_avp.code, RE_AUTH_REQUEST_TYPE_AVP_CODE)
self.assertFalse(re_auth_request_type_avp.is_vendor_id())
self.assertTrue(re_auth_request_type_avp.is_mandatory())
self.assertFalse(re_auth_request_type_avp.is_protected())
self.assertEqual(re_auth_request_type_avp.get_length(), 12)
self.assertIsNone(re_auth_request_type_avp.vendor_id)
self.assertEqual(re_auth_request_type_avp.data, RE_AUTH_REQUEST_TYPE_AUTHORIZE_ONLY)
self.assertIsNone(re_auth_request_type_avp.get_padding_length())
self.assertEqual(re_auth_request_type_avp.__repr__(), "<Diameter AVP: 285 [Re-Auth-Request-Type] MANDATORY>")
#: Fourth AVP
auth_request_type_avp = avps[3]
self.assertTrue(isinstance(auth_request_type_avp, AuthRequestTypeAVP))
self.assertEqual(auth_request_type_avp.code, AUTH_REQUEST_TYPE_AVP_CODE)
self.assertFalse(auth_request_type_avp.is_vendor_id())
self.assertTrue(auth_request_type_avp.is_mandatory())
self.assertFalse(auth_request_type_avp.is_protected())
self.assertEqual(auth_request_type_avp.get_length(), 12)
self.assertIsNone(auth_request_type_avp.vendor_id)
self.assertEqual(auth_request_type_avp.data, AUTH_REQUEST_TYPE_AUTHENTICATE_ONLY)
self.assertIsNone(auth_request_type_avp.get_padding_length())
self.assertEqual(auth_request_type_avp.__repr__(), "<Diameter AVP: 274 [Auth-Request-Type] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_2_similar_avps(self):
stream = bytes.fromhex("0000010c4000000c000007d10000010c4000000c000007d1")
avps = DiameterAVP.load(stream)
for avp in avps:
self.assertTrue(isinstance(avp, ResultCodeAVP))
self.assertEqual(avp.code, RESULT_CODE_AVP_CODE)
self.assertFalse(avp.is_vendor_id())
self.assertTrue(avp.is_mandatory())
self.assertFalse(avp.is_protected())
self.assertEqual(avp.get_length(), 12)
self.assertIsNone(avp.vendor_id)
self.assertEqual(avp.data, DIAMETER_SUCCESS)
self.assertIsNone(avp.get_padding_length())
self.assertEqual(avp.__repr__(), "<Diameter AVP: 268 [Result-Code] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_3_similar_avps(self):
stream = bytes.fromhex("000001094000000c000028af000001094000000c000028af000001094000000c000028af")
avps = DiameterAVP.load(stream)
for avp in avps:
self.assertTrue(isinstance(avp, SupportedVendorIdAVP))
self.assertEqual(avp.code, SUPPORTED_VENDOR_ID_AVP_CODE)
self.assertFalse(avp.is_vendor_id())
self.assertTrue(avp.is_mandatory())
self.assertFalse(avp.is_protected())
self.assertEqual(avp.get_length(), 12)
self.assertIsNone(avp.vendor_id)
self.assertEqual(avp.data, VENDOR_ID_3GPP)
self.assertIsNone(avp.get_padding_length())
self.assertEqual(avp.__repr__(), "<Diameter AVP: 265 [Supported-Vendor-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_4_similar_avps(self):
stream = bytes.fromhex("00000104400000200000010a4000000c000028af000001024000000c0100003000000104400000200000010a4000000c000028af000001024000000c0100003000000104400000200000010a4000000c000028af000001024000000c0100003000000104400000200000010a4000000c000028af000001024000000c01000030")
avps = DiameterAVP.load(stream)
for avp in avps:
vendor_specific_application_id_avp = avp
self.assertTrue(isinstance(vendor_specific_application_id_avp, VendorSpecificApplicationIdAVP))
self.assertEqual(vendor_specific_application_id_avp.code, VENDOR_SPECIFIC_APPLICATION_ID_AVP_CODE)
self.assertFalse(vendor_specific_application_id_avp.is_vendor_id())
self.assertTrue(vendor_specific_application_id_avp.is_mandatory())
self.assertFalse(vendor_specific_application_id_avp.is_protected())
self.assertEqual(vendor_specific_application_id_avp.get_length(), 32)
self.assertIsNone(vendor_specific_application_id_avp.vendor_id)
self.assertEqual(vendor_specific_application_id_avp.data.hex(), "0000010a4000000c000028af000001024000000c01000030")
self.assertEqual(vendor_specific_application_id_avp.__repr__(), "<Diameter AVP: 260 [Vendor-Specific-Application-Id] MANDATORY>")
vendor_id_avp = vendor_specific_application_id_avp.avps[0]
auth_app_id_avp = vendor_specific_application_id_avp.avps[1]
self.assertTrue(isinstance(vendor_id_avp, VendorIdAVP))
self.assertEqual(vendor_id_avp.code, VENDOR_ID_AVP_CODE)
self.assertFalse(vendor_id_avp.is_vendor_id())
self.assertTrue(vendor_id_avp.is_mandatory())
self.assertFalse(vendor_id_avp.is_protected())
self.assertEqual(vendor_id_avp.get_length(), 12)
self.assertIsNone(vendor_id_avp.vendor_id)
self.assertEqual(vendor_id_avp.data.hex(), "000028af")
self.assertEqual(vendor_id_avp.__repr__(), "<Diameter AVP: 266 [Vendor-Id] MANDATORY>")
self.assertTrue(isinstance(auth_app_id_avp, AuthApplicationIdAVP))
self.assertEqual(auth_app_id_avp.code, AUTH_APPLICATION_ID_AVP_CODE)
self.assertFalse(auth_app_id_avp.is_vendor_id())
self.assertTrue(auth_app_id_avp.is_mandatory())
self.assertFalse(auth_app_id_avp.is_protected())
self.assertEqual(auth_app_id_avp.get_length(), 12)
self.assertIsNone(auth_app_id_avp.vendor_id)
self.assertEqual(auth_app_id_avp.data.hex(), "01000030")
self.assertEqual(auth_app_id_avp.__repr__(), "<Diameter AVP: 258 [Auth-Application-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_unknown_avp(self):
stream = bytes.fromhex("000000000000000c00000000")
avps = DiameterAVP.load(stream)
self.assertEqual(str(type(avps[0])), "<class 'bromelia.base.DiameterAVP'>")
self.assertTrue(isinstance(avps[0], DiameterAVP))
self.assertFalse(avps[0].is_vendor_id())
self.assertFalse(avps[0].is_mandatory())
self.assertFalse(avps[0].is_protected())
self.assertEqual(avps[0].get_length(), 12)
self.assertIsNone(avps[0].vendor_id)
self.assertEqual(avps[0].data, convert_to_4_bytes(0))
self.assertIsNone(avps[0].get_padding_length())
self.assertEqual(avps[0].__repr__(), "<Diameter AVP: 0 [Unknown]>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_2_similar_unknown_avps(self):
stream = bytes.fromhex("000000000000000c00000000000000000000000c00000000")
avps = DiameterAVP.load(stream)
for avp in avps:
self.assertEqual(str(type(avp)), "<class 'bromelia.base.DiameterAVP'>")
self.assertTrue(isinstance(avp, DiameterAVP))
self.assertFalse(avp.is_vendor_id())
self.assertFalse(avp.is_mandatory())
self.assertFalse(avp.is_protected())
self.assertEqual(avp.get_length(), 12)
self.assertIsNone(avp.vendor_id)
self.assertEqual(avp.data, convert_to_4_bytes(0))
self.assertIsNone(avp.get_padding_length())
self.assertEqual(avp.__repr__(), "<Diameter AVP: 0 [Unknown]>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_3_similar_unknown_avps(self):
stream = bytes.fromhex("000000000000000c00000000000000000000000c00000000000000000000000c00000000")
avps = DiameterAVP.load(stream)
for avp in avps:
self.assertEqual(str(type(avp)), "<class 'bromelia.base.DiameterAVP'>")
self.assertTrue(isinstance(avp, DiameterAVP))
self.assertFalse(avp.is_vendor_id())
self.assertFalse(avp.is_mandatory())
self.assertFalse(avp.is_protected())
self.assertEqual(avp.get_length(), 12)
self.assertIsNone(avp.vendor_id)
self.assertEqual(avp.data, convert_to_4_bytes(0))
self.assertIsNone(avp.get_padding_length())
self.assertEqual(avp.__repr__(), "<Diameter AVP: 0 [Unknown]>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_4_similar_unknown_avps(self):
stream = bytes.fromhex("000000000000000c00000000000000000000000c00000000000000000000000c00000000000000000000000c00000000")
avps = DiameterAVP.load(stream)
for avp in avps:
self.assertEqual(str(type(avp)), "<class 'bromelia.base.DiameterAVP'>")
self.assertTrue(isinstance(avp, DiameterAVP))
self.assertFalse(avp.is_vendor_id())
self.assertFalse(avp.is_mandatory())
self.assertFalse(avp.is_protected())
self.assertEqual(avp.get_length(), 12)
self.assertIsNone(avp.vendor_id)
self.assertEqual(avp.data, convert_to_4_bytes(0))
self.assertIsNone(avp.get_padding_length())
self.assertEqual(avp.__repr__(), "<Diameter AVP: 0 [Unknown]>")
def test_diameter_avp__load_staticmethod__parsing_stream_of_2_unknown_avps_and_2_known_avps(self):
stream = bytes.fromhex("000000000000000c00000000000000000000000c00000000000001034000000c01000030000001034000000c01000030")
avps = DiameterAVP.load(stream)
#: First AVP
first_avp = avps[0]
self.assertEqual(str(type(first_avp)), "<class 'bromelia.base.DiameterAVP'>")
self.assertTrue(isinstance(first_avp, DiameterAVP))
self.assertFalse(first_avp.is_vendor_id())
self.assertFalse(first_avp.is_mandatory())
self.assertFalse(first_avp.is_protected())
self.assertEqual(first_avp.get_length(), 12)
self.assertIsNone(first_avp.vendor_id)
self.assertEqual(first_avp.data, convert_to_4_bytes(0))
self.assertIsNone(first_avp.get_padding_length())
self.assertEqual(first_avp.__repr__(), "<Diameter AVP: 0 [Unknown]>")
#: Second AVP
second_avp = avps[1]
self.assertEqual(str(type(second_avp)), "<class 'bromelia.base.DiameterAVP'>")
self.assertTrue(isinstance(second_avp, DiameterAVP))
self.assertFalse(second_avp.is_vendor_id())
self.assertFalse(second_avp.is_mandatory())
self.assertFalse(second_avp.is_protected())
self.assertEqual(second_avp.get_length(), 12)
self.assertIsNone(second_avp.vendor_id)
self.assertEqual(second_avp.data, convert_to_4_bytes(0))
self.assertIsNone(second_avp.get_padding_length())
self.assertEqual(second_avp.__repr__(), "<Diameter AVP: 0 [Unknown]>")
#: Third AVP
acc_application_id_avp__1 = avps[2]
self.assertTrue(isinstance(acc_application_id_avp__1, AcctApplicationIdAVP))
self.assertEqual(acc_application_id_avp__1.code, ACCT_APPLICATION_ID_AVP_CODE)
self.assertFalse(acc_application_id_avp__1.is_vendor_id())
self.assertTrue(acc_application_id_avp__1.is_mandatory())
self.assertFalse(acc_application_id_avp__1.is_protected())
self.assertEqual(acc_application_id_avp__1.get_length(), 12)
self.assertIsNone(acc_application_id_avp__1.vendor_id)
self.assertEqual(acc_application_id_avp__1.data, DIAMETER_APPLICATION_SWm)
self.assertIsNone(acc_application_id_avp__1.get_padding_length())
self.assertEqual(acc_application_id_avp__1.__repr__(), "<Diameter AVP: 259 [Acct-Application-Id] MANDATORY>")
#: Fourth AVP
acc_application_id_avp__2 = avps[3]
self.assertTrue(isinstance(acc_application_id_avp__2, AcctApplicationIdAVP))
self.assertEqual(acc_application_id_avp__2.code, ACCT_APPLICATION_ID_AVP_CODE)
self.assertFalse(acc_application_id_avp__2.is_vendor_id())
self.assertTrue(acc_application_id_avp__2.is_mandatory())
self.assertFalse(acc_application_id_avp__2.is_protected())
self.assertEqual(acc_application_id_avp__2.get_length(), 12)
self.assertIsNone(acc_application_id_avp__2.vendor_id)
self.assertEqual(acc_application_id_avp__2.data, DIAMETER_APPLICATION_SWm)
self.assertIsNone(acc_application_id_avp__2.get_padding_length())
self.assertEqual(acc_application_id_avp__2.__repr__(), "<Diameter AVP: 259 [Acct-Application-Id] MANDATORY>")
def test_diameter_avp__load_staticmethod__parsing_invalid_stream_only_1_byte__incomplete_code(self):
stream = bytes.fromhex("00")
with self.assertRaises(AVPParsingError) as cm:
avps = DiameterAVP.load(stream)
self.assertEqual(cm.exception.args[0], "invalid bytes stream")
def test_diameter_avp__load_staticmethod__parsing_invalid_stream_only_2_bytes__incomplete_code(self):
stream = bytes.fromhex("0000")
with self.assertRaises(AVPParsingError) as cm:
avps = DiameterAVP.load(stream)
self.assertEqual(cm.exception.args[0], "invalid bytes stream")
def test_diameter_avp__load_staticmethod__parsing_invalid_stream_only_3_bytes__incomplete_code(self):
stream = bytes.fromhex("000000")
with self.assertRaises(AVPParsingError) as cm:
avps = DiameterAVP.load(stream)
self.assertEqual(cm.exception.args[0], "invalid bytes stream")
def test_diameter_avp__load_staticmethod__parsing_invalid_stream_only_4_bytes__only_code(self):
stream = bytes.fromhex("00000000")
with self.assertRaises(AVPParsingError) as cm:
avps = DiameterAVP.load(stream)
self.assertEqual(cm.exception.args[0], "invalid bytes stream")
def test_diameter_avp__load_staticmethod__parsing_invalid_stream_only_5_bytes__only_code_and_flags(self):
stream = bytes.fromhex("0000000011")
with self.assertRaises(AVPParsingError) as cm:
avps = DiameterAVP.load(stream)
self.assertEqual(cm.exception.args[0], "invalid bytes stream. It contains only the code and flags fields")
def test_diameter_avp__load_staticmethod__parsing_invalid_stream_only_6_bytes__only_code_and_flags_and_incomplete_length(self):
stream = bytes.fromhex("0000000011")
with self.assertRaises(AVPParsingError) as cm:
avps = DiameterAVP.load(stream)
self.assertEqual(cm.exception.args[0], "invalid bytes stream. It contains only the code and flags fields")
def test_diameter_avp__load_staticmethod__parsing_invalid_stream_only_7_bytes__only_code_and_flags_and_incomplete_length(self):
stream = bytes.fromhex("000000001100")
with self.assertRaises(AVPParsingError) as cm:
avps = DiameterAVP.load(stream)
self.assertEqual(cm.exception.args[0], "invalid bytes stream. It contains only the code and flags fields")
def test_diameter_avp__load_staticmethod__parsing_invalid_stream_only_8_bytes__only_code_and_flags_and_length(self):
stream = bytes.fromhex("00000000000011")
with self.assertRaises(AVPParsingError) as cm:
avps = DiameterAVP.load(stream)
self.assertEqual(cm.exception.args[0], "invalid bytes stream. It contains only the code and flags fields")
def test_diameter_avp__load_staticmethod__parsing_stream_of_9_bytes(self):
stream = bytes.fromhex("000000000000004f12")
with self.assertRaises(AVPParsingError) as cm:
avps = DiameterAVP.load(stream)
self.assertEqual(cm.exception.args[0], "invalid bytes stream. The length field value does not correspond to the AVP length")
class TestUserNameAVP(unittest.TestCase):
def test_user_name_avp__no_value(self):
self.assertRaises(TypeError, UserNameAVP)
def test_user_name_avp__repr_dunder(self):
nai = "<EMAIL>@<EMAIL>.epc.mncXXX.mccYYY.3gppnetwork.org"
avp = UserNameAVP(nai)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 1 [User-Name] MANDATORY>")
def test_user_name_avp__diameter_avp_convert_classmethod(self):
nai = "<EMAIL>@nai.epc.mncXXX.mccYYY.3gppnetwork.org"
avp = UserNameAVP(nai)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_user_name_avp__nai_format(self):
nai = "<EMAIL>@<EMAIL>.epc.mncXXX.mccYYY.3gppnetwork.org"
avp = UserNameAVP(nai)
ref = "00000001400000356d792d75736572406e61692e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000"
self.assertEqual(avp.dump().hex(), ref)
def test_user_name_avp__imsi_format(self):
nai = "012345678901234"
avp = UserNameAVP(nai)
ref = "000000014000001730313233343536373839303132333400"
self.assertEqual(avp.dump().hex(), ref)
class TestClassAVP(unittest.TestCase):
def test_class_avp__no_value(self):
self.assertRaises(TypeError, ClassAVP)
def test_class_avp__repr_dunder(self):
avp = ClassAVP("CLOSED")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 25 [Class] MANDATORY>")
def test_class_avp__diameter_avp_convert_classmethod(self):
avp = ClassAVP("CLOSED")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_class_avp__1(self):
avp = ClassAVP("CLOSED")
ref = "000000194000000e434c4f5345440000"
self.assertEqual(avp.dump().hex(), ref)
def test_class_avp__2(self):
avp = ClassAVP("OPENED")
ref = "000000194000000e4f50454e45440000"
self.assertEqual(avp.dump().hex(), ref)
class TestSessionTimeoutAVP(unittest.TestCase):
def test_session_timeout_avp__no_value(self):
self.assertRaises(TypeError, SessionTimeoutAVP)
def test_session_timeout_avp__repr_dunder(self):
avp = SessionTimeoutAVP(10799)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 27 [Session-Timeout] MANDATORY>")
def test_session_timeout_avp__diameter_avp_convert_classmethod(self):
avp = SessionTimeoutAVP(10799)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_session_timeout_avp__1(self):
avp = SessionTimeoutAVP(10799)
ref = "0000001b4000000c00002a2f"
self.assertEqual(avp.dump().hex(), ref)
class TestCallingStationIdAVP(unittest.TestCase):
def test_calling_station_id_avp__no_value(self):
self.assertRaises(TypeError, CallingStationIdAVP)
def test_calling_station_id_avp__repr_dunder(self):
avp = CallingStationIdAVP("my-user")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 31 [Calling-Station-Id] MANDATORY>")
def test_calling_station_id_avp__diameter_avp_convert_classmethod(self):
avp = CallingStationIdAVP("my-user")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_calling_station_id_avp__1(self):
avp = CallingStationIdAVP("frodo")
ref = "0000001f4000000d66726f646f000000"
self.assertEqual(avp.dump().hex(), ref)
def test_calling_station_id_avp__2(self):
avp = CallingStationIdAVP("link")
ref = "0000001f4000000c6c696e6b"
self.assertEqual(avp.dump().hex(), ref)
class TestProxyStateAVP(unittest.TestCase):
def test_proxy_state_avp__no_value(self):
self.assertRaises(TypeError, ProxyStateAVP)
def test_proxy_state_avp__repr_dunder(self):
avp = ProxyStateAVP("CLOSED")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 33 [Proxy-State] MANDATORY>")
def test_proxy_state_avp__diameter_avp_convert_classmethod(self):
avp = ProxyStateAVP("CLOSED")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_proxy_state_avp__(self):
avp = ProxyStateAVP("CLOSED")
ref = "000000214000000e434c4f5345440000"
self.assertEqual(avp.dump().hex(), ref)
class TestAcctSessionIdAVP(unittest.TestCase):
def test_acct_session_id_avp__no_value(self):
self.assertRaises(TypeError, AcctSessionIdAVP)
def test_acct_session_id_avp__repr_dunder(self):
data = convert_to_4_bytes(42)
avp = AcctSessionIdAVP(data)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 44 [Acct-Session-Id] MANDATORY>")
def test_acct_session_id_avp__diameter_avp_convert_classmethod(self):
data = convert_to_4_bytes(42)
avp = AcctSessionIdAVP(data)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_acct_session_id_avp__1(self):
data = convert_to_4_bytes(6733)
avp = AcctSessionIdAVP(data)
ref = "0000002c4000000c00001a4d"
self.assertEqual(avp.dump().hex(), ref)
def test_acct_session_id_avp__2(self):
data = convert_to_4_bytes(3588)
avp = AcctSessionIdAVP(data)
ref = "0000002c4000000c00000e04"
self.assertEqual(avp.dump().hex(), ref)
class TestAcctMultiSessionIdAVP(unittest.TestCase):
def test_acct_multi_session_id_avp__no_value(self):
self.assertRaises(TypeError, AcctMultiSessionIdAVP)
def test_acct_multi_session_id_avp__repr_dunder(self):
avp = AcctMultiSessionIdAVP("es2")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 50 [Acct-Multi-Session-Id] MANDATORY>")
def test_acct_multi_session_id_avp__diameter_avp_convert_classmethod(self):
avp = AcctMultiSessionIdAVP("es2")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_acct_multi_session_id_avp__1(self):
avp = AcctMultiSessionIdAVP("es2")
ref = "00000032400000206573323b3430333239323b3430333239323b343033323932"
self.assertEqual(avp.dump().hex(), ref)
def test_acct_multi_session_id_avp__2(self):
avp = AcctMultiSessionIdAVP("my-diameter-server.my-network")
ref = "000000324000003a6d792d6469616d657465722d7365727665722e6d792d6e6574776f726b3b3430333239323b3430333239323b3430333239320000"
self.assertEqual(avp.dump().hex(), ref)
class TestEventTimestampAVP(unittest.TestCase):
def test_event_timestamp_avp__repr_dunder(self):
avp = EventTimestampAVP()
self.assertEqual(avp.__repr__(), "<Diameter AVP: 55 [Event-Timestamp] MANDATORY>")
def test_event_timestamp_avp__diameter_avp_convert_classmethod(self):
avp = EventTimestampAVP()
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_event_timestamp_avp__1(self):
timestamp = datetime.datetime.strptime("Nov 12, 2020 18:15:55", '%b %d, %Y %H:%M:%S')
avp = EventTimestampAVP(timestamp)
ref = "000000374000000ce357fa5b"
self.assertEqual(avp.dump().hex(), ref)
def test_event_timestamp_avp__2(self):
timestamp = datetime.datetime.strptime("Nov 12, 2020 18:12:08", '%b %d, %Y %H:%M:%S')
avp = EventTimestampAVP(timestamp)
ref = "000000374000000ce357f978"
self.assertEqual(avp.dump().hex(), ref)
class TestAcctInterimIntervalAVP(unittest.TestCase):
def test_acct_interim_interval_avp__no_value(self):
self.assertRaises(TypeError, AcctInterimIntervalAVP)
def test_acct_interim_interval_avp__repr_dunder(self):
avp = AcctInterimIntervalAVP(DIAMETER_APPLICATION_SWm)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 85 [Acct-Interim-Interval] MANDATORY>")
def test_acct_interim_interval_avp__diameter_avp_convert_classmethod(self):
avp = AcctInterimIntervalAVP(300)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_acct_interim_interval_avp__swm(self):
avp = AcctInterimIntervalAVP(300)
ref = "000000554000000c0000012c"
self.assertEqual(avp.dump().hex(), ref)
class TestHostIpAddressAVP(unittest.TestCase):
def test_host_ip_address_avp__no_value(self):
self.assertRaises(TypeError, HostIpAddressAVP)
def test_host_ip_address_avp__repr_dunder(self):
avp = HostIpAddressAVP("10.129.241.235")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 257 [Host-Ip-Address] MANDATORY>")
def test_host_ip_address_avp__diameter_avp_convert_classmethod(self):
avp = HostIpAddressAVP("10.129.241.235")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_host_ip_address_avp__1(self):
avp = HostIpAddressAVP("10.129.241.235")
ref = "000001014000000e00010a81f1eb0000"
self.assertEqual(avp.dump().hex(), ref)
def test_host_ip_address_avp__2(self):
avp = HostIpAddressAVP("10.159.120.36")
ref = "000001014000000e00010a9f78240000"
self.assertEqual(avp.dump().hex(), ref)
class TestAuthApplicationIdAVP(unittest.TestCase):
def test_auth_application_id_avp__no_value(self):
self.assertRaises(TypeError, AuthApplicationIdAVP)
def test_auth_application_id_avp__repr_dunder(self):
avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_SWm)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 258 [Auth-Application-Id] MANDATORY>")
def test_auth_application_id_avp__diameter_avp_convert_classmethod(self):
avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_SWm)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_auth_application_id_avp__swm(self):
avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_SWm)
ref = "000001024000000c01000030"
self.assertEqual(avp.dump().hex(), ref)
def test_auth_application_id_avp__swx(self):
avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_SWx)
ref = "000001024000000c01000031"
self.assertEqual(avp.dump().hex(), ref)
def test_auth_application_id_avp__rx(self):
avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_Rx)
ref = "000001024000000c01000014"
self.assertEqual(avp.dump().hex(), ref)
def test_auth_application_id_avp__gx(self):
avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_Gx)
ref = "000001024000000c01000016"
self.assertEqual(avp.dump().hex(), ref)
def test_auth_application_id_avp__s6a_s6d(self):
avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_S6a_S6d)
ref = "000001024000000c01000023"
self.assertEqual(avp.dump().hex(), ref)
class TestAcctApplicationIdAVP(unittest.TestCase):
def test_acct_application_id_avp__no_value(self):
self.assertRaises(TypeError, AcctApplicationIdAVP)
def test_acct_application_id_avp__repr_dunder(self):
avp = AcctApplicationIdAVP(DIAMETER_APPLICATION_SWm)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 259 [Acct-Application-Id] MANDATORY>")
def test_acct_application_id_avp__diameter_avp_convert_classmethod(self):
avp = AcctApplicationIdAVP(DIAMETER_APPLICATION_SWm)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_acct_application_id_avp__swm(self):
avp = AcctApplicationIdAVP(DIAMETER_APPLICATION_SWm)
ref = "000001034000000c01000030"
self.assertEqual(avp.dump().hex(), ref)
def test_acct_application_id_avp__swx(self):
avp = AcctApplicationIdAVP(DIAMETER_APPLICATION_SWx)
ref = "000001034000000c01000031"
self.assertEqual(avp.dump().hex(), ref)
def test_acct_application_id_avp__rx(self):
avp = AcctApplicationIdAVP(DIAMETER_APPLICATION_Rx)
ref = "000001034000000c01000014"
self.assertEqual(avp.dump().hex(), ref)
def test_acct_application_id_avp__gx(self):
avp = AcctApplicationIdAVP(DIAMETER_APPLICATION_Gx)
ref = "000001034000000c01000016"
self.assertEqual(avp.dump().hex(), ref)
def test_acct_application_id_avp__s6a_s6d(self):
avp = AcctApplicationIdAVP(DIAMETER_APPLICATION_S6a_S6d)
ref = "000001034000000c01000023"
self.assertEqual(avp.dump().hex(), ref)
class TestVendorSpecificApplicationIdAVP(unittest.TestCase):
def test_vendor_specific_application_id_avp__no_value(self):
self.assertRaises(TypeError, VendorSpecificApplicationIdAVP)
def test_vendor_specific_application_id_avp__repr_dunder(self):
vendor_id_avp = VendorIdAVP(VENDOR_ID_3GPP)
auth_app_id_avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_SWm)
avps = [vendor_id_avp, auth_app_id_avp]
avp = VendorSpecificApplicationIdAVP(avps)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 260 [Vendor-Specific-Application-Id] MANDATORY>")
def test_vendor_specific_application_id_avp__diameter_avp_convert_classmethod(self):
vendor_id_avp = VendorIdAVP(VENDOR_ID_3GPP)
auth_app_id_avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_SWm)
avps = [vendor_id_avp, auth_app_id_avp]
avp = VendorSpecificApplicationIdAVP(avps)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_vendor_specific_application_id_avp__default(self):
ref = "00000104400000200000010a4000000c000028af000001024000000c01000030"
vendor_id_avp = VendorIdAVP(VENDOR_ID_3GPP)
auth_app_id_avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_SWm)
avps = [vendor_id_avp, auth_app_id_avp]
avp = VendorSpecificApplicationIdAVP(avps)
self.assertEqual(avp.dump().hex(), ref)
def test_vendor_specific_application_id_avp__only_auth_and_acct_app_avps(self):
auth_app_id_avp = AuthApplicationIdAVP(DIAMETER_APPLICATION_SWm)
acct_app_id_avp = AcctApplicationIdAVP(DIAMETER_APPLICATION_SWm)
avps = [auth_app_id_avp, acct_app_id_avp]
with self.assertRaises(AVPAttributeValueError) as cm:
avp = VendorSpecificApplicationIdAVP(avps)
self.assertEqual(cm.exception.args[1], DIAMETER_MISSING_AVP)
class TestRedirectHostUsageAVP(unittest.TestCase):
def test_redirect_host_usage_avp__no_value(self):
self.assertRaises(TypeError, RedirectHostUsageAVP)
def test_redirect_host_usage_avp__repr_dunder(self):
avp = RedirectHostUsageAVP(REDIRECT_HOST_USAGE_DONT_CACHE)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 261 [Redirect-Host-Usage] MANDATORY>")
def test_redirect_host_usage_avp__diameter_avp_convert_classmethod(self):
avp = RedirectHostUsageAVP(REDIRECT_HOST_USAGE_DONT_CACHE)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_redirect_host_usage_avp__dont_cache(self):
avp = RedirectHostUsageAVP(REDIRECT_HOST_USAGE_DONT_CACHE)
ref = "000001054000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_usage_avp__all_session(self):
avp = RedirectHostUsageAVP(REDIRECT_HOST_USAGE_ALL_SESSION)
ref = "000001054000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_usage_avp__all_realm(self):
avp = RedirectHostUsageAVP(REDIRECT_HOST_USAGE_ALL_REALM)
ref = "000001054000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_usage_avp__realm_and_application(self):
avp = RedirectHostUsageAVP(REDIRECT_HOST_USAGE_REALM_AND_APPLICATION)
ref = "000001054000000c00000003"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_usage_avp__all_application(self):
avp = RedirectHostUsageAVP(REDIRECT_HOST_USAGE_ALL_APPLICATION)
ref = "000001054000000c00000004"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_usage_avp__all_host(self):
avp = RedirectHostUsageAVP(REDIRECT_HOST_USAGE_ALL_HOST)
ref = "000001054000000c00000005"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_usage_avp__all_user(self):
avp = RedirectHostUsageAVP(REDIRECT_HOST_USAGE_ALL_USER)
ref = "000001054000000c00000006"
self.assertEqual(avp.dump().hex(), ref)
class TestRedirectMaxCacheTimeAVP(unittest.TestCase):
def test_redirect_max_cache_time_avp__no_value(self):
self.assertRaises(TypeError, RedirectMaxCacheTimeAVP)
def test_redirect_max_cache_time_avp__repr_dunder(self):
avp = RedirectMaxCacheTimeAVP(10799)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 262 [Redirect-Max-Cache-Time] MANDATORY>")
def test_redirect_max_cache_time_avp__diameter_avp_convert_classmethod(self):
avp = RedirectMaxCacheTimeAVP(10799)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_redirect_max_cache_time_avp__1(self):
avp = RedirectMaxCacheTimeAVP(10799)
ref = "000001064000000c00002a2f"
self.assertEqual(avp.dump().hex(), ref)
class TestSessionIdAVP(unittest.TestCase):
def test_session_id_avp__no_value(self):
self.assertRaises(TypeError, SessionIdAVP)
def test_session_id_avp__repr_dunder(self):
avp = SessionIdAVP("es2")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 263 [Session-Id] MANDATORY>")
def test_session_id_avp__diameter_avp_convert_classmethod(self):
avp = SessionIdAVP("es2")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
class TestOriginHostAVP(unittest.TestCase):
def test_origin_host_avp__no_value(self):
self.assertRaises(TypeError, OriginHostAVP)
def test_origin_host_avp__repr_dunder(self):
avp = OriginHostAVP("es2")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 264 [Origin-Host] MANDATORY>")
def test_origin_host_avp__diameter_avp_convert_classmethod(self):
avp = OriginHostAVP("es2")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_origin_host_avp__1(self):
avp = OriginHostAVP("rx.pcscf-ni0.ims.mncXXX.mccYYY.3gppnetwork.org")
ref = "000001084000003672782e70637363662d6e69302e696d732e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000"
self.assertEqual(avp.dump().hex(), ref)
def test_origin_host_avp__2(self):
avp = OriginHostAVP("es2")
ref = "000001084000000b65733200"
self.assertEqual(avp.dump().hex(), ref)
def test_origin_host_avp__3(self):
avp = OriginHostAVP("rjni0.epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "000001084000002f726a6e69302e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f726700"
self.assertEqual(avp.dump().hex(), ref)
def test_origin_host_avp__4(self):
avp = OriginHostAVP("rjnt0")
ref = "000001084000000d726a6e7430000000"
self.assertEqual(avp.dump().hex(), ref)
def test_origin_host_avp__5(self):
avp = OriginHostAVP("esp-sev19.epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "00000108400000336573702d73657631392e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f726700"
self.assertEqual(avp.dump().hex(), ref)
class TestSupportedVendorIdAVP(unittest.TestCase):
def test_supported_vendor_id_avp__no_value(self):
self.assertRaises(TypeError, SupportedVendorIdAVP)
def test_supported_vendor_id_avp__repr_dunder(self):
avp = SupportedVendorIdAVP(VENDOR_ID_3GPP)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 265 [Supported-Vendor-Id] MANDATORY>")
def test_supported_vendor_id_avp__diameter_avp_convert_classmethod(self):
avp = SupportedVendorIdAVP(VENDOR_ID_3GPP)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_supported_vendor_id_avp__vendor_id_3gpp(self):
avp = SupportedVendorIdAVP(VENDOR_ID_3GPP)
ref = "000001094000000c000028af"
self.assertEqual(avp.dump().hex(), ref)
class TestVendorIdAVP(unittest.TestCase):
def test_vendor_id_avp__no_value(self):
avp = VendorIdAVP()
ref = "0000010a4000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_vendor_id_avp__repr_dunder(self):
avp = VendorIdAVP()
self.assertEqual(avp.__repr__(), "<Diameter AVP: 266 [Vendor-Id] MANDATORY>")
def test_vendor_id_avp__diameter_avp_convert_classmethod(self):
avp = VendorIdAVP()
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_vendor_id_avp__3gpp(self):
avp = VendorIdAVP(VENDOR_ID_3GPP)
ref = "0000010a4000000c000028af"
self.assertEqual(avp.dump().hex(), ref)
class TestFirmwareRevisionAVP(unittest.TestCase):
def test_firmware_revision_avp__no_value(self):
self.assertRaises(TypeError, FirmwareRevisionAVP)
def test_firmware_revision_avp__repr_dunder(self):
FIRMWARE_REVISION = convert_to_4_bytes(1)
avp = FirmwareRevisionAVP(FIRMWARE_REVISION)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 267 [Firmware-Revision]>")
def test_firmware_revision_avp__diameter_avp_convert_classmethod(self):
FIRMWARE_REVISION = convert_to_4_bytes(1)
avp = FirmwareRevisionAVP(FIRMWARE_REVISION)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_firmware_revision_avp__1(self):
FIRMWARE_REVISION = convert_to_4_bytes(1)
avp = FirmwareRevisionAVP(FIRMWARE_REVISION)
ref = "0000010b0000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_firmware_revision_avp__2(self):
FIRMWARE_REVISION = convert_to_4_bytes(529153)
avp = FirmwareRevisionAVP(FIRMWARE_REVISION)
ref = "0000010b0000000c00081301"
self.assertEqual(avp.dump().hex(), ref)
class TestResultCodeAVP(unittest.TestCase):
def test_result_code_avp__no_value(self):
self.assertRaises(TypeError, ResultCodeAVP)
def test_result_code_avp__repr_dunder(self):
avp = ResultCodeAVP(DIAMETER_SUCCESS)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 268 [Result-Code] MANDATORY>")
def test_result_code_avp__diameter_avp_convert_classmethod(self):
avp = ResultCodeAVP(DIAMETER_SUCCESS)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_result_code_avp__diameter_success(self):
avp = ResultCodeAVP(DIAMETER_SUCCESS)
ref = "0000010c4000000c000007d1"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_command_unsupported(self):
avp = ResultCodeAVP(DIAMETER_COMMAND_UNSUPPORTED)
ref = "0000010c4000000c00000bb9"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_unable_to_deliver(self):
avp = ResultCodeAVP(DIAMETER_UNABLE_TO_DELIVER)
ref = "0000010c4000000c00000bba"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_realm_not_served(self):
avp = ResultCodeAVP(DIAMETER_REALM_NOT_SERVED)
ref = "0000010c4000000c00000bbb"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_too_busy(self):
avp = ResultCodeAVP(DIAMETER_TOO_BUSY)
ref = "0000010c4000000c00000bbc"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_loop_detected(self):
avp = ResultCodeAVP(DIAMETER_LOOP_DETECTED)
ref = "0000010c4000000c00000bbd"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_redirect_indication(self):
avp = ResultCodeAVP(DIAMETER_REDIRECT_INDICATION)
ref = "0000010c4000000c00000bbe"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_application_unsupported(self):
avp = ResultCodeAVP(DIAMETER_APPLICATION_UNSUPPORTED)
ref = "0000010c4000000c00000bbf"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_invalid_hdr_bits(self):
avp = ResultCodeAVP(DIAMETER_INVALID_HDR_BITS)
ref = "0000010c4000000c00000bc0"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_invalid_avp_bits(self):
avp = ResultCodeAVP(DIAMETER_INVALID_AVP_BITS)
ref = "0000010c4000000c00000bc1"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_unknown_peer(self):
avp = ResultCodeAVP(DIAMETER_UNKNOWN_PEER)
ref = "0000010c4000000c00000bc2"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_authentication_rejected(self):
avp = ResultCodeAVP(DIAMETER_AUTHENTICATION_REJECTED)
ref = "0000010c4000000c00000fa1"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_out_of_space(self):
avp = ResultCodeAVP(DIAMETER_OUT_OF_SPACE)
ref = "0000010c4000000c00000fa2"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_election_lost(self):
avp = ResultCodeAVP(DIAMETER_ELECTION_LOST)
ref = "0000010c4000000c00000fa3"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_avp_unsupported(self):
avp = ResultCodeAVP(DIAMETER_AVP_UNSUPPORTED)
ref = "0000010c4000000c00001389"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_unknown_session_id(self):
avp = ResultCodeAVP(DIAMETER_UNKNOWN_SESSION_ID)
ref = "0000010c4000000c0000138a"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_authorization_rejected(self):
avp = ResultCodeAVP(DIAMETER_AUTHORIZATION_REJECTED)
ref = "0000010c4000000c0000138b"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_invalid_avp_value(self):
avp = ResultCodeAVP(DIAMETER_INVALID_AVP_VALUE)
ref = "0000010c4000000c0000138c"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_missing_avp(self):
avp = ResultCodeAVP(DIAMETER_MISSING_AVP)
ref = "0000010c4000000c0000138d"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_resources_exceeded(self):
avp = ResultCodeAVP(DIAMETER_RESOURCES_EXCEEDED)
ref = "0000010c4000000c0000138e"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_contradicting_avps(self):
avp = ResultCodeAVP(DIAMETER_CONTRADICTING_AVPS)
ref = "0000010c4000000c0000138f"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_avp_not_allowed(self):
avp = ResultCodeAVP(DIAMETER_AVP_NOT_ALLOWED)
ref = "0000010c4000000c00001390"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_avp_occurs_too_many_times(self):
avp = ResultCodeAVP(DIAMETER_AVP_OCCURS_TOO_MANY_TIMES)
ref = "0000010c4000000c00001391"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_no_common_application(self):
avp = ResultCodeAVP(DIAMETER_NO_COMMON_APPLICATION)
ref = "0000010c4000000c00001392"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_unsupported_version(self):
avp = ResultCodeAVP(DIAMETER_UNSUPPORTED_VERSION)
ref = "0000010c4000000c00001393"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_unable_to_comply(self):
avp = ResultCodeAVP(DIAMETER_UNABLE_TO_COMPLY)
ref = "0000010c4000000c00001394"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_invalid_bit_in_header(self):
avp = ResultCodeAVP(DIAMETER_INVALID_BIT_IN_HEADER)
ref = "0000010c4000000c00001395"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_invalid_avp_length(self):
avp = ResultCodeAVP(DIAMETER_INVALID_AVP_LENGTH)
ref = "0000010c4000000c00001396"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_invalid_message_length(self):
avp = ResultCodeAVP(DIAMETER_INVALID_MESSAGE_LENGTH)
ref = "0000010c4000000c00001397"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_invalid_avp_bit_combo(self):
avp = ResultCodeAVP(DIAMETER_INVALID_AVP_BIT_COMBO)
ref = "0000010c4000000c00001398"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_non_common_security(self):
avp = ResultCodeAVP(DIAMETER_NO_COMMON_SECURITY)
ref = "0000010c4000000c00001399"
self.assertEqual(avp.dump().hex(), ref)
class TestProductNameAVP(unittest.TestCase):
def test_product_name_avp__default(self):
avp = ProductNameAVP()
ref = "0000010d0000001c507974686f6e2062726f6d656c69612076302e31"
self.assertEqual(avp.dump().hex(), ref)
def test_product_name_avp__repr_dunder(self):
avp = ProductNameAVP()
self.assertEqual(avp.__repr__(), "<Diameter AVP: 269 [Product-Name]>")
def test_product_name_avp__diameter_avp_convert_classmethod(self):
avp = ProductNameAVP()
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_product_name_avp__1(self):
avp = ProductNameAVP("Entitlement Server")
ref = "0000010d0000001a456e7469746c656d656e74205365727665720000"
self.assertEqual(avp.dump().hex(), ref)
class TestSessionBindingAVP(unittest.TestCase):
def test_session_binding_avp__no_value(self):
self.assertRaises(TypeError, SessionBindingAVP)
def test_session_binding_avp__repr_dunder(self):
avp = SessionBindingAVP(1)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 270 [Session-Binding] MANDATORY>")
def test_session_binding_avp__diameter_avp_convert_classmethod(self):
avp = SessionBindingAVP(2)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_session_binding_avp__re_auth_bit(self):
avp = SessionBindingAVP(1)
ref = "0000010e4000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_session_binding_avp__str_bit(self):
avp = SessionBindingAVP(2)
ref = "0000010e4000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
def test_session_binding_avp__accounting_bit(self):
avp = SessionBindingAVP(4)
ref = "0000010e4000000c00000004"
self.assertEqual(avp.dump().hex(), ref)
class TestSessionServerFailoverAVP(unittest.TestCase):
def test_session_server_failover_avp__no_value(self):
avp = SessionServerFailoverAVP()
ref = "0000010f4000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_session_server_failover_avp__repr_dunder(self):
avp = SessionServerFailoverAVP()
self.assertEqual(avp.__repr__(), "<Diameter AVP: 271 [Session-Server-Failover] MANDATORY>")
def test_session_server_failover_avp__diameter_avp_convert_classmethod(self):
avp = SessionServerFailoverAVP()
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_session_server_failover_avp__refuse_service(self):
avp = SessionServerFailoverAVP(SESSION_SERVER_FAILOVER_REFUSE_SERVICE)
ref = "0000010f4000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_session_server_failover_avp__try_again(self):
avp = SessionServerFailoverAVP(SESSION_SERVER_FAILOVER_TRY_AGAIN)
ref = "0000010f4000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_session_server_failover_avp__allow_service(self):
avp = SessionServerFailoverAVP(SESSION_SERVER_FAILOVER_ALLOW_SERVICE)
ref = "0000010f4000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
def test_session_server_failover_avp__try_again_allow_service(self):
avp = SessionServerFailoverAVP(SESSION_SERVER_FAILOVER_TRY_AGAIN_ALLOW_SERVICE)
ref = "0000010f4000000c00000003"
self.assertEqual(avp.dump().hex(), ref)
class TestMultiRoundTimeOutAVP(unittest.TestCase):
def test_multi_round_time_out_avp__no_value(self):
self.assertRaises(TypeError, MultiRoundTimeOutAVP)
def test_multi_round_time_out_avp__repr_dunder(self):
avp = MultiRoundTimeOutAVP(60)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 272 [Multi-Round-Time-Out] MANDATORY>")
def test_multi_round_time_out_avp__diameter_avp_convert_classmethod(self):
avp = MultiRoundTimeOutAVP(3600)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_multi_round_time_out_avp__1(self):
avp = MultiRoundTimeOutAVP(60)
ref = "000001104000000c0000003c"
self.assertEqual(avp.dump().hex(), ref)
def test_multi_round_time_out_avp__2(self):
avp = MultiRoundTimeOutAVP(3600)
ref = "000001104000000c00000e10"
self.assertEqual(avp.dump().hex(), ref)
def test_multi_round_time_out_avp__3(self):
avp = MultiRoundTimeOutAVP(86400)
ref = "000001104000000c00015180"
self.assertEqual(avp.dump().hex(), ref)
class TestDisconnectCauseAVP(unittest.TestCase):
def test_disconnect_cause_avp__no_value(self):
avp = DisconnectCauseAVP()
ref = "000001114000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_disconnect_cause_avp__repr_dunder(self):
avp = DisconnectCauseAVP()
self.assertEqual(avp.__repr__(), "<Diameter AVP: 273 [Disconnect-Cause] MANDATORY>")
def test_disconnect_cause_avp__diameter_avp_convert_classmethod(self):
avp = DisconnectCauseAVP()
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_disconnect_cause_avp__rebooting(self):
avp = DisconnectCauseAVP(DISCONNECT_CAUSE_REBOOTING)
ref = "000001114000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_disconnect_cause_avp__busy(self):
avp = DisconnectCauseAVP(DISCONNECT_CAUSE_BUSY)
ref = "000001114000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_disconnect_cause_avp__do_not_want_to_talk_to_you(self):
avp = DisconnectCauseAVP(DISCONNECT_CAUSE_DO_NOT_WANT_TO_TALK_TO_YOU)
ref = "000001114000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
class TestAuthRequestTypeAVP(unittest.TestCase):
def test_auth_request_type_avp__no_value(self):
self.assertRaises(TypeError, AuthRequestTypeAVP)
def test_auth_request_type_avp__repr_dunder(self):
avp = AuthRequestTypeAVP(AUTH_REQUEST_TYPE_AUTHENTICATE_ONLY)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 274 [Auth-Request-Type] MANDATORY>")
def test_auth_request_type_avp__diameter_avp_convert_classmethod(self):
avp = AuthRequestTypeAVP(AUTH_REQUEST_TYPE_AUTHENTICATE_ONLY)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_auth_request_type_avp__authenticate_only(self):
avp = AuthRequestTypeAVP(AUTH_REQUEST_TYPE_AUTHENTICATE_ONLY)
ref = "000001124000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_auth_request_type_avp__authorize_only(self):
avp = AuthRequestTypeAVP(AUTH_REQUEST_TYPE_AUTHORIZE_ONLY)
ref = "000001124000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
def test_auth_request_type_avp__authorize_authenticate(self):
avp = AuthRequestTypeAVP(AUTH_REQUEST_TYPE_AUTHORIZE_AUTHENTICATE)
ref = "000001124000000c00000003"
self.assertEqual(avp.dump().hex(), ref)
class TestAuthGracePeriodAVP(unittest.TestCase):
def test_auth_grace_period_avp__no_value(self):
self.assertRaises(TypeError, AuthGracePeriodAVP)
def test_auth_grace_period_avp__repr_dunder(self):
avp = AuthGracePeriodAVP(60)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 276 [Auth-Grace-Period] MANDATORY>")
def test_auth_grace_period_avp__diameter_avp_convert_classmethod(self):
avp = AuthGracePeriodAVP(3600)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_auth_grace_period_avp__1(self):
avp = AuthGracePeriodAVP(60)
ref = "000001144000000c0000003c"
self.assertEqual(avp.dump().hex(), ref)
def test_auth_grace_period_avp__2(self):
avp = AuthGracePeriodAVP(3600)
ref = "000001144000000c00000e10"
self.assertEqual(avp.dump().hex(), ref)
def test_auth_grace_period_avp__3(self):
avp = AuthGracePeriodAVP(86400)
ref = "000001144000000c00015180"
self.assertEqual(avp.dump().hex(), ref)
class TestAuthSessionStateAVP(unittest.TestCase):
def test_auth_session_state_avp__no_value(self):
self.assertRaises(TypeError, AuthSessionStateAVP)
def test_auth_session_state_avp__repr_dunder(self):
avp = AuthSessionStateAVP(STATE_MAINTAINED)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 277 [Auth-Session-State] MANDATORY>")
def test_auth_session_state_avp__diameter_avp_convert_classmethod(self):
avp = AuthSessionStateAVP(STATE_MAINTAINED)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_auth_session_state_avp__state_maintained(self):
avp = AuthSessionStateAVP(STATE_MAINTAINED)
ref = "000001154000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_auth_session_state_avp__no_state_maintained(self):
avp = AuthSessionStateAVP(NO_STATE_MAINTAINED)
ref = "000001154000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
class TestOriginStateIdAVP(unittest.TestCase):
def test_origin_state_id_avp__no_value(self):
self.assertRaises(TypeError, OriginStateIdAVP)
def test_origin_state_id_avp__repr_dunder(self):
avp = OriginStateIdAVP(1524733202)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 278 [Origin-State-Id] MANDATORY>")
def test_origin_state_id_avp__diameter_avp_convert_classmethod(self):
avp = OriginStateIdAVP(1524733202)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_origin_state_id_avp__1(self):
avp = OriginStateIdAVP(1524733202)
ref = "000001164000000c5ae19512"
self.assertEqual(avp.dump().hex(), ref)
class TestFailedAvpAVP(unittest.TestCase):
def test_failed_avp_avp__no_value(self):
self.assertRaises(TypeError, FailedAvpAVP)
def test_failed_avp_avp__repr_dunder(self):
route_record1 = RouteRecordAVP("hssrj1.epc.mncXXX.mccYYY.3gppnetwork.org")
route_record2 = RouteRecordAVP("drasm01b.epc.mncXXX.mccYYY.3gppnetwork.org")
avps = [route_record1, route_record2]
avp = FailedAvpAVP(avps)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 279 [Failed-Avp] MANDATORY>")
def test_failed_avp_avp__diameter_avp_convert_classmethod(self):
route_record1 = RouteRecordAVP("hssrj1.epc.mncXXX.mccYYY.3gppnetwork.org")
route_record2 = RouteRecordAVP("drasm01b.epc.mncXXX.mccYYY.3gppnetwork.org")
avps = [route_record1, route_record2]
avp = FailedAvpAVP(avps)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_failed_avp_avp__1(self):
ref = "00000117400000680000011a4000002f68656272612e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000011a4000002f656c64696e2e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f726700"
route_record1 = RouteRecordAVP("hebra.epc.mncXXX.mccYYY.3gppnetwork.org")
route_record2 = RouteRecordAVP("eldin.epc.mncXXX.mccYYY.3gppnetwork.org")
avps = [route_record1, route_record2]
avp = FailedAvpAVP(avps)
self.maxDiff = None
self.assertEqual(avp.dump().hex(), ref)
def test_failed_avp_avp__2(self):
ref = "00000117400000380000011a40000030687373736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267"
route_record1 = RouteRecordAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
avps = [route_record1]
avp = FailedAvpAVP(avps)
self.assertEqual(avp.dump().hex(), ref)
def test_failed_avp_avp__3(self):
ref = "000001174000006c0000011a400000306573702d6d642e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000011a40000032746162616e7468612e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000"
route_record1 = RouteRecordAVP("esp-md.epc.mncXXX.mccYYY.3gppnetwork.org")
route_record2 = RouteRecordAVP("tabantha.epc.mncXXX.mccYYY.3gppnetwork.org")
avps = [route_record1, route_record2]
avp = FailedAvpAVP(avps)
self.maxDiff = None
self.assertEqual(avp.dump().hex(), ref)
class TestProxyHostAVP(unittest.TestCase):
def test_proxy_host_avp__no_value(self):
self.assertRaises(TypeError, ProxyHostAVP)
def test_proxy_host_avp__repr_dunder(self):
avp = ProxyHostAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 280 [Proxy-Host] MANDATORY>")
def test_proxy_host_avp__diameter_avp_convert_classmethod(self):
avp = ProxyHostAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_proxy_host_avp__1(self):
avp = ProxyHostAVP("hsssm92.epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "0000011840000031687373736d39322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000"
self.assertEqual(avp.dump().hex(), ref)
def test_proxy_host_avp__2(self):
avp = ProxyHostAVP("pgwrj03.epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "0000011840000031706777726a30332e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000"
self.assertEqual(avp.dump().hex(), ref)
class TestErrorMessageAVP(unittest.TestCase):
def test_error_message_avp__no_value(self):
self.assertRaises(TypeError, ErrorMessageAVP)
def test_error_message_avp__repr_dunder(self):
avp = ErrorMessageAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 281 [Error-Message]>")
def test_error_message_avp__diameter_avp_convert_classmethod(self):
avp = ErrorMessageAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_error_message_avp__1(self):
avp = ErrorMessageAVP("DRL-ERR-3002-304:.")
ref = "000001190000001a44524c2d4552522d333030322d3330343a2e0000"
self.assertEqual(avp.dump().hex(), ref)
class TestRouteRecordAVP(unittest.TestCase):
def test_route_record_avp__no_value(self):
self.assertRaises(TypeError, RouteRecordAVP)
def test_route_record_avp__repr_dunder(self):
avp = RouteRecordAVP("pcrfrj1.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 282 [Route-Record] MANDATORY>")
def test_route_record_avp__diameter_avp_convert_classmethod(self):
avp = RouteRecordAVP("scscfsm2.epc.mncXXX.mccYYY.3gppnetwork.org")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_route_record_avp__1(self):
avp = RouteRecordAVP("pcrfrj1.epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "0000011a4000003170637266726a312e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000"
self.assertEqual(avp.dump().hex(), ref)
def test_route_record_avp__2(self):
avp = RouteRecordAVP("scscfsm2.epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "0000011a400000327363736366736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000"
self.assertEqual(avp.dump().hex(), ref)
class TestDestinationRealmAVP(unittest.TestCase):
def test_destination_realm_avp__no_value(self):
self.assertRaises(TypeError, DestinationRealmAVP)
def test_destination_realm_avp__repr_dunder(self):
avp = DestinationRealmAVP("sevilla.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 283 [Destination-Realm] MANDATORY>")
def test_destination_realm_avp__diameter_avp_convert_classmethod(self):
avp = DestinationRealmAVP("epc.mncXXX.mccYYY.3gppnetwork.org")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_destination_realm_avp__1(self):
avp = DestinationRealmAVP("epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "0000011b400000296570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000"
self.assertEqual(avp.dump().hex(), ref)
def test_destination_realm_avp__2(self):
avp = DestinationRealmAVP("gxserver.operator.com")
ref = "0000011b4000001d67787365727665722e6f70657261746f722e636f6d000000"
self.assertEqual(avp.dump().hex(), ref)
class TestProxyInfoAVP(unittest.TestCase):
def test_proxy_info_avp__no_value(self):
self.assertRaises(TypeError, ProxyInfoAVP)
def test_proxy_info_avp__repr_dunder(self):
proxy_host_avp = ProxyHostAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
proxy_state_avp = ProxyStateAVP("CLOSED")
avps = [proxy_host_avp, proxy_state_avp]
avp = ProxyInfoAVP(avps)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 284 [Proxy-Info] MANDATORY>")
def test_proxy_info_avp__diameter_avp_convert_classmethod(self):
proxy_host_avp = ProxyHostAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
proxy_state_avp = ProxyStateAVP("CLOSED")
avps = [proxy_host_avp, proxy_state_avp]
avp = ProxyInfoAVP(avps)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_proxy_info_avp__1(self):
ref = "0000011c400000480000011840000030687373736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000214000000e434c4f5345440000"
proxy_host_avp = ProxyHostAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
proxy_state_avp = ProxyStateAVP("CLOSED")
avps = [proxy_host_avp, proxy_state_avp]
avp = ProxyInfoAVP(avps)
self.assertEqual(avp.dump().hex(), ref)
def test_proxy_info_avp__2(self):
ref = "0000011c40000048000001184000002d726a342e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000000000214000000e4f50454e45440000"
proxy_host_avp = ProxyHostAVP("rj4.epc.mncXXX.mccYYY.3gppnetwork.org")
proxy_state_avp = ProxyStateAVP("OPENED")
avps = [proxy_host_avp, proxy_state_avp]
avp = ProxyInfoAVP(avps)
self.assertEqual(avp.dump().hex(), ref)
class TestReAuthRequestTypeAVP(unittest.TestCase):
def test_re_auth_request_type_avp__no_value(self):
self.assertRaises(TypeError, ReAuthRequestTypeAVP)
def test_re_auth_request_type_avp__repr_dunder(self):
avp = ReAuthRequestTypeAVP(RE_AUTH_REQUEST_TYPE_AUTHORIZE_ONLY)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 285 [Re-Auth-Request-Type] MANDATORY>")
def test_re_auth_request_type_avp__diameter_avp_convert_classmethod(self):
avp = ReAuthRequestTypeAVP(RE_AUTH_REQUEST_TYPE_AUTHORIZE_ONLY)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_re_auth_request_type_avp__authorize_only(self):
avp = ReAuthRequestTypeAVP(RE_AUTH_REQUEST_TYPE_AUTHORIZE_ONLY)
ref = "0000011d4000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_re_auth_request_type_avp__authorize_authenticate(self):
avp = ReAuthRequestTypeAVP(RE_AUTH_REQUEST_TYPE_AUTHORIZE_AUTHENTICATE)
ref = "0000011d4000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
class TestAccountingSubSessionIdAVP(unittest.TestCase):
def test_accounting_sub_session_id_avp__no_value(self):
self.assertRaises(TypeError, AccountingSubSessionIdAVP)
def test_accounting_sub_session_id_avp__repr_dunder(self):
avp = AccountingSubSessionIdAVP(6733)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 287 [Accounting-Sub-Session-Id] MANDATORY>")
def test_accounting_sub_session_id_avp__diameter_avp_convert_classmethod(self):
avp = AccountingSubSessionIdAVP(3588)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_accounting_sub_session_id_avp__1(self):
avp = AccountingSubSessionIdAVP(42)
ref = "0000011f40000010000000000000002a"
self.assertEqual(avp.dump().hex(), ref)
def test_accounting_sub_session_id_avp__2(self):
avp = AccountingSubSessionIdAVP(92049311)
ref = "0000011f4000001000000000057c8f9f"
self.assertEqual(avp.dump().hex(), ref)
class TestAuthorizationLifetimeAVP(unittest.TestCase):
def test_authorization_lifetime_avp__no_value(self):
self.assertRaises(TypeError, AuthorizationLifetimeAVP)
def test_authorization_lifetime_avp__repr_dunder(self):
avp = AuthorizationLifetimeAVP(60)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 291 [Authorization-Lifetime] MANDATORY>")
def test_authorization_lifetime_avp__diameter_avp_convert_classmethod(self):
avp = AuthorizationLifetimeAVP(3600)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_authorization_lifetime_avp__1(self):
avp = AuthorizationLifetimeAVP(60)
ref = "000001234000000c0000003c"
self.assertEqual(avp.dump().hex(), ref)
def test_authorization_lifetime_avp__2(self):
avp = AuthorizationLifetimeAVP(3600)
ref = "000001234000000c00000e10"
self.assertEqual(avp.dump().hex(), ref)
def test_authorization_lifetime_avp__3(self):
avp = AuthorizationLifetimeAVP(86400)
ref = "000001234000000c00015180"
self.assertEqual(avp.dump().hex(), ref)
class TestRedirectHostAVP(unittest.TestCase):
def test_redirect_host_avp__no_value(self):
self.assertRaises(TypeError, RedirectHostAVP)
def test_redirect_host_avp__repr_dunder(self):
avp = RedirectHostAVP("aaa://host.example.com;transport=tcp")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 292 [Redirect-Host] MANDATORY>")
def test_redirect_host_avp__diameter_avp_convert_classmethod(self):
avp = RedirectHostAVP("aaa://host.example.com;transport=tcp")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_redirect_host_avp__1(self):
avp = RedirectHostAVP("aaa://host.example.com;transport=tcp")
ref = "000001244000002c6161613a2f2f686f73742e6578616d706c652e636f6d3b7472616e73706f72743d746370"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_avp__2(self):
avp = RedirectHostAVP("aaas://host.example.com:6666;transport=tcp")
ref = "0000012440000032616161733a2f2f686f73742e6578616d706c652e636f6d3a363636363b7472616e73706f72743d7463700000"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_avp__3(self):
avp = RedirectHostAVP("aaa://host.example.com;protocol=diameter")
ref = "00000124400000306161613a2f2f686f73742e6578616d706c652e636f6d3b70726f746f636f6c3d6469616d65746572"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_avp__4(self):
avp = RedirectHostAVP("aaa://host.example.com:6666;protocol=diameter")
ref = "00000124400000356161613a2f2f686f73742e6578616d706c652e636f6d3a363636363b70726f746f636f6c3d6469616d65746572000000"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_avp__5(self):
avp = RedirectHostAVP("aaa://host.example.com:6666;transport=tcp;protocol=diameter")
ref = "00000124400000436161613a2f2f686f73742e6578616d706c652e636f6d3a363636363b7472616e73706f72743d7463703b70726f746f636f6c3d6469616d6574657200"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_avp__6(self):
avp = RedirectHostAVP("aaa://host.example.com:1813;transport=udp;protocol=radius")
ref = "00000124400000416161613a2f2f686f73742e6578616d706c652e636f6d3a313831333b7472616e73706f72743d7564703b70726f746f636f6c3d726164697573000000"
self.assertEqual(avp.dump().hex(), ref)
def test_redirect_host_avp__7(self):
avp = RedirectHostAVP("aaas://host.example.com:1024;transport=tcp;protocol=diameter")
ref = "0000012440000044616161733a2f2f686f73742e6578616d706c652e636f6d3a313032343b7472616e73706f72743d7463703b70726f746f636f6c3d6469616d65746572"
self.assertEqual(avp.dump().hex(), ref)
class TestDestinationHostAVP(unittest.TestCase):
def test_destination_host_avp__no_value(self):
self.assertRaises(TypeError, DestinationHostAVP)
def test_destination_host_avp__repr_dunder(self):
avp = DestinationHostAVP("encvltapp1-ne-rx")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 293 [Destination-Host] MANDATORY>")
def test_destination_host_avp__diameter_avp_convert_classmethod(self):
avp = DestinationHostAVP("encvltapp1-ne-rx")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_destination_host_avp__1(self):
avp = DestinationHostAVP("encvltapp1-ne-rx")
ref = "0000012540000018656e63766c74617070312d6e652d7278"
self.assertEqual(avp.dump().hex(), ref)
def test_destination_host_avp__2(self):
avp = DestinationHostAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "0000012540000030687373736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267"
self.assertEqual(avp.dump().hex(), ref)
class TestErrorReportingHostAVP(unittest.TestCase):
def test_error_reporting_host_avp__no_value(self):
self.assertRaises(TypeError, ErrorReportingHostAVP)
def test_error_reporting_host_avp__repr_dunder(self):
avp = ErrorReportingHostAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 294 [Error-Reporting-Host]>")
def test_error_reporting_host_avp__diameter_avp_convert_classmethod(self):
avp = ErrorReportingHostAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_error_reporting_host_avp__1(self):
avp = ErrorReportingHostAVP("hsssm2.epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "0000012600000030687373736d322e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267"
self.assertEqual(avp.dump().hex(), ref)
def test_error_reporting_host_avp__2(self):
avp = ErrorReportingHostAVP("rj4.epc.mncXXX.mccYYY.3gppnetwork.org")
ref = "000001260000002d726a342e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000"
self.assertEqual(avp.dump().hex(), ref)
class TestTerminationCauseAVP(unittest.TestCase):
def test_termination_cause_avp__no_value(self):
self.assertRaises(TypeError, TerminationCauseAVP)
def test_error_reporting_host_avp__repr_dunder(self):
avp = TerminationCauseAVP(DIAMETER_LOGOUT)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 295 [Termination-Cause] MANDATORY>")
def test_error_reporting_host_avp__diameter_avp_convert_classmethod(self):
avp = TerminationCauseAVP(DIAMETER_LOGOUT)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_termination_cause_avp__diameter_logout(self):
avp = TerminationCauseAVP(DIAMETER_LOGOUT)
ref = "000001274000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_termination_cause_avp__diameter_service_not_provided(self):
avp = TerminationCauseAVP(DIAMETER_SERVICE_NOT_PROVIDED)
ref = "000001274000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
def test_termination_cause_avp__diameter_bad_answer(self):
avp = TerminationCauseAVP(DIAMETER_BAD_ANSWER)
ref = "000001274000000c00000003"
self.assertEqual(avp.dump().hex(), ref)
def test_termination_cause_avp__diameter_administrative(self):
avp = TerminationCauseAVP(DIAMETER_ADMINISTRATIVE)
ref = "000001274000000c00000004"
self.assertEqual(avp.dump().hex(), ref)
def test_termination_cause_avp__diameter_link_broken(self):
avp = TerminationCauseAVP(DIAMETER_LINK_BROKEN)
ref = "000001274000000c00000005"
self.assertEqual(avp.dump().hex(), ref)
def test_termination_cause_avp__diameter_auth_expired(self):
avp = TerminationCauseAVP(DIAMETER_AUTH_EXPIRED)
ref = "000001274000000c00000006"
self.assertEqual(avp.dump().hex(), ref)
def test_termination_cause_avp__diameter_user_moved(self):
avp = TerminationCauseAVP(DIAMETER_USER_MOVED)
ref = "000001274000000c00000007"
self.assertEqual(avp.dump().hex(), ref)
def test_termination_cause_avp__diameter_session_timeout(self):
avp = TerminationCauseAVP(DIAMETER_SESSION_TIMEOUT)
ref = "000001274000000c00000008"
self.assertEqual(avp.dump().hex(), ref)
class TestOriginRealmAVP(unittest.TestCase):
def test_origin_realm_avp__no_value(self):
self.assertRaises(TypeError, OriginRealmAVP)
def test_origin_realm_avp__repr_dunder(self):
avp = OriginRealmAVP("ims.mncXXX.mccYYY.3gppnetwork.org")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 296 [Origin-Realm] MANDATORY>")
def test_origin_realm_avp__diameter_avp_convert_classmethod(self):
avp = OriginRealmAVP("ims.mncXXX.mccYYY.3gppnetwork.org")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_origin_realm_avp__1(self):
avp = OriginRealmAVP("ims.mncXXX.mccYYY.3gppnetwork.org")
ref = "0000012840000029696d732e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000"
self.assertEqual(avp.dump().hex(), ref)
def test_origin_realm_avp__2(self):
avp = OriginRealmAVP("esm")
ref = "000001284000000b65736d00"
self.assertEqual(avp.dump().hex(), ref)
class TestExperimentalResultAVP(unittest.TestCase):
def test_experimental_result_avp__no_value(self):
self.assertRaises(TypeError, ExperimentalResultAVP)
def test_experimental_result_avp__repr_dunder(self):
experimental_result_code_avp = ExperimentalResultCodeAVP(
DIAMETER_ERROR_SERVING_NODE_FEATURE_UNSUPPORTED
)
vendor_id_avp = VendorIdAVP(VENDOR_ID_3GPP)
avps = [experimental_result_code_avp, vendor_id_avp]
avp = ExperimentalResultAVP(avps)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 297 [Experimental-Result]>")
def test_experimental_result_avp__diameter_avp_convert_classmethod(self):
experimental_result_code_avp = ExperimentalResultCodeAVP(
DIAMETER_ERROR_SERVING_NODE_FEATURE_UNSUPPORTED
)
vendor_id_avp = VendorIdAVP(VENDOR_ID_3GPP)
avps = [experimental_result_code_avp, vendor_id_avp]
avp = ExperimentalResultAVP(avps)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_experimental_result_avp__serving_node_feature_unsupported(self):
ref = "00000129000000200000012a4000000c000013940000010a4000000c000028af"
experimental_result_code_avp = ExperimentalResultCodeAVP(
DIAMETER_ERROR_SERVING_NODE_FEATURE_UNSUPPORTED
)
vendor_id_avp = VendorIdAVP(VENDOR_ID_3GPP)
avps = [experimental_result_code_avp, vendor_id_avp]
avp = ExperimentalResultAVP(avps)
self.assertEqual(avp.dump().hex(), ref)
def test_experimental_result_avp__user_unknown(self):
ref = "00000129000000200000012a4000000c000013890000010a4000000c000028af"
experimental_result_code_avp = ExperimentalResultCodeAVP(
DIAMETER_ERROR_USER_UNKNOWN
)
vendor_id_avp = VendorIdAVP(VENDOR_ID_3GPP)
avps = [experimental_result_code_avp, vendor_id_avp]
avp = ExperimentalResultAVP(avps)
self.assertEqual(avp.dump().hex(), ref)
class TestExperimentalResultCodeAVP(unittest.TestCase):
def test_experimental_result_code_avp__no_value(self):
self.assertRaises(TypeError, ExperimentalResultCodeAVP)
def test_result_code_avp__repr_dunder(self):
avp = ExperimentalResultCodeAVP(DIAMETER_FIRST_REGISTRATION)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 298 [Experimental-Result-Code] MANDATORY>")
def test_result_code_avp__diameter_avp_convert_classmethod(self):
avp = ExperimentalResultCodeAVP(DIAMETER_FIRST_REGISTRATION)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_result_code_avp__diameter_first_registration(self):
avp = ExperimentalResultCodeAVP(DIAMETER_FIRST_REGISTRATION)
ref = "0000012a4000000c000007d1"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_subsequent_registration(self):
avp = ExperimentalResultCodeAVP(DIAMETER_SUBSEQUENT_REGISTRATION)
ref = "0000012a4000000c000007d2"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_unregistered_service(self):
avp = ExperimentalResultCodeAVP(DIAMETER_UNREGISTERED_SERVICE)
ref = "0000012a4000000c000007d3"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_success_server_name_not_stored(self):
avp = ExperimentalResultCodeAVP(DIAMETER_SUCCESS_SERVER_NAME_NOT_STORED)
ref = "0000012a4000000c000007d1"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_user_data_not_available(self):
avp = ExperimentalResultCodeAVP(DIAMETER_USER_DATA_NOT_AVAILABLE)
ref = "0000012a4000000c00001004"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_prior_update_in_progress(self):
avp = ExperimentalResultCodeAVP(DIAMETER_PRIOR_UPDATE_IN_PROGRESS)
ref = "0000012a4000000c00001005"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_authentication_data_unavailable(self):
avp = ExperimentalResultCodeAVP(DIAMETER_AUTHENTICATION_DATA_UNAVAILABLE)
ref = "0000012a4000000c00001055"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_user_unknown(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_USER_UNKNOWN)
ref = "0000012a4000000c00001389"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_identities_dont_match(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_IDENTITIES_DONT_MATCH)
ref = "0000012a4000000c0000138a"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_identity_not_registered(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_IDENTITY_NOT_REGISTERED)
ref = "0000012a4000000c0000138b"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_roaming_not_allowed(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_ROAMING_NOT_ALLOWED)
ref = "0000012a4000000c0000138c"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_identity_already_registered(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_IDENTITY_ALREADY_REGISTERED)
ref = "0000012a4000000c0000138d"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_auth_scheme_not_supported(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_AUTH_SCHEME_NOT_SUPPORTED)
ref = "0000012a4000000c0000138e"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_in_assignment_type(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_IN_ASSIGNMENT_TYPE)
ref = "0000012a4000000c0000138f"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_too_much_data(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_TOO_MUCH_DATA)
ref = "0000012a4000000c00001390"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_not_supported_user_data(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_NOT_SUPPORTED_USER_DATA)
ref = "0000012a4000000c00001391"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_feature_unsupported(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_FEATURE_UNSUPPORTED)
ref = "0000012a4000000c00001393"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_user_data_not_recognized(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_USER_DATA_NOT_RECOGNIZED)
ref = "0000012a4000000c000013ec"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_operation_not_allowed(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_OPERATION_NOT_ALLOWED)
ref = "0000012a4000000c000013ed"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_user_data_cannot_be_read(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_USER_DATA_CANNOT_BE_READ)
ref = "0000012a4000000c000013ee"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_user_data_cannot_be_modified(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_USER_DATA_CANNOT_BE_MODIFIED)
ref = "0000012a4000000c000013ef"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_user_data_cannot_be_notified(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_USER_DATA_CANNOT_BE_NOTIFIED)
ref = "0000012a4000000c000013f0"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_transparent_data_out_of_sync(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_TRANSPARENT_DATA_OUT_OF_SYNC)
ref = "0000012a4000000c000013f1"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_subs_data_absent(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_SUBS_DATA_ABSENT)
ref = "0000012a4000000c000013f2"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_no_subscription_to_data(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_NO_SUBSCRIPTION_TO_DATA)
ref = "0000012a4000000c000013f3"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_dsai_not_available(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_DSAI_NOT_AVAILABLE)
ref = "0000012a4000000c000013f4"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_unknown_eps_subscription(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_UNKNOWN_EPS_SUBSCRIPTION)
ref = "0000012a4000000c0000152c"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_rat_not_allowed(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_RAT_NOT_ALLOWED)
ref = "0000012a4000000c0000152d"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_equipement_unknown(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_EQUIPMENT_UNKNOWN)
ref = "0000012a4000000c0000152e"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_unknown_serving_node(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_UNKOWN_SERVING_NODE)
ref = "0000012a4000000c0000152f"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_user_no_non_3gpp_subscription(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_USER_NO_NON_3GPP_SUBSCRIPTION)
ref = "0000012a4000000c0000154a"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_user_no_apn_subscription(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_USER_NO_APN_SUBSCRIPTION)
ref = "0000012a4000000c0000154b"
self.assertEqual(avp.dump().hex(), ref)
def test_result_code_avp__diameter_error_rat_type_not_allowed(self):
avp = ExperimentalResultCodeAVP(DIAMETER_ERROR_RAT_TYPE_NOT_ALLOWED)
ref = "0000012a4000000c0000154c"
self.assertEqual(avp.dump().hex(), ref)
class TestInbandSecurityIdAVP(unittest.TestCase):
def test_inband_security_id_avp__no_value(self):
self.assertRaises(TypeError, InbandSecurityIdAVP)
def test_inband_security_id_avp__repr_dunder(self):
avp = InbandSecurityIdAVP(INBAND_SECURITY_ID_NO_SECURITY)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 299 [Inband-Security-Id]>")
def test_inband_security_id_avp__diameter_avp_convert_classmethod(self):
avp = InbandSecurityIdAVP(INBAND_SECURITY_ID_NO_SECURITY)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_inband_security_id_avp__no_inband_security(self):
avp = InbandSecurityIdAVP(INBAND_SECURITY_ID_NO_SECURITY)
ref = "0000012b0000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_inband_security_id_avp__tls(self):
avp = InbandSecurityIdAVP(INBAND_SECURITY_ID_TLS)
ref = "0000012b0000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
class TestMipHomeAgentHostAVP(unittest.TestCase):
def test_mip_home_agent_host_avp__no_value(self):
self.assertRaises(TypeError, MipHomeAgentHostAVP)
def test_mip_home_agent_host_avp__repr_dunder(self):
destination_realm_avp = DestinationRealmAVP("epc.mncXXX.mccYYY.3gppnetwork.org")
destination_host_avp = DestinationHostAVP("topon.s5pgw.node.epc.mncXXX.mccYYY.3gppnetwork.org")
avps = [destination_realm_avp, destination_host_avp]
avp = MipHomeAgentHostAVP(avps)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 348 [Mip-Home-Agent-Host] MANDATORY>")
def test_mip_home_agent_host_avp__diameter_avp_convert_classmethod(self):
destination_realm_avp = DestinationRealmAVP("epc.mncXXX.mccYYY.3gppnetwork.org")
destination_host_avp = DestinationHostAVP("topon.s5pgw.node.epc.mncXXX.mccYYY.3gppnetwork.org")
avps = [destination_realm_avp, destination_host_avp]
avp = MipHomeAgentHostAVP(avps)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_mip_home_agent_host_avp__1(self):
self.maxDiff = None
ref = "0000015c400000700000011b400000296570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000000001254000003a746f706f6e2e73357067772e6e6f64652e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000"
destination_realm_avp = DestinationRealmAVP("epc.mncXXX.mccYYY.3gppnetwork.org")
destination_host_avp = DestinationHostAVP("topon.s5pgw.node.epc.mncXXX.mccYYY.3gppnetwork.org")
avps = [destination_realm_avp, destination_host_avp]
avp = MipHomeAgentHostAVP(avps)
self.assertEqual(avp.dump().hex(), ref)
class TestSubscriptionIdAVP(unittest.TestCase):
def test_subscription_id_avp__no_value(self):
self.assertRaises(TypeError, SubscriptionIdAVP)
def test_subscription_id_avp__repr_dunder(self):
subscription_id_type_avp = SubscriptionIdTypeAVP(END_USER_E164)
subscription_id_data_avp = SubscriptionIdDataAVP("5566123456789")
avps = [subscription_id_type_avp, subscription_id_data_avp]
avp = SubscriptionIdAVP(avps)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 443 [Subscription-Id] MANDATORY>")
def test_subscription_id_avp__diameter_avp_convert_classmethod(self):
subscription_id_type_avp = SubscriptionIdTypeAVP(END_USER_E164)
subscription_id_data_avp = SubscriptionIdDataAVP("5566123456789")
avps = [subscription_id_type_avp, subscription_id_data_avp]
avp = SubscriptionIdAVP(avps)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_subscription_id_avp__default(self):
ref = "000001bb4000002c000001c24000000c00000000000001bc4000001535353636313233343536373839000000"
subscription_id_type_avp = SubscriptionIdTypeAVP(END_USER_E164)
subscription_id_data_avp = SubscriptionIdDataAVP("5566123456789")
avps = [subscription_id_type_avp, subscription_id_data_avp]
avp = SubscriptionIdAVP(avps)
self.assertEqual(avp.dump().hex(), ref)
def test_subscription_id_avp__subscription_id_type_only(self):
subscription_id_type_avp = SubscriptionIdTypeAVP(END_USER_E164)
avps = [subscription_id_type_avp]
with self.assertRaises(AVPAttributeValueError) as cm:
avp = SubscriptionIdAVP(avps)
self.assertEqual(cm.exception.args[1], DIAMETER_MISSING_AVP)
def test_subscription_id_avp__subscription_id_data_only(self):
subscription_id_type_avp = SubscriptionIdDataAVP("5521993082672")
avps = [subscription_id_type_avp]
with self.assertRaises(AVPAttributeValueError) as cm:
avp = SubscriptionIdAVP(avps)
self.assertEqual(cm.exception.args[1], DIAMETER_MISSING_AVP)
class TestSubscriptionIdDataAVP(unittest.TestCase):
def test_subscription_id_data_avp__no_value(self):
self.assertRaises(TypeError, SubscriptionIdDataAVP)
def test_subscription_id_data_avp__repr_dunder(self):
avp = SubscriptionIdDataAVP("5522123456789")
self.assertEqual(avp.__repr__(), "<Diameter AVP: 444 [Subscription-Id-Data] MANDATORY>")
def test_subscription_id_data_avp__diameter_avp_convert_classmethod(self):
avp = SubscriptionIdDataAVP("5511123456789")
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_subscription_id_data_avp__1(self):
avp = SubscriptionIdDataAVP("5511123456789")
ref = "000001bc4000001535353131313233343536373839000000"
self.assertEqual(avp.dump().hex(), ref)
def test_subscription_id_data_avp__2(self):
avp = SubscriptionIdDataAVP("5522123456789")
ref = "000001bc4000001535353232313233343536373839000000"
self.assertEqual(avp.dump().hex(), ref)
class TestSubscriptionIdTypeAVP(unittest.TestCase):
def test_subscription_id_type_avp__no_value(self):
self.assertRaises(TypeError, SubscriptionIdTypeAVP)
def test_subscription_id_type_avp__repr_dunder(self):
avp = SubscriptionIdTypeAVP(END_USER_E164)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 450 [Subscription-Id-Type] MANDATORY>")
def test_subscription_id_type_avp__diameter_avp_convert_classmethod(self):
avp = SubscriptionIdTypeAVP(END_USER_PRIVATE)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_subscription_id_type_avp__end_user_e164(self):
avp = SubscriptionIdTypeAVP(END_USER_E164)
ref = "000001c24000000c00000000"
self.assertEqual(avp.dump().hex(), ref)
def test_subscription_id_type_avp__end_user_imsi(self):
avp = SubscriptionIdTypeAVP(END_USER_IMSI)
ref = "000001c24000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_subscription_id_type_avp__end_user_sip_uri(self):
avp = SubscriptionIdTypeAVP(END_USER_SIP_URI)
ref = "000001c24000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
def test_subscription_id_type_avp__end_user_nai(self):
avp = SubscriptionIdTypeAVP(END_USER_NAI)
ref = "000001c24000000c00000003"
self.assertEqual(avp.dump().hex(), ref)
def test_subscription_id_type_avp__end_user_private(self):
avp = SubscriptionIdTypeAVP(END_USER_PRIVATE)
ref = "000001c24000000c00000004"
self.assertEqual(avp.dump().hex(), ref)
class TestEapPayloadAVP(unittest.TestCase):
def setUp(self):
self.nai = "<EMAIL>"
def test_eap_payload_avp__no_value(self):
self.assertRaises(TypeError, EapPayload)
def test_eap_payload_avp__repr_dunder(self):
content = {"nai": self.nai, "payload": None}
eap_payload = EapPayload(eap_code=EAP_CODE_RESPONSE, eap_id=0, eap_type=EAP_TYPE_IDENTITY, content=content)
avp = EapPayloadAVP(eap_payload)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 462 [Eap-Payload] MANDATORY>")
def test_eap_payload_avp__diameter_avp_convert_classmethod(self):
content = {"nai": self.nai, "payload": None}
eap_payload = EapPayload(eap_code=EAP_CODE_RESPONSE, eap_id=0, eap_type=EAP_TYPE_IDENTITY, content=content)
avp = EapPayloadAVP(eap_payload)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_eap_payload_avp__eap_type_identity(self):
content = {"nai": self.nai, "payload": None}
eap_payload = EapPayload(eap_code=EAP_CODE_RESPONSE, eap_id=0, eap_type=EAP_TYPE_IDENTITY, content=content)
avp = EapPayloadAVP(eap_payload)
ref = "000001ce4000003a02000032016d792d75736572406e61692e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000"
self.assertEqual(avp.dump().hex(), ref)
def test_eap_payload_avp__ea_type_umts_authentication_and_key_agreement_eap(self):
content = {"nai": self.nai, "payload": "AgEAKBcBAAADAwBAtRAM4pAWdZkLBQAANb\/0OV77760sUjmpBDihxA=="}
eap_payload = EapPayload(eap_code=EAP_CODE_RESPONSE, eap_id=1, eap_type=EAP_TYPE_UMTS_AUTHENTICATION_AND_KEY_AGREEMENT_EAP, content=content)
avp = EapPayloadAVP(eap_payload)
ref = "000001ce40000030020100281701000003030040b5100ce2901675990b05000035bff4395efbefad2c5239a90438a1c4"
self.assertEqual(avp.dump().hex(), ref)
class TestEapMasterSessionKeyAVP(unittest.TestCase):
def setUp(self):
self.EAP_MASTER_SESSION_KEY = bytes.fromhex("ec3208c43154f60862858afa650dd875e8a095dfcd364e73420fcc573388d4c207308ace020aa3e3f9ff76ed1821a044e8deed2470997fbfbf5197d724d51fa1")
def test_eap_master_session_key_avp__no_value(self):
self.assertRaises(TypeError, EapMasterSessionKeyAVP)
def test_eap_master_session_key_avp__repr_dunder(self):
avp = EapMasterSessionKeyAVP(self.EAP_MASTER_SESSION_KEY)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 464 [Eap-Master-Session-Key]>")
def test_eap_master_session_key_avp__diameter_avp_convert_classmethod(self):
avp = EapMasterSessionKeyAVP(self.EAP_MASTER_SESSION_KEY)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_eap_master_session_key_avp__1(self):
avp = EapMasterSessionKeyAVP(self.EAP_MASTER_SESSION_KEY)
ref = "000001d000000048ec3208c43154f60862858afa650dd875e8a095dfcd364e73420fcc573388d4c207308ace020aa3e3f9ff76ed1821a044e8deed2470997fbfbf5197d724d51fa1"
self.assertEqual(avp.dump().hex(), ref)
class TestAccountingRecordTypeAVP(unittest.TestCase):
def test_accounting_record_type_avp__repr_dunder(self):
avp = AccountingRecordTypeAVP()
self.assertEqual(avp.__repr__(), "<Diameter AVP: 480 [Accounting-Record-Type] MANDATORY>")
def test_accounting_record_type_avp__diameter_avp_convert_classmethod(self):
avp = AccountingRecordTypeAVP(ACCOUNTING_RECORD_TYPE_EVENT_RECORD)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_accounting_record_type_avp__event_record(self):
avp = AccountingRecordTypeAVP(ACCOUNTING_RECORD_TYPE_EVENT_RECORD)
ref = "000001e04000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_accounting_record_type_avp__start_record(self):
avp = AccountingRecordTypeAVP(ACCOUNTING_RECORD_TYPE_START_RECORD)
ref = "000001e04000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
def test_accounting_record_type_avp__interim_record(self):
avp = AccountingRecordTypeAVP(ACCOUNTING_RECORD_TYPE_INTERIM_RECORD)
ref = "000001e04000000c00000003"
self.assertEqual(avp.dump().hex(), ref)
def test_accounting_record_type_avp__stop_record(self):
avp = AccountingRecordTypeAVP(ACCOUNTING_RECORD_TYPE_STOP_RECORD)
ref = "000001e04000000c00000004"
self.assertEqual(avp.dump().hex(), ref)
class TestAccountingRealtimeRequiredAVP(unittest.TestCase):
def test_accounting_realtime_required_avp__no_value(self):
self.assertRaises(TypeError, AccountingRealtimeRequiredAVP)
def test_accounting_realtime_required_avp__repr_dunder(self):
avp = AccountingRealtimeRequiredAVP(ACCOUNTING_RECORD_TYPE_EVENT_RECORD)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 483 [Accounting-Realtime-Required] MANDATORY>")
def test_accounting_realtime_required_avp__diameter_avp_convert_classmethod(self):
avp = AccountingRealtimeRequiredAVP(ACCOUNTING_RECORD_TYPE_EVENT_RECORD)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_accounting_realtime_required_avp__deliver_and_grant(self):
avp = AccountingRealtimeRequiredAVP(ACCOUNTING_REALTIME_REQUIRED_DELIVER_AND_GRANT)
ref = "000001e34000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_accounting_realtime_required_avp__grant_and_store(self):
avp = AccountingRealtimeRequiredAVP(ACCOUNTING_REALTIME_REQUIRED_GRANT_AND_STORE)
ref = "000001e34000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
def test_accounting_realtime_required_avp__grant_and_lose(self):
avp = AccountingRealtimeRequiredAVP(ACCOUNTING_REALTIME_REQUIRED_GRAND_AND_LOSE)
ref = "000001e34000000c00000003"
self.assertEqual(avp.dump().hex(), ref)
class TestAccountingRecordNumberAVP(unittest.TestCase):
def test_accounting_record_number_avp__no_value(self):
self.assertRaises(TypeError, AccountingRecordNumberAVP)
def test_accounting_record_number_avp__repr_dunder(self):
avp = AccountingRecordNumberAVP(1)
self.assertEqual(avp.__repr__(), "<Diameter AVP: 485 [Accounting-Record-Number] MANDATORY>")
def test_accounting_record_number_avp__diameter_avp_convert_classmethod(self):
avp = AccountingRecordNumberAVP(1)
custom = DiameterAVP.convert(avp)
self.assertEqual(custom.code, avp.code)
self.assertEqual(custom.flags, avp.flags)
self.assertEqual(custom.length, avp.length)
self.assertEqual(custom.vendor_id, avp.vendor_id)
self.assertEqual(custom.data, avp.data)
self.assertEqual(custom._padding, avp._padding)
def test_accounting_record_number_avp__event_record(self):
avp = AccountingRecordNumberAVP(1)
ref = "000001e54000000c00000001"
self.assertEqual(avp.dump().hex(), ref)
def test_accounting_record_number_avp__start_record(self):
avp = AccountingRecordNumberAVP(2)
ref = "000001e54000000c00000002"
self.assertEqual(avp.dump().hex(), ref)
class TestMip6AgentInfoAVP(unittest.TestCase):
def test_mip6_agent_info_host_avp__no_value(self):
self.assertRaises(TypeError, Mip6AgentInfoAVP)
def test_mip6_agent_info_avp__repr_dunder(self):
avp = Mip6AgentInfoAVP([
MipHomeAgentHostAVP([
DestinationRealmAVP("epc.mncXXX.mccYYY.3gppnetwork.org"),
DestinationHostAVP("topon.s5pgw.node.epc.mncXXX.mccYYY.3gppnetwork.org")
])
])
self.assertEqual(avp.__repr__(), "<Diameter AVP: 486 [Mip6-Agent-Info] MANDATORY>")
def test_mip6_agent_info_avp__mip_home_agent_host_only(self):
ref = "000001e6400000780000015c400000700000011b400000296570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f7267000000000001254000003a746f706f6e2e73357067772e6e6f64652e6570632e6d6e635858582e6d63635959592e336770706e6574776f726b2e6f72670000"
destination_realm_avp = DestinationRealmAVP("epc.mncXXX.mccYYY.3gppnetwork.org")
destination_host_avp = DestinationHostAVP("topon.s5pgw.node.epc.mncXXX.mccYYY.3gppnetwork.org")
avps = [destination_realm_avp, destination_host_avp]
mip_home_agent_host_avp = MipHomeAgentHostAVP(avps)
avps = [mip_home_agent_host_avp]
avp = Mip6AgentInfoAVP(avps)
self.assertEqual(avp.dump().hex(), ref)
if __name__ == "__main__":
unittest.main() | StarcoderdataPython |
33516 | from gurobipy import *
from itertools import combinations
from time import localtime, strftime, time
import config
from fibonew2 import (
AK2exp, InitMatNew, MatroidCompatible, Resol2m, bi, bs, disjoint, rankfinder,
ib, sb)
from timing import endlog, log
def CheckOneAK(mbases,gset,rnk):
'''
We check if the matroid is 1-AK.
We also implement some optimizations that help reduce the number of flats to check.
mbases (dictionary) containing matroids to be checked and their bases
gset (list) is the ground set of the matroids
rnk (int) is the rank of the matroids
Note: observe that the matroids in a particular run need to be of the same size and rank.
'''
start = time()
log("Start Program")
checker = 1
nonAK = 0
oneAK = 0
noAKmats = list()
sfiles = open('runresultAK.txt','a+')
nm = 'File listing checked files from polymatroid extension run using all sets (AK) with some optimizations.'
nm1 = '%' * len(nm)
sfiles.write('{}\n'.format(nm1))
sfiles.write('{}\n'.format(nm))
sfiles.write('{}\n'.format(nm1))
for key in mbases:
counter = 0
begin = time()
log('Start polymatroid extension check (AK) for {} using all sets with some optimizations.'.format(key))
rankd,allranks = rankfinder(mbases[key],gset)
Ar = list()
for i in range(0,len(allranks)-1,2):
if len(allranks[i]) < 2: continue
#if Ar1[i+1] == rnk: continue # not sure why this is here
Ar.append(set([str(it3) for it3 in allranks[i]]))
combs3 = combinations( [i for i in range(len(Ar))], 3)
comb_hlder = list()
################################################
## We remove tuples (U,V,Z) where:
## (i) UV has full rank
## (ii) U and V are subsets of Z
## (iii) UV is a subset of Z
## (iv) Z is the intersection of U and V
## (v) Z is a subset of UV
## (vi) UV and Z are a modular pair
## (vii) UV and Z have zero mutual information
################################################
for combo in combs3:
pre_comb_hlder = list()
cmbs12 = Ar[combo[0]].union(Ar[combo[1]])
excld = set([int(itm) for itm in cmbs12])
ind = allranks.index(excld)
rnk_excld = allranks[ind + 1]
if rnk_excld == rnk: continue
if (Ar[combo[0]].issubset(Ar[combo[2]]) and Ar[combo[1]].issubset(Ar[combo[2]])) or cmbs12.issubset(Ar[combo[2]]): continue
if Ar[combo[2]]==Ar[combo[0]].intersection(Ar[combo[1]]) or cmbs12.issuperset(Ar[combo[2]]): continue
#int_combo01 = [int(item) for item in cmbs12]
set_combo01 = set( [int(item) for item in cmbs12] )
index_combo01 = allranks.index(set_combo01)
rnk_combo01 = allranks[index_combo01+1]
#int_combo2 = [int(item) for item in Ar[combo[2]]]
set_combo2 = set( [int(item) for item in Ar[combo[2]]] )
index_combo2 = allranks.index(set_combo2)
rnk_combo2 = allranks[index_combo2+1]
combo_inters = cmbs12.intersection(Ar[combo[2]])
#int_combointers = [int(item) for item in combo_inters]
set_combointers = set( [int(item) for item in combo_inters] )
index_combointers = allranks.index(set_combointers)
rnk_combointers = allranks[index_combointers+1]
combo_union = cmbs12.union(Ar[combo[2]])
#int_combounion = [int(item) for item in combo_union]
set_combounion = set( [int(item) for item in combo_union] )
index_combounion = allranks.index(set_combounion)
rnk_combounion = allranks[index_combounion+1]
check_modularity = rnk_combo01 + rnk_combo2 - rnk_combounion - rnk_combointers
mutual_info = rnk_combo01 + rnk_combo2 - rnk_combounion
if check_modularity != 0 and mutual_info != 0:
pre_comb_hlder.append(Ar[combo[0]])
pre_comb_hlder.append(Ar[combo[1]])
pre_comb_hlder.append(Ar[combo[2]])
comb_hlder.append(pre_comb_hlder)
print('{} has {} 3-member working combinations.'.format(key,len(comb_hlder)))
for i in range(len(comb_hlder)):
combo1 = comb_hlder[i]
J = combo1[0]
K = combo1[1]
L = combo1[2]
config.p = Model("gurotest")
config.w = config.p.addVars(range(0,2**config.vrbls+1),name="w")
InitMatNew()
MatroidCompatible(mbases[key],gset)
AK2exp(bi(sb(J)), bi(sb(K)), bi(sb(L)), 2**(config.Part))
Resol2m()
if config.p.status == GRB.Status.OPTIMAL: continue
print('{} is a non-AK matroid with violating sets {}, {} and {}.'.format(key,J,K,L))
sfiles.write('{} is a non-AK matroid with violating sets {}, {} and {}.\n'.format(key,J,K,L))
noAKmats.append(key)
counter = 1
break ###### To find ALL combinations that break AK, suppress this line #####
if counter == 0:
oneAK += 1
sfiles.write('{} is an AK matroid.\n'.format(key))
else:
nonAK += 1
endlog(begin)
if checker < len(mbases):
difference = len(mbases)-checker
if difference > 1:
print('{0}done. {1} matroids remaining. Moving to the next one... \n'.format(key,difference))
else:
print('{}done. One matroid left.'.format(key))
else:
print('*********************************************************')
print('Last run made. Program concluded.')
print('*********************************************************')
sfiles.write('\n All {} matroids checked.\n'.format(len(mbases)))
if nonAK == 0:
sfiles.write('All {} matroids are AK.\n'.format(oneAK))
else:
sfiles.write('non_AK_mats = {}\n'.format(noAKmats))
if nonAK == 1 and nonAK != len(mbases):
if oneAK == 1:
sfiles.write('There is one non-AK and {} AK matroid here.\n'.format(oneAK))
else:
sfiles.write('There is one non-AK and {} AK matroids here.\n'.format(oneAK))
elif nonAK > 1 and nonAK < len(mbases):
if oneAK == 1:
sfiles.write('There are {} non-AK matroids, and {} AK matroid here.\n'.format(nonAK,oneAK))
else:
sfiles.write('There are {} non-AK matroids, and {} AK matroids here.\n'.format(nonAK,oneAK))
elif nonAK == len(mbases):
sfiles.write('All {} matroids are non-AK.\n'.format(nonAK))
checker += 1
endlog(start)
| StarcoderdataPython |
1751357 | """Test other aspects of the server implementation."""
import os
import socket
import unittest
from aiosmtpd.controller import Controller
from aiosmtpd.handlers import Sink
from aiosmtpd.smtp import SMTP as Server
from smtplib import SMTP
def in_wsl():
# WSL 1.0 somehow allows more than one listener on one port.
# So when testing on WSL, we must set PLATFORM=wsl and skip the
# "test_socket_error" test.
return os.environ.get("PLATFORM") == "wsl"
class TestServer(unittest.TestCase):
def test_smtp_utf8(self):
controller = Controller(Sink())
controller.start()
self.addCleanup(controller.stop)
with SMTP(controller.hostname, controller.port) as client:
code, response = client.ehlo('example.com')
self.assertEqual(code, 250)
self.assertIn(b'SMTPUTF8', response.splitlines())
def test_default_max_command_size_limit(self):
server = Server(Sink())
self.assertEqual(server.max_command_size_limit, 512)
def test_special_max_command_size_limit(self):
server = Server(Sink())
server.command_size_limits['DATA'] = 1024
self.assertEqual(server.max_command_size_limit, 1024)
@unittest.skipIf(in_wsl(), "WSL prevents socket collisions")
# See explanation in the in_wsl() function
def test_socket_error(self):
# Testing starting a server with a port already in use
s1 = Controller(Sink(), port=8025)
s2 = Controller(Sink(), port=8025)
self.addCleanup(s1.stop)
self.addCleanup(s2.stop)
s1.start()
self.assertRaises(socket.error, s2.start)
def test_server_attribute(self):
controller = Controller(Sink())
self.assertIsNone(controller.server)
try:
controller.start()
self.assertIsNotNone(controller.server)
finally:
controller.stop()
self.assertIsNone(controller.server)
| StarcoderdataPython |
1786122 | <reponame>Aetf/fathom<filename>fathom/deepq/database.py
from __future__ import absolute_import, print_function, division
import numpy as np
class database(object):
def __init__(self, params):
self.size = params['db_size']
self.img_scale = params['img_scale']
self.states = np.zeros([self.size, 84, 84], dtype='uint8') # image dimensions
self.actions = np.zeros(self.size, dtype='float32')
self.terminals = np.zeros(self.size, dtype='float32')
self.rewards = np.zeros(self.size, dtype='float32')
self.bat_size = params['batch']
self.bat_s = np.zeros([self.bat_size, 84, 84, 4])
self.bat_a = np.zeros([self.bat_size])
self.bat_t = np.zeros([self.bat_size])
self.bat_n = np.zeros([self.bat_size, 84, 84, 4])
self.bat_r = np.zeros([self.bat_size])
self.counter = 0 # keep track of next empty state
self.flag = False
return
def get_batches(self):
for i in range(self.bat_size):
idx = 0
while idx < 3 or (idx > self.counter-2 and idx < self.counter+3):
idx = np.random.randint(3, self.get_size()-1)
self.bat_s[i] = np.transpose(self.states[idx-3:idx+1, :, :], (1, 2, 0))/self.img_scale
self.bat_n[i] = np.transpose(self.states[idx-2:idx+2, :, :], (1, 2, 0))/self.img_scale
self.bat_a[i] = self.actions[idx]
self.bat_t[i] = self.terminals[idx]
self.bat_r[i] = self.rewards[idx]
#self.bat_s[0] = np.transpose(self.states[10:14,:,:],(1,2,0))/self.img_scale
#self.bat_n[0] = np.transpose(self.states[11:15,:,:],(1,2,0))/self.img_scale
#self.bat_a[0] = self.actions[13]
#self.bat_t[0] = self.terminals[13]
#self.bat_r[0] = self.rewards[13]
return self.bat_s, self.bat_a, self.bat_t, self.bat_n, self.bat_r
def insert(self, prevstate_proc, reward, action, terminal):
self.states[self.counter] = prevstate_proc
self.rewards[self.counter] = reward
self.actions[self.counter] = action
self.terminals[self.counter] = terminal
# update counter
self.counter += 1
if self.counter >= self.size:
self.flag = True
self.counter = 0
return
def get_size(self):
if self.flag == False:
return self.counter
else:
return self.size
| StarcoderdataPython |
147586 | <filename>ionoscloud/api/nat_gateways_api.py
from __future__ import absolute_import
import re # noqa: F401
import six
from ionoscloud.api_client import ApiClient
from ionoscloud.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class NATGatewaysApi(object):
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def datacenters_natgateways_delete(self, datacenter_id, nat_gateway_id, **kwargs): # noqa: E501
"""Delete NAT Gateways # noqa: E501
Remove the specified NAT Gateway from the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_delete(datacenter_id, nat_gateway_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_delete_with_http_info(datacenter_id, nat_gateway_id, **kwargs) # noqa: E501
def datacenters_natgateways_delete_with_http_info(self, datacenter_id, nat_gateway_id, **kwargs): # noqa: E501
"""Delete NAT Gateways # noqa: E501
Remove the specified NAT Gateway from the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_delete_with_http_info(datacenter_id, nat_gateway_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_delete" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_delete`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_delete`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_delete`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_delete`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = None
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_find_by_nat_gateway_id(self, datacenter_id, nat_gateway_id, **kwargs): # noqa: E501
"""Retrieve NAT Gateways # noqa: E501
Retrieve the properties of the specified NAT Gateway within the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_find_by_nat_gateway_id(datacenter_id, nat_gateway_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGateway
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_find_by_nat_gateway_id_with_http_info(datacenter_id, nat_gateway_id, **kwargs) # noqa: E501
def datacenters_natgateways_find_by_nat_gateway_id_with_http_info(self, datacenter_id, nat_gateway_id, **kwargs): # noqa: E501
"""Retrieve NAT Gateways # noqa: E501
Retrieve the properties of the specified NAT Gateway within the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_find_by_nat_gateway_id_with_http_info(datacenter_id, nat_gateway_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGateway, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_find_by_nat_gateway_id" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_find_by_nat_gateway_id`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_find_by_nat_gateway_id`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_find_by_nat_gateway_id`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_find_by_nat_gateway_id`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGateway'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_flowlogs_delete(self, datacenter_id, nat_gateway_id, flow_log_id, **kwargs): # noqa: E501
"""Delete NAT Gateway Flow Logs # noqa: E501
Delete the specified NAT Gateway Flow Log. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_delete(datacenter_id, nat_gateway_id, flow_log_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param flow_log_id: The unique ID of the Flow Log. (required)
:type flow_log_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_flowlogs_delete_with_http_info(datacenter_id, nat_gateway_id, flow_log_id, **kwargs) # noqa: E501
def datacenters_natgateways_flowlogs_delete_with_http_info(self, datacenter_id, nat_gateway_id, flow_log_id, **kwargs): # noqa: E501
"""Delete NAT Gateway Flow Logs # noqa: E501
Delete the specified NAT Gateway Flow Log. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_delete_with_http_info(datacenter_id, nat_gateway_id, flow_log_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param flow_log_id: The unique ID of the Flow Log. (required)
:type flow_log_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'flow_log_id',
'pretty',
'depth'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_flowlogs_delete" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_flowlogs_delete`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_flowlogs_delete`") # noqa: E501
# verify the required parameter 'flow_log_id' is set
if self.api_client.client_side_validation and ('flow_log_id' not in local_var_params or # noqa: E501
local_var_params['flow_log_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `flow_log_id` when calling `datacenters_natgateways_flowlogs_delete`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_delete`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_delete`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
if 'flow_log_id' in local_var_params:
path_params['flowLogId'] = local_var_params['flow_log_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = None
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/flowlogs/{flowLogId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_flowlogs_find_by_flow_log_id(self, datacenter_id, nat_gateway_id, flow_log_id, **kwargs): # noqa: E501
"""Retrieve NAT Gateway Flow Logs # noqa: E501
Retrieve the specified NAT Gateway Flow Log. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_find_by_flow_log_id(datacenter_id, nat_gateway_id, flow_log_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param flow_log_id: The unique ID of the Flow Log. (required)
:type flow_log_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: FlowLog
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_flowlogs_find_by_flow_log_id_with_http_info(datacenter_id, nat_gateway_id, flow_log_id, **kwargs) # noqa: E501
def datacenters_natgateways_flowlogs_find_by_flow_log_id_with_http_info(self, datacenter_id, nat_gateway_id, flow_log_id, **kwargs): # noqa: E501
"""Retrieve NAT Gateway Flow Logs # noqa: E501
Retrieve the specified NAT Gateway Flow Log. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_find_by_flow_log_id_with_http_info(datacenter_id, nat_gateway_id, flow_log_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param flow_log_id: The unique ID of the Flow Log. (required)
:type flow_log_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(FlowLog, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'flow_log_id',
'pretty',
'depth'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_flowlogs_find_by_flow_log_id" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_flowlogs_find_by_flow_log_id`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_flowlogs_find_by_flow_log_id`") # noqa: E501
# verify the required parameter 'flow_log_id' is set
if self.api_client.client_side_validation and ('flow_log_id' not in local_var_params or # noqa: E501
local_var_params['flow_log_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `flow_log_id` when calling `datacenters_natgateways_flowlogs_find_by_flow_log_id`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_find_by_flow_log_id`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_find_by_flow_log_id`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
if 'flow_log_id' in local_var_params:
path_params['flowLogId'] = local_var_params['flow_log_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'FlowLog'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/flowlogs/{flowLogId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_flowlogs_get(self, datacenter_id, nat_gateway_id, **kwargs): # noqa: E501
"""List NAT Gateway Flow Logs # noqa: E501
List all the Flow Logs for the specified NAT Gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_get(datacenter_id, nat_gateway_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param offset: The first element (from the complete list of the elements) to include in the response (used together with <b><i>limit</i></b> for pagination).
:type offset: int
:param limit: The maximum number of elements to return (use together with offset for pagination).
:type limit: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: FlowLogs
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_flowlogs_get_with_http_info(datacenter_id, nat_gateway_id, **kwargs) # noqa: E501
def datacenters_natgateways_flowlogs_get_with_http_info(self, datacenter_id, nat_gateway_id, **kwargs): # noqa: E501
"""List NAT Gateway Flow Logs # noqa: E501
List all the Flow Logs for the specified NAT Gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_get_with_http_info(datacenter_id, nat_gateway_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param offset: The first element (from the complete list of the elements) to include in the response (used together with <b><i>limit</i></b> for pagination).
:type offset: int
:param limit: The maximum number of elements to return (use together with offset for pagination).
:type limit: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(FlowLogs, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'pretty',
'depth',
'offset',
'limit'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_flowlogs_get" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_flowlogs_get`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_flowlogs_get`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_get`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_get`, must be a value greater than or equal to `0`") # noqa: E501
if self.api_client.client_side_validation and 'offset' in local_var_params and local_var_params['offset'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `offset` when calling `datacenters_natgateways_flowlogs_get`, must be a value greater than or equal to `0`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 10000: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `datacenters_natgateways_flowlogs_get`, must be a value less than or equal to `10000`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `datacenters_natgateways_flowlogs_get`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'FlowLogs'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/flowlogs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_flowlogs_patch(self, datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log_properties, **kwargs): # noqa: E501
"""Partially modify NAT Gateway Flow Logs # noqa: E501
Update the properties of the specified NAT Gateway Flow Log. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_patch(datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log_properties, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param flow_log_id: The unique ID of the Flow Log. (required)
:type flow_log_id: str
:param nat_gateway_flow_log_properties: The properties of the Flow Log to be updated. (required)
:type nat_gateway_flow_log_properties: FlowLogProperties
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: FlowLog
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_flowlogs_patch_with_http_info(datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log_properties, **kwargs) # noqa: E501
def datacenters_natgateways_flowlogs_patch_with_http_info(self, datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log_properties, **kwargs): # noqa: E501
"""Partially modify NAT Gateway Flow Logs # noqa: E501
Update the properties of the specified NAT Gateway Flow Log. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_patch_with_http_info(datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log_properties, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param flow_log_id: The unique ID of the Flow Log. (required)
:type flow_log_id: str
:param nat_gateway_flow_log_properties: The properties of the Flow Log to be updated. (required)
:type nat_gateway_flow_log_properties: FlowLogProperties
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(FlowLog, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'flow_log_id',
'nat_gateway_flow_log_properties',
'pretty',
'depth'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_flowlogs_patch" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_flowlogs_patch`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_flowlogs_patch`") # noqa: E501
# verify the required parameter 'flow_log_id' is set
if self.api_client.client_side_validation and ('flow_log_id' not in local_var_params or # noqa: E501
local_var_params['flow_log_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `flow_log_id` when calling `datacenters_natgateways_flowlogs_patch`") # noqa: E501
# verify the required parameter 'nat_gateway_flow_log_properties' is set
if self.api_client.client_side_validation and ('nat_gateway_flow_log_properties' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_flow_log_properties'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_flow_log_properties` when calling `datacenters_natgateways_flowlogs_patch`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_patch`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_patch`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
if 'flow_log_id' in local_var_params:
path_params['flowLogId'] = local_var_params['flow_log_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'nat_gateway_flow_log_properties' in local_var_params:
body_params = local_var_params['nat_gateway_flow_log_properties']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'FlowLog'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/flowlogs/{flowLogId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_flowlogs_post(self, datacenter_id, nat_gateway_id, nat_gateway_flow_log, **kwargs): # noqa: E501
"""Create NAT Gateway Flow Logs # noqa: E501
Add a new Flow Log for the specified NAT Gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_post(datacenter_id, nat_gateway_id, nat_gateway_flow_log, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_flow_log: The Flow Log to create. (required)
:type nat_gateway_flow_log: FlowLog
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: FlowLog
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_flowlogs_post_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_flow_log, **kwargs) # noqa: E501
def datacenters_natgateways_flowlogs_post_with_http_info(self, datacenter_id, nat_gateway_id, nat_gateway_flow_log, **kwargs): # noqa: E501
"""Create NAT Gateway Flow Logs # noqa: E501
Add a new Flow Log for the specified NAT Gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_post_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_flow_log, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_flow_log: The Flow Log to create. (required)
:type nat_gateway_flow_log: FlowLog
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(FlowLog, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'nat_gateway_flow_log',
'pretty',
'depth'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_flowlogs_post" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_flowlogs_post`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_flowlogs_post`") # noqa: E501
# verify the required parameter 'nat_gateway_flow_log' is set
if self.api_client.client_side_validation and ('nat_gateway_flow_log' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_flow_log'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_flow_log` when calling `datacenters_natgateways_flowlogs_post`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_post`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_post`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'nat_gateway_flow_log' in local_var_params:
body_params = local_var_params['nat_gateway_flow_log']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'FlowLog'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/flowlogs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_flowlogs_put(self, datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log, **kwargs): # noqa: E501
"""Modify NAT Gateway Flow Logs # noqa: E501
Modify the specified NAT Gateway Flow Log. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_put(datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param flow_log_id: The unique ID of the Flow Log. (required)
:type flow_log_id: str
:param nat_gateway_flow_log: The modified NAT Gateway Flow Log. (required)
:type nat_gateway_flow_log: FlowLogPut
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: FlowLog
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_flowlogs_put_with_http_info(datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log, **kwargs) # noqa: E501
def datacenters_natgateways_flowlogs_put_with_http_info(self, datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log, **kwargs): # noqa: E501
"""Modify NAT Gateway Flow Logs # noqa: E501
Modify the specified NAT Gateway Flow Log. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_flowlogs_put_with_http_info(datacenter_id, nat_gateway_id, flow_log_id, nat_gateway_flow_log, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param flow_log_id: The unique ID of the Flow Log. (required)
:type flow_log_id: str
:param nat_gateway_flow_log: The modified NAT Gateway Flow Log. (required)
:type nat_gateway_flow_log: FlowLogPut
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(FlowLog, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'flow_log_id',
'nat_gateway_flow_log',
'pretty',
'depth'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_flowlogs_put" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_flowlogs_put`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_flowlogs_put`") # noqa: E501
# verify the required parameter 'flow_log_id' is set
if self.api_client.client_side_validation and ('flow_log_id' not in local_var_params or # noqa: E501
local_var_params['flow_log_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `flow_log_id` when calling `datacenters_natgateways_flowlogs_put`") # noqa: E501
# verify the required parameter 'nat_gateway_flow_log' is set
if self.api_client.client_side_validation and ('nat_gateway_flow_log' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_flow_log'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_flow_log` when calling `datacenters_natgateways_flowlogs_put`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_put`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_flowlogs_put`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
if 'flow_log_id' in local_var_params:
path_params['flowLogId'] = local_var_params['flow_log_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'nat_gateway_flow_log' in local_var_params:
body_params = local_var_params['nat_gateway_flow_log']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'FlowLog'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/flowlogs/{flowLogId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_get(self, datacenter_id, **kwargs): # noqa: E501
"""List NAT Gateways # noqa: E501
List all NAT Gateways within the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_get(datacenter_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGateways
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_get_with_http_info(datacenter_id, **kwargs) # noqa: E501
def datacenters_natgateways_get_with_http_info(self, datacenter_id, **kwargs): # noqa: E501
"""List NAT Gateways # noqa: E501
List all NAT Gateways within the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_get_with_http_info(datacenter_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGateways, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_get" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_get`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_get`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_get`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGateways'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_patch(self, datacenter_id, nat_gateway_id, nat_gateway_properties, **kwargs): # noqa: E501
"""Partially modify NAT Gateways # noqa: E501
Update the properties of the specified NAT Gateway within the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_patch(datacenter_id, nat_gateway_id, nat_gateway_properties, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_properties: The properties of the NAT Gateway to be updated. (required)
:type nat_gateway_properties: NatGatewayProperties
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGateway
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_patch_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_properties, **kwargs) # noqa: E501
def datacenters_natgateways_patch_with_http_info(self, datacenter_id, nat_gateway_id, nat_gateway_properties, **kwargs): # noqa: E501
"""Partially modify NAT Gateways # noqa: E501
Update the properties of the specified NAT Gateway within the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_patch_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_properties, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_properties: The properties of the NAT Gateway to be updated. (required)
:type nat_gateway_properties: NatGatewayProperties
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGateway, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'nat_gateway_properties',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_patch" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_patch`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_patch`") # noqa: E501
# verify the required parameter 'nat_gateway_properties' is set
if self.api_client.client_side_validation and ('nat_gateway_properties' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_properties'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_properties` when calling `datacenters_natgateways_patch`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_patch`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_patch`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'nat_gateway_properties' in local_var_params:
body_params = local_var_params['nat_gateway_properties']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGateway'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_post(self, datacenter_id, nat_gateway, **kwargs): # noqa: E501
"""Create NAT Gateways # noqa: E501
Create a NAT Gateway within the data center. This operation is restricted to contract owner, admin, and users with 'createInternetAccess' privileges. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_post(datacenter_id, nat_gateway, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway: The NAT Gateway to create. (required)
:type nat_gateway: NatGateway
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGateway
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_post_with_http_info(datacenter_id, nat_gateway, **kwargs) # noqa: E501
def datacenters_natgateways_post_with_http_info(self, datacenter_id, nat_gateway, **kwargs): # noqa: E501
"""Create NAT Gateways # noqa: E501
Create a NAT Gateway within the data center. This operation is restricted to contract owner, admin, and users with 'createInternetAccess' privileges. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_post_with_http_info(datacenter_id, nat_gateway, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway: The NAT Gateway to create. (required)
:type nat_gateway: NatGateway
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGateway, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_post" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_post`") # noqa: E501
# verify the required parameter 'nat_gateway' is set
if self.api_client.client_side_validation and ('nat_gateway' not in local_var_params or # noqa: E501
local_var_params['nat_gateway'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway` when calling `datacenters_natgateways_post`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_post`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_post`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'nat_gateway' in local_var_params:
body_params = local_var_params['nat_gateway']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGateway'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_put(self, datacenter_id, nat_gateway_id, nat_gateway, **kwargs): # noqa: E501
"""Modify NAT Gateways # noqa: E501
Modify the properties of the specified NAT Gateway within the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_put(datacenter_id, nat_gateway_id, nat_gateway, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway: The modified NAT Gateway. (required)
:type nat_gateway: NatGatewayPut
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGateway
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_put_with_http_info(datacenter_id, nat_gateway_id, nat_gateway, **kwargs) # noqa: E501
def datacenters_natgateways_put_with_http_info(self, datacenter_id, nat_gateway_id, nat_gateway, **kwargs): # noqa: E501
"""Modify NAT Gateways # noqa: E501
Modify the properties of the specified NAT Gateway within the data center. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_put_with_http_info(datacenter_id, nat_gateway_id, nat_gateway, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway: The modified NAT Gateway. (required)
:type nat_gateway: NatGatewayPut
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGateway, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'nat_gateway',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_put" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_put`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_put`") # noqa: E501
# verify the required parameter 'nat_gateway' is set
if self.api_client.client_side_validation and ('nat_gateway' not in local_var_params or # noqa: E501
local_var_params['nat_gateway'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway` when calling `datacenters_natgateways_put`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_put`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_put`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'nat_gateway' in local_var_params:
body_params = local_var_params['nat_gateway']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGateway'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_rules_delete(self, datacenter_id, nat_gateway_id, nat_gateway_rule_id, **kwargs): # noqa: E501
"""Delete NAT Gateway rules # noqa: E501
Delete the specified NAT Gateway rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_delete(datacenter_id, nat_gateway_id, nat_gateway_rule_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule_id: The unique ID of the NAT Gateway rule. (required)
:type nat_gateway_rule_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_rules_delete_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule_id, **kwargs) # noqa: E501
def datacenters_natgateways_rules_delete_with_http_info(self, datacenter_id, nat_gateway_id, nat_gateway_rule_id, **kwargs): # noqa: E501
"""Delete NAT Gateway rules # noqa: E501
Delete the specified NAT Gateway rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_delete_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule_id: The unique ID of the NAT Gateway rule. (required)
:type nat_gateway_rule_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'nat_gateway_rule_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_rules_delete" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_rules_delete`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_rules_delete`") # noqa: E501
# verify the required parameter 'nat_gateway_rule_id' is set
if self.api_client.client_side_validation and ('nat_gateway_rule_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_rule_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_rule_id` when calling `datacenters_natgateways_rules_delete`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_delete`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_delete`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
if 'nat_gateway_rule_id' in local_var_params:
path_params['natGatewayRuleId'] = local_var_params['nat_gateway_rule_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = None
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/rules/{natGatewayRuleId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_rules_find_by_nat_gateway_rule_id(self, datacenter_id, nat_gateway_id, nat_gateway_rule_id, **kwargs): # noqa: E501
"""Retrieve NAT Gateway rules # noqa: E501
Retrieve the properties of the specified NAT Gateway rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_find_by_nat_gateway_rule_id(datacenter_id, nat_gateway_id, nat_gateway_rule_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule_id: The unique ID of the NAT Gateway rule. (required)
:type nat_gateway_rule_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGatewayRule
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_rules_find_by_nat_gateway_rule_id_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule_id, **kwargs) # noqa: E501
def datacenters_natgateways_rules_find_by_nat_gateway_rule_id_with_http_info(self, datacenter_id, nat_gateway_id, nat_gateway_rule_id, **kwargs): # noqa: E501
"""Retrieve NAT Gateway rules # noqa: E501
Retrieve the properties of the specified NAT Gateway rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_find_by_nat_gateway_rule_id_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule_id: The unique ID of the NAT Gateway rule. (required)
:type nat_gateway_rule_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGatewayRule, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'nat_gateway_rule_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_rules_find_by_nat_gateway_rule_id" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_rules_find_by_nat_gateway_rule_id`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_rules_find_by_nat_gateway_rule_id`") # noqa: E501
# verify the required parameter 'nat_gateway_rule_id' is set
if self.api_client.client_side_validation and ('nat_gateway_rule_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_rule_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_rule_id` when calling `datacenters_natgateways_rules_find_by_nat_gateway_rule_id`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_find_by_nat_gateway_rule_id`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_find_by_nat_gateway_rule_id`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
if 'nat_gateway_rule_id' in local_var_params:
path_params['natGatewayRuleId'] = local_var_params['nat_gateway_rule_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGatewayRule'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/rules/{natGatewayRuleId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_rules_get(self, datacenter_id, nat_gateway_id, **kwargs): # noqa: E501
"""List NAT Gateway rules # noqa: E501
List all rules for the specified NAT Gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_get(datacenter_id, nat_gateway_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGatewayRules
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_rules_get_with_http_info(datacenter_id, nat_gateway_id, **kwargs) # noqa: E501
def datacenters_natgateways_rules_get_with_http_info(self, datacenter_id, nat_gateway_id, **kwargs): # noqa: E501
"""List NAT Gateway rules # noqa: E501
List all rules for the specified NAT Gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_get_with_http_info(datacenter_id, nat_gateway_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGatewayRules, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_rules_get" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_rules_get`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_rules_get`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_get`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_get`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGatewayRules'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/rules', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_rules_patch(self, datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule_properties, **kwargs): # noqa: E501
"""Partially modify NAT Gateway rules # noqa: E501
Update the properties of the specified NAT Gateway rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_patch(datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule_properties, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule_id: The unique ID of the NAT Gateway rule. (required)
:type nat_gateway_rule_id: str
:param nat_gateway_rule_properties: The properties of the NAT Gateway rule to be updated. (required)
:type nat_gateway_rule_properties: NatGatewayRuleProperties
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGatewayRule
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_rules_patch_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule_properties, **kwargs) # noqa: E501
def datacenters_natgateways_rules_patch_with_http_info(self, datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule_properties, **kwargs): # noqa: E501
"""Partially modify NAT Gateway rules # noqa: E501
Update the properties of the specified NAT Gateway rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_patch_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule_properties, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule_id: The unique ID of the NAT Gateway rule. (required)
:type nat_gateway_rule_id: str
:param nat_gateway_rule_properties: The properties of the NAT Gateway rule to be updated. (required)
:type nat_gateway_rule_properties: NatGatewayRuleProperties
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGatewayRule, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'nat_gateway_rule_id',
'nat_gateway_rule_properties',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_rules_patch" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_rules_patch`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_rules_patch`") # noqa: E501
# verify the required parameter 'nat_gateway_rule_id' is set
if self.api_client.client_side_validation and ('nat_gateway_rule_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_rule_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_rule_id` when calling `datacenters_natgateways_rules_patch`") # noqa: E501
# verify the required parameter 'nat_gateway_rule_properties' is set
if self.api_client.client_side_validation and ('nat_gateway_rule_properties' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_rule_properties'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_rule_properties` when calling `datacenters_natgateways_rules_patch`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_patch`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_patch`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
if 'nat_gateway_rule_id' in local_var_params:
path_params['natGatewayRuleId'] = local_var_params['nat_gateway_rule_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'nat_gateway_rule_properties' in local_var_params:
body_params = local_var_params['nat_gateway_rule_properties']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGatewayRule'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/rules/{natGatewayRuleId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_rules_post(self, datacenter_id, nat_gateway_id, nat_gateway_rule, **kwargs): # noqa: E501
"""Create NAT Gateway rules # noqa: E501
Create a rule for the specified NAT Gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_post(datacenter_id, nat_gateway_id, nat_gateway_rule, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule: The NAT Gateway rule to create. (required)
:type nat_gateway_rule: NatGatewayRule
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGatewayRule
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_rules_post_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule, **kwargs) # noqa: E501
def datacenters_natgateways_rules_post_with_http_info(self, datacenter_id, nat_gateway_id, nat_gateway_rule, **kwargs): # noqa: E501
"""Create NAT Gateway rules # noqa: E501
Create a rule for the specified NAT Gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_post_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule: The NAT Gateway rule to create. (required)
:type nat_gateway_rule: NatGatewayRule
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGatewayRule, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'nat_gateway_rule',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_rules_post" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_rules_post`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_rules_post`") # noqa: E501
# verify the required parameter 'nat_gateway_rule' is set
if self.api_client.client_side_validation and ('nat_gateway_rule' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_rule'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_rule` when calling `datacenters_natgateways_rules_post`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_post`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_post`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'nat_gateway_rule' in local_var_params:
body_params = local_var_params['nat_gateway_rule']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGatewayRule'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/rules', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_natgateways_rules_put(self, datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule, **kwargs): # noqa: E501
"""Modify NAT Gateway rules # noqa: E501
Modify the specified NAT Gateway rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_put(datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule_id: The unique ID of the NAT Gateway rule. (required)
:type nat_gateway_rule_id: str
:param nat_gateway_rule: The modified NAT Gateway rule. (required)
:type nat_gateway_rule: NatGatewayRulePut
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: NatGatewayRule
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_natgateways_rules_put_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule, **kwargs) # noqa: E501
def datacenters_natgateways_rules_put_with_http_info(self, datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule, **kwargs): # noqa: E501
"""Modify NAT Gateway rules # noqa: E501
Modify the specified NAT Gateway rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_natgateways_rules_put_with_http_info(datacenter_id, nat_gateway_id, nat_gateway_rule_id, nat_gateway_rule, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the data center. (required)
:type datacenter_id: str
:param nat_gateway_id: The unique ID of the NAT Gateway. (required)
:type nat_gateway_id: str
:param nat_gateway_rule_id: The unique ID of the NAT Gateway rule. (required)
:type nat_gateway_rule_id: str
:param nat_gateway_rule: The modified NAT Gateway rule. (required)
:type nat_gateway_rule: NatGatewayRulePut
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(NatGatewayRule, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'nat_gateway_id',
'nat_gateway_rule_id',
'nat_gateway_rule',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_natgateways_rules_put" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_natgateways_rules_put`") # noqa: E501
# verify the required parameter 'nat_gateway_id' is set
if self.api_client.client_side_validation and ('nat_gateway_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_id` when calling `datacenters_natgateways_rules_put`") # noqa: E501
# verify the required parameter 'nat_gateway_rule_id' is set
if self.api_client.client_side_validation and ('nat_gateway_rule_id' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_rule_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_rule_id` when calling `datacenters_natgateways_rules_put`") # noqa: E501
# verify the required parameter 'nat_gateway_rule' is set
if self.api_client.client_side_validation and ('nat_gateway_rule' not in local_var_params or # noqa: E501
local_var_params['nat_gateway_rule'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `nat_gateway_rule` when calling `datacenters_natgateways_rules_put`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_put`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_natgateways_rules_put`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'nat_gateway_id' in local_var_params:
path_params['natGatewayId'] = local_var_params['nat_gateway_id'] # noqa: E501
if 'nat_gateway_rule_id' in local_var_params:
path_params['natGatewayRuleId'] = local_var_params['nat_gateway_rule_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'nat_gateway_rule' in local_var_params:
body_params = local_var_params['nat_gateway_rule']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'NatGatewayRule'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/natgateways/{natGatewayId}/rules/{natGatewayRuleId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| StarcoderdataPython |
3354215 | import notification_method
import datetime
from database import *
def send_asap_notifications():
meeting = Meeting.get_or_none(Meeting.notified_asap==False)
while meeting:
batch = meeting.batch
notification_methods = list(meeting.group.notification_methods)
for method in notification_methods:
method = notification_method.get_notification_method(method)
method.notify_asap_batch(batch)
Meeting.update(notified_asap=True).where(Meeting.batch == batch).execute()
meeting = Meeting.get_or_none(Meeting.notified_asap==False)
def send_starting_soon_notifications():
now = datetime.datetime.now(tz=datetime.timezone.utc)
in_future_window = now + datetime.timedelta(0, 15*60) # 15 minutes
meetings_to_notify = Meeting.select().where(Meeting.notified_before_lesson == False).where(Meeting.starts_at < in_future_window)
for meeting in meetings_to_notify:
for notif_method in meeting.group.notification_methods:
notif_method = notification_method.get_notification_method(notif_method)
notif_method.notify_before_start(meeting)
meeting.notified_before_lesson = True
meeting.save()
send_asap_notifications()
send_starting_soon_notifications()
| StarcoderdataPython |
72257 | import logging
def setup_logging():
# Create root logger
logger = logging.getLogger('')
logger.setLevel(logging.DEBUG)
# Create stream handler
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# Create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
# Add the handler to the rool logger
logger.addHandler(ch)
return logger
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.